builtins.c, [...]: Change most occurrences of TREE_UNSIGNED to TYPE_UNSIGNED.
[gcc.git] / gcc / dojump.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "expr.h"
34 #include "optabs.h"
35 #include "langhooks.h"
36 #include "ggc.h"
37
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40 static void do_jump_by_parts_equality (tree, rtx, rtx);
41 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
42 rtx);
43
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
46
47 void
48 init_pending_stack_adjust (void)
49 {
50 pending_stack_adjust = 0;
51 }
52
53 /* When exiting from function, if safe, clear out any pending stack adjust
54 so the adjustment won't get done.
55
56 Note, if the current function calls alloca, then it must have a
57 frame pointer regardless of the value of flag_omit_frame_pointer. */
58
59 void
60 clear_pending_stack_adjust (void)
61 {
62 if (optimize > 0
63 && (! flag_omit_frame_pointer || current_function_calls_alloca)
64 && EXIT_IGNORE_STACK
65 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
66 && ! flag_inline_functions)
67 {
68 stack_pointer_delta -= pending_stack_adjust,
69 pending_stack_adjust = 0;
70 }
71 }
72
73 /* Pop any previously-pushed arguments that have not been popped yet. */
74
75 void
76 do_pending_stack_adjust (void)
77 {
78 if (inhibit_defer_pop == 0)
79 {
80 if (pending_stack_adjust != 0)
81 adjust_stack (GEN_INT (pending_stack_adjust));
82 pending_stack_adjust = 0;
83 }
84 }
85 \f
86 /* Expand conditional expressions. */
87
88 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
89 LABEL is an rtx of code CODE_LABEL, in this function and all the
90 functions here. */
91
92 void
93 jumpifnot (tree exp, rtx label)
94 {
95 do_jump (exp, label, NULL_RTX);
96 }
97
98 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
99
100 void
101 jumpif (tree exp, rtx label)
102 {
103 do_jump (exp, NULL_RTX, label);
104 }
105
106 /* Used internally by prefer_and_bit_test. */
107
108 static GTY(()) rtx and_reg;
109 static GTY(()) rtx and_test;
110 static GTY(()) rtx shift_test;
111
112 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
113 where X is an arbitrary register of mode MODE. Return true if the former
114 is preferred. */
115
116 static bool
117 prefer_and_bit_test (enum machine_mode mode, int bitnum)
118 {
119 if (and_test == 0)
120 {
121 /* Set up rtxes for the two variations. Use NULL as a placeholder
122 for the BITNUM-based constants. */
123 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
124 and_test = gen_rtx_AND (mode, and_reg, NULL);
125 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
126 const1_rtx);
127 }
128 else
129 {
130 /* Change the mode of the previously-created rtxes. */
131 PUT_MODE (and_reg, mode);
132 PUT_MODE (and_test, mode);
133 PUT_MODE (shift_test, mode);
134 PUT_MODE (XEXP (shift_test, 0), mode);
135 }
136
137 /* Fill in the integers. */
138 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
139 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
140
141 return (rtx_cost (and_test, IF_THEN_ELSE)
142 <= rtx_cost (shift_test, IF_THEN_ELSE));
143 }
144
145 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
146 the result is zero, or IF_TRUE_LABEL if the result is one.
147 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
148 meaning fall through in that case.
149
150 do_jump always does any pending stack adjust except when it does not
151 actually perform a jump. An example where there is no jump
152 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
153
154 This function is responsible for optimizing cases such as
155 &&, || and comparison operators in EXP. */
156
157 void
158 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
159 {
160 enum tree_code code = TREE_CODE (exp);
161 /* Some cases need to create a label to jump to
162 in order to properly fall through.
163 These cases set DROP_THROUGH_LABEL nonzero. */
164 rtx drop_through_label = 0;
165 rtx temp;
166 int i;
167 tree type;
168 enum machine_mode mode;
169
170 emit_queue ();
171
172 switch (code)
173 {
174 case ERROR_MARK:
175 break;
176
177 case INTEGER_CST:
178 temp = integer_zerop (exp) ? if_false_label : if_true_label;
179 if (temp)
180 emit_jump (temp);
181 break;
182
183 #if 0
184 /* This is not true with #pragma weak */
185 case ADDR_EXPR:
186 /* The address of something can never be zero. */
187 if (if_true_label)
188 emit_jump (if_true_label);
189 break;
190 #endif
191
192 case UNSAVE_EXPR:
193 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
194 TREE_OPERAND (exp, 0)
195 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
196 break;
197
198 case NOP_EXPR:
199 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
200 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
201 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
202 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
203 goto normal;
204 case CONVERT_EXPR:
205 /* If we are narrowing the operand, we have to do the compare in the
206 narrower mode. */
207 if ((TYPE_PRECISION (TREE_TYPE (exp))
208 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
209 goto normal;
210 case NON_LVALUE_EXPR:
211 case REFERENCE_EXPR:
212 case ABS_EXPR:
213 case NEGATE_EXPR:
214 case LROTATE_EXPR:
215 case RROTATE_EXPR:
216 /* These cannot change zero->nonzero or vice versa. */
217 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
218 break;
219
220 #if 0
221 /* This is never less insns than evaluating the PLUS_EXPR followed by
222 a test and can be longer if the test is eliminated. */
223 case PLUS_EXPR:
224 /* Reduce to minus. */
225 exp = build (MINUS_EXPR, TREE_TYPE (exp),
226 TREE_OPERAND (exp, 0),
227 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
228 TREE_OPERAND (exp, 1))));
229 /* Process as MINUS. */
230 #endif
231
232 case MINUS_EXPR:
233 /* Nonzero iff operands of minus differ. */
234 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
235 TREE_OPERAND (exp, 0),
236 TREE_OPERAND (exp, 1)),
237 NE, NE, if_false_label, if_true_label);
238 break;
239
240 case BIT_AND_EXPR:
241 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
242 See if the former is preferred for jump tests and restore it
243 if so. */
244 if (TREE_CODE (TREE_OPERAND (exp, 0)) == RSHIFT_EXPR
245 && integer_onep (TREE_OPERAND (exp, 1)))
246 {
247 tree arg = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
248 tree shift = TREE_OPERAND (TREE_OPERAND (exp, 0), 1);
249 tree one = TREE_OPERAND (exp, 1);
250 tree argtype = TREE_TYPE (arg);
251 if (TREE_CODE (shift) == INTEGER_CST
252 && compare_tree_int (shift, 0) > 0
253 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
254 && prefer_and_bit_test (TYPE_MODE (argtype),
255 TREE_INT_CST_LOW (shift)))
256 {
257 do_jump (build (BIT_AND_EXPR, argtype, arg,
258 fold (build (LSHIFT_EXPR, argtype, one, shift))),
259 if_false_label, if_true_label);
260 break;
261 }
262 }
263
264 /* If we are AND'ing with a small constant, do this comparison in the
265 smallest type that fits. If the machine doesn't have comparisons
266 that small, it will be converted back to the wider comparison.
267 This helps if we are testing the sign bit of a narrower object.
268 combine can't do this for us because it can't know whether a
269 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
270
271 if (! SLOW_BYTE_ACCESS
272 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
273 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
274 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
275 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
276 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
277 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
278 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
279 != CODE_FOR_nothing))
280 {
281 do_jump (convert (type, exp), if_false_label, if_true_label);
282 break;
283 }
284 goto normal;
285
286 case TRUTH_NOT_EXPR:
287 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
288 break;
289
290 case TRUTH_ANDIF_EXPR:
291 if (if_false_label == 0)
292 if_false_label = drop_through_label = gen_label_rtx ();
293 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
294 start_cleanup_deferral ();
295 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
296 end_cleanup_deferral ();
297 break;
298
299 case TRUTH_ORIF_EXPR:
300 if (if_true_label == 0)
301 if_true_label = drop_through_label = gen_label_rtx ();
302 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
303 start_cleanup_deferral ();
304 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
305 end_cleanup_deferral ();
306 break;
307
308 case COMPOUND_EXPR:
309 push_temp_slots ();
310 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
311 preserve_temp_slots (NULL_RTX);
312 free_temp_slots ();
313 pop_temp_slots ();
314 emit_queue ();
315 do_pending_stack_adjust ();
316 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
317 break;
318
319 case COMPONENT_REF:
320 case BIT_FIELD_REF:
321 case ARRAY_REF:
322 case ARRAY_RANGE_REF:
323 {
324 HOST_WIDE_INT bitsize, bitpos;
325 int unsignedp;
326 enum machine_mode mode;
327 tree type;
328 tree offset;
329 int volatilep = 0;
330
331 /* Get description of this reference. We don't actually care
332 about the underlying object here. */
333 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
334 &unsignedp, &volatilep);
335
336 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
337 if (! SLOW_BYTE_ACCESS
338 && type != 0 && bitsize >= 0
339 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
340 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
341 != CODE_FOR_nothing))
342 {
343 do_jump (convert (type, exp), if_false_label, if_true_label);
344 break;
345 }
346 goto normal;
347 }
348
349 case COND_EXPR:
350 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
351 if (integer_onep (TREE_OPERAND (exp, 1))
352 && integer_zerop (TREE_OPERAND (exp, 2)))
353 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
354
355 else if (integer_zerop (TREE_OPERAND (exp, 1))
356 && integer_onep (TREE_OPERAND (exp, 2)))
357 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
358
359 else
360 {
361 rtx label1 = gen_label_rtx ();
362 drop_through_label = gen_label_rtx ();
363
364 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
365
366 start_cleanup_deferral ();
367 /* Now the THEN-expression. */
368 do_jump (TREE_OPERAND (exp, 1),
369 if_false_label ? if_false_label : drop_through_label,
370 if_true_label ? if_true_label : drop_through_label);
371 /* In case the do_jump just above never jumps. */
372 do_pending_stack_adjust ();
373 emit_label (label1);
374
375 /* Now the ELSE-expression. */
376 do_jump (TREE_OPERAND (exp, 2),
377 if_false_label ? if_false_label : drop_through_label,
378 if_true_label ? if_true_label : drop_through_label);
379 end_cleanup_deferral ();
380 }
381 break;
382
383 case EQ_EXPR:
384 {
385 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
386
387 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
388 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
389 {
390 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
391 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
392 do_jump
393 (fold
394 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
395 fold (build (EQ_EXPR, TREE_TYPE (exp),
396 fold (build1 (REALPART_EXPR,
397 TREE_TYPE (inner_type),
398 exp0)),
399 fold (build1 (REALPART_EXPR,
400 TREE_TYPE (inner_type),
401 exp1)))),
402 fold (build (EQ_EXPR, TREE_TYPE (exp),
403 fold (build1 (IMAGPART_EXPR,
404 TREE_TYPE (inner_type),
405 exp0)),
406 fold (build1 (IMAGPART_EXPR,
407 TREE_TYPE (inner_type),
408 exp1)))))),
409 if_false_label, if_true_label);
410 }
411
412 else if (integer_zerop (TREE_OPERAND (exp, 1)))
413 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
414
415 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
416 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
417 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
418 else
419 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
420 break;
421 }
422
423 case NE_EXPR:
424 {
425 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
426
427 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
428 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
429 {
430 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
431 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
432 do_jump
433 (fold
434 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
435 fold (build (NE_EXPR, TREE_TYPE (exp),
436 fold (build1 (REALPART_EXPR,
437 TREE_TYPE (inner_type),
438 exp0)),
439 fold (build1 (REALPART_EXPR,
440 TREE_TYPE (inner_type),
441 exp1)))),
442 fold (build (NE_EXPR, TREE_TYPE (exp),
443 fold (build1 (IMAGPART_EXPR,
444 TREE_TYPE (inner_type),
445 exp0)),
446 fold (build1 (IMAGPART_EXPR,
447 TREE_TYPE (inner_type),
448 exp1)))))),
449 if_false_label, if_true_label);
450 }
451
452 else if (integer_zerop (TREE_OPERAND (exp, 1)))
453 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
454
455 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
456 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
457 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
458 else
459 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
460 break;
461 }
462
463 case LT_EXPR:
464 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
465 if (GET_MODE_CLASS (mode) == MODE_INT
466 && ! can_compare_p (LT, mode, ccp_jump))
467 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
468 else
469 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
470 break;
471
472 case LE_EXPR:
473 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
474 if (GET_MODE_CLASS (mode) == MODE_INT
475 && ! can_compare_p (LE, mode, ccp_jump))
476 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
477 else
478 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
479 break;
480
481 case GT_EXPR:
482 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
483 if (GET_MODE_CLASS (mode) == MODE_INT
484 && ! can_compare_p (GT, mode, ccp_jump))
485 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
486 else
487 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
488 break;
489
490 case GE_EXPR:
491 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
492 if (GET_MODE_CLASS (mode) == MODE_INT
493 && ! can_compare_p (GE, mode, ccp_jump))
494 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
495 else
496 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
497 break;
498
499 case UNORDERED_EXPR:
500 case ORDERED_EXPR:
501 {
502 enum rtx_code cmp, rcmp;
503 int do_rev;
504
505 if (code == UNORDERED_EXPR)
506 cmp = UNORDERED, rcmp = ORDERED;
507 else
508 cmp = ORDERED, rcmp = UNORDERED;
509 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
510
511 do_rev = 0;
512 if (! can_compare_p (cmp, mode, ccp_jump)
513 && (can_compare_p (rcmp, mode, ccp_jump)
514 /* If the target doesn't provide either UNORDERED or ORDERED
515 comparisons, canonicalize on UNORDERED for the library. */
516 || rcmp == UNORDERED))
517 do_rev = 1;
518
519 if (! do_rev)
520 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
521 else
522 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
523 }
524 break;
525
526 {
527 enum rtx_code rcode1;
528 enum tree_code tcode2;
529
530 case UNLT_EXPR:
531 rcode1 = UNLT;
532 tcode2 = LT_EXPR;
533 goto unordered_bcc;
534 case UNLE_EXPR:
535 rcode1 = UNLE;
536 tcode2 = LE_EXPR;
537 goto unordered_bcc;
538 case UNGT_EXPR:
539 rcode1 = UNGT;
540 tcode2 = GT_EXPR;
541 goto unordered_bcc;
542 case UNGE_EXPR:
543 rcode1 = UNGE;
544 tcode2 = GE_EXPR;
545 goto unordered_bcc;
546 case UNEQ_EXPR:
547 rcode1 = UNEQ;
548 tcode2 = EQ_EXPR;
549 goto unordered_bcc;
550
551 unordered_bcc:
552 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
553 if (can_compare_p (rcode1, mode, ccp_jump))
554 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
555 if_true_label);
556 else
557 {
558 tree op0 = save_expr (TREE_OPERAND (exp, 0));
559 tree op1 = save_expr (TREE_OPERAND (exp, 1));
560 tree cmp0, cmp1;
561
562 /* If the target doesn't support combined unordered
563 compares, decompose into UNORDERED + comparison. */
564 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
565 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
566 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
567 do_jump (exp, if_false_label, if_true_label);
568 }
569 }
570 break;
571
572 /* Special case:
573 __builtin_expect (<test>, 0) and
574 __builtin_expect (<test>, 1)
575
576 We need to do this here, so that <test> is not converted to a SCC
577 operation on machines that use condition code registers and COMPARE
578 like the PowerPC, and then the jump is done based on whether the SCC
579 operation produced a 1 or 0. */
580 case CALL_EXPR:
581 /* Check for a built-in function. */
582 {
583 tree fndecl = get_callee_fndecl (exp);
584 tree arglist = TREE_OPERAND (exp, 1);
585
586 if (fndecl
587 && DECL_BUILT_IN (fndecl)
588 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
589 && arglist != NULL_TREE
590 && TREE_CHAIN (arglist) != NULL_TREE)
591 {
592 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
593 if_true_label);
594
595 if (seq != NULL_RTX)
596 {
597 emit_insn (seq);
598 return;
599 }
600 }
601 }
602 /* Fall through and generate the normal code. */
603
604 default:
605 normal:
606 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
607 #if 0
608 /* This is not needed any more and causes poor code since it causes
609 comparisons and tests from non-SI objects to have different code
610 sequences. */
611 /* Copy to register to avoid generating bad insns by cse
612 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
613 if (!cse_not_expected && GET_CODE (temp) == MEM)
614 temp = copy_to_reg (temp);
615 #endif
616 do_pending_stack_adjust ();
617 /* Do any postincrements in the expression that was tested. */
618 emit_queue ();
619
620 if (GET_CODE (temp) == CONST_INT
621 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
622 || GET_CODE (temp) == LABEL_REF)
623 {
624 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
625 if (target)
626 emit_jump (target);
627 }
628 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
629 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
630 /* Note swapping the labels gives us not-equal. */
631 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
632 else if (GET_MODE (temp) != VOIDmode)
633 {
634 /* The RTL optimizers prefer comparisons against pseudos. */
635 if (GET_CODE (temp) == SUBREG)
636 {
637 /* Compare promoted variables in their promoted mode. */
638 if (SUBREG_PROMOTED_VAR_P (temp)
639 && GET_CODE (XEXP (temp, 0)) == REG)
640 temp = XEXP (temp, 0);
641 else
642 temp = copy_to_reg (temp);
643 }
644 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
645 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
646 GET_MODE (temp), NULL_RTX,
647 if_false_label, if_true_label);
648 }
649 else
650 abort ();
651 }
652
653 if (drop_through_label)
654 {
655 /* If do_jump produces code that might be jumped around,
656 do any stack adjusts from that code, before the place
657 where control merges in. */
658 do_pending_stack_adjust ();
659 emit_label (drop_through_label);
660 }
661 }
662 \f
663 /* Given a comparison expression EXP for values too wide to be compared
664 with one insn, test the comparison and jump to the appropriate label.
665 The code of EXP is ignored; we always test GT if SWAP is 0,
666 and LT if SWAP is 1. */
667
668 static void
669 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
670 rtx if_true_label)
671 {
672 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
673 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
674 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
675 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
676
677 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
678 if_true_label);
679 }
680
681 /* Compare OP0 with OP1, word at a time, in mode MODE.
682 UNSIGNEDP says to do unsigned comparison.
683 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
684
685 void
686 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
687 rtx op1, rtx if_false_label, rtx if_true_label)
688 {
689 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
690 rtx drop_through_label = 0;
691 int i;
692
693 if (! if_true_label || ! if_false_label)
694 drop_through_label = gen_label_rtx ();
695 if (! if_true_label)
696 if_true_label = drop_through_label;
697 if (! if_false_label)
698 if_false_label = drop_through_label;
699
700 /* Compare a word at a time, high order first. */
701 for (i = 0; i < nwords; i++)
702 {
703 rtx op0_word, op1_word;
704
705 if (WORDS_BIG_ENDIAN)
706 {
707 op0_word = operand_subword_force (op0, i, mode);
708 op1_word = operand_subword_force (op1, i, mode);
709 }
710 else
711 {
712 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
713 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
714 }
715
716 /* All but high-order word must be compared as unsigned. */
717 do_compare_rtx_and_jump (op0_word, op1_word, GT,
718 (unsignedp || i > 0), word_mode, NULL_RTX,
719 NULL_RTX, if_true_label);
720
721 /* Consider lower words only if these are equal. */
722 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
723 NULL_RTX, NULL_RTX, if_false_label);
724 }
725
726 if (if_false_label)
727 emit_jump (if_false_label);
728 if (drop_through_label)
729 emit_label (drop_through_label);
730 }
731
732 /* Given an EQ_EXPR expression EXP for values too wide to be compared
733 with one insn, test the comparison and jump to the appropriate label. */
734
735 static void
736 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
737 {
738 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
739 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
740 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
741 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
742 int i;
743 rtx drop_through_label = 0;
744
745 if (! if_false_label)
746 drop_through_label = if_false_label = gen_label_rtx ();
747
748 for (i = 0; i < nwords; i++)
749 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
750 operand_subword_force (op1, i, mode),
751 EQ, TYPE_UNSIGNED (TREE_TYPE (exp)),
752 word_mode, NULL_RTX, if_false_label, NULL_RTX);
753
754 if (if_true_label)
755 emit_jump (if_true_label);
756 if (drop_through_label)
757 emit_label (drop_through_label);
758 }
759 \f
760 /* Jump according to whether OP0 is 0.
761 We assume that OP0 has an integer mode that is too wide
762 for the available compare insns. */
763
764 void
765 do_jump_by_parts_equality_rtx (rtx op0, rtx if_false_label, rtx if_true_label)
766 {
767 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
768 rtx part;
769 int i;
770 rtx drop_through_label = 0;
771
772 /* The fastest way of doing this comparison on almost any machine is to
773 "or" all the words and compare the result. If all have to be loaded
774 from memory and this is a very wide item, it's possible this may
775 be slower, but that's highly unlikely. */
776
777 part = gen_reg_rtx (word_mode);
778 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
779 for (i = 1; i < nwords && part != 0; i++)
780 part = expand_binop (word_mode, ior_optab, part,
781 operand_subword_force (op0, i, GET_MODE (op0)),
782 part, 1, OPTAB_WIDEN);
783
784 if (part != 0)
785 {
786 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
787 NULL_RTX, if_false_label, if_true_label);
788
789 return;
790 }
791
792 /* If we couldn't do the "or" simply, do this with a series of compares. */
793 if (! if_false_label)
794 drop_through_label = if_false_label = gen_label_rtx ();
795
796 for (i = 0; i < nwords; i++)
797 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
798 const0_rtx, EQ, 1, word_mode, NULL_RTX,
799 if_false_label, NULL_RTX);
800
801 if (if_true_label)
802 emit_jump (if_true_label);
803
804 if (drop_through_label)
805 emit_label (drop_through_label);
806 }
807 \f
808 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
809 (including code to compute the values to be compared)
810 and set (CC0) according to the result.
811 The decision as to signed or unsigned comparison must be made by the caller.
812
813 We force a stack adjustment unless there are currently
814 things pushed on the stack that aren't yet used.
815
816 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
817 compared. */
818
819 rtx
820 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
821 enum machine_mode mode, rtx size)
822 {
823 enum rtx_code ucode;
824 rtx tem;
825
826 /* If one operand is constant, make it the second one. Only do this
827 if the other operand is not constant as well. */
828
829 if (swap_commutative_operands_p (op0, op1))
830 {
831 tem = op0;
832 op0 = op1;
833 op1 = tem;
834 code = swap_condition (code);
835 }
836
837 if (flag_force_mem)
838 {
839 op0 = force_not_mem (op0);
840 op1 = force_not_mem (op1);
841 }
842
843 do_pending_stack_adjust ();
844
845 ucode = unsignedp ? unsigned_condition (code) : code;
846 tem = simplify_const_relational_operation (ucode, mode, op0, op1);
847 if (tem != 0)
848 return tem;
849
850 #if 0
851 /* There's no need to do this now that combine.c can eliminate lots of
852 sign extensions. This can be less efficient in certain cases on other
853 machines. */
854
855 /* If this is a signed equality comparison, we can do it as an
856 unsigned comparison since zero-extension is cheaper than sign
857 extension and comparisons with zero are done as unsigned. This is
858 the case even on machines that can do fast sign extension, since
859 zero-extension is easier to combine with other operations than
860 sign-extension is. If we are comparing against a constant, we must
861 convert it to what it would look like unsigned. */
862 if ((code == EQ || code == NE) && ! unsignedp
863 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
864 {
865 if (GET_CODE (op1) == CONST_INT
866 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
867 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
868 unsignedp = 1;
869 }
870 #endif
871
872 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
873
874 #if HAVE_cc0
875 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
876 #else
877 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
878 #endif
879 }
880
881 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
882 The decision as to signed or unsigned comparison must be made by the caller.
883
884 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
885 compared. */
886
887 void
888 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
889 enum machine_mode mode, rtx size, rtx if_false_label,
890 rtx if_true_label)
891 {
892 enum rtx_code ucode;
893 rtx tem;
894 int dummy_true_label = 0;
895
896 /* Reverse the comparison if that is safe and we want to jump if it is
897 false. */
898 if (! if_true_label && ! FLOAT_MODE_P (mode))
899 {
900 if_true_label = if_false_label;
901 if_false_label = 0;
902 code = reverse_condition (code);
903 }
904
905 /* If one operand is constant, make it the second one. Only do this
906 if the other operand is not constant as well. */
907
908 if (swap_commutative_operands_p (op0, op1))
909 {
910 tem = op0;
911 op0 = op1;
912 op1 = tem;
913 code = swap_condition (code);
914 }
915
916 if (flag_force_mem)
917 {
918 op0 = force_not_mem (op0);
919 op1 = force_not_mem (op1);
920 }
921
922 do_pending_stack_adjust ();
923
924 ucode = unsignedp ? unsigned_condition (code) : code;
925 tem = simplify_const_relational_operation (ucode, mode, op0, op1);
926 if (tem != 0)
927 {
928 if (tem == const_true_rtx)
929 {
930 if (if_true_label)
931 emit_jump (if_true_label);
932 }
933 else
934 {
935 if (if_false_label)
936 emit_jump (if_false_label);
937 }
938 return;
939 }
940
941 #if 0
942 /* There's no need to do this now that combine.c can eliminate lots of
943 sign extensions. This can be less efficient in certain cases on other
944 machines. */
945
946 /* If this is a signed equality comparison, we can do it as an
947 unsigned comparison since zero-extension is cheaper than sign
948 extension and comparisons with zero are done as unsigned. This is
949 the case even on machines that can do fast sign extension, since
950 zero-extension is easier to combine with other operations than
951 sign-extension is. If we are comparing against a constant, we must
952 convert it to what it would look like unsigned. */
953 if ((code == EQ || code == NE) && ! unsignedp
954 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
955 {
956 if (GET_CODE (op1) == CONST_INT
957 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
958 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
959 unsignedp = 1;
960 }
961 #endif
962
963 if (! if_true_label)
964 {
965 dummy_true_label = 1;
966 if_true_label = gen_label_rtx ();
967 }
968
969 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
970 if_true_label);
971
972 if (if_false_label)
973 emit_jump (if_false_label);
974 if (dummy_true_label)
975 emit_label (if_true_label);
976 }
977
978 /* Generate code for a comparison expression EXP (including code to compute
979 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
980 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
981 generated code will drop through.
982 SIGNED_CODE should be the rtx operation for this comparison for
983 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
984
985 We force a stack adjustment unless there are currently
986 things pushed on the stack that aren't yet used. */
987
988 static void
989 do_compare_and_jump (tree exp, enum rtx_code signed_code,
990 enum rtx_code unsigned_code, rtx if_false_label,
991 rtx if_true_label)
992 {
993 rtx op0, op1;
994 tree type;
995 enum machine_mode mode;
996 int unsignedp;
997 enum rtx_code code;
998
999 /* Don't crash if the comparison was erroneous. */
1000 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1001 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
1002 return;
1003
1004 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
1005 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
1006 return;
1007
1008 type = TREE_TYPE (TREE_OPERAND (exp, 0));
1009 mode = TYPE_MODE (type);
1010 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
1011 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
1012 || (GET_MODE_BITSIZE (mode)
1013 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
1014 1)))))))
1015 {
1016 /* op0 might have been replaced by promoted constant, in which
1017 case the type of second argument should be used. */
1018 type = TREE_TYPE (TREE_OPERAND (exp, 1));
1019 mode = TYPE_MODE (type);
1020 }
1021 unsignedp = TYPE_UNSIGNED (type);
1022 code = unsignedp ? unsigned_code : signed_code;
1023
1024 #ifdef HAVE_canonicalize_funcptr_for_compare
1025 /* If function pointers need to be "canonicalized" before they can
1026 be reliably compared, then canonicalize them. */
1027 if (HAVE_canonicalize_funcptr_for_compare
1028 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
1029 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
1030 == FUNCTION_TYPE))
1031 {
1032 rtx new_op0 = gen_reg_rtx (mode);
1033
1034 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1035 op0 = new_op0;
1036 }
1037
1038 if (HAVE_canonicalize_funcptr_for_compare
1039 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
1040 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
1041 == FUNCTION_TYPE))
1042 {
1043 rtx new_op1 = gen_reg_rtx (mode);
1044
1045 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1046 op1 = new_op1;
1047 }
1048 #endif
1049
1050 /* Do any postincrements in the expression that was tested. */
1051 emit_queue ();
1052
1053 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1054 ((mode == BLKmode)
1055 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
1056 if_false_label, if_true_label);
1057 }
1058
1059 #include "gt-dojump.h"