usage.adb: Change "pragma inline" to "pragma Inline" in information and error messages
[gcc.git] / gcc / dojump.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "expr.h"
34 #include "optabs.h"
35 #include "langhooks.h"
36 #include "ggc.h"
37
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40 static void do_jump_by_parts_equality (tree, rtx, rtx);
41 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
42 rtx);
43
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
46
47 void
48 init_pending_stack_adjust (void)
49 {
50 pending_stack_adjust = 0;
51 }
52
53 /* When exiting from function, if safe, clear out any pending stack adjust
54 so the adjustment won't get done.
55
56 Note, if the current function calls alloca, then it must have a
57 frame pointer regardless of the value of flag_omit_frame_pointer. */
58
59 void
60 clear_pending_stack_adjust (void)
61 {
62 if (optimize > 0
63 && (! flag_omit_frame_pointer || current_function_calls_alloca)
64 && EXIT_IGNORE_STACK
65 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
66 && ! flag_inline_functions)
67 {
68 stack_pointer_delta -= pending_stack_adjust,
69 pending_stack_adjust = 0;
70 }
71 }
72
73 /* Pop any previously-pushed arguments that have not been popped yet. */
74
75 void
76 do_pending_stack_adjust (void)
77 {
78 if (inhibit_defer_pop == 0)
79 {
80 if (pending_stack_adjust != 0)
81 adjust_stack (GEN_INT (pending_stack_adjust));
82 pending_stack_adjust = 0;
83 }
84 }
85 \f
86 /* Expand conditional expressions. */
87
88 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
89 LABEL is an rtx of code CODE_LABEL, in this function and all the
90 functions here. */
91
92 void
93 jumpifnot (tree exp, rtx label)
94 {
95 do_jump (exp, label, NULL_RTX);
96 }
97
98 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
99
100 void
101 jumpif (tree exp, rtx label)
102 {
103 do_jump (exp, NULL_RTX, label);
104 }
105
106 /* Used internally by prefer_and_bit_test. */
107
108 static GTY(()) rtx and_reg;
109 static GTY(()) rtx and_test;
110 static GTY(()) rtx shift_test;
111
112 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
113 where X is an arbitrary register of mode MODE. Return true if the former
114 is preferred. */
115
116 static bool
117 prefer_and_bit_test (enum machine_mode mode, int bitnum)
118 {
119 if (and_test == 0)
120 {
121 /* Set up rtxes for the two variations. Use NULL as a placeholder
122 for the BITNUM-based constants. */
123 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
124 and_test = gen_rtx_AND (mode, and_reg, NULL);
125 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
126 const1_rtx);
127 }
128 else
129 {
130 /* Change the mode of the previously-created rtxes. */
131 PUT_MODE (and_reg, mode);
132 PUT_MODE (and_test, mode);
133 PUT_MODE (shift_test, mode);
134 PUT_MODE (XEXP (shift_test, 0), mode);
135 }
136
137 /* Fill in the integers. */
138 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
139 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
140
141 return (rtx_cost (and_test, IF_THEN_ELSE)
142 <= rtx_cost (shift_test, IF_THEN_ELSE));
143 }
144
145 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
146 the result is zero, or IF_TRUE_LABEL if the result is one.
147 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
148 meaning fall through in that case.
149
150 do_jump always does any pending stack adjust except when it does not
151 actually perform a jump. An example where there is no jump
152 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */
153
154 void
155 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
156 {
157 enum tree_code code = TREE_CODE (exp);
158 rtx temp;
159 int i;
160 tree type;
161 enum machine_mode mode;
162
163 switch (code)
164 {
165 case ERROR_MARK:
166 break;
167
168 case INTEGER_CST:
169 temp = integer_zerop (exp) ? if_false_label : if_true_label;
170 if (temp)
171 emit_jump (temp);
172 break;
173
174 #if 0
175 /* This is not true with #pragma weak */
176 case ADDR_EXPR:
177 /* The address of something can never be zero. */
178 if (if_true_label)
179 emit_jump (if_true_label);
180 break;
181 #endif
182
183 case NOP_EXPR:
184 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
185 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
186 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
187 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
188 goto normal;
189 case CONVERT_EXPR:
190 /* If we are narrowing the operand, we have to do the compare in the
191 narrower mode. */
192 if ((TYPE_PRECISION (TREE_TYPE (exp))
193 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
194 goto normal;
195 case NON_LVALUE_EXPR:
196 case ABS_EXPR:
197 case NEGATE_EXPR:
198 case LROTATE_EXPR:
199 case RROTATE_EXPR:
200 /* These cannot change zero->nonzero or vice versa. */
201 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
202 break;
203
204 case MINUS_EXPR:
205 /* Nonzero iff operands of minus differ. */
206 do_compare_and_jump (build2 (NE_EXPR, TREE_TYPE (exp),
207 TREE_OPERAND (exp, 0),
208 TREE_OPERAND (exp, 1)),
209 NE, NE, if_false_label, if_true_label);
210 break;
211
212 case BIT_AND_EXPR:
213 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
214 See if the former is preferred for jump tests and restore it
215 if so. */
216 if (TREE_CODE (TREE_OPERAND (exp, 0)) == RSHIFT_EXPR
217 && integer_onep (TREE_OPERAND (exp, 1)))
218 {
219 tree arg = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
220 tree shift = TREE_OPERAND (TREE_OPERAND (exp, 0), 1);
221 tree one = TREE_OPERAND (exp, 1);
222 tree argtype = TREE_TYPE (arg);
223 if (TREE_CODE (shift) == INTEGER_CST
224 && compare_tree_int (shift, 0) > 0
225 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
226 && prefer_and_bit_test (TYPE_MODE (argtype),
227 TREE_INT_CST_LOW (shift)))
228 {
229 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
230 fold (build2 (LSHIFT_EXPR, argtype,
231 one, shift))),
232 if_false_label, if_true_label);
233 break;
234 }
235 }
236
237 /* If we are AND'ing with a small constant, do this comparison in the
238 smallest type that fits. If the machine doesn't have comparisons
239 that small, it will be converted back to the wider comparison.
240 This helps if we are testing the sign bit of a narrower object.
241 combine can't do this for us because it can't know whether a
242 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
243
244 if (! SLOW_BYTE_ACCESS
245 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
246 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
247 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
248 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
249 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
250 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
251 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
252 != CODE_FOR_nothing))
253 {
254 do_jump (convert (type, exp), if_false_label, if_true_label);
255 break;
256 }
257 goto normal;
258
259 case TRUTH_NOT_EXPR:
260 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
261 break;
262
263 case TRUTH_ANDIF_EXPR:
264 case TRUTH_ORIF_EXPR:
265 case COMPOUND_EXPR:
266 case COND_EXPR:
267 /* Lowered by gimplify.c. */
268 gcc_unreachable ();
269
270 case COMPONENT_REF:
271 case BIT_FIELD_REF:
272 case ARRAY_REF:
273 case ARRAY_RANGE_REF:
274 {
275 HOST_WIDE_INT bitsize, bitpos;
276 int unsignedp;
277 enum machine_mode mode;
278 tree type;
279 tree offset;
280 int volatilep = 0;
281
282 /* Get description of this reference. We don't actually care
283 about the underlying object here. */
284 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
285 &unsignedp, &volatilep);
286
287 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
288 if (! SLOW_BYTE_ACCESS
289 && type != 0 && bitsize >= 0
290 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
291 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
292 != CODE_FOR_nothing))
293 {
294 do_jump (convert (type, exp), if_false_label, if_true_label);
295 break;
296 }
297 goto normal;
298 }
299
300 case EQ_EXPR:
301 {
302 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
303
304 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
305 != MODE_COMPLEX_FLOAT);
306 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
307 != MODE_COMPLEX_INT);
308
309 if (integer_zerop (TREE_OPERAND (exp, 1)))
310 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
311 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
312 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
313 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
314 else
315 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
316 break;
317 }
318
319 case NE_EXPR:
320 {
321 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
322
323 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
324 != MODE_COMPLEX_FLOAT);
325 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
326 != MODE_COMPLEX_INT);
327
328 if (integer_zerop (TREE_OPERAND (exp, 1)))
329 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
330 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
331 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
332 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
333 else
334 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
335 break;
336 }
337
338 case LT_EXPR:
339 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
340 if (GET_MODE_CLASS (mode) == MODE_INT
341 && ! can_compare_p (LT, mode, ccp_jump))
342 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
343 else
344 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
345 break;
346
347 case LE_EXPR:
348 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
349 if (GET_MODE_CLASS (mode) == MODE_INT
350 && ! can_compare_p (LE, mode, ccp_jump))
351 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
352 else
353 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
354 break;
355
356 case GT_EXPR:
357 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
358 if (GET_MODE_CLASS (mode) == MODE_INT
359 && ! can_compare_p (GT, mode, ccp_jump))
360 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
361 else
362 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
363 break;
364
365 case GE_EXPR:
366 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
367 if (GET_MODE_CLASS (mode) == MODE_INT
368 && ! can_compare_p (GE, mode, ccp_jump))
369 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
370 else
371 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
372 break;
373
374 case UNORDERED_EXPR:
375 case ORDERED_EXPR:
376 {
377 enum rtx_code cmp, rcmp;
378 int do_rev;
379
380 if (code == UNORDERED_EXPR)
381 cmp = UNORDERED, rcmp = ORDERED;
382 else
383 cmp = ORDERED, rcmp = UNORDERED;
384 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
385
386 do_rev = 0;
387 if (! can_compare_p (cmp, mode, ccp_jump)
388 && (can_compare_p (rcmp, mode, ccp_jump)
389 /* If the target doesn't provide either UNORDERED or ORDERED
390 comparisons, canonicalize on UNORDERED for the library. */
391 || rcmp == UNORDERED))
392 do_rev = 1;
393
394 if (! do_rev)
395 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
396 else
397 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
398 }
399 break;
400
401 {
402 enum rtx_code rcode1;
403 enum tree_code tcode1, tcode2;
404
405 case UNLT_EXPR:
406 rcode1 = UNLT;
407 tcode1 = UNORDERED_EXPR;
408 tcode2 = LT_EXPR;
409 goto unordered_bcc;
410 case UNLE_EXPR:
411 rcode1 = UNLE;
412 tcode1 = UNORDERED_EXPR;
413 tcode2 = LE_EXPR;
414 goto unordered_bcc;
415 case UNGT_EXPR:
416 rcode1 = UNGT;
417 tcode1 = UNORDERED_EXPR;
418 tcode2 = GT_EXPR;
419 goto unordered_bcc;
420 case UNGE_EXPR:
421 rcode1 = UNGE;
422 tcode1 = UNORDERED_EXPR;
423 tcode2 = GE_EXPR;
424 goto unordered_bcc;
425 case UNEQ_EXPR:
426 rcode1 = UNEQ;
427 tcode1 = UNORDERED_EXPR;
428 tcode2 = EQ_EXPR;
429 goto unordered_bcc;
430 case LTGT_EXPR:
431 /* It is ok for LTGT_EXPR to trap when the result is unordered,
432 so expand to (a < b) || (a > b). */
433 rcode1 = LTGT;
434 tcode1 = LT_EXPR;
435 tcode2 = GT_EXPR;
436 goto unordered_bcc;
437
438 unordered_bcc:
439 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
440 if (can_compare_p (rcode1, mode, ccp_jump))
441 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
442 if_true_label);
443 else
444 {
445 tree op0 = save_expr (TREE_OPERAND (exp, 0));
446 tree op1 = save_expr (TREE_OPERAND (exp, 1));
447 tree cmp0, cmp1;
448 rtx drop_through_label = 0;
449
450 /* If the target doesn't support combined unordered
451 compares, decompose into two comparisons. */
452 if (if_true_label == 0)
453 drop_through_label = if_true_label = gen_label_rtx ();
454
455 cmp0 = fold (build2 (tcode1, TREE_TYPE (exp), op0, op1));
456 cmp1 = fold (build2 (tcode2, TREE_TYPE (exp), op0, op1));
457 do_jump (cmp0, 0, if_true_label);
458 do_jump (cmp1, if_false_label, if_true_label);
459
460 if (drop_through_label)
461 {
462 do_pending_stack_adjust ();
463 emit_label (drop_through_label);
464 }
465 }
466 }
467 break;
468
469 /* Special case:
470 __builtin_expect (<test>, 0) and
471 __builtin_expect (<test>, 1)
472
473 We need to do this here, so that <test> is not converted to a SCC
474 operation on machines that use condition code registers and COMPARE
475 like the PowerPC, and then the jump is done based on whether the SCC
476 operation produced a 1 or 0. */
477 case CALL_EXPR:
478 /* Check for a built-in function. */
479 {
480 tree fndecl = get_callee_fndecl (exp);
481 tree arglist = TREE_OPERAND (exp, 1);
482
483 if (fndecl
484 && DECL_BUILT_IN (fndecl)
485 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
486 && arglist != NULL_TREE
487 && TREE_CHAIN (arglist) != NULL_TREE)
488 {
489 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
490 if_true_label);
491
492 if (seq != NULL_RTX)
493 {
494 emit_insn (seq);
495 return;
496 }
497 }
498 }
499 /* Fall through and generate the normal code. */
500
501 default:
502 normal:
503 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
504 do_pending_stack_adjust ();
505
506 if (GET_CODE (temp) == CONST_INT
507 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
508 || GET_CODE (temp) == LABEL_REF)
509 {
510 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
511 if (target)
512 emit_jump (target);
513 }
514 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
515 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
516 /* Note swapping the labels gives us not-equal. */
517 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
518 else
519 {
520 gcc_assert (GET_MODE (temp) != VOIDmode);
521
522 /* The RTL optimizers prefer comparisons against pseudos. */
523 if (GET_CODE (temp) == SUBREG)
524 {
525 /* Compare promoted variables in their promoted mode. */
526 if (SUBREG_PROMOTED_VAR_P (temp)
527 && REG_P (XEXP (temp, 0)))
528 temp = XEXP (temp, 0);
529 else
530 temp = copy_to_reg (temp);
531 }
532 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
533 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
534 GET_MODE (temp), NULL_RTX,
535 if_false_label, if_true_label);
536 }
537 }
538 }
539 \f
540 /* Given a comparison expression EXP for values too wide to be compared
541 with one insn, test the comparison and jump to the appropriate label.
542 The code of EXP is ignored; we always test GT if SWAP is 0,
543 and LT if SWAP is 1. */
544
545 static void
546 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
547 rtx if_true_label)
548 {
549 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
550 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
551 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
552 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
553
554 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
555 if_true_label);
556 }
557
558 /* Compare OP0 with OP1, word at a time, in mode MODE.
559 UNSIGNEDP says to do unsigned comparison.
560 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
561
562 void
563 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
564 rtx op1, rtx if_false_label, rtx if_true_label)
565 {
566 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
567 rtx drop_through_label = 0;
568 int i;
569
570 if (! if_true_label || ! if_false_label)
571 drop_through_label = gen_label_rtx ();
572 if (! if_true_label)
573 if_true_label = drop_through_label;
574 if (! if_false_label)
575 if_false_label = drop_through_label;
576
577 /* Compare a word at a time, high order first. */
578 for (i = 0; i < nwords; i++)
579 {
580 rtx op0_word, op1_word;
581
582 if (WORDS_BIG_ENDIAN)
583 {
584 op0_word = operand_subword_force (op0, i, mode);
585 op1_word = operand_subword_force (op1, i, mode);
586 }
587 else
588 {
589 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
590 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
591 }
592
593 /* All but high-order word must be compared as unsigned. */
594 do_compare_rtx_and_jump (op0_word, op1_word, GT,
595 (unsignedp || i > 0), word_mode, NULL_RTX,
596 NULL_RTX, if_true_label);
597
598 /* Consider lower words only if these are equal. */
599 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
600 NULL_RTX, NULL_RTX, if_false_label);
601 }
602
603 if (if_false_label)
604 emit_jump (if_false_label);
605 if (drop_through_label)
606 emit_label (drop_through_label);
607 }
608
609 /* Given an EQ_EXPR expression EXP for values too wide to be compared
610 with one insn, test the comparison and jump to the appropriate label. */
611
612 static void
613 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
614 {
615 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
616 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
617 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
618 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
619 int i;
620 rtx drop_through_label = 0;
621
622 if (! if_false_label)
623 drop_through_label = if_false_label = gen_label_rtx ();
624
625 for (i = 0; i < nwords; i++)
626 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
627 operand_subword_force (op1, i, mode),
628 EQ, TYPE_UNSIGNED (TREE_TYPE (exp)),
629 word_mode, NULL_RTX, if_false_label, NULL_RTX);
630
631 if (if_true_label)
632 emit_jump (if_true_label);
633 if (drop_through_label)
634 emit_label (drop_through_label);
635 }
636 \f
637 /* Jump according to whether OP0 is 0.
638 We assume that OP0 has an integer mode that is too wide
639 for the available compare insns. */
640
641 void
642 do_jump_by_parts_equality_rtx (rtx op0, rtx if_false_label, rtx if_true_label)
643 {
644 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
645 rtx part;
646 int i;
647 rtx drop_through_label = 0;
648
649 /* The fastest way of doing this comparison on almost any machine is to
650 "or" all the words and compare the result. If all have to be loaded
651 from memory and this is a very wide item, it's possible this may
652 be slower, but that's highly unlikely. */
653
654 part = gen_reg_rtx (word_mode);
655 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
656 for (i = 1; i < nwords && part != 0; i++)
657 part = expand_binop (word_mode, ior_optab, part,
658 operand_subword_force (op0, i, GET_MODE (op0)),
659 part, 1, OPTAB_WIDEN);
660
661 if (part != 0)
662 {
663 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
664 NULL_RTX, if_false_label, if_true_label);
665
666 return;
667 }
668
669 /* If we couldn't do the "or" simply, do this with a series of compares. */
670 if (! if_false_label)
671 drop_through_label = if_false_label = gen_label_rtx ();
672
673 for (i = 0; i < nwords; i++)
674 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
675 const0_rtx, EQ, 1, word_mode, NULL_RTX,
676 if_false_label, NULL_RTX);
677
678 if (if_true_label)
679 emit_jump (if_true_label);
680
681 if (drop_through_label)
682 emit_label (drop_through_label);
683 }
684 \f
685 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
686 (including code to compute the values to be compared)
687 and set (CC0) according to the result.
688 The decision as to signed or unsigned comparison must be made by the caller.
689
690 We force a stack adjustment unless there are currently
691 things pushed on the stack that aren't yet used.
692
693 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
694 compared. */
695
696 rtx
697 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
698 enum machine_mode mode, rtx size)
699 {
700 rtx tem;
701
702 /* If one operand is constant, make it the second one. Only do this
703 if the other operand is not constant as well. */
704
705 if (swap_commutative_operands_p (op0, op1))
706 {
707 tem = op0;
708 op0 = op1;
709 op1 = tem;
710 code = swap_condition (code);
711 }
712
713 if (flag_force_mem)
714 {
715 op0 = force_not_mem (op0);
716 op1 = force_not_mem (op1);
717 }
718
719 do_pending_stack_adjust ();
720
721 code = unsignedp ? unsigned_condition (code) : code;
722 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
723 op0, op1)))
724 {
725 if (CONSTANT_P (tem))
726 return tem;
727
728 code = GET_CODE (tem);
729 mode = GET_MODE (tem);
730 op0 = XEXP (tem, 0);
731 op1 = XEXP (tem, 1);
732 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
733 }
734
735 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
736
737 #if HAVE_cc0
738 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
739 #else
740 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
741 #endif
742 }
743
744 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
745 The decision as to signed or unsigned comparison must be made by the caller.
746
747 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
748 compared. */
749
750 void
751 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
752 enum machine_mode mode, rtx size, rtx if_false_label,
753 rtx if_true_label)
754 {
755 rtx tem;
756 int dummy_true_label = 0;
757
758 /* Reverse the comparison if that is safe and we want to jump if it is
759 false. */
760 if (! if_true_label && ! FLOAT_MODE_P (mode))
761 {
762 if_true_label = if_false_label;
763 if_false_label = 0;
764 code = reverse_condition (code);
765 }
766
767 /* If one operand is constant, make it the second one. Only do this
768 if the other operand is not constant as well. */
769
770 if (swap_commutative_operands_p (op0, op1))
771 {
772 tem = op0;
773 op0 = op1;
774 op1 = tem;
775 code = swap_condition (code);
776 }
777
778 if (flag_force_mem)
779 {
780 op0 = force_not_mem (op0);
781 op1 = force_not_mem (op1);
782 }
783
784 do_pending_stack_adjust ();
785
786 code = unsignedp ? unsigned_condition (code) : code;
787 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
788 op0, op1)))
789 {
790 if (CONSTANT_P (tem))
791 {
792 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
793 ? if_false_label : if_true_label;
794 if (label)
795 emit_jump (label);
796 return;
797 }
798
799 code = GET_CODE (tem);
800 mode = GET_MODE (tem);
801 op0 = XEXP (tem, 0);
802 op1 = XEXP (tem, 1);
803 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
804 }
805
806 if (! if_true_label)
807 {
808 dummy_true_label = 1;
809 if_true_label = gen_label_rtx ();
810 }
811
812 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
813 if_true_label);
814
815 if (if_false_label)
816 emit_jump (if_false_label);
817 if (dummy_true_label)
818 emit_label (if_true_label);
819 }
820
821 /* Generate code for a comparison expression EXP (including code to compute
822 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
823 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
824 generated code will drop through.
825 SIGNED_CODE should be the rtx operation for this comparison for
826 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
827
828 We force a stack adjustment unless there are currently
829 things pushed on the stack that aren't yet used. */
830
831 static void
832 do_compare_and_jump (tree exp, enum rtx_code signed_code,
833 enum rtx_code unsigned_code, rtx if_false_label,
834 rtx if_true_label)
835 {
836 rtx op0, op1;
837 tree type;
838 enum machine_mode mode;
839 int unsignedp;
840 enum rtx_code code;
841
842 /* Don't crash if the comparison was erroneous. */
843 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
844 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
845 return;
846
847 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
848 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
849 return;
850
851 type = TREE_TYPE (TREE_OPERAND (exp, 0));
852 mode = TYPE_MODE (type);
853 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
854 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
855 || (GET_MODE_BITSIZE (mode)
856 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
857 1)))))))
858 {
859 /* op0 might have been replaced by promoted constant, in which
860 case the type of second argument should be used. */
861 type = TREE_TYPE (TREE_OPERAND (exp, 1));
862 mode = TYPE_MODE (type);
863 }
864 unsignedp = TYPE_UNSIGNED (type);
865 code = unsignedp ? unsigned_code : signed_code;
866
867 #ifdef HAVE_canonicalize_funcptr_for_compare
868 /* If function pointers need to be "canonicalized" before they can
869 be reliably compared, then canonicalize them. */
870 if (HAVE_canonicalize_funcptr_for_compare
871 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
872 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
873 == FUNCTION_TYPE))
874 {
875 rtx new_op0 = gen_reg_rtx (mode);
876
877 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
878 op0 = new_op0;
879 }
880
881 if (HAVE_canonicalize_funcptr_for_compare
882 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
883 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
884 == FUNCTION_TYPE))
885 {
886 rtx new_op1 = gen_reg_rtx (mode);
887
888 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
889 op1 = new_op1;
890 }
891 #endif
892
893 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
894 ((mode == BLKmode)
895 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
896 if_false_label, if_true_label);
897 }
898
899 #include "gt-dojump.h"