explow.c (allocate_dynamic_stack_space SETJMP_VIA_SAVE_AREA): Kill setjmpless_size.
[gcc.git] / gcc / explow.c
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "expr.h"
34 #include "optabs.h"
35 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "ggc.h"
38 #include "recog.h"
39 #include "langhooks.h"
40
41 static rtx break_out_memory_refs (rtx);
42 static void emit_stack_probe (rtx);
43
44
45 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
46
47 HOST_WIDE_INT
48 trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
49 {
50 int width = GET_MODE_BITSIZE (mode);
51
52 /* You want to truncate to a _what_? */
53 gcc_assert (SCALAR_INT_MODE_P (mode));
54
55 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
56 if (mode == BImode)
57 return c & 1 ? STORE_FLAG_VALUE : 0;
58
59 /* Sign-extend for the requested mode. */
60
61 if (width < HOST_BITS_PER_WIDE_INT)
62 {
63 HOST_WIDE_INT sign = 1;
64 sign <<= width - 1;
65 c &= (sign << 1) - 1;
66 c ^= sign;
67 c -= sign;
68 }
69
70 return c;
71 }
72
73 /* Return an rtx for the sum of X and the integer C. */
74
75 rtx
76 plus_constant (rtx x, HOST_WIDE_INT c)
77 {
78 RTX_CODE code;
79 rtx y;
80 enum machine_mode mode;
81 rtx tem;
82 int all_constant = 0;
83
84 if (c == 0)
85 return x;
86
87 restart:
88
89 code = GET_CODE (x);
90 mode = GET_MODE (x);
91 y = x;
92
93 switch (code)
94 {
95 case CONST_INT:
96 return GEN_INT (INTVAL (x) + c);
97
98 case CONST_DOUBLE:
99 {
100 unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
101 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
102 unsigned HOST_WIDE_INT l2 = c;
103 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
104 unsigned HOST_WIDE_INT lv;
105 HOST_WIDE_INT hv;
106
107 add_double (l1, h1, l2, h2, &lv, &hv);
108
109 return immed_double_const (lv, hv, VOIDmode);
110 }
111
112 case MEM:
113 /* If this is a reference to the constant pool, try replacing it with
114 a reference to a new constant. If the resulting address isn't
115 valid, don't return it because we have no way to validize it. */
116 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
117 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
118 {
119 tem
120 = force_const_mem (GET_MODE (x),
121 plus_constant (get_pool_constant (XEXP (x, 0)),
122 c));
123 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
124 return tem;
125 }
126 break;
127
128 case CONST:
129 /* If adding to something entirely constant, set a flag
130 so that we can add a CONST around the result. */
131 x = XEXP (x, 0);
132 all_constant = 1;
133 goto restart;
134
135 case SYMBOL_REF:
136 case LABEL_REF:
137 all_constant = 1;
138 break;
139
140 case PLUS:
141 /* The interesting case is adding the integer to a sum.
142 Look for constant term in the sum and combine
143 with C. For an integer constant term, we make a combined
144 integer. For a constant term that is not an explicit integer,
145 we cannot really combine, but group them together anyway.
146
147 Restart or use a recursive call in case the remaining operand is
148 something that we handle specially, such as a SYMBOL_REF.
149
150 We may not immediately return from the recursive call here, lest
151 all_constant gets lost. */
152
153 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
154 {
155 c += INTVAL (XEXP (x, 1));
156
157 if (GET_MODE (x) != VOIDmode)
158 c = trunc_int_for_mode (c, GET_MODE (x));
159
160 x = XEXP (x, 0);
161 goto restart;
162 }
163 else if (CONSTANT_P (XEXP (x, 1)))
164 {
165 x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
166 c = 0;
167 }
168 else if (find_constant_term_loc (&y))
169 {
170 /* We need to be careful since X may be shared and we can't
171 modify it in place. */
172 rtx copy = copy_rtx (x);
173 rtx *const_loc = find_constant_term_loc (&copy);
174
175 *const_loc = plus_constant (*const_loc, c);
176 x = copy;
177 c = 0;
178 }
179 break;
180
181 default:
182 break;
183 }
184
185 if (c != 0)
186 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
187
188 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
189 return x;
190 else if (all_constant)
191 return gen_rtx_CONST (mode, x);
192 else
193 return x;
194 }
195 \f
196 /* If X is a sum, return a new sum like X but lacking any constant terms.
197 Add all the removed constant terms into *CONSTPTR.
198 X itself is not altered. The result != X if and only if
199 it is not isomorphic to X. */
200
201 rtx
202 eliminate_constant_term (rtx x, rtx *constptr)
203 {
204 rtx x0, x1;
205 rtx tem;
206
207 if (GET_CODE (x) != PLUS)
208 return x;
209
210 /* First handle constants appearing at this level explicitly. */
211 if (GET_CODE (XEXP (x, 1)) == CONST_INT
212 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
213 XEXP (x, 1)))
214 && GET_CODE (tem) == CONST_INT)
215 {
216 *constptr = tem;
217 return eliminate_constant_term (XEXP (x, 0), constptr);
218 }
219
220 tem = const0_rtx;
221 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
222 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
223 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
224 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
225 *constptr, tem))
226 && GET_CODE (tem) == CONST_INT)
227 {
228 *constptr = tem;
229 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
230 }
231
232 return x;
233 }
234
235 /* Return an rtx for the size in bytes of the value of EXP. */
236
237 rtx
238 expr_size (tree exp)
239 {
240 tree size;
241
242 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
243 size = TREE_OPERAND (exp, 1);
244 else
245 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lang_hooks.expr_size (exp), exp);
246
247 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), 0);
248 }
249
250 /* Return a wide integer for the size in bytes of the value of EXP, or -1
251 if the size can vary or is larger than an integer. */
252
253 HOST_WIDE_INT
254 int_expr_size (tree exp)
255 {
256 tree size;
257
258 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
259 size = TREE_OPERAND (exp, 1);
260 else
261 size = lang_hooks.expr_size (exp);
262
263 if (size == 0 || !host_integerp (size, 0))
264 return -1;
265
266 return tree_low_cst (size, 0);
267 }
268 \f
269 /* Return a copy of X in which all memory references
270 and all constants that involve symbol refs
271 have been replaced with new temporary registers.
272 Also emit code to load the memory locations and constants
273 into those registers.
274
275 If X contains no such constants or memory references,
276 X itself (not a copy) is returned.
277
278 If a constant is found in the address that is not a legitimate constant
279 in an insn, it is left alone in the hope that it might be valid in the
280 address.
281
282 X may contain no arithmetic except addition, subtraction and multiplication.
283 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
284
285 static rtx
286 break_out_memory_refs (rtx x)
287 {
288 if (MEM_P (x)
289 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
290 && GET_MODE (x) != VOIDmode))
291 x = force_reg (GET_MODE (x), x);
292 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
293 || GET_CODE (x) == MULT)
294 {
295 rtx op0 = break_out_memory_refs (XEXP (x, 0));
296 rtx op1 = break_out_memory_refs (XEXP (x, 1));
297
298 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
299 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
300 }
301
302 return x;
303 }
304
305 /* Given X, a memory address in ptr_mode, convert it to an address
306 in Pmode, or vice versa (TO_MODE says which way). We take advantage of
307 the fact that pointers are not allowed to overflow by commuting arithmetic
308 operations over conversions so that address arithmetic insns can be
309 used. */
310
311 rtx
312 convert_memory_address (enum machine_mode to_mode ATTRIBUTE_UNUSED,
313 rtx x)
314 {
315 #ifndef POINTERS_EXTEND_UNSIGNED
316 return x;
317 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
318 enum machine_mode from_mode;
319 rtx temp;
320 enum rtx_code code;
321
322 /* If X already has the right mode, just return it. */
323 if (GET_MODE (x) == to_mode)
324 return x;
325
326 from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
327
328 /* Here we handle some special cases. If none of them apply, fall through
329 to the default case. */
330 switch (GET_CODE (x))
331 {
332 case CONST_INT:
333 case CONST_DOUBLE:
334 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
335 code = TRUNCATE;
336 else if (POINTERS_EXTEND_UNSIGNED < 0)
337 break;
338 else if (POINTERS_EXTEND_UNSIGNED > 0)
339 code = ZERO_EXTEND;
340 else
341 code = SIGN_EXTEND;
342 temp = simplify_unary_operation (code, to_mode, x, from_mode);
343 if (temp)
344 return temp;
345 break;
346
347 case SUBREG:
348 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
349 && GET_MODE (SUBREG_REG (x)) == to_mode)
350 return SUBREG_REG (x);
351 break;
352
353 case LABEL_REF:
354 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
355 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
356 return temp;
357 break;
358
359 case SYMBOL_REF:
360 temp = shallow_copy_rtx (x);
361 PUT_MODE (temp, to_mode);
362 return temp;
363 break;
364
365 case CONST:
366 return gen_rtx_CONST (to_mode,
367 convert_memory_address (to_mode, XEXP (x, 0)));
368 break;
369
370 case PLUS:
371 case MULT:
372 /* For addition we can safely permute the conversion and addition
373 operation if one operand is a constant and converting the constant
374 does not change it. We can always safely permute them if we are
375 making the address narrower. */
376 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
377 || (GET_CODE (x) == PLUS
378 && GET_CODE (XEXP (x, 1)) == CONST_INT
379 && XEXP (x, 1) == convert_memory_address (to_mode, XEXP (x, 1))))
380 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
381 convert_memory_address (to_mode, XEXP (x, 0)),
382 XEXP (x, 1));
383 break;
384
385 default:
386 break;
387 }
388
389 return convert_modes (to_mode, from_mode,
390 x, POINTERS_EXTEND_UNSIGNED);
391 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
392 }
393 \f
394 /* Return something equivalent to X but valid as a memory address
395 for something of mode MODE. When X is not itself valid, this
396 works by copying X or subexpressions of it into registers. */
397
398 rtx
399 memory_address (enum machine_mode mode, rtx x)
400 {
401 rtx oldx = x;
402
403 x = convert_memory_address (Pmode, x);
404
405 /* By passing constant addresses through registers
406 we get a chance to cse them. */
407 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
408 x = force_reg (Pmode, x);
409
410 /* We get better cse by rejecting indirect addressing at this stage.
411 Let the combiner create indirect addresses where appropriate.
412 For now, generate the code so that the subexpressions useful to share
413 are visible. But not if cse won't be done! */
414 else
415 {
416 if (! cse_not_expected && !REG_P (x))
417 x = break_out_memory_refs (x);
418
419 /* At this point, any valid address is accepted. */
420 if (memory_address_p (mode, x))
421 goto win;
422
423 /* If it was valid before but breaking out memory refs invalidated it,
424 use it the old way. */
425 if (memory_address_p (mode, oldx))
426 goto win2;
427
428 /* Perform machine-dependent transformations on X
429 in certain cases. This is not necessary since the code
430 below can handle all possible cases, but machine-dependent
431 transformations can make better code. */
432 LEGITIMIZE_ADDRESS (x, oldx, mode, win);
433
434 /* PLUS and MULT can appear in special ways
435 as the result of attempts to make an address usable for indexing.
436 Usually they are dealt with by calling force_operand, below.
437 But a sum containing constant terms is special
438 if removing them makes the sum a valid address:
439 then we generate that address in a register
440 and index off of it. We do this because it often makes
441 shorter code, and because the addresses thus generated
442 in registers often become common subexpressions. */
443 if (GET_CODE (x) == PLUS)
444 {
445 rtx constant_term = const0_rtx;
446 rtx y = eliminate_constant_term (x, &constant_term);
447 if (constant_term == const0_rtx
448 || ! memory_address_p (mode, y))
449 x = force_operand (x, NULL_RTX);
450 else
451 {
452 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
453 if (! memory_address_p (mode, y))
454 x = force_operand (x, NULL_RTX);
455 else
456 x = y;
457 }
458 }
459
460 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
461 x = force_operand (x, NULL_RTX);
462
463 /* If we have a register that's an invalid address,
464 it must be a hard reg of the wrong class. Copy it to a pseudo. */
465 else if (REG_P (x))
466 x = copy_to_reg (x);
467
468 /* Last resort: copy the value to a register, since
469 the register is a valid address. */
470 else
471 x = force_reg (Pmode, x);
472
473 goto done;
474
475 win2:
476 x = oldx;
477 win:
478 if (flag_force_addr && ! cse_not_expected && !REG_P (x)
479 /* Don't copy an addr via a reg if it is one of our stack slots. */
480 && ! (GET_CODE (x) == PLUS
481 && (XEXP (x, 0) == virtual_stack_vars_rtx
482 || XEXP (x, 0) == virtual_incoming_args_rtx)))
483 {
484 if (general_operand (x, Pmode))
485 x = force_reg (Pmode, x);
486 else
487 x = force_operand (x, NULL_RTX);
488 }
489 }
490
491 done:
492
493 /* If we didn't change the address, we are done. Otherwise, mark
494 a reg as a pointer if we have REG or REG + CONST_INT. */
495 if (oldx == x)
496 return x;
497 else if (REG_P (x))
498 mark_reg_pointer (x, BITS_PER_UNIT);
499 else if (GET_CODE (x) == PLUS
500 && REG_P (XEXP (x, 0))
501 && GET_CODE (XEXP (x, 1)) == CONST_INT)
502 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
503
504 /* OLDX may have been the address on a temporary. Update the address
505 to indicate that X is now used. */
506 update_temp_slot_address (oldx, x);
507
508 return x;
509 }
510
511 /* Like `memory_address' but pretend `flag_force_addr' is 0. */
512
513 rtx
514 memory_address_noforce (enum machine_mode mode, rtx x)
515 {
516 int ambient_force_addr = flag_force_addr;
517 rtx val;
518
519 flag_force_addr = 0;
520 val = memory_address (mode, x);
521 flag_force_addr = ambient_force_addr;
522 return val;
523 }
524
525 /* Convert a mem ref into one with a valid memory address.
526 Pass through anything else unchanged. */
527
528 rtx
529 validize_mem (rtx ref)
530 {
531 if (!MEM_P (ref))
532 return ref;
533 if (! (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (ref, 0)))
534 && memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
535 return ref;
536
537 /* Don't alter REF itself, since that is probably a stack slot. */
538 return replace_equiv_address (ref, XEXP (ref, 0));
539 }
540 \f
541 /* Copy the value or contents of X to a new temp reg and return that reg. */
542
543 rtx
544 copy_to_reg (rtx x)
545 {
546 rtx temp = gen_reg_rtx (GET_MODE (x));
547
548 /* If not an operand, must be an address with PLUS and MULT so
549 do the computation. */
550 if (! general_operand (x, VOIDmode))
551 x = force_operand (x, temp);
552
553 if (x != temp)
554 emit_move_insn (temp, x);
555
556 return temp;
557 }
558
559 /* Like copy_to_reg but always give the new register mode Pmode
560 in case X is a constant. */
561
562 rtx
563 copy_addr_to_reg (rtx x)
564 {
565 return copy_to_mode_reg (Pmode, x);
566 }
567
568 /* Like copy_to_reg but always give the new register mode MODE
569 in case X is a constant. */
570
571 rtx
572 copy_to_mode_reg (enum machine_mode mode, rtx x)
573 {
574 rtx temp = gen_reg_rtx (mode);
575
576 /* If not an operand, must be an address with PLUS and MULT so
577 do the computation. */
578 if (! general_operand (x, VOIDmode))
579 x = force_operand (x, temp);
580
581 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
582 if (x != temp)
583 emit_move_insn (temp, x);
584 return temp;
585 }
586
587 /* Load X into a register if it is not already one.
588 Use mode MODE for the register.
589 X should be valid for mode MODE, but it may be a constant which
590 is valid for all integer modes; that's why caller must specify MODE.
591
592 The caller must not alter the value in the register we return,
593 since we mark it as a "constant" register. */
594
595 rtx
596 force_reg (enum machine_mode mode, rtx x)
597 {
598 rtx temp, insn, set;
599
600 if (REG_P (x))
601 return x;
602
603 if (general_operand (x, mode))
604 {
605 temp = gen_reg_rtx (mode);
606 insn = emit_move_insn (temp, x);
607 }
608 else
609 {
610 temp = force_operand (x, NULL_RTX);
611 if (REG_P (temp))
612 insn = get_last_insn ();
613 else
614 {
615 rtx temp2 = gen_reg_rtx (mode);
616 insn = emit_move_insn (temp2, temp);
617 temp = temp2;
618 }
619 }
620
621 /* Let optimizers know that TEMP's value never changes
622 and that X can be substituted for it. Don't get confused
623 if INSN set something else (such as a SUBREG of TEMP). */
624 if (CONSTANT_P (x)
625 && (set = single_set (insn)) != 0
626 && SET_DEST (set) == temp
627 && ! rtx_equal_p (x, SET_SRC (set)))
628 set_unique_reg_note (insn, REG_EQUAL, x);
629
630 /* Let optimizers know that TEMP is a pointer, and if so, the
631 known alignment of that pointer. */
632 {
633 unsigned align = 0;
634 if (GET_CODE (x) == SYMBOL_REF)
635 {
636 align = BITS_PER_UNIT;
637 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
638 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
639 }
640 else if (GET_CODE (x) == LABEL_REF)
641 align = BITS_PER_UNIT;
642 else if (GET_CODE (x) == CONST
643 && GET_CODE (XEXP (x, 0)) == PLUS
644 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
645 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
646 {
647 rtx s = XEXP (XEXP (x, 0), 0);
648 rtx c = XEXP (XEXP (x, 0), 1);
649 unsigned sa, ca;
650
651 sa = BITS_PER_UNIT;
652 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
653 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
654
655 ca = exact_log2 (INTVAL (c) & -INTVAL (c)) * BITS_PER_UNIT;
656
657 align = MIN (sa, ca);
658 }
659
660 if (align)
661 mark_reg_pointer (temp, align);
662 }
663
664 return temp;
665 }
666
667 /* If X is a memory ref, copy its contents to a new temp reg and return
668 that reg. Otherwise, return X. */
669
670 rtx
671 force_not_mem (rtx x)
672 {
673 rtx temp;
674
675 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
676 return x;
677
678 temp = gen_reg_rtx (GET_MODE (x));
679
680 if (MEM_POINTER (x))
681 REG_POINTER (temp) = 1;
682
683 emit_move_insn (temp, x);
684 return temp;
685 }
686
687 /* Copy X to TARGET (if it's nonzero and a reg)
688 or to a new temp reg and return that reg.
689 MODE is the mode to use for X in case it is a constant. */
690
691 rtx
692 copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
693 {
694 rtx temp;
695
696 if (target && REG_P (target))
697 temp = target;
698 else
699 temp = gen_reg_rtx (mode);
700
701 emit_move_insn (temp, x);
702 return temp;
703 }
704 \f
705 /* Return the mode to use to store a scalar of TYPE and MODE.
706 PUNSIGNEDP points to the signedness of the type and may be adjusted
707 to show what signedness to use on extension operations.
708
709 FOR_CALL is nonzero if this call is promoting args for a call. */
710
711 #if defined(PROMOTE_MODE) && !defined(PROMOTE_FUNCTION_MODE)
712 #define PROMOTE_FUNCTION_MODE PROMOTE_MODE
713 #endif
714
715 enum machine_mode
716 promote_mode (tree type, enum machine_mode mode, int *punsignedp,
717 int for_call ATTRIBUTE_UNUSED)
718 {
719 enum tree_code code = TREE_CODE (type);
720 int unsignedp = *punsignedp;
721
722 #ifndef PROMOTE_MODE
723 if (! for_call)
724 return mode;
725 #endif
726
727 switch (code)
728 {
729 #ifdef PROMOTE_FUNCTION_MODE
730 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
731 case CHAR_TYPE: case REAL_TYPE: case OFFSET_TYPE:
732 #ifdef PROMOTE_MODE
733 if (for_call)
734 {
735 #endif
736 PROMOTE_FUNCTION_MODE (mode, unsignedp, type);
737 #ifdef PROMOTE_MODE
738 }
739 else
740 {
741 PROMOTE_MODE (mode, unsignedp, type);
742 }
743 #endif
744 break;
745 #endif
746
747 #ifdef POINTERS_EXTEND_UNSIGNED
748 case REFERENCE_TYPE:
749 case POINTER_TYPE:
750 mode = Pmode;
751 unsignedp = POINTERS_EXTEND_UNSIGNED;
752 break;
753 #endif
754
755 default:
756 break;
757 }
758
759 *punsignedp = unsignedp;
760 return mode;
761 }
762 \f
763 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
764 This pops when ADJUST is positive. ADJUST need not be constant. */
765
766 void
767 adjust_stack (rtx adjust)
768 {
769 rtx temp;
770
771 if (adjust == const0_rtx)
772 return;
773
774 /* We expect all variable sized adjustments to be multiple of
775 PREFERRED_STACK_BOUNDARY. */
776 if (GET_CODE (adjust) == CONST_INT)
777 stack_pointer_delta -= INTVAL (adjust);
778
779 temp = expand_binop (Pmode,
780 #ifdef STACK_GROWS_DOWNWARD
781 add_optab,
782 #else
783 sub_optab,
784 #endif
785 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
786 OPTAB_LIB_WIDEN);
787
788 if (temp != stack_pointer_rtx)
789 emit_move_insn (stack_pointer_rtx, temp);
790 }
791
792 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
793 This pushes when ADJUST is positive. ADJUST need not be constant. */
794
795 void
796 anti_adjust_stack (rtx adjust)
797 {
798 rtx temp;
799
800 if (adjust == const0_rtx)
801 return;
802
803 /* We expect all variable sized adjustments to be multiple of
804 PREFERRED_STACK_BOUNDARY. */
805 if (GET_CODE (adjust) == CONST_INT)
806 stack_pointer_delta += INTVAL (adjust);
807
808 temp = expand_binop (Pmode,
809 #ifdef STACK_GROWS_DOWNWARD
810 sub_optab,
811 #else
812 add_optab,
813 #endif
814 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
815 OPTAB_LIB_WIDEN);
816
817 if (temp != stack_pointer_rtx)
818 emit_move_insn (stack_pointer_rtx, temp);
819 }
820
821 /* Round the size of a block to be pushed up to the boundary required
822 by this machine. SIZE is the desired size, which need not be constant. */
823
824 static rtx
825 round_push (rtx size)
826 {
827 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
828
829 if (align == 1)
830 return size;
831
832 if (GET_CODE (size) == CONST_INT)
833 {
834 HOST_WIDE_INT new = (INTVAL (size) + align - 1) / align * align;
835
836 if (INTVAL (size) != new)
837 size = GEN_INT (new);
838 }
839 else
840 {
841 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
842 but we know it can't. So add ourselves and then do
843 TRUNC_DIV_EXPR. */
844 size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
845 NULL_RTX, 1, OPTAB_LIB_WIDEN);
846 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
847 NULL_RTX, 1);
848 size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
849 }
850
851 return size;
852 }
853 \f
854 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
855 to a previously-created save area. If no save area has been allocated,
856 this function will allocate one. If a save area is specified, it
857 must be of the proper mode.
858
859 The insns are emitted after insn AFTER, if nonzero, otherwise the insns
860 are emitted at the current position. */
861
862 void
863 emit_stack_save (enum save_level save_level, rtx *psave, rtx after)
864 {
865 rtx sa = *psave;
866 /* The default is that we use a move insn and save in a Pmode object. */
867 rtx (*fcn) (rtx, rtx) = gen_move_insn;
868 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
869
870 /* See if this machine has anything special to do for this kind of save. */
871 switch (save_level)
872 {
873 #ifdef HAVE_save_stack_block
874 case SAVE_BLOCK:
875 if (HAVE_save_stack_block)
876 fcn = gen_save_stack_block;
877 break;
878 #endif
879 #ifdef HAVE_save_stack_function
880 case SAVE_FUNCTION:
881 if (HAVE_save_stack_function)
882 fcn = gen_save_stack_function;
883 break;
884 #endif
885 #ifdef HAVE_save_stack_nonlocal
886 case SAVE_NONLOCAL:
887 if (HAVE_save_stack_nonlocal)
888 fcn = gen_save_stack_nonlocal;
889 break;
890 #endif
891 default:
892 break;
893 }
894
895 /* If there is no save area and we have to allocate one, do so. Otherwise
896 verify the save area is the proper mode. */
897
898 if (sa == 0)
899 {
900 if (mode != VOIDmode)
901 {
902 if (save_level == SAVE_NONLOCAL)
903 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
904 else
905 *psave = sa = gen_reg_rtx (mode);
906 }
907 }
908
909 if (after)
910 {
911 rtx seq;
912
913 start_sequence ();
914 do_pending_stack_adjust ();
915 /* We must validize inside the sequence, to ensure that any instructions
916 created by the validize call also get moved to the right place. */
917 if (sa != 0)
918 sa = validize_mem (sa);
919 emit_insn (fcn (sa, stack_pointer_rtx));
920 seq = get_insns ();
921 end_sequence ();
922 emit_insn_after (seq, after);
923 }
924 else
925 {
926 do_pending_stack_adjust ();
927 if (sa != 0)
928 sa = validize_mem (sa);
929 emit_insn (fcn (sa, stack_pointer_rtx));
930 }
931 }
932
933 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
934 area made by emit_stack_save. If it is zero, we have nothing to do.
935
936 Put any emitted insns after insn AFTER, if nonzero, otherwise at
937 current position. */
938
939 void
940 emit_stack_restore (enum save_level save_level, rtx sa, rtx after)
941 {
942 /* The default is that we use a move insn. */
943 rtx (*fcn) (rtx, rtx) = gen_move_insn;
944
945 /* See if this machine has anything special to do for this kind of save. */
946 switch (save_level)
947 {
948 #ifdef HAVE_restore_stack_block
949 case SAVE_BLOCK:
950 if (HAVE_restore_stack_block)
951 fcn = gen_restore_stack_block;
952 break;
953 #endif
954 #ifdef HAVE_restore_stack_function
955 case SAVE_FUNCTION:
956 if (HAVE_restore_stack_function)
957 fcn = gen_restore_stack_function;
958 break;
959 #endif
960 #ifdef HAVE_restore_stack_nonlocal
961 case SAVE_NONLOCAL:
962 if (HAVE_restore_stack_nonlocal)
963 fcn = gen_restore_stack_nonlocal;
964 break;
965 #endif
966 default:
967 break;
968 }
969
970 if (sa != 0)
971 {
972 sa = validize_mem (sa);
973 /* These clobbers prevent the scheduler from moving
974 references to variable arrays below the code
975 that deletes (pops) the arrays. */
976 emit_insn (gen_rtx_CLOBBER (VOIDmode,
977 gen_rtx_MEM (BLKmode,
978 gen_rtx_SCRATCH (VOIDmode))));
979 emit_insn (gen_rtx_CLOBBER (VOIDmode,
980 gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
981 }
982
983 discard_pending_stack_adjust ();
984
985 if (after)
986 {
987 rtx seq;
988
989 start_sequence ();
990 emit_insn (fcn (stack_pointer_rtx, sa));
991 seq = get_insns ();
992 end_sequence ();
993 emit_insn_after (seq, after);
994 }
995 else
996 emit_insn (fcn (stack_pointer_rtx, sa));
997 }
998
999 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1000 function. This function should be called whenever we allocate or
1001 deallocate dynamic stack space. */
1002
1003 void
1004 update_nonlocal_goto_save_area (void)
1005 {
1006 tree t_save;
1007 rtx r_save;
1008
1009 /* The nonlocal_goto_save_area object is an array of N pointers. The
1010 first one is used for the frame pointer save; the rest are sized by
1011 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1012 of the stack save area slots. */
1013 t_save = build4 (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area,
1014 integer_one_node, NULL_TREE, NULL_TREE);
1015 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1016
1017 emit_stack_save (SAVE_NONLOCAL, &r_save, NULL_RTX);
1018 }
1019 \f
1020 /* Return an rtx representing the address of an area of memory dynamically
1021 pushed on the stack. This region of memory is always aligned to
1022 a multiple of BIGGEST_ALIGNMENT.
1023
1024 Any required stack pointer alignment is preserved.
1025
1026 SIZE is an rtx representing the size of the area.
1027 TARGET is a place in which the address can be placed.
1028
1029 KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */
1030
1031 rtx
1032 allocate_dynamic_stack_space (rtx size, rtx target, int known_align)
1033 {
1034 /* If we're asking for zero bytes, it doesn't matter what we point
1035 to since we can't dereference it. But return a reasonable
1036 address anyway. */
1037 if (size == const0_rtx)
1038 return virtual_stack_dynamic_rtx;
1039
1040 /* Otherwise, show we're calling alloca or equivalent. */
1041 current_function_calls_alloca = 1;
1042
1043 /* Ensure the size is in the proper mode. */
1044 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1045 size = convert_to_mode (Pmode, size, 1);
1046
1047 /* We can't attempt to minimize alignment necessary, because we don't
1048 know the final value of preferred_stack_boundary yet while executing
1049 this code. */
1050 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1051
1052 /* We will need to ensure that the address we return is aligned to
1053 BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
1054 always know its final value at this point in the compilation (it
1055 might depend on the size of the outgoing parameter lists, for
1056 example), so we must align the value to be returned in that case.
1057 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1058 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1059 We must also do an alignment operation on the returned value if
1060 the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1061
1062 If we have to align, we must leave space in SIZE for the hole
1063 that might result from the alignment operation. */
1064
1065 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1066 #define MUST_ALIGN 1
1067 #else
1068 #define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1069 #endif
1070
1071 if (MUST_ALIGN)
1072 size
1073 = force_operand (plus_constant (size,
1074 BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1075 NULL_RTX);
1076
1077 #ifdef SETJMP_VIA_SAVE_AREA
1078 /* If setjmp restores regs from a save area in the stack frame,
1079 avoid clobbering the reg save area. Note that the offset of
1080 virtual_incoming_args_rtx includes the preallocated stack args space.
1081 It would be no problem to clobber that, but it's on the wrong side
1082 of the old save area.
1083
1084 What used to happen is that, since we did not know for sure
1085 whether setjmp() was invoked until after RTL generation, we
1086 would use reg notes to store the "optimized" size and fix things
1087 up later. These days we know this information before we ever
1088 start building RTL so the reg notes are unnecessary. */
1089 if (!current_function_calls_setjmp)
1090 {
1091 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1092
1093 /* ??? Code below assumes that the save area needs maximal
1094 alignment. This constraint may be too strong. */
1095 gcc_assert (PREFERRED_STACK_BOUNDARY == BIGGEST_ALIGNMENT);
1096
1097 if (GET_CODE (size) == CONST_INT)
1098 {
1099 HOST_WIDE_INT new = INTVAL (size) / align * align;
1100
1101 if (INTVAL (size) != new)
1102 size = GEN_INT (new);
1103 }
1104 else
1105 {
1106 /* Since we know overflow is not possible, we avoid using
1107 CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead. */
1108 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1109 GEN_INT (align), NULL_RTX, 1);
1110 size = expand_mult (Pmode, size,
1111 GEN_INT (align), NULL_RTX, 1);
1112 }
1113 }
1114 else
1115 {
1116 rtx dynamic_offset
1117 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1118 stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1119
1120 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1121 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1122 }
1123 #endif /* SETJMP_VIA_SAVE_AREA */
1124
1125 /* Round the size to a multiple of the required stack alignment.
1126 Since the stack if presumed to be rounded before this allocation,
1127 this will maintain the required alignment.
1128
1129 If the stack grows downward, we could save an insn by subtracting
1130 SIZE from the stack pointer and then aligning the stack pointer.
1131 The problem with this is that the stack pointer may be unaligned
1132 between the execution of the subtraction and alignment insns and
1133 some machines do not allow this. Even on those that do, some
1134 signal handlers malfunction if a signal should occur between those
1135 insns. Since this is an extremely rare event, we have no reliable
1136 way of knowing which systems have this problem. So we avoid even
1137 momentarily mis-aligning the stack. */
1138
1139 /* If we added a variable amount to SIZE,
1140 we can no longer assume it is aligned. */
1141 #if !defined (SETJMP_VIA_SAVE_AREA)
1142 if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1143 #endif
1144 size = round_push (size);
1145
1146 do_pending_stack_adjust ();
1147
1148 /* We ought to be called always on the toplevel and stack ought to be aligned
1149 properly. */
1150 gcc_assert (!(stack_pointer_delta
1151 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1152
1153 /* If needed, check that we have the required amount of stack. Take into
1154 account what has already been checked. */
1155 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1156 probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1157
1158 /* Don't use a TARGET that isn't a pseudo or is the wrong mode. */
1159 if (target == 0 || !REG_P (target)
1160 || REGNO (target) < FIRST_PSEUDO_REGISTER
1161 || GET_MODE (target) != Pmode)
1162 target = gen_reg_rtx (Pmode);
1163
1164 mark_reg_pointer (target, known_align);
1165
1166 /* Perform the required allocation from the stack. Some systems do
1167 this differently than simply incrementing/decrementing from the
1168 stack pointer, such as acquiring the space by calling malloc(). */
1169 #ifdef HAVE_allocate_stack
1170 if (HAVE_allocate_stack)
1171 {
1172 enum machine_mode mode = STACK_SIZE_MODE;
1173 insn_operand_predicate_fn pred;
1174
1175 /* We don't have to check against the predicate for operand 0 since
1176 TARGET is known to be a pseudo of the proper mode, which must
1177 be valid for the operand. For operand 1, convert to the
1178 proper mode and validate. */
1179 if (mode == VOIDmode)
1180 mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode;
1181
1182 pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
1183 if (pred && ! ((*pred) (size, mode)))
1184 size = copy_to_mode_reg (mode, convert_to_mode (mode, size, 1));
1185
1186 emit_insn (gen_allocate_stack (target, size));
1187 }
1188 else
1189 #endif
1190 {
1191 #ifndef STACK_GROWS_DOWNWARD
1192 emit_move_insn (target, virtual_stack_dynamic_rtx);
1193 #endif
1194
1195 /* Check stack bounds if necessary. */
1196 if (current_function_limit_stack)
1197 {
1198 rtx available;
1199 rtx space_available = gen_label_rtx ();
1200 #ifdef STACK_GROWS_DOWNWARD
1201 available = expand_binop (Pmode, sub_optab,
1202 stack_pointer_rtx, stack_limit_rtx,
1203 NULL_RTX, 1, OPTAB_WIDEN);
1204 #else
1205 available = expand_binop (Pmode, sub_optab,
1206 stack_limit_rtx, stack_pointer_rtx,
1207 NULL_RTX, 1, OPTAB_WIDEN);
1208 #endif
1209 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1210 space_available);
1211 #ifdef HAVE_trap
1212 if (HAVE_trap)
1213 emit_insn (gen_trap ());
1214 else
1215 #endif
1216 error ("stack limits not supported on this target");
1217 emit_barrier ();
1218 emit_label (space_available);
1219 }
1220
1221 anti_adjust_stack (size);
1222
1223 #ifdef STACK_GROWS_DOWNWARD
1224 emit_move_insn (target, virtual_stack_dynamic_rtx);
1225 #endif
1226 }
1227
1228 if (MUST_ALIGN)
1229 {
1230 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1231 but we know it can't. So add ourselves and then do
1232 TRUNC_DIV_EXPR. */
1233 target = expand_binop (Pmode, add_optab, target,
1234 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1235 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1236 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1237 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1238 NULL_RTX, 1);
1239 target = expand_mult (Pmode, target,
1240 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1241 NULL_RTX, 1);
1242 }
1243
1244 /* Record the new stack level for nonlocal gotos. */
1245 if (cfun->nonlocal_goto_save_area != 0)
1246 update_nonlocal_goto_save_area ();
1247
1248 return target;
1249 }
1250 \f
1251 /* A front end may want to override GCC's stack checking by providing a
1252 run-time routine to call to check the stack, so provide a mechanism for
1253 calling that routine. */
1254
1255 static GTY(()) rtx stack_check_libfunc;
1256
1257 void
1258 set_stack_check_libfunc (rtx libfunc)
1259 {
1260 stack_check_libfunc = libfunc;
1261 }
1262 \f
1263 /* Emit one stack probe at ADDRESS, an address within the stack. */
1264
1265 static void
1266 emit_stack_probe (rtx address)
1267 {
1268 rtx memref = gen_rtx_MEM (word_mode, address);
1269
1270 MEM_VOLATILE_P (memref) = 1;
1271
1272 if (STACK_CHECK_PROBE_LOAD)
1273 emit_move_insn (gen_reg_rtx (word_mode), memref);
1274 else
1275 emit_move_insn (memref, const0_rtx);
1276 }
1277
1278 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1279 FIRST is a constant and size is a Pmode RTX. These are offsets from the
1280 current stack pointer. STACK_GROWS_DOWNWARD says whether to add or
1281 subtract from the stack. If SIZE is constant, this is done
1282 with a fixed number of probes. Otherwise, we must make a loop. */
1283
1284 #ifdef STACK_GROWS_DOWNWARD
1285 #define STACK_GROW_OP MINUS
1286 #else
1287 #define STACK_GROW_OP PLUS
1288 #endif
1289
1290 void
1291 probe_stack_range (HOST_WIDE_INT first, rtx size)
1292 {
1293 /* First ensure SIZE is Pmode. */
1294 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1295 size = convert_to_mode (Pmode, size, 1);
1296
1297 /* Next see if the front end has set up a function for us to call to
1298 check the stack. */
1299 if (stack_check_libfunc != 0)
1300 {
1301 rtx addr = memory_address (QImode,
1302 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1303 stack_pointer_rtx,
1304 plus_constant (size, first)));
1305
1306 addr = convert_memory_address (ptr_mode, addr);
1307 emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1308 ptr_mode);
1309 }
1310
1311 /* Next see if we have an insn to check the stack. Use it if so. */
1312 #ifdef HAVE_check_stack
1313 else if (HAVE_check_stack)
1314 {
1315 insn_operand_predicate_fn pred;
1316 rtx last_addr
1317 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1318 stack_pointer_rtx,
1319 plus_constant (size, first)),
1320 NULL_RTX);
1321
1322 pred = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
1323 if (pred && ! ((*pred) (last_addr, Pmode)))
1324 last_addr = copy_to_mode_reg (Pmode, last_addr);
1325
1326 emit_insn (gen_check_stack (last_addr));
1327 }
1328 #endif
1329
1330 /* If we have to generate explicit probes, see if we have a constant
1331 small number of them to generate. If so, that's the easy case. */
1332 else if (GET_CODE (size) == CONST_INT
1333 && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1334 {
1335 HOST_WIDE_INT offset;
1336
1337 /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1338 for values of N from 1 until it exceeds LAST. If only one
1339 probe is needed, this will not generate any code. Then probe
1340 at LAST. */
1341 for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1342 offset < INTVAL (size);
1343 offset = offset + STACK_CHECK_PROBE_INTERVAL)
1344 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1345 stack_pointer_rtx,
1346 GEN_INT (offset)));
1347
1348 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1349 stack_pointer_rtx,
1350 plus_constant (size, first)));
1351 }
1352
1353 /* In the variable case, do the same as above, but in a loop. We emit loop
1354 notes so that loop optimization can be done. */
1355 else
1356 {
1357 rtx test_addr
1358 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1359 stack_pointer_rtx,
1360 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1361 NULL_RTX);
1362 rtx last_addr
1363 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1364 stack_pointer_rtx,
1365 plus_constant (size, first)),
1366 NULL_RTX);
1367 rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1368 rtx loop_lab = gen_label_rtx ();
1369 rtx test_lab = gen_label_rtx ();
1370 rtx end_lab = gen_label_rtx ();
1371 rtx temp;
1372
1373 if (!REG_P (test_addr)
1374 || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1375 test_addr = force_reg (Pmode, test_addr);
1376
1377 emit_jump (test_lab);
1378
1379 emit_label (loop_lab);
1380 emit_stack_probe (test_addr);
1381
1382 #ifdef STACK_GROWS_DOWNWARD
1383 #define CMP_OPCODE GTU
1384 temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1385 1, OPTAB_WIDEN);
1386 #else
1387 #define CMP_OPCODE LTU
1388 temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1389 1, OPTAB_WIDEN);
1390 #endif
1391
1392 gcc_assert (temp == test_addr);
1393
1394 emit_label (test_lab);
1395 emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1396 NULL_RTX, Pmode, 1, loop_lab);
1397 emit_jump (end_lab);
1398 emit_label (end_lab);
1399
1400 emit_stack_probe (last_addr);
1401 }
1402 }
1403 \f
1404 /* Return an rtx representing the register or memory location
1405 in which a scalar value of data type VALTYPE
1406 was returned by a function call to function FUNC.
1407 FUNC is a FUNCTION_DECL node if the precise function is known,
1408 otherwise 0.
1409 OUTGOING is 1 if on a machine with register windows this function
1410 should return the register in which the function will put its result
1411 and 0 otherwise. */
1412
1413 rtx
1414 hard_function_value (tree valtype, tree func ATTRIBUTE_UNUSED,
1415 int outgoing ATTRIBUTE_UNUSED)
1416 {
1417 rtx val;
1418
1419 #ifdef FUNCTION_OUTGOING_VALUE
1420 if (outgoing)
1421 val = FUNCTION_OUTGOING_VALUE (valtype, func);
1422 else
1423 #endif
1424 val = FUNCTION_VALUE (valtype, func);
1425
1426 if (REG_P (val)
1427 && GET_MODE (val) == BLKmode)
1428 {
1429 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1430 enum machine_mode tmpmode;
1431
1432 /* int_size_in_bytes can return -1. We don't need a check here
1433 since the value of bytes will then be large enough that no
1434 mode will match anyway. */
1435
1436 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1437 tmpmode != VOIDmode;
1438 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1439 {
1440 /* Have we found a large enough mode? */
1441 if (GET_MODE_SIZE (tmpmode) >= bytes)
1442 break;
1443 }
1444
1445 /* No suitable mode found. */
1446 gcc_assert (tmpmode != VOIDmode);
1447
1448 PUT_MODE (val, tmpmode);
1449 }
1450 return val;
1451 }
1452
1453 /* Return an rtx representing the register or memory location
1454 in which a scalar value of mode MODE was returned by a library call. */
1455
1456 rtx
1457 hard_libcall_value (enum machine_mode mode)
1458 {
1459 return LIBCALL_VALUE (mode);
1460 }
1461
1462 /* Look up the tree code for a given rtx code
1463 to provide the arithmetic operation for REAL_ARITHMETIC.
1464 The function returns an int because the caller may not know
1465 what `enum tree_code' means. */
1466
1467 int
1468 rtx_to_tree_code (enum rtx_code code)
1469 {
1470 enum tree_code tcode;
1471
1472 switch (code)
1473 {
1474 case PLUS:
1475 tcode = PLUS_EXPR;
1476 break;
1477 case MINUS:
1478 tcode = MINUS_EXPR;
1479 break;
1480 case MULT:
1481 tcode = MULT_EXPR;
1482 break;
1483 case DIV:
1484 tcode = RDIV_EXPR;
1485 break;
1486 case SMIN:
1487 tcode = MIN_EXPR;
1488 break;
1489 case SMAX:
1490 tcode = MAX_EXPR;
1491 break;
1492 default:
1493 tcode = LAST_AND_UNUSED_TREE_CODE;
1494 break;
1495 }
1496 return ((int) tcode);
1497 }
1498
1499 #include "gt-explow.h"