arc-protos.h (arc_select_cc_mode, gen_compare_reg): Wrap in RTX_CODE macro guard.
[gcc.git] / gcc / explow.c
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "expr.h"
34 #include "optabs.h"
35 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "ggc.h"
38 #include "recog.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "output.h"
42
43 static rtx break_out_memory_refs (rtx);
44 static void emit_stack_probe (rtx);
45
46
47 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
48
49 HOST_WIDE_INT
50 trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
51 {
52 int width = GET_MODE_BITSIZE (mode);
53
54 /* You want to truncate to a _what_? */
55 gcc_assert (SCALAR_INT_MODE_P (mode));
56
57 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
58 if (mode == BImode)
59 return c & 1 ? STORE_FLAG_VALUE : 0;
60
61 /* Sign-extend for the requested mode. */
62
63 if (width < HOST_BITS_PER_WIDE_INT)
64 {
65 HOST_WIDE_INT sign = 1;
66 sign <<= width - 1;
67 c &= (sign << 1) - 1;
68 c ^= sign;
69 c -= sign;
70 }
71
72 return c;
73 }
74
75 /* Return an rtx for the sum of X and the integer C. */
76
77 rtx
78 plus_constant (rtx x, HOST_WIDE_INT c)
79 {
80 RTX_CODE code;
81 rtx y;
82 enum machine_mode mode;
83 rtx tem;
84 int all_constant = 0;
85
86 if (c == 0)
87 return x;
88
89 restart:
90
91 code = GET_CODE (x);
92 mode = GET_MODE (x);
93 y = x;
94
95 switch (code)
96 {
97 case CONST_INT:
98 return GEN_INT (INTVAL (x) + c);
99
100 case CONST_DOUBLE:
101 {
102 unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
103 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
104 unsigned HOST_WIDE_INT l2 = c;
105 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
106 unsigned HOST_WIDE_INT lv;
107 HOST_WIDE_INT hv;
108
109 add_double (l1, h1, l2, h2, &lv, &hv);
110
111 return immed_double_const (lv, hv, VOIDmode);
112 }
113
114 case MEM:
115 /* If this is a reference to the constant pool, try replacing it with
116 a reference to a new constant. If the resulting address isn't
117 valid, don't return it because we have no way to validize it. */
118 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
119 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
120 {
121 tem
122 = force_const_mem (GET_MODE (x),
123 plus_constant (get_pool_constant (XEXP (x, 0)),
124 c));
125 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
126 return tem;
127 }
128 break;
129
130 case CONST:
131 /* If adding to something entirely constant, set a flag
132 so that we can add a CONST around the result. */
133 x = XEXP (x, 0);
134 all_constant = 1;
135 goto restart;
136
137 case SYMBOL_REF:
138 case LABEL_REF:
139 all_constant = 1;
140 break;
141
142 case PLUS:
143 /* The interesting case is adding the integer to a sum.
144 Look for constant term in the sum and combine
145 with C. For an integer constant term, we make a combined
146 integer. For a constant term that is not an explicit integer,
147 we cannot really combine, but group them together anyway.
148
149 Restart or use a recursive call in case the remaining operand is
150 something that we handle specially, such as a SYMBOL_REF.
151
152 We may not immediately return from the recursive call here, lest
153 all_constant gets lost. */
154
155 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
156 {
157 c += INTVAL (XEXP (x, 1));
158
159 if (GET_MODE (x) != VOIDmode)
160 c = trunc_int_for_mode (c, GET_MODE (x));
161
162 x = XEXP (x, 0);
163 goto restart;
164 }
165 else if (CONSTANT_P (XEXP (x, 1)))
166 {
167 x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
168 c = 0;
169 }
170 else if (find_constant_term_loc (&y))
171 {
172 /* We need to be careful since X may be shared and we can't
173 modify it in place. */
174 rtx copy = copy_rtx (x);
175 rtx *const_loc = find_constant_term_loc (&copy);
176
177 *const_loc = plus_constant (*const_loc, c);
178 x = copy;
179 c = 0;
180 }
181 break;
182
183 default:
184 break;
185 }
186
187 if (c != 0)
188 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
189
190 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
191 return x;
192 else if (all_constant)
193 return gen_rtx_CONST (mode, x);
194 else
195 return x;
196 }
197 \f
198 /* If X is a sum, return a new sum like X but lacking any constant terms.
199 Add all the removed constant terms into *CONSTPTR.
200 X itself is not altered. The result != X if and only if
201 it is not isomorphic to X. */
202
203 rtx
204 eliminate_constant_term (rtx x, rtx *constptr)
205 {
206 rtx x0, x1;
207 rtx tem;
208
209 if (GET_CODE (x) != PLUS)
210 return x;
211
212 /* First handle constants appearing at this level explicitly. */
213 if (GET_CODE (XEXP (x, 1)) == CONST_INT
214 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
215 XEXP (x, 1)))
216 && GET_CODE (tem) == CONST_INT)
217 {
218 *constptr = tem;
219 return eliminate_constant_term (XEXP (x, 0), constptr);
220 }
221
222 tem = const0_rtx;
223 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
224 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
225 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
226 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
227 *constptr, tem))
228 && GET_CODE (tem) == CONST_INT)
229 {
230 *constptr = tem;
231 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
232 }
233
234 return x;
235 }
236
237 /* Return an rtx for the size in bytes of the value of EXP. */
238
239 rtx
240 expr_size (tree exp)
241 {
242 tree size;
243
244 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
245 size = TREE_OPERAND (exp, 1);
246 else
247 {
248 size = lang_hooks.expr_size (exp);
249 gcc_assert (size);
250 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp);
251 }
252
253 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
254 }
255
256 /* Return a wide integer for the size in bytes of the value of EXP, or -1
257 if the size can vary or is larger than an integer. */
258
259 HOST_WIDE_INT
260 int_expr_size (tree exp)
261 {
262 tree size;
263
264 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
265 size = TREE_OPERAND (exp, 1);
266 else
267 {
268 size = lang_hooks.expr_size (exp);
269 gcc_assert (size);
270 }
271
272 if (size == 0 || !host_integerp (size, 0))
273 return -1;
274
275 return tree_low_cst (size, 0);
276 }
277 \f
278 /* Return a copy of X in which all memory references
279 and all constants that involve symbol refs
280 have been replaced with new temporary registers.
281 Also emit code to load the memory locations and constants
282 into those registers.
283
284 If X contains no such constants or memory references,
285 X itself (not a copy) is returned.
286
287 If a constant is found in the address that is not a legitimate constant
288 in an insn, it is left alone in the hope that it might be valid in the
289 address.
290
291 X may contain no arithmetic except addition, subtraction and multiplication.
292 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
293
294 static rtx
295 break_out_memory_refs (rtx x)
296 {
297 if (MEM_P (x)
298 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
299 && GET_MODE (x) != VOIDmode))
300 x = force_reg (GET_MODE (x), x);
301 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
302 || GET_CODE (x) == MULT)
303 {
304 rtx op0 = break_out_memory_refs (XEXP (x, 0));
305 rtx op1 = break_out_memory_refs (XEXP (x, 1));
306
307 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
308 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
309 }
310
311 return x;
312 }
313
314 /* Given X, a memory address in ptr_mode, convert it to an address
315 in Pmode, or vice versa (TO_MODE says which way). We take advantage of
316 the fact that pointers are not allowed to overflow by commuting arithmetic
317 operations over conversions so that address arithmetic insns can be
318 used. */
319
320 rtx
321 convert_memory_address (enum machine_mode to_mode ATTRIBUTE_UNUSED,
322 rtx x)
323 {
324 #ifndef POINTERS_EXTEND_UNSIGNED
325 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
326 return x;
327 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
328 enum machine_mode from_mode;
329 rtx temp;
330 enum rtx_code code;
331
332 /* If X already has the right mode, just return it. */
333 if (GET_MODE (x) == to_mode)
334 return x;
335
336 from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
337
338 /* Here we handle some special cases. If none of them apply, fall through
339 to the default case. */
340 switch (GET_CODE (x))
341 {
342 case CONST_INT:
343 case CONST_DOUBLE:
344 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
345 code = TRUNCATE;
346 else if (POINTERS_EXTEND_UNSIGNED < 0)
347 break;
348 else if (POINTERS_EXTEND_UNSIGNED > 0)
349 code = ZERO_EXTEND;
350 else
351 code = SIGN_EXTEND;
352 temp = simplify_unary_operation (code, to_mode, x, from_mode);
353 if (temp)
354 return temp;
355 break;
356
357 case SUBREG:
358 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
359 && GET_MODE (SUBREG_REG (x)) == to_mode)
360 return SUBREG_REG (x);
361 break;
362
363 case LABEL_REF:
364 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
365 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
366 return temp;
367 break;
368
369 case SYMBOL_REF:
370 temp = shallow_copy_rtx (x);
371 PUT_MODE (temp, to_mode);
372 return temp;
373 break;
374
375 case CONST:
376 return gen_rtx_CONST (to_mode,
377 convert_memory_address (to_mode, XEXP (x, 0)));
378 break;
379
380 case PLUS:
381 case MULT:
382 /* For addition we can safely permute the conversion and addition
383 operation if one operand is a constant and converting the constant
384 does not change it or if one operand is a constant and we are
385 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
386 We can always safely permute them if we are making the address
387 narrower. */
388 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
389 || (GET_CODE (x) == PLUS
390 && GET_CODE (XEXP (x, 1)) == CONST_INT
391 && (XEXP (x, 1) == convert_memory_address (to_mode, XEXP (x, 1))
392 || POINTERS_EXTEND_UNSIGNED < 0)))
393 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
394 convert_memory_address (to_mode, XEXP (x, 0)),
395 XEXP (x, 1));
396 break;
397
398 default:
399 break;
400 }
401
402 return convert_modes (to_mode, from_mode,
403 x, POINTERS_EXTEND_UNSIGNED);
404 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
405 }
406 \f
407 /* Return something equivalent to X but valid as a memory address
408 for something of mode MODE. When X is not itself valid, this
409 works by copying X or subexpressions of it into registers. */
410
411 rtx
412 memory_address (enum machine_mode mode, rtx x)
413 {
414 rtx oldx = x;
415
416 x = convert_memory_address (Pmode, x);
417
418 /* By passing constant addresses through registers
419 we get a chance to cse them. */
420 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
421 x = force_reg (Pmode, x);
422
423 /* We get better cse by rejecting indirect addressing at this stage.
424 Let the combiner create indirect addresses where appropriate.
425 For now, generate the code so that the subexpressions useful to share
426 are visible. But not if cse won't be done! */
427 else
428 {
429 if (! cse_not_expected && !REG_P (x))
430 x = break_out_memory_refs (x);
431
432 /* At this point, any valid address is accepted. */
433 if (memory_address_p (mode, x))
434 goto win;
435
436 /* If it was valid before but breaking out memory refs invalidated it,
437 use it the old way. */
438 if (memory_address_p (mode, oldx))
439 goto win2;
440
441 /* Perform machine-dependent transformations on X
442 in certain cases. This is not necessary since the code
443 below can handle all possible cases, but machine-dependent
444 transformations can make better code. */
445 LEGITIMIZE_ADDRESS (x, oldx, mode, win);
446
447 /* PLUS and MULT can appear in special ways
448 as the result of attempts to make an address usable for indexing.
449 Usually they are dealt with by calling force_operand, below.
450 But a sum containing constant terms is special
451 if removing them makes the sum a valid address:
452 then we generate that address in a register
453 and index off of it. We do this because it often makes
454 shorter code, and because the addresses thus generated
455 in registers often become common subexpressions. */
456 if (GET_CODE (x) == PLUS)
457 {
458 rtx constant_term = const0_rtx;
459 rtx y = eliminate_constant_term (x, &constant_term);
460 if (constant_term == const0_rtx
461 || ! memory_address_p (mode, y))
462 x = force_operand (x, NULL_RTX);
463 else
464 {
465 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
466 if (! memory_address_p (mode, y))
467 x = force_operand (x, NULL_RTX);
468 else
469 x = y;
470 }
471 }
472
473 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
474 x = force_operand (x, NULL_RTX);
475
476 /* If we have a register that's an invalid address,
477 it must be a hard reg of the wrong class. Copy it to a pseudo. */
478 else if (REG_P (x))
479 x = copy_to_reg (x);
480
481 /* Last resort: copy the value to a register, since
482 the register is a valid address. */
483 else
484 x = force_reg (Pmode, x);
485
486 goto done;
487
488 win2:
489 x = oldx;
490 win:
491 if (flag_force_addr && ! cse_not_expected && !REG_P (x))
492 {
493 x = force_operand (x, NULL_RTX);
494 x = force_reg (Pmode, x);
495 }
496 }
497
498 done:
499
500 /* If we didn't change the address, we are done. Otherwise, mark
501 a reg as a pointer if we have REG or REG + CONST_INT. */
502 if (oldx == x)
503 return x;
504 else if (REG_P (x))
505 mark_reg_pointer (x, BITS_PER_UNIT);
506 else if (GET_CODE (x) == PLUS
507 && REG_P (XEXP (x, 0))
508 && GET_CODE (XEXP (x, 1)) == CONST_INT)
509 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
510
511 /* OLDX may have been the address on a temporary. Update the address
512 to indicate that X is now used. */
513 update_temp_slot_address (oldx, x);
514
515 return x;
516 }
517
518 /* Like `memory_address' but pretend `flag_force_addr' is 0. */
519
520 rtx
521 memory_address_noforce (enum machine_mode mode, rtx x)
522 {
523 int ambient_force_addr = flag_force_addr;
524 rtx val;
525
526 flag_force_addr = 0;
527 val = memory_address (mode, x);
528 flag_force_addr = ambient_force_addr;
529 return val;
530 }
531
532 /* Convert a mem ref into one with a valid memory address.
533 Pass through anything else unchanged. */
534
535 rtx
536 validize_mem (rtx ref)
537 {
538 if (!MEM_P (ref))
539 return ref;
540 ref = use_anchored_address (ref);
541 if (! (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (ref, 0)))
542 && memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
543 return ref;
544
545 /* Don't alter REF itself, since that is probably a stack slot. */
546 return replace_equiv_address (ref, XEXP (ref, 0));
547 }
548
549 /* If X is a memory reference to a member of an object block, try rewriting
550 it to use an anchor instead. Return the new memory reference on success
551 and the old one on failure. */
552
553 rtx
554 use_anchored_address (rtx x)
555 {
556 rtx base;
557 HOST_WIDE_INT offset;
558
559 if (!flag_section_anchors)
560 return x;
561
562 if (!MEM_P (x))
563 return x;
564
565 /* Split the address into a base and offset. */
566 base = XEXP (x, 0);
567 offset = 0;
568 if (GET_CODE (base) == CONST
569 && GET_CODE (XEXP (base, 0)) == PLUS
570 && GET_CODE (XEXP (XEXP (base, 0), 1)) == CONST_INT)
571 {
572 offset += INTVAL (XEXP (XEXP (base, 0), 1));
573 base = XEXP (XEXP (base, 0), 0);
574 }
575
576 /* Check whether BASE is suitable for anchors. */
577 if (GET_CODE (base) != SYMBOL_REF
578 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
579 || SYMBOL_REF_ANCHOR_P (base)
580 || SYMBOL_REF_BLOCK (base) == NULL
581 || !targetm.use_anchors_for_symbol_p (base))
582 return x;
583
584 /* Decide where BASE is going to be. */
585 place_block_symbol (base);
586
587 /* Get the anchor we need to use. */
588 offset += SYMBOL_REF_BLOCK_OFFSET (base);
589 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
590 SYMBOL_REF_TLS_MODEL (base));
591
592 /* Work out the offset from the anchor. */
593 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
594
595 /* If we're going to run a CSE pass, force the anchor into a register.
596 We will then be able to reuse registers for several accesses, if the
597 target costs say that that's worthwhile. */
598 if (!cse_not_expected)
599 base = force_reg (GET_MODE (base), base);
600
601 return replace_equiv_address (x, plus_constant (base, offset));
602 }
603 \f
604 /* Copy the value or contents of X to a new temp reg and return that reg. */
605
606 rtx
607 copy_to_reg (rtx x)
608 {
609 rtx temp = gen_reg_rtx (GET_MODE (x));
610
611 /* If not an operand, must be an address with PLUS and MULT so
612 do the computation. */
613 if (! general_operand (x, VOIDmode))
614 x = force_operand (x, temp);
615
616 if (x != temp)
617 emit_move_insn (temp, x);
618
619 return temp;
620 }
621
622 /* Like copy_to_reg but always give the new register mode Pmode
623 in case X is a constant. */
624
625 rtx
626 copy_addr_to_reg (rtx x)
627 {
628 return copy_to_mode_reg (Pmode, x);
629 }
630
631 /* Like copy_to_reg but always give the new register mode MODE
632 in case X is a constant. */
633
634 rtx
635 copy_to_mode_reg (enum machine_mode mode, rtx x)
636 {
637 rtx temp = gen_reg_rtx (mode);
638
639 /* If not an operand, must be an address with PLUS and MULT so
640 do the computation. */
641 if (! general_operand (x, VOIDmode))
642 x = force_operand (x, temp);
643
644 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
645 if (x != temp)
646 emit_move_insn (temp, x);
647 return temp;
648 }
649
650 /* Load X into a register if it is not already one.
651 Use mode MODE for the register.
652 X should be valid for mode MODE, but it may be a constant which
653 is valid for all integer modes; that's why caller must specify MODE.
654
655 The caller must not alter the value in the register we return,
656 since we mark it as a "constant" register. */
657
658 rtx
659 force_reg (enum machine_mode mode, rtx x)
660 {
661 rtx temp, insn, set;
662
663 if (REG_P (x))
664 return x;
665
666 if (general_operand (x, mode))
667 {
668 temp = gen_reg_rtx (mode);
669 insn = emit_move_insn (temp, x);
670 }
671 else
672 {
673 temp = force_operand (x, NULL_RTX);
674 if (REG_P (temp))
675 insn = get_last_insn ();
676 else
677 {
678 rtx temp2 = gen_reg_rtx (mode);
679 insn = emit_move_insn (temp2, temp);
680 temp = temp2;
681 }
682 }
683
684 /* Let optimizers know that TEMP's value never changes
685 and that X can be substituted for it. Don't get confused
686 if INSN set something else (such as a SUBREG of TEMP). */
687 if (CONSTANT_P (x)
688 && (set = single_set (insn)) != 0
689 && SET_DEST (set) == temp
690 && ! rtx_equal_p (x, SET_SRC (set)))
691 set_unique_reg_note (insn, REG_EQUAL, x);
692
693 /* Let optimizers know that TEMP is a pointer, and if so, the
694 known alignment of that pointer. */
695 {
696 unsigned align = 0;
697 if (GET_CODE (x) == SYMBOL_REF)
698 {
699 align = BITS_PER_UNIT;
700 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
701 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
702 }
703 else if (GET_CODE (x) == LABEL_REF)
704 align = BITS_PER_UNIT;
705 else if (GET_CODE (x) == CONST
706 && GET_CODE (XEXP (x, 0)) == PLUS
707 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
708 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
709 {
710 rtx s = XEXP (XEXP (x, 0), 0);
711 rtx c = XEXP (XEXP (x, 0), 1);
712 unsigned sa, ca;
713
714 sa = BITS_PER_UNIT;
715 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
716 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
717
718 ca = exact_log2 (INTVAL (c) & -INTVAL (c)) * BITS_PER_UNIT;
719
720 align = MIN (sa, ca);
721 }
722 else if (MEM_P (x) && MEM_POINTER (x))
723 align = MEM_ALIGN (x);
724
725 if (align)
726 mark_reg_pointer (temp, align);
727 }
728
729 return temp;
730 }
731
732 /* If X is a memory ref, copy its contents to a new temp reg and return
733 that reg. Otherwise, return X. */
734
735 rtx
736 force_not_mem (rtx x)
737 {
738 rtx temp;
739
740 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
741 return x;
742
743 temp = gen_reg_rtx (GET_MODE (x));
744
745 if (MEM_POINTER (x))
746 REG_POINTER (temp) = 1;
747
748 emit_move_insn (temp, x);
749 return temp;
750 }
751
752 /* Copy X to TARGET (if it's nonzero and a reg)
753 or to a new temp reg and return that reg.
754 MODE is the mode to use for X in case it is a constant. */
755
756 rtx
757 copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
758 {
759 rtx temp;
760
761 if (target && REG_P (target))
762 temp = target;
763 else
764 temp = gen_reg_rtx (mode);
765
766 emit_move_insn (temp, x);
767 return temp;
768 }
769 \f
770 /* Return the mode to use to store a scalar of TYPE and MODE.
771 PUNSIGNEDP points to the signedness of the type and may be adjusted
772 to show what signedness to use on extension operations.
773
774 FOR_CALL is nonzero if this call is promoting args for a call. */
775
776 #if defined(PROMOTE_MODE) && !defined(PROMOTE_FUNCTION_MODE)
777 #define PROMOTE_FUNCTION_MODE PROMOTE_MODE
778 #endif
779
780 enum machine_mode
781 promote_mode (const_tree type, enum machine_mode mode, int *punsignedp,
782 int for_call ATTRIBUTE_UNUSED)
783 {
784 const enum tree_code code = TREE_CODE (type);
785 int unsignedp = *punsignedp;
786
787 #ifndef PROMOTE_MODE
788 if (! for_call)
789 return mode;
790 #endif
791
792 switch (code)
793 {
794 #ifdef PROMOTE_FUNCTION_MODE
795 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
796 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
797 #ifdef PROMOTE_MODE
798 if (for_call)
799 {
800 #endif
801 PROMOTE_FUNCTION_MODE (mode, unsignedp, type);
802 #ifdef PROMOTE_MODE
803 }
804 else
805 {
806 PROMOTE_MODE (mode, unsignedp, type);
807 }
808 #endif
809 break;
810 #endif
811
812 #ifdef POINTERS_EXTEND_UNSIGNED
813 case REFERENCE_TYPE:
814 case POINTER_TYPE:
815 mode = Pmode;
816 unsignedp = POINTERS_EXTEND_UNSIGNED;
817 break;
818 #endif
819
820 default:
821 break;
822 }
823
824 *punsignedp = unsignedp;
825 return mode;
826 }
827 \f
828 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
829 This pops when ADJUST is positive. ADJUST need not be constant. */
830
831 void
832 adjust_stack (rtx adjust)
833 {
834 rtx temp;
835
836 if (adjust == const0_rtx)
837 return;
838
839 /* We expect all variable sized adjustments to be multiple of
840 PREFERRED_STACK_BOUNDARY. */
841 if (GET_CODE (adjust) == CONST_INT)
842 stack_pointer_delta -= INTVAL (adjust);
843
844 temp = expand_binop (Pmode,
845 #ifdef STACK_GROWS_DOWNWARD
846 add_optab,
847 #else
848 sub_optab,
849 #endif
850 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
851 OPTAB_LIB_WIDEN);
852
853 if (temp != stack_pointer_rtx)
854 emit_move_insn (stack_pointer_rtx, temp);
855 }
856
857 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
858 This pushes when ADJUST is positive. ADJUST need not be constant. */
859
860 void
861 anti_adjust_stack (rtx adjust)
862 {
863 rtx temp;
864
865 if (adjust == const0_rtx)
866 return;
867
868 /* We expect all variable sized adjustments to be multiple of
869 PREFERRED_STACK_BOUNDARY. */
870 if (GET_CODE (adjust) == CONST_INT)
871 stack_pointer_delta += INTVAL (adjust);
872
873 temp = expand_binop (Pmode,
874 #ifdef STACK_GROWS_DOWNWARD
875 sub_optab,
876 #else
877 add_optab,
878 #endif
879 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
880 OPTAB_LIB_WIDEN);
881
882 if (temp != stack_pointer_rtx)
883 emit_move_insn (stack_pointer_rtx, temp);
884 }
885
886 /* Round the size of a block to be pushed up to the boundary required
887 by this machine. SIZE is the desired size, which need not be constant. */
888
889 static rtx
890 round_push (rtx size)
891 {
892 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
893
894 if (align == 1)
895 return size;
896
897 if (GET_CODE (size) == CONST_INT)
898 {
899 HOST_WIDE_INT new = (INTVAL (size) + align - 1) / align * align;
900
901 if (INTVAL (size) != new)
902 size = GEN_INT (new);
903 }
904 else
905 {
906 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
907 but we know it can't. So add ourselves and then do
908 TRUNC_DIV_EXPR. */
909 size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
910 NULL_RTX, 1, OPTAB_LIB_WIDEN);
911 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
912 NULL_RTX, 1);
913 size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
914 }
915
916 return size;
917 }
918 \f
919 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
920 to a previously-created save area. If no save area has been allocated,
921 this function will allocate one. If a save area is specified, it
922 must be of the proper mode.
923
924 The insns are emitted after insn AFTER, if nonzero, otherwise the insns
925 are emitted at the current position. */
926
927 void
928 emit_stack_save (enum save_level save_level, rtx *psave, rtx after)
929 {
930 rtx sa = *psave;
931 /* The default is that we use a move insn and save in a Pmode object. */
932 rtx (*fcn) (rtx, rtx) = gen_move_insn;
933 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
934
935 /* See if this machine has anything special to do for this kind of save. */
936 switch (save_level)
937 {
938 #ifdef HAVE_save_stack_block
939 case SAVE_BLOCK:
940 if (HAVE_save_stack_block)
941 fcn = gen_save_stack_block;
942 break;
943 #endif
944 #ifdef HAVE_save_stack_function
945 case SAVE_FUNCTION:
946 if (HAVE_save_stack_function)
947 fcn = gen_save_stack_function;
948 break;
949 #endif
950 #ifdef HAVE_save_stack_nonlocal
951 case SAVE_NONLOCAL:
952 if (HAVE_save_stack_nonlocal)
953 fcn = gen_save_stack_nonlocal;
954 break;
955 #endif
956 default:
957 break;
958 }
959
960 /* If there is no save area and we have to allocate one, do so. Otherwise
961 verify the save area is the proper mode. */
962
963 if (sa == 0)
964 {
965 if (mode != VOIDmode)
966 {
967 if (save_level == SAVE_NONLOCAL)
968 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
969 else
970 *psave = sa = gen_reg_rtx (mode);
971 }
972 }
973
974 if (after)
975 {
976 rtx seq;
977
978 start_sequence ();
979 do_pending_stack_adjust ();
980 /* We must validize inside the sequence, to ensure that any instructions
981 created by the validize call also get moved to the right place. */
982 if (sa != 0)
983 sa = validize_mem (sa);
984 emit_insn (fcn (sa, stack_pointer_rtx));
985 seq = get_insns ();
986 end_sequence ();
987 emit_insn_after (seq, after);
988 }
989 else
990 {
991 do_pending_stack_adjust ();
992 if (sa != 0)
993 sa = validize_mem (sa);
994 emit_insn (fcn (sa, stack_pointer_rtx));
995 }
996 }
997
998 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
999 area made by emit_stack_save. If it is zero, we have nothing to do.
1000
1001 Put any emitted insns after insn AFTER, if nonzero, otherwise at
1002 current position. */
1003
1004 void
1005 emit_stack_restore (enum save_level save_level, rtx sa, rtx after)
1006 {
1007 /* The default is that we use a move insn. */
1008 rtx (*fcn) (rtx, rtx) = gen_move_insn;
1009
1010 /* See if this machine has anything special to do for this kind of save. */
1011 switch (save_level)
1012 {
1013 #ifdef HAVE_restore_stack_block
1014 case SAVE_BLOCK:
1015 if (HAVE_restore_stack_block)
1016 fcn = gen_restore_stack_block;
1017 break;
1018 #endif
1019 #ifdef HAVE_restore_stack_function
1020 case SAVE_FUNCTION:
1021 if (HAVE_restore_stack_function)
1022 fcn = gen_restore_stack_function;
1023 break;
1024 #endif
1025 #ifdef HAVE_restore_stack_nonlocal
1026 case SAVE_NONLOCAL:
1027 if (HAVE_restore_stack_nonlocal)
1028 fcn = gen_restore_stack_nonlocal;
1029 break;
1030 #endif
1031 default:
1032 break;
1033 }
1034
1035 if (sa != 0)
1036 {
1037 sa = validize_mem (sa);
1038 /* These clobbers prevent the scheduler from moving
1039 references to variable arrays below the code
1040 that deletes (pops) the arrays. */
1041 emit_insn (gen_rtx_CLOBBER (VOIDmode,
1042 gen_rtx_MEM (BLKmode,
1043 gen_rtx_SCRATCH (VOIDmode))));
1044 emit_insn (gen_rtx_CLOBBER (VOIDmode,
1045 gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
1046 }
1047
1048 discard_pending_stack_adjust ();
1049
1050 if (after)
1051 {
1052 rtx seq;
1053
1054 start_sequence ();
1055 emit_insn (fcn (stack_pointer_rtx, sa));
1056 seq = get_insns ();
1057 end_sequence ();
1058 emit_insn_after (seq, after);
1059 }
1060 else
1061 emit_insn (fcn (stack_pointer_rtx, sa));
1062 }
1063
1064 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1065 function. This function should be called whenever we allocate or
1066 deallocate dynamic stack space. */
1067
1068 void
1069 update_nonlocal_goto_save_area (void)
1070 {
1071 tree t_save;
1072 rtx r_save;
1073
1074 /* The nonlocal_goto_save_area object is an array of N pointers. The
1075 first one is used for the frame pointer save; the rest are sized by
1076 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1077 of the stack save area slots. */
1078 t_save = build4 (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area,
1079 integer_one_node, NULL_TREE, NULL_TREE);
1080 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1081
1082 emit_stack_save (SAVE_NONLOCAL, &r_save, NULL_RTX);
1083 }
1084 \f
1085 /* Return an rtx representing the address of an area of memory dynamically
1086 pushed on the stack. This region of memory is always aligned to
1087 a multiple of BIGGEST_ALIGNMENT.
1088
1089 Any required stack pointer alignment is preserved.
1090
1091 SIZE is an rtx representing the size of the area.
1092 TARGET is a place in which the address can be placed.
1093
1094 KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */
1095
1096 rtx
1097 allocate_dynamic_stack_space (rtx size, rtx target, int known_align)
1098 {
1099 /* If we're asking for zero bytes, it doesn't matter what we point
1100 to since we can't dereference it. But return a reasonable
1101 address anyway. */
1102 if (size == const0_rtx)
1103 return virtual_stack_dynamic_rtx;
1104
1105 /* Otherwise, show we're calling alloca or equivalent. */
1106 current_function_calls_alloca = 1;
1107
1108 /* Ensure the size is in the proper mode. */
1109 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1110 size = convert_to_mode (Pmode, size, 1);
1111
1112 /* We can't attempt to minimize alignment necessary, because we don't
1113 know the final value of preferred_stack_boundary yet while executing
1114 this code. */
1115 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1116
1117 /* We will need to ensure that the address we return is aligned to
1118 BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
1119 always know its final value at this point in the compilation (it
1120 might depend on the size of the outgoing parameter lists, for
1121 example), so we must align the value to be returned in that case.
1122 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1123 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1124 We must also do an alignment operation on the returned value if
1125 the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1126
1127 If we have to align, we must leave space in SIZE for the hole
1128 that might result from the alignment operation. */
1129
1130 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1131 #define MUST_ALIGN 1
1132 #else
1133 #define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1134 #endif
1135
1136 if (MUST_ALIGN)
1137 size
1138 = force_operand (plus_constant (size,
1139 BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1140 NULL_RTX);
1141
1142 #ifdef SETJMP_VIA_SAVE_AREA
1143 /* If setjmp restores regs from a save area in the stack frame,
1144 avoid clobbering the reg save area. Note that the offset of
1145 virtual_incoming_args_rtx includes the preallocated stack args space.
1146 It would be no problem to clobber that, but it's on the wrong side
1147 of the old save area.
1148
1149 What used to happen is that, since we did not know for sure
1150 whether setjmp() was invoked until after RTL generation, we
1151 would use reg notes to store the "optimized" size and fix things
1152 up later. These days we know this information before we ever
1153 start building RTL so the reg notes are unnecessary. */
1154 if (!current_function_calls_setjmp)
1155 {
1156 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1157
1158 /* ??? Code below assumes that the save area needs maximal
1159 alignment. This constraint may be too strong. */
1160 gcc_assert (PREFERRED_STACK_BOUNDARY == BIGGEST_ALIGNMENT);
1161
1162 if (GET_CODE (size) == CONST_INT)
1163 {
1164 HOST_WIDE_INT new = INTVAL (size) / align * align;
1165
1166 if (INTVAL (size) != new)
1167 size = GEN_INT (new);
1168 }
1169 else
1170 {
1171 /* Since we know overflow is not possible, we avoid using
1172 CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead. */
1173 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1174 GEN_INT (align), NULL_RTX, 1);
1175 size = expand_mult (Pmode, size,
1176 GEN_INT (align), NULL_RTX, 1);
1177 }
1178 }
1179 else
1180 {
1181 rtx dynamic_offset
1182 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1183 stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1184
1185 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1186 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1187 }
1188 #endif /* SETJMP_VIA_SAVE_AREA */
1189
1190 /* Round the size to a multiple of the required stack alignment.
1191 Since the stack if presumed to be rounded before this allocation,
1192 this will maintain the required alignment.
1193
1194 If the stack grows downward, we could save an insn by subtracting
1195 SIZE from the stack pointer and then aligning the stack pointer.
1196 The problem with this is that the stack pointer may be unaligned
1197 between the execution of the subtraction and alignment insns and
1198 some machines do not allow this. Even on those that do, some
1199 signal handlers malfunction if a signal should occur between those
1200 insns. Since this is an extremely rare event, we have no reliable
1201 way of knowing which systems have this problem. So we avoid even
1202 momentarily mis-aligning the stack. */
1203
1204 /* If we added a variable amount to SIZE,
1205 we can no longer assume it is aligned. */
1206 #if !defined (SETJMP_VIA_SAVE_AREA)
1207 if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1208 #endif
1209 size = round_push (size);
1210
1211 do_pending_stack_adjust ();
1212
1213 /* We ought to be called always on the toplevel and stack ought to be aligned
1214 properly. */
1215 gcc_assert (!(stack_pointer_delta
1216 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1217
1218 /* If needed, check that we have the required amount of stack. Take into
1219 account what has already been checked. */
1220 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1221 probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1222
1223 /* Don't use a TARGET that isn't a pseudo or is the wrong mode. */
1224 if (target == 0 || !REG_P (target)
1225 || REGNO (target) < FIRST_PSEUDO_REGISTER
1226 || GET_MODE (target) != Pmode)
1227 target = gen_reg_rtx (Pmode);
1228
1229 mark_reg_pointer (target, known_align);
1230
1231 /* Perform the required allocation from the stack. Some systems do
1232 this differently than simply incrementing/decrementing from the
1233 stack pointer, such as acquiring the space by calling malloc(). */
1234 #ifdef HAVE_allocate_stack
1235 if (HAVE_allocate_stack)
1236 {
1237 enum machine_mode mode = STACK_SIZE_MODE;
1238 insn_operand_predicate_fn pred;
1239
1240 /* We don't have to check against the predicate for operand 0 since
1241 TARGET is known to be a pseudo of the proper mode, which must
1242 be valid for the operand. For operand 1, convert to the
1243 proper mode and validate. */
1244 if (mode == VOIDmode)
1245 mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode;
1246
1247 pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
1248 if (pred && ! ((*pred) (size, mode)))
1249 size = copy_to_mode_reg (mode, convert_to_mode (mode, size, 1));
1250
1251 emit_insn (gen_allocate_stack (target, size));
1252 }
1253 else
1254 #endif
1255 {
1256 #ifndef STACK_GROWS_DOWNWARD
1257 emit_move_insn (target, virtual_stack_dynamic_rtx);
1258 #endif
1259
1260 /* Check stack bounds if necessary. */
1261 if (current_function_limit_stack)
1262 {
1263 rtx available;
1264 rtx space_available = gen_label_rtx ();
1265 #ifdef STACK_GROWS_DOWNWARD
1266 available = expand_binop (Pmode, sub_optab,
1267 stack_pointer_rtx, stack_limit_rtx,
1268 NULL_RTX, 1, OPTAB_WIDEN);
1269 #else
1270 available = expand_binop (Pmode, sub_optab,
1271 stack_limit_rtx, stack_pointer_rtx,
1272 NULL_RTX, 1, OPTAB_WIDEN);
1273 #endif
1274 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1275 space_available);
1276 #ifdef HAVE_trap
1277 if (HAVE_trap)
1278 emit_insn (gen_trap ());
1279 else
1280 #endif
1281 error ("stack limits not supported on this target");
1282 emit_barrier ();
1283 emit_label (space_available);
1284 }
1285
1286 anti_adjust_stack (size);
1287
1288 #ifdef STACK_GROWS_DOWNWARD
1289 emit_move_insn (target, virtual_stack_dynamic_rtx);
1290 #endif
1291 }
1292
1293 if (MUST_ALIGN)
1294 {
1295 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1296 but we know it can't. So add ourselves and then do
1297 TRUNC_DIV_EXPR. */
1298 target = expand_binop (Pmode, add_optab, target,
1299 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1300 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1301 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1302 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1303 NULL_RTX, 1);
1304 target = expand_mult (Pmode, target,
1305 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1306 NULL_RTX, 1);
1307 }
1308
1309 /* Record the new stack level for nonlocal gotos. */
1310 if (cfun->nonlocal_goto_save_area != 0)
1311 update_nonlocal_goto_save_area ();
1312
1313 return target;
1314 }
1315 \f
1316 /* A front end may want to override GCC's stack checking by providing a
1317 run-time routine to call to check the stack, so provide a mechanism for
1318 calling that routine. */
1319
1320 static GTY(()) rtx stack_check_libfunc;
1321
1322 void
1323 set_stack_check_libfunc (rtx libfunc)
1324 {
1325 stack_check_libfunc = libfunc;
1326 }
1327 \f
1328 /* Emit one stack probe at ADDRESS, an address within the stack. */
1329
1330 static void
1331 emit_stack_probe (rtx address)
1332 {
1333 rtx memref = gen_rtx_MEM (word_mode, address);
1334
1335 MEM_VOLATILE_P (memref) = 1;
1336
1337 if (STACK_CHECK_PROBE_LOAD)
1338 emit_move_insn (gen_reg_rtx (word_mode), memref);
1339 else
1340 emit_move_insn (memref, const0_rtx);
1341 }
1342
1343 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1344 FIRST is a constant and size is a Pmode RTX. These are offsets from the
1345 current stack pointer. STACK_GROWS_DOWNWARD says whether to add or
1346 subtract from the stack. If SIZE is constant, this is done
1347 with a fixed number of probes. Otherwise, we must make a loop. */
1348
1349 #ifdef STACK_GROWS_DOWNWARD
1350 #define STACK_GROW_OP MINUS
1351 #else
1352 #define STACK_GROW_OP PLUS
1353 #endif
1354
1355 void
1356 probe_stack_range (HOST_WIDE_INT first, rtx size)
1357 {
1358 /* First ensure SIZE is Pmode. */
1359 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1360 size = convert_to_mode (Pmode, size, 1);
1361
1362 /* Next see if the front end has set up a function for us to call to
1363 check the stack. */
1364 if (stack_check_libfunc != 0)
1365 {
1366 rtx addr = memory_address (QImode,
1367 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1368 stack_pointer_rtx,
1369 plus_constant (size, first)));
1370
1371 addr = convert_memory_address (ptr_mode, addr);
1372 emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1373 ptr_mode);
1374 }
1375
1376 /* Next see if we have an insn to check the stack. Use it if so. */
1377 #ifdef HAVE_check_stack
1378 else if (HAVE_check_stack)
1379 {
1380 insn_operand_predicate_fn pred;
1381 rtx last_addr
1382 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1383 stack_pointer_rtx,
1384 plus_constant (size, first)),
1385 NULL_RTX);
1386
1387 pred = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
1388 if (pred && ! ((*pred) (last_addr, Pmode)))
1389 last_addr = copy_to_mode_reg (Pmode, last_addr);
1390
1391 emit_insn (gen_check_stack (last_addr));
1392 }
1393 #endif
1394
1395 /* If we have to generate explicit probes, see if we have a constant
1396 small number of them to generate. If so, that's the easy case. */
1397 else if (GET_CODE (size) == CONST_INT
1398 && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1399 {
1400 HOST_WIDE_INT offset;
1401
1402 /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1403 for values of N from 1 until it exceeds LAST. If only one
1404 probe is needed, this will not generate any code. Then probe
1405 at LAST. */
1406 for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1407 offset < INTVAL (size);
1408 offset = offset + STACK_CHECK_PROBE_INTERVAL)
1409 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1410 stack_pointer_rtx,
1411 GEN_INT (offset)));
1412
1413 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1414 stack_pointer_rtx,
1415 plus_constant (size, first)));
1416 }
1417
1418 /* In the variable case, do the same as above, but in a loop. We emit loop
1419 notes so that loop optimization can be done. */
1420 else
1421 {
1422 rtx test_addr
1423 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1424 stack_pointer_rtx,
1425 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1426 NULL_RTX);
1427 rtx last_addr
1428 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1429 stack_pointer_rtx,
1430 plus_constant (size, first)),
1431 NULL_RTX);
1432 rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1433 rtx loop_lab = gen_label_rtx ();
1434 rtx test_lab = gen_label_rtx ();
1435 rtx end_lab = gen_label_rtx ();
1436 rtx temp;
1437
1438 if (!REG_P (test_addr)
1439 || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1440 test_addr = force_reg (Pmode, test_addr);
1441
1442 emit_jump (test_lab);
1443
1444 emit_label (loop_lab);
1445 emit_stack_probe (test_addr);
1446
1447 #ifdef STACK_GROWS_DOWNWARD
1448 #define CMP_OPCODE GTU
1449 temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1450 1, OPTAB_WIDEN);
1451 #else
1452 #define CMP_OPCODE LTU
1453 temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1454 1, OPTAB_WIDEN);
1455 #endif
1456
1457 gcc_assert (temp == test_addr);
1458
1459 emit_label (test_lab);
1460 emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1461 NULL_RTX, Pmode, 1, loop_lab);
1462 emit_jump (end_lab);
1463 emit_label (end_lab);
1464
1465 emit_stack_probe (last_addr);
1466 }
1467 }
1468 \f
1469 /* Return an rtx representing the register or memory location
1470 in which a scalar value of data type VALTYPE
1471 was returned by a function call to function FUNC.
1472 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1473 function is known, otherwise 0.
1474 OUTGOING is 1 if on a machine with register windows this function
1475 should return the register in which the function will put its result
1476 and 0 otherwise. */
1477
1478 rtx
1479 hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
1480 int outgoing ATTRIBUTE_UNUSED)
1481 {
1482 rtx val;
1483
1484 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
1485
1486 if (REG_P (val)
1487 && GET_MODE (val) == BLKmode)
1488 {
1489 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1490 enum machine_mode tmpmode;
1491
1492 /* int_size_in_bytes can return -1. We don't need a check here
1493 since the value of bytes will then be large enough that no
1494 mode will match anyway. */
1495
1496 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1497 tmpmode != VOIDmode;
1498 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1499 {
1500 /* Have we found a large enough mode? */
1501 if (GET_MODE_SIZE (tmpmode) >= bytes)
1502 break;
1503 }
1504
1505 /* No suitable mode found. */
1506 gcc_assert (tmpmode != VOIDmode);
1507
1508 PUT_MODE (val, tmpmode);
1509 }
1510 return val;
1511 }
1512
1513 /* Return an rtx representing the register or memory location
1514 in which a scalar value of mode MODE was returned by a library call. */
1515
1516 rtx
1517 hard_libcall_value (enum machine_mode mode)
1518 {
1519 return LIBCALL_VALUE (mode);
1520 }
1521
1522 /* Look up the tree code for a given rtx code
1523 to provide the arithmetic operation for REAL_ARITHMETIC.
1524 The function returns an int because the caller may not know
1525 what `enum tree_code' means. */
1526
1527 int
1528 rtx_to_tree_code (enum rtx_code code)
1529 {
1530 enum tree_code tcode;
1531
1532 switch (code)
1533 {
1534 case PLUS:
1535 tcode = PLUS_EXPR;
1536 break;
1537 case MINUS:
1538 tcode = MINUS_EXPR;
1539 break;
1540 case MULT:
1541 tcode = MULT_EXPR;
1542 break;
1543 case DIV:
1544 tcode = RDIV_EXPR;
1545 break;
1546 case SMIN:
1547 tcode = MIN_EXPR;
1548 break;
1549 case SMAX:
1550 tcode = MAX_EXPR;
1551 break;
1552 default:
1553 tcode = LAST_AND_UNUSED_TREE_CODE;
1554 break;
1555 }
1556 return ((int) tcode);
1557 }
1558
1559 #include "gt-explow.h"