Guard against M4 versions with a buggy strstr.
[gcc.git] / gcc / explow.c
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "diagnostic-core.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "flags.h"
32 #include "except.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "optabs.h"
36 #include "libfuncs.h"
37 #include "hard-reg-set.h"
38 #include "insn-config.h"
39 #include "ggc.h"
40 #include "recog.h"
41 #include "langhooks.h"
42 #include "target.h"
43 #include "output.h"
44
45 static rtx break_out_memory_refs (rtx);
46
47
48 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
49
50 HOST_WIDE_INT
51 trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
52 {
53 int width = GET_MODE_BITSIZE (mode);
54
55 /* You want to truncate to a _what_? */
56 gcc_assert (SCALAR_INT_MODE_P (mode));
57
58 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
59 if (mode == BImode)
60 return c & 1 ? STORE_FLAG_VALUE : 0;
61
62 /* Sign-extend for the requested mode. */
63
64 if (width < HOST_BITS_PER_WIDE_INT)
65 {
66 HOST_WIDE_INT sign = 1;
67 sign <<= width - 1;
68 c &= (sign << 1) - 1;
69 c ^= sign;
70 c -= sign;
71 }
72
73 return c;
74 }
75
76 /* Return an rtx for the sum of X and the integer C. */
77
78 rtx
79 plus_constant (rtx x, HOST_WIDE_INT c)
80 {
81 RTX_CODE code;
82 rtx y;
83 enum machine_mode mode;
84 rtx tem;
85 int all_constant = 0;
86
87 if (c == 0)
88 return x;
89
90 restart:
91
92 code = GET_CODE (x);
93 mode = GET_MODE (x);
94 y = x;
95
96 switch (code)
97 {
98 case CONST_INT:
99 return GEN_INT (INTVAL (x) + c);
100
101 case CONST_DOUBLE:
102 {
103 unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
104 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
105 unsigned HOST_WIDE_INT l2 = c;
106 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
107 unsigned HOST_WIDE_INT lv;
108 HOST_WIDE_INT hv;
109
110 add_double (l1, h1, l2, h2, &lv, &hv);
111
112 return immed_double_const (lv, hv, VOIDmode);
113 }
114
115 case MEM:
116 /* If this is a reference to the constant pool, try replacing it with
117 a reference to a new constant. If the resulting address isn't
118 valid, don't return it because we have no way to validize it. */
119 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
120 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
121 {
122 tem
123 = force_const_mem (GET_MODE (x),
124 plus_constant (get_pool_constant (XEXP (x, 0)),
125 c));
126 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
127 return tem;
128 }
129 break;
130
131 case CONST:
132 /* If adding to something entirely constant, set a flag
133 so that we can add a CONST around the result. */
134 x = XEXP (x, 0);
135 all_constant = 1;
136 goto restart;
137
138 case SYMBOL_REF:
139 case LABEL_REF:
140 all_constant = 1;
141 break;
142
143 case PLUS:
144 /* The interesting case is adding the integer to a sum.
145 Look for constant term in the sum and combine
146 with C. For an integer constant term, we make a combined
147 integer. For a constant term that is not an explicit integer,
148 we cannot really combine, but group them together anyway.
149
150 Restart or use a recursive call in case the remaining operand is
151 something that we handle specially, such as a SYMBOL_REF.
152
153 We may not immediately return from the recursive call here, lest
154 all_constant gets lost. */
155
156 if (CONST_INT_P (XEXP (x, 1)))
157 {
158 c += INTVAL (XEXP (x, 1));
159
160 if (GET_MODE (x) != VOIDmode)
161 c = trunc_int_for_mode (c, GET_MODE (x));
162
163 x = XEXP (x, 0);
164 goto restart;
165 }
166 else if (CONSTANT_P (XEXP (x, 1)))
167 {
168 x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
169 c = 0;
170 }
171 else if (find_constant_term_loc (&y))
172 {
173 /* We need to be careful since X may be shared and we can't
174 modify it in place. */
175 rtx copy = copy_rtx (x);
176 rtx *const_loc = find_constant_term_loc (&copy);
177
178 *const_loc = plus_constant (*const_loc, c);
179 x = copy;
180 c = 0;
181 }
182 break;
183
184 default:
185 break;
186 }
187
188 if (c != 0)
189 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
190
191 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
192 return x;
193 else if (all_constant)
194 return gen_rtx_CONST (mode, x);
195 else
196 return x;
197 }
198 \f
199 /* If X is a sum, return a new sum like X but lacking any constant terms.
200 Add all the removed constant terms into *CONSTPTR.
201 X itself is not altered. The result != X if and only if
202 it is not isomorphic to X. */
203
204 rtx
205 eliminate_constant_term (rtx x, rtx *constptr)
206 {
207 rtx x0, x1;
208 rtx tem;
209
210 if (GET_CODE (x) != PLUS)
211 return x;
212
213 /* First handle constants appearing at this level explicitly. */
214 if (CONST_INT_P (XEXP (x, 1))
215 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
216 XEXP (x, 1)))
217 && CONST_INT_P (tem))
218 {
219 *constptr = tem;
220 return eliminate_constant_term (XEXP (x, 0), constptr);
221 }
222
223 tem = const0_rtx;
224 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
225 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
226 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
227 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
228 *constptr, tem))
229 && CONST_INT_P (tem))
230 {
231 *constptr = tem;
232 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
233 }
234
235 return x;
236 }
237
238 /* Return an rtx for the size in bytes of the value of EXP. */
239
240 rtx
241 expr_size (tree exp)
242 {
243 tree size;
244
245 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
246 size = TREE_OPERAND (exp, 1);
247 else
248 {
249 size = tree_expr_size (exp);
250 gcc_assert (size);
251 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
252 }
253
254 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
255 }
256
257 /* Return a wide integer for the size in bytes of the value of EXP, or -1
258 if the size can vary or is larger than an integer. */
259
260 HOST_WIDE_INT
261 int_expr_size (tree exp)
262 {
263 tree size;
264
265 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
266 size = TREE_OPERAND (exp, 1);
267 else
268 {
269 size = tree_expr_size (exp);
270 gcc_assert (size);
271 }
272
273 if (size == 0 || !host_integerp (size, 0))
274 return -1;
275
276 return tree_low_cst (size, 0);
277 }
278 \f
279 /* Return a copy of X in which all memory references
280 and all constants that involve symbol refs
281 have been replaced with new temporary registers.
282 Also emit code to load the memory locations and constants
283 into those registers.
284
285 If X contains no such constants or memory references,
286 X itself (not a copy) is returned.
287
288 If a constant is found in the address that is not a legitimate constant
289 in an insn, it is left alone in the hope that it might be valid in the
290 address.
291
292 X may contain no arithmetic except addition, subtraction and multiplication.
293 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
294
295 static rtx
296 break_out_memory_refs (rtx x)
297 {
298 if (MEM_P (x)
299 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
300 && GET_MODE (x) != VOIDmode))
301 x = force_reg (GET_MODE (x), x);
302 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
303 || GET_CODE (x) == MULT)
304 {
305 rtx op0 = break_out_memory_refs (XEXP (x, 0));
306 rtx op1 = break_out_memory_refs (XEXP (x, 1));
307
308 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
309 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
310 }
311
312 return x;
313 }
314
315 /* Given X, a memory address in address space AS' pointer mode, convert it to
316 an address in the address space's address mode, or vice versa (TO_MODE says
317 which way). We take advantage of the fact that pointers are not allowed to
318 overflow by commuting arithmetic operations over conversions so that address
319 arithmetic insns can be used. */
320
321 rtx
322 convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED,
323 rtx x, addr_space_t as ATTRIBUTE_UNUSED)
324 {
325 #ifndef POINTERS_EXTEND_UNSIGNED
326 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
327 return x;
328 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
329 enum machine_mode pointer_mode, address_mode, from_mode;
330 rtx temp;
331 enum rtx_code code;
332
333 /* If X already has the right mode, just return it. */
334 if (GET_MODE (x) == to_mode)
335 return x;
336
337 pointer_mode = targetm.addr_space.pointer_mode (as);
338 address_mode = targetm.addr_space.address_mode (as);
339 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
340
341 /* Here we handle some special cases. If none of them apply, fall through
342 to the default case. */
343 switch (GET_CODE (x))
344 {
345 case CONST_INT:
346 case CONST_DOUBLE:
347 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
348 code = TRUNCATE;
349 else if (POINTERS_EXTEND_UNSIGNED < 0)
350 break;
351 else if (POINTERS_EXTEND_UNSIGNED > 0)
352 code = ZERO_EXTEND;
353 else
354 code = SIGN_EXTEND;
355 temp = simplify_unary_operation (code, to_mode, x, from_mode);
356 if (temp)
357 return temp;
358 break;
359
360 case SUBREG:
361 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
362 && GET_MODE (SUBREG_REG (x)) == to_mode)
363 return SUBREG_REG (x);
364 break;
365
366 case LABEL_REF:
367 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
368 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
369 return temp;
370 break;
371
372 case SYMBOL_REF:
373 temp = shallow_copy_rtx (x);
374 PUT_MODE (temp, to_mode);
375 return temp;
376 break;
377
378 case CONST:
379 return gen_rtx_CONST (to_mode,
380 convert_memory_address_addr_space
381 (to_mode, XEXP (x, 0), as));
382 break;
383
384 case PLUS:
385 case MULT:
386 /* For addition we can safely permute the conversion and addition
387 operation if one operand is a constant and converting the constant
388 does not change it or if one operand is a constant and we are
389 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
390 We can always safely permute them if we are making the address
391 narrower. */
392 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
393 || (GET_CODE (x) == PLUS
394 && CONST_INT_P (XEXP (x, 1))
395 && (XEXP (x, 1) == convert_memory_address_addr_space
396 (to_mode, XEXP (x, 1), as)
397 || POINTERS_EXTEND_UNSIGNED < 0)))
398 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
399 convert_memory_address_addr_space
400 (to_mode, XEXP (x, 0), as),
401 XEXP (x, 1));
402 break;
403
404 default:
405 break;
406 }
407
408 return convert_modes (to_mode, from_mode,
409 x, POINTERS_EXTEND_UNSIGNED);
410 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
411 }
412 \f
413 /* Return something equivalent to X but valid as a memory address for something
414 of mode MODE in the named address space AS. When X is not itself valid,
415 this works by copying X or subexpressions of it into registers. */
416
417 rtx
418 memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as)
419 {
420 rtx oldx = x;
421 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
422
423 x = convert_memory_address_addr_space (address_mode, x, as);
424
425 /* By passing constant addresses through registers
426 we get a chance to cse them. */
427 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
428 x = force_reg (address_mode, x);
429
430 /* We get better cse by rejecting indirect addressing at this stage.
431 Let the combiner create indirect addresses where appropriate.
432 For now, generate the code so that the subexpressions useful to share
433 are visible. But not if cse won't be done! */
434 else
435 {
436 if (! cse_not_expected && !REG_P (x))
437 x = break_out_memory_refs (x);
438
439 /* At this point, any valid address is accepted. */
440 if (memory_address_addr_space_p (mode, x, as))
441 goto done;
442
443 /* If it was valid before but breaking out memory refs invalidated it,
444 use it the old way. */
445 if (memory_address_addr_space_p (mode, oldx, as))
446 {
447 x = oldx;
448 goto done;
449 }
450
451 /* Perform machine-dependent transformations on X
452 in certain cases. This is not necessary since the code
453 below can handle all possible cases, but machine-dependent
454 transformations can make better code. */
455 {
456 rtx orig_x = x;
457 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
458 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
459 goto done;
460 }
461
462 /* PLUS and MULT can appear in special ways
463 as the result of attempts to make an address usable for indexing.
464 Usually they are dealt with by calling force_operand, below.
465 But a sum containing constant terms is special
466 if removing them makes the sum a valid address:
467 then we generate that address in a register
468 and index off of it. We do this because it often makes
469 shorter code, and because the addresses thus generated
470 in registers often become common subexpressions. */
471 if (GET_CODE (x) == PLUS)
472 {
473 rtx constant_term = const0_rtx;
474 rtx y = eliminate_constant_term (x, &constant_term);
475 if (constant_term == const0_rtx
476 || ! memory_address_addr_space_p (mode, y, as))
477 x = force_operand (x, NULL_RTX);
478 else
479 {
480 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
481 if (! memory_address_addr_space_p (mode, y, as))
482 x = force_operand (x, NULL_RTX);
483 else
484 x = y;
485 }
486 }
487
488 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
489 x = force_operand (x, NULL_RTX);
490
491 /* If we have a register that's an invalid address,
492 it must be a hard reg of the wrong class. Copy it to a pseudo. */
493 else if (REG_P (x))
494 x = copy_to_reg (x);
495
496 /* Last resort: copy the value to a register, since
497 the register is a valid address. */
498 else
499 x = force_reg (address_mode, x);
500 }
501
502 done:
503
504 gcc_assert (memory_address_addr_space_p (mode, x, as));
505 /* If we didn't change the address, we are done. Otherwise, mark
506 a reg as a pointer if we have REG or REG + CONST_INT. */
507 if (oldx == x)
508 return x;
509 else if (REG_P (x))
510 mark_reg_pointer (x, BITS_PER_UNIT);
511 else if (GET_CODE (x) == PLUS
512 && REG_P (XEXP (x, 0))
513 && CONST_INT_P (XEXP (x, 1)))
514 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
515
516 /* OLDX may have been the address on a temporary. Update the address
517 to indicate that X is now used. */
518 update_temp_slot_address (oldx, x);
519
520 return x;
521 }
522
523 /* Convert a mem ref into one with a valid memory address.
524 Pass through anything else unchanged. */
525
526 rtx
527 validize_mem (rtx ref)
528 {
529 if (!MEM_P (ref))
530 return ref;
531 ref = use_anchored_address (ref);
532 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
533 MEM_ADDR_SPACE (ref)))
534 return ref;
535
536 /* Don't alter REF itself, since that is probably a stack slot. */
537 return replace_equiv_address (ref, XEXP (ref, 0));
538 }
539
540 /* If X is a memory reference to a member of an object block, try rewriting
541 it to use an anchor instead. Return the new memory reference on success
542 and the old one on failure. */
543
544 rtx
545 use_anchored_address (rtx x)
546 {
547 rtx base;
548 HOST_WIDE_INT offset;
549
550 if (!flag_section_anchors)
551 return x;
552
553 if (!MEM_P (x))
554 return x;
555
556 /* Split the address into a base and offset. */
557 base = XEXP (x, 0);
558 offset = 0;
559 if (GET_CODE (base) == CONST
560 && GET_CODE (XEXP (base, 0)) == PLUS
561 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
562 {
563 offset += INTVAL (XEXP (XEXP (base, 0), 1));
564 base = XEXP (XEXP (base, 0), 0);
565 }
566
567 /* Check whether BASE is suitable for anchors. */
568 if (GET_CODE (base) != SYMBOL_REF
569 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
570 || SYMBOL_REF_ANCHOR_P (base)
571 || SYMBOL_REF_BLOCK (base) == NULL
572 || !targetm.use_anchors_for_symbol_p (base))
573 return x;
574
575 /* Decide where BASE is going to be. */
576 place_block_symbol (base);
577
578 /* Get the anchor we need to use. */
579 offset += SYMBOL_REF_BLOCK_OFFSET (base);
580 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
581 SYMBOL_REF_TLS_MODEL (base));
582
583 /* Work out the offset from the anchor. */
584 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
585
586 /* If we're going to run a CSE pass, force the anchor into a register.
587 We will then be able to reuse registers for several accesses, if the
588 target costs say that that's worthwhile. */
589 if (!cse_not_expected)
590 base = force_reg (GET_MODE (base), base);
591
592 return replace_equiv_address (x, plus_constant (base, offset));
593 }
594 \f
595 /* Copy the value or contents of X to a new temp reg and return that reg. */
596
597 rtx
598 copy_to_reg (rtx x)
599 {
600 rtx temp = gen_reg_rtx (GET_MODE (x));
601
602 /* If not an operand, must be an address with PLUS and MULT so
603 do the computation. */
604 if (! general_operand (x, VOIDmode))
605 x = force_operand (x, temp);
606
607 if (x != temp)
608 emit_move_insn (temp, x);
609
610 return temp;
611 }
612
613 /* Like copy_to_reg but always give the new register mode Pmode
614 in case X is a constant. */
615
616 rtx
617 copy_addr_to_reg (rtx x)
618 {
619 return copy_to_mode_reg (Pmode, x);
620 }
621
622 /* Like copy_to_reg but always give the new register mode MODE
623 in case X is a constant. */
624
625 rtx
626 copy_to_mode_reg (enum machine_mode mode, rtx x)
627 {
628 rtx temp = gen_reg_rtx (mode);
629
630 /* If not an operand, must be an address with PLUS and MULT so
631 do the computation. */
632 if (! general_operand (x, VOIDmode))
633 x = force_operand (x, temp);
634
635 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
636 if (x != temp)
637 emit_move_insn (temp, x);
638 return temp;
639 }
640
641 /* Load X into a register if it is not already one.
642 Use mode MODE for the register.
643 X should be valid for mode MODE, but it may be a constant which
644 is valid for all integer modes; that's why caller must specify MODE.
645
646 The caller must not alter the value in the register we return,
647 since we mark it as a "constant" register. */
648
649 rtx
650 force_reg (enum machine_mode mode, rtx x)
651 {
652 rtx temp, insn, set;
653
654 if (REG_P (x))
655 return x;
656
657 if (general_operand (x, mode))
658 {
659 temp = gen_reg_rtx (mode);
660 insn = emit_move_insn (temp, x);
661 }
662 else
663 {
664 temp = force_operand (x, NULL_RTX);
665 if (REG_P (temp))
666 insn = get_last_insn ();
667 else
668 {
669 rtx temp2 = gen_reg_rtx (mode);
670 insn = emit_move_insn (temp2, temp);
671 temp = temp2;
672 }
673 }
674
675 /* Let optimizers know that TEMP's value never changes
676 and that X can be substituted for it. Don't get confused
677 if INSN set something else (such as a SUBREG of TEMP). */
678 if (CONSTANT_P (x)
679 && (set = single_set (insn)) != 0
680 && SET_DEST (set) == temp
681 && ! rtx_equal_p (x, SET_SRC (set)))
682 set_unique_reg_note (insn, REG_EQUAL, x);
683
684 /* Let optimizers know that TEMP is a pointer, and if so, the
685 known alignment of that pointer. */
686 {
687 unsigned align = 0;
688 if (GET_CODE (x) == SYMBOL_REF)
689 {
690 align = BITS_PER_UNIT;
691 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
692 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
693 }
694 else if (GET_CODE (x) == LABEL_REF)
695 align = BITS_PER_UNIT;
696 else if (GET_CODE (x) == CONST
697 && GET_CODE (XEXP (x, 0)) == PLUS
698 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
699 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
700 {
701 rtx s = XEXP (XEXP (x, 0), 0);
702 rtx c = XEXP (XEXP (x, 0), 1);
703 unsigned sa, ca;
704
705 sa = BITS_PER_UNIT;
706 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
707 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
708
709 if (INTVAL (c) == 0)
710 align = sa;
711 else
712 {
713 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
714 align = MIN (sa, ca);
715 }
716 }
717
718 if (align || (MEM_P (x) && MEM_POINTER (x)))
719 mark_reg_pointer (temp, align);
720 }
721
722 return temp;
723 }
724
725 /* If X is a memory ref, copy its contents to a new temp reg and return
726 that reg. Otherwise, return X. */
727
728 rtx
729 force_not_mem (rtx x)
730 {
731 rtx temp;
732
733 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
734 return x;
735
736 temp = gen_reg_rtx (GET_MODE (x));
737
738 if (MEM_POINTER (x))
739 REG_POINTER (temp) = 1;
740
741 emit_move_insn (temp, x);
742 return temp;
743 }
744
745 /* Copy X to TARGET (if it's nonzero and a reg)
746 or to a new temp reg and return that reg.
747 MODE is the mode to use for X in case it is a constant. */
748
749 rtx
750 copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
751 {
752 rtx temp;
753
754 if (target && REG_P (target))
755 temp = target;
756 else
757 temp = gen_reg_rtx (mode);
758
759 emit_move_insn (temp, x);
760 return temp;
761 }
762 \f
763 /* Return the mode to use to pass or return a scalar of TYPE and MODE.
764 PUNSIGNEDP points to the signedness of the type and may be adjusted
765 to show what signedness to use on extension operations.
766
767 FOR_RETURN is nonzero if the caller is promoting the return value
768 of FNDECL, else it is for promoting args. */
769
770 enum machine_mode
771 promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp,
772 const_tree funtype, int for_return)
773 {
774 switch (TREE_CODE (type))
775 {
776 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
777 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
778 case POINTER_TYPE: case REFERENCE_TYPE:
779 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
780 for_return);
781
782 default:
783 return mode;
784 }
785 }
786 /* Return the mode to use to store a scalar of TYPE and MODE.
787 PUNSIGNEDP points to the signedness of the type and may be adjusted
788 to show what signedness to use on extension operations. */
789
790 enum machine_mode
791 promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode,
792 int *punsignedp ATTRIBUTE_UNUSED)
793 {
794 /* FIXME: this is the same logic that was there until GCC 4.4, but we
795 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
796 is not defined. The affected targets are M32C, S390, SPARC. */
797 #ifdef PROMOTE_MODE
798 const enum tree_code code = TREE_CODE (type);
799 int unsignedp = *punsignedp;
800
801 switch (code)
802 {
803 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
804 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
805 PROMOTE_MODE (mode, unsignedp, type);
806 *punsignedp = unsignedp;
807 return mode;
808 break;
809
810 #ifdef POINTERS_EXTEND_UNSIGNED
811 case REFERENCE_TYPE:
812 case POINTER_TYPE:
813 *punsignedp = POINTERS_EXTEND_UNSIGNED;
814 return targetm.addr_space.address_mode
815 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
816 break;
817 #endif
818
819 default:
820 return mode;
821 }
822 #else
823 return mode;
824 #endif
825 }
826
827
828 /* Use one of promote_mode or promote_function_mode to find the promoted
829 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
830 of DECL after promotion. */
831
832 enum machine_mode
833 promote_decl_mode (const_tree decl, int *punsignedp)
834 {
835 tree type = TREE_TYPE (decl);
836 int unsignedp = TYPE_UNSIGNED (type);
837 enum machine_mode mode = DECL_MODE (decl);
838 enum machine_mode pmode;
839
840 if (TREE_CODE (decl) == RESULT_DECL
841 || TREE_CODE (decl) == PARM_DECL)
842 pmode = promote_function_mode (type, mode, &unsignedp,
843 TREE_TYPE (current_function_decl), 2);
844 else
845 pmode = promote_mode (type, mode, &unsignedp);
846
847 if (punsignedp)
848 *punsignedp = unsignedp;
849 return pmode;
850 }
851
852 \f
853 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
854 This pops when ADJUST is positive. ADJUST need not be constant. */
855
856 void
857 adjust_stack (rtx adjust)
858 {
859 rtx temp;
860
861 if (adjust == const0_rtx)
862 return;
863
864 /* We expect all variable sized adjustments to be multiple of
865 PREFERRED_STACK_BOUNDARY. */
866 if (CONST_INT_P (adjust))
867 stack_pointer_delta -= INTVAL (adjust);
868
869 temp = expand_binop (Pmode,
870 #ifdef STACK_GROWS_DOWNWARD
871 add_optab,
872 #else
873 sub_optab,
874 #endif
875 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
876 OPTAB_LIB_WIDEN);
877
878 if (temp != stack_pointer_rtx)
879 emit_move_insn (stack_pointer_rtx, temp);
880 }
881
882 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
883 This pushes when ADJUST is positive. ADJUST need not be constant. */
884
885 void
886 anti_adjust_stack (rtx adjust)
887 {
888 rtx temp;
889
890 if (adjust == const0_rtx)
891 return;
892
893 /* We expect all variable sized adjustments to be multiple of
894 PREFERRED_STACK_BOUNDARY. */
895 if (CONST_INT_P (adjust))
896 stack_pointer_delta += INTVAL (adjust);
897
898 temp = expand_binop (Pmode,
899 #ifdef STACK_GROWS_DOWNWARD
900 sub_optab,
901 #else
902 add_optab,
903 #endif
904 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
905 OPTAB_LIB_WIDEN);
906
907 if (temp != stack_pointer_rtx)
908 emit_move_insn (stack_pointer_rtx, temp);
909 }
910
911 /* Round the size of a block to be pushed up to the boundary required
912 by this machine. SIZE is the desired size, which need not be constant. */
913
914 static rtx
915 round_push (rtx size)
916 {
917 rtx align_rtx, alignm1_rtx;
918
919 if (!SUPPORTS_STACK_ALIGNMENT
920 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
921 {
922 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
923
924 if (align == 1)
925 return size;
926
927 if (CONST_INT_P (size))
928 {
929 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
930
931 if (INTVAL (size) != new_size)
932 size = GEN_INT (new_size);
933 return size;
934 }
935
936 align_rtx = GEN_INT (align);
937 alignm1_rtx = GEN_INT (align - 1);
938 }
939 else
940 {
941 /* If crtl->preferred_stack_boundary might still grow, use
942 virtual_preferred_stack_boundary_rtx instead. This will be
943 substituted by the right value in vregs pass and optimized
944 during combine. */
945 align_rtx = virtual_preferred_stack_boundary_rtx;
946 alignm1_rtx = force_operand (plus_constant (align_rtx, -1), NULL_RTX);
947 }
948
949 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
950 but we know it can't. So add ourselves and then do
951 TRUNC_DIV_EXPR. */
952 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
953 NULL_RTX, 1, OPTAB_LIB_WIDEN);
954 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
955 NULL_RTX, 1);
956 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
957
958 return size;
959 }
960 \f
961 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
962 to a previously-created save area. If no save area has been allocated,
963 this function will allocate one. If a save area is specified, it
964 must be of the proper mode. */
965
966 void
967 emit_stack_save (enum save_level save_level, rtx *psave)
968 {
969 rtx sa = *psave;
970 /* The default is that we use a move insn and save in a Pmode object. */
971 rtx (*fcn) (rtx, rtx) = gen_move_insn;
972 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
973
974 /* See if this machine has anything special to do for this kind of save. */
975 switch (save_level)
976 {
977 #ifdef HAVE_save_stack_block
978 case SAVE_BLOCK:
979 if (HAVE_save_stack_block)
980 fcn = gen_save_stack_block;
981 break;
982 #endif
983 #ifdef HAVE_save_stack_function
984 case SAVE_FUNCTION:
985 if (HAVE_save_stack_function)
986 fcn = gen_save_stack_function;
987 break;
988 #endif
989 #ifdef HAVE_save_stack_nonlocal
990 case SAVE_NONLOCAL:
991 if (HAVE_save_stack_nonlocal)
992 fcn = gen_save_stack_nonlocal;
993 break;
994 #endif
995 default:
996 break;
997 }
998
999 /* If there is no save area and we have to allocate one, do so. Otherwise
1000 verify the save area is the proper mode. */
1001
1002 if (sa == 0)
1003 {
1004 if (mode != VOIDmode)
1005 {
1006 if (save_level == SAVE_NONLOCAL)
1007 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1008 else
1009 *psave = sa = gen_reg_rtx (mode);
1010 }
1011 }
1012
1013 do_pending_stack_adjust ();
1014 if (sa != 0)
1015 sa = validize_mem (sa);
1016 emit_insn (fcn (sa, stack_pointer_rtx));
1017 }
1018
1019 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1020 area made by emit_stack_save. If it is zero, we have nothing to do. */
1021
1022 void
1023 emit_stack_restore (enum save_level save_level, rtx sa)
1024 {
1025 /* The default is that we use a move insn. */
1026 rtx (*fcn) (rtx, rtx) = gen_move_insn;
1027
1028 /* See if this machine has anything special to do for this kind of save. */
1029 switch (save_level)
1030 {
1031 #ifdef HAVE_restore_stack_block
1032 case SAVE_BLOCK:
1033 if (HAVE_restore_stack_block)
1034 fcn = gen_restore_stack_block;
1035 break;
1036 #endif
1037 #ifdef HAVE_restore_stack_function
1038 case SAVE_FUNCTION:
1039 if (HAVE_restore_stack_function)
1040 fcn = gen_restore_stack_function;
1041 break;
1042 #endif
1043 #ifdef HAVE_restore_stack_nonlocal
1044 case SAVE_NONLOCAL:
1045 if (HAVE_restore_stack_nonlocal)
1046 fcn = gen_restore_stack_nonlocal;
1047 break;
1048 #endif
1049 default:
1050 break;
1051 }
1052
1053 if (sa != 0)
1054 {
1055 sa = validize_mem (sa);
1056 /* These clobbers prevent the scheduler from moving
1057 references to variable arrays below the code
1058 that deletes (pops) the arrays. */
1059 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1060 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
1061 }
1062
1063 discard_pending_stack_adjust ();
1064
1065 emit_insn (fcn (stack_pointer_rtx, sa));
1066 }
1067
1068 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1069 function. This function should be called whenever we allocate or
1070 deallocate dynamic stack space. */
1071
1072 void
1073 update_nonlocal_goto_save_area (void)
1074 {
1075 tree t_save;
1076 rtx r_save;
1077
1078 /* The nonlocal_goto_save_area object is an array of N pointers. The
1079 first one is used for the frame pointer save; the rest are sized by
1080 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1081 of the stack save area slots. */
1082 t_save = build4 (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area,
1083 integer_one_node, NULL_TREE, NULL_TREE);
1084 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1085
1086 emit_stack_save (SAVE_NONLOCAL, &r_save);
1087 }
1088 \f
1089 /* Return an rtx representing the address of an area of memory dynamically
1090 pushed on the stack.
1091
1092 Any required stack pointer alignment is preserved.
1093
1094 SIZE is an rtx representing the size of the area.
1095
1096 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1097 parameter may be zero. If so, a proper value will be extracted
1098 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1099
1100 REQUIRED_ALIGN is the alignment (in bits) required for the region
1101 of memory.
1102
1103 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1104 stack space allocated by the generated code cannot be added with itself
1105 in the course of the execution of the function. It is always safe to
1106 pass FALSE here and the following criterion is sufficient in order to
1107 pass TRUE: every path in the CFG that starts at the allocation point and
1108 loops to it executes the associated deallocation code. */
1109
1110 rtx
1111 allocate_dynamic_stack_space (rtx size, unsigned size_align,
1112 unsigned required_align, bool cannot_accumulate)
1113 {
1114 HOST_WIDE_INT stack_usage_size = -1;
1115 rtx final_label, final_target, target;
1116 unsigned extra_align = 0;
1117 bool must_align;
1118
1119 /* If we're asking for zero bytes, it doesn't matter what we point
1120 to since we can't dereference it. But return a reasonable
1121 address anyway. */
1122 if (size == const0_rtx)
1123 return virtual_stack_dynamic_rtx;
1124
1125 /* Otherwise, show we're calling alloca or equivalent. */
1126 cfun->calls_alloca = 1;
1127
1128 /* If stack usage info is requested, look into the size we are passed.
1129 We need to do so this early to avoid the obfuscation that may be
1130 introduced later by the various alignment operations. */
1131 if (flag_stack_usage)
1132 {
1133 if (CONST_INT_P (size))
1134 stack_usage_size = INTVAL (size);
1135 else if (REG_P (size))
1136 {
1137 /* Look into the last emitted insn and see if we can deduce
1138 something for the register. */
1139 rtx insn, set, note;
1140 insn = get_last_insn ();
1141 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1142 {
1143 if (CONST_INT_P (SET_SRC (set)))
1144 stack_usage_size = INTVAL (SET_SRC (set));
1145 else if ((note = find_reg_equal_equiv_note (insn))
1146 && CONST_INT_P (XEXP (note, 0)))
1147 stack_usage_size = INTVAL (XEXP (note, 0));
1148 }
1149 }
1150
1151 /* If the size is not constant, we can't say anything. */
1152 if (stack_usage_size == -1)
1153 {
1154 current_function_has_unbounded_dynamic_stack_size = 1;
1155 stack_usage_size = 0;
1156 }
1157 }
1158
1159 /* Ensure the size is in the proper mode. */
1160 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1161 size = convert_to_mode (Pmode, size, 1);
1162
1163 /* Adjust SIZE_ALIGN, if needed. */
1164 if (CONST_INT_P (size))
1165 {
1166 unsigned HOST_WIDE_INT lsb;
1167
1168 lsb = INTVAL (size);
1169 lsb &= -lsb;
1170
1171 /* Watch out for overflow truncating to "unsigned". */
1172 if (lsb > UINT_MAX / BITS_PER_UNIT)
1173 size_align = 1u << (HOST_BITS_PER_INT - 1);
1174 else
1175 size_align = (unsigned)lsb * BITS_PER_UNIT;
1176 }
1177 else if (size_align < BITS_PER_UNIT)
1178 size_align = BITS_PER_UNIT;
1179
1180 /* We can't attempt to minimize alignment necessary, because we don't
1181 know the final value of preferred_stack_boundary yet while executing
1182 this code. */
1183 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1184 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1185
1186 /* We will need to ensure that the address we return is aligned to
1187 REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't
1188 always know its final value at this point in the compilation (it
1189 might depend on the size of the outgoing parameter lists, for
1190 example), so we must align the value to be returned in that case.
1191 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1192 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1193 We must also do an alignment operation on the returned value if
1194 the stack pointer alignment is less strict than REQUIRED_ALIGN.
1195
1196 If we have to align, we must leave space in SIZE for the hole
1197 that might result from the alignment operation. */
1198
1199 must_align = (crtl->preferred_stack_boundary < required_align);
1200 if (must_align)
1201 {
1202 if (required_align > PREFERRED_STACK_BOUNDARY)
1203 extra_align = PREFERRED_STACK_BOUNDARY;
1204 else if (required_align > STACK_BOUNDARY)
1205 extra_align = STACK_BOUNDARY;
1206 else
1207 extra_align = BITS_PER_UNIT;
1208 }
1209
1210 /* ??? STACK_POINTER_OFFSET is always defined now. */
1211 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1212 must_align = true;
1213 extra_align = BITS_PER_UNIT;
1214 #endif
1215
1216 if (must_align)
1217 {
1218 unsigned extra = (required_align - extra_align) / BITS_PER_UNIT;
1219
1220 size = plus_constant (size, extra);
1221 size = force_operand (size, NULL_RTX);
1222
1223 if (flag_stack_usage)
1224 stack_usage_size += extra;
1225
1226 if (extra && size_align > extra_align)
1227 size_align = extra_align;
1228 }
1229
1230 #ifdef SETJMP_VIA_SAVE_AREA
1231 /* If setjmp restores regs from a save area in the stack frame,
1232 avoid clobbering the reg save area. Note that the offset of
1233 virtual_incoming_args_rtx includes the preallocated stack args space.
1234 It would be no problem to clobber that, but it's on the wrong side
1235 of the old save area.
1236
1237 What used to happen is that, since we did not know for sure
1238 whether setjmp() was invoked until after RTL generation, we
1239 would use reg notes to store the "optimized" size and fix things
1240 up later. These days we know this information before we ever
1241 start building RTL so the reg notes are unnecessary. */
1242 if (cfun->calls_setjmp)
1243 {
1244 rtx dynamic_offset
1245 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1246 stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1247
1248 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1249 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1250
1251 /* The above dynamic offset cannot be computed statically at this
1252 point, but it will be possible to do so after RTL expansion is
1253 done. Record how many times we will need to add it. */
1254 if (flag_stack_usage)
1255 current_function_dynamic_alloc_count++;
1256
1257 /* ??? Can we infer a minimum of STACK_BOUNDARY here? */
1258 size_align = BITS_PER_UNIT;
1259 }
1260 #endif /* SETJMP_VIA_SAVE_AREA */
1261
1262 /* Round the size to a multiple of the required stack alignment.
1263 Since the stack if presumed to be rounded before this allocation,
1264 this will maintain the required alignment.
1265
1266 If the stack grows downward, we could save an insn by subtracting
1267 SIZE from the stack pointer and then aligning the stack pointer.
1268 The problem with this is that the stack pointer may be unaligned
1269 between the execution of the subtraction and alignment insns and
1270 some machines do not allow this. Even on those that do, some
1271 signal handlers malfunction if a signal should occur between those
1272 insns. Since this is an extremely rare event, we have no reliable
1273 way of knowing which systems have this problem. So we avoid even
1274 momentarily mis-aligning the stack. */
1275 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
1276 {
1277 size = round_push (size);
1278
1279 if (flag_stack_usage)
1280 {
1281 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
1282 stack_usage_size = (stack_usage_size + align - 1) / align * align;
1283 }
1284 }
1285
1286 target = gen_reg_rtx (Pmode);
1287
1288 /* The size is supposed to be fully adjusted at this point so record it
1289 if stack usage info is requested. */
1290 if (flag_stack_usage)
1291 {
1292 current_function_dynamic_stack_size += stack_usage_size;
1293
1294 /* ??? This is gross but the only safe stance in the absence
1295 of stack usage oriented flow analysis. */
1296 if (!cannot_accumulate)
1297 current_function_has_unbounded_dynamic_stack_size = 1;
1298 }
1299
1300 final_label = NULL_RTX;
1301 final_target = NULL_RTX;
1302
1303 /* If we are splitting the stack, we need to ask the backend whether
1304 there is enough room on the current stack. If there isn't, or if
1305 the backend doesn't know how to tell is, then we need to call a
1306 function to allocate memory in some other way. This memory will
1307 be released when we release the current stack segment. The
1308 effect is that stack allocation becomes less efficient, but at
1309 least it doesn't cause a stack overflow. */
1310 if (flag_split_stack)
1311 {
1312 rtx available_label, ask, space, func;
1313
1314 available_label = NULL_RTX;
1315
1316 #ifdef HAVE_split_stack_space_check
1317 if (HAVE_split_stack_space_check)
1318 {
1319 available_label = gen_label_rtx ();
1320
1321 /* This instruction will branch to AVAILABLE_LABEL if there
1322 are SIZE bytes available on the stack. */
1323 emit_insn (gen_split_stack_space_check (size, available_label));
1324 }
1325 #endif
1326
1327 /* The __morestack_allocate_stack_space function will allocate
1328 memory using malloc. If the alignment of the memory returned
1329 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1330 make sure we allocate enough space. */
1331 if (MALLOC_ABI_ALIGNMENT >= required_align)
1332 ask = size;
1333 else
1334 {
1335 ask = expand_binop (Pmode, add_optab, size,
1336 GEN_INT (required_align / BITS_PER_UNIT - 1),
1337 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1338 must_align = true;
1339 }
1340
1341 func = init_one_libfunc ("__morestack_allocate_stack_space");
1342
1343 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
1344 1, ask, Pmode);
1345
1346 if (available_label == NULL_RTX)
1347 return space;
1348
1349 final_target = gen_reg_rtx (Pmode);
1350
1351 emit_move_insn (final_target, space);
1352
1353 final_label = gen_label_rtx ();
1354 emit_jump (final_label);
1355
1356 emit_label (available_label);
1357 }
1358
1359 do_pending_stack_adjust ();
1360
1361 /* We ought to be called always on the toplevel and stack ought to be aligned
1362 properly. */
1363 gcc_assert (!(stack_pointer_delta
1364 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1365
1366 /* If needed, check that we have the required amount of stack. Take into
1367 account what has already been checked. */
1368 if (STACK_CHECK_MOVING_SP)
1369 ;
1370 else if (flag_stack_check == GENERIC_STACK_CHECK)
1371 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1372 size);
1373 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1374 probe_stack_range (STACK_CHECK_PROTECT, size);
1375
1376 /* Perform the required allocation from the stack. Some systems do
1377 this differently than simply incrementing/decrementing from the
1378 stack pointer, such as acquiring the space by calling malloc(). */
1379 #ifdef HAVE_allocate_stack
1380 if (HAVE_allocate_stack)
1381 {
1382 enum machine_mode mode = STACK_SIZE_MODE;
1383 insn_operand_predicate_fn pred;
1384
1385 /* We don't have to check against the predicate for operand 0 since
1386 TARGET is known to be a pseudo of the proper mode, which must
1387 be valid for the operand. For operand 1, convert to the
1388 proper mode and validate. */
1389 if (mode == VOIDmode)
1390 mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode;
1391
1392 pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
1393 if (pred && ! ((*pred) (size, mode)))
1394 size = copy_to_mode_reg (mode, convert_to_mode (mode, size, 1));
1395
1396 emit_insn (gen_allocate_stack (target, size));
1397 }
1398 else
1399 #endif
1400 {
1401 int saved_stack_pointer_delta;
1402
1403 #ifndef STACK_GROWS_DOWNWARD
1404 emit_move_insn (target, virtual_stack_dynamic_rtx);
1405 #endif
1406
1407 /* Check stack bounds if necessary. */
1408 if (crtl->limit_stack)
1409 {
1410 rtx available;
1411 rtx space_available = gen_label_rtx ();
1412 #ifdef STACK_GROWS_DOWNWARD
1413 available = expand_binop (Pmode, sub_optab,
1414 stack_pointer_rtx, stack_limit_rtx,
1415 NULL_RTX, 1, OPTAB_WIDEN);
1416 #else
1417 available = expand_binop (Pmode, sub_optab,
1418 stack_limit_rtx, stack_pointer_rtx,
1419 NULL_RTX, 1, OPTAB_WIDEN);
1420 #endif
1421 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1422 space_available);
1423 #ifdef HAVE_trap
1424 if (HAVE_trap)
1425 emit_insn (gen_trap ());
1426 else
1427 #endif
1428 error ("stack limits not supported on this target");
1429 emit_barrier ();
1430 emit_label (space_available);
1431 }
1432
1433 saved_stack_pointer_delta = stack_pointer_delta;
1434 if (flag_stack_check && STACK_CHECK_MOVING_SP)
1435 anti_adjust_stack_and_probe (size, false);
1436 else
1437 anti_adjust_stack (size);
1438 /* Even if size is constant, don't modify stack_pointer_delta.
1439 The constant size alloca should preserve
1440 crtl->preferred_stack_boundary alignment. */
1441 stack_pointer_delta = saved_stack_pointer_delta;
1442
1443 #ifdef STACK_GROWS_DOWNWARD
1444 emit_move_insn (target, virtual_stack_dynamic_rtx);
1445 #endif
1446 }
1447
1448 /* Finish up the split stack handling. */
1449 if (final_label != NULL_RTX)
1450 {
1451 gcc_assert (flag_split_stack);
1452 emit_move_insn (final_target, target);
1453 emit_label (final_label);
1454 target = final_target;
1455 }
1456
1457 if (must_align)
1458 {
1459 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1460 but we know it can't. So add ourselves and then do
1461 TRUNC_DIV_EXPR. */
1462 target = expand_binop (Pmode, add_optab, target,
1463 GEN_INT (required_align / BITS_PER_UNIT - 1),
1464 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1465 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1466 GEN_INT (required_align / BITS_PER_UNIT),
1467 NULL_RTX, 1);
1468 target = expand_mult (Pmode, target,
1469 GEN_INT (required_align / BITS_PER_UNIT),
1470 NULL_RTX, 1);
1471 }
1472
1473 /* Now that we've committed to a return value, mark its alignment. */
1474 mark_reg_pointer (target, required_align);
1475
1476 /* Record the new stack level for nonlocal gotos. */
1477 if (cfun->nonlocal_goto_save_area != 0)
1478 update_nonlocal_goto_save_area ();
1479
1480 return target;
1481 }
1482 \f
1483 /* A front end may want to override GCC's stack checking by providing a
1484 run-time routine to call to check the stack, so provide a mechanism for
1485 calling that routine. */
1486
1487 static GTY(()) rtx stack_check_libfunc;
1488
1489 void
1490 set_stack_check_libfunc (const char *libfunc_name)
1491 {
1492 gcc_assert (stack_check_libfunc == NULL_RTX);
1493 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
1494 }
1495 \f
1496 /* Emit one stack probe at ADDRESS, an address within the stack. */
1497
1498 void
1499 emit_stack_probe (rtx address)
1500 {
1501 rtx memref = gen_rtx_MEM (word_mode, address);
1502
1503 MEM_VOLATILE_P (memref) = 1;
1504
1505 /* See if we have an insn to probe the stack. */
1506 #ifdef HAVE_probe_stack
1507 if (HAVE_probe_stack)
1508 emit_insn (gen_probe_stack (memref));
1509 else
1510 #endif
1511 emit_move_insn (memref, const0_rtx);
1512 }
1513
1514 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1515 FIRST is a constant and size is a Pmode RTX. These are offsets from
1516 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1517 or subtract them from the stack pointer. */
1518
1519 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1520
1521 #ifdef STACK_GROWS_DOWNWARD
1522 #define STACK_GROW_OP MINUS
1523 #define STACK_GROW_OPTAB sub_optab
1524 #define STACK_GROW_OFF(off) -(off)
1525 #else
1526 #define STACK_GROW_OP PLUS
1527 #define STACK_GROW_OPTAB add_optab
1528 #define STACK_GROW_OFF(off) (off)
1529 #endif
1530
1531 void
1532 probe_stack_range (HOST_WIDE_INT first, rtx size)
1533 {
1534 /* First ensure SIZE is Pmode. */
1535 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1536 size = convert_to_mode (Pmode, size, 1);
1537
1538 /* Next see if we have a function to check the stack. */
1539 if (stack_check_libfunc)
1540 {
1541 rtx addr = memory_address (Pmode,
1542 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1543 stack_pointer_rtx,
1544 plus_constant (size, first)));
1545 emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1546 Pmode);
1547 }
1548
1549 /* Next see if we have an insn to check the stack. */
1550 #ifdef HAVE_check_stack
1551 else if (HAVE_check_stack)
1552 {
1553 rtx addr = memory_address (Pmode,
1554 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1555 stack_pointer_rtx,
1556 plus_constant (size, first)));
1557 insn_operand_predicate_fn pred
1558 = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
1559 if (pred && !((*pred) (addr, Pmode)))
1560 addr = copy_to_mode_reg (Pmode, addr);
1561
1562 emit_insn (gen_check_stack (addr));
1563 }
1564 #endif
1565
1566 /* Otherwise we have to generate explicit probes. If we have a constant
1567 small number of them to generate, that's the easy case. */
1568 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1569 {
1570 HOST_WIDE_INT isize = INTVAL (size), i;
1571 rtx addr;
1572
1573 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1574 it exceeds SIZE. If only one probe is needed, this will not
1575 generate any code. Then probe at FIRST + SIZE. */
1576 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1577 {
1578 addr = memory_address (Pmode,
1579 plus_constant (stack_pointer_rtx,
1580 STACK_GROW_OFF (first + i)));
1581 emit_stack_probe (addr);
1582 }
1583
1584 addr = memory_address (Pmode,
1585 plus_constant (stack_pointer_rtx,
1586 STACK_GROW_OFF (first + isize)));
1587 emit_stack_probe (addr);
1588 }
1589
1590 /* In the variable case, do the same as above, but in a loop. Note that we
1591 must be extra careful with variables wrapping around because we might be
1592 at the very top (or the very bottom) of the address space and we have to
1593 be able to handle this case properly; in particular, we use an equality
1594 test for the loop condition. */
1595 else
1596 {
1597 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
1598 rtx loop_lab = gen_label_rtx ();
1599 rtx end_lab = gen_label_rtx ();
1600
1601
1602 /* Step 1: round SIZE to the previous multiple of the interval. */
1603
1604 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1605 rounded_size
1606 = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
1607 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1608
1609
1610 /* Step 2: compute initial and final value of the loop counter. */
1611
1612 /* TEST_ADDR = SP + FIRST. */
1613 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1614 stack_pointer_rtx,
1615 GEN_INT (first)), NULL_RTX);
1616
1617 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1618 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1619 test_addr,
1620 rounded_size_op), NULL_RTX);
1621
1622
1623 /* Step 3: the loop
1624
1625 while (TEST_ADDR != LAST_ADDR)
1626 {
1627 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1628 probe at TEST_ADDR
1629 }
1630
1631 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1632 until it is equal to ROUNDED_SIZE. */
1633
1634 emit_label (loop_lab);
1635
1636 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1637 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1638 end_lab);
1639
1640 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1641 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
1642 GEN_INT (PROBE_INTERVAL), test_addr,
1643 1, OPTAB_WIDEN);
1644
1645 gcc_assert (temp == test_addr);
1646
1647 /* Probe at TEST_ADDR. */
1648 emit_stack_probe (test_addr);
1649
1650 emit_jump (loop_lab);
1651
1652 emit_label (end_lab);
1653
1654
1655 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1656 that SIZE is equal to ROUNDED_SIZE. */
1657
1658 /* TEMP = SIZE - ROUNDED_SIZE. */
1659 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1660 if (temp != const0_rtx)
1661 {
1662 rtx addr;
1663
1664 if (CONST_INT_P (temp))
1665 {
1666 /* Use [base + disp} addressing mode if supported. */
1667 HOST_WIDE_INT offset = INTVAL (temp);
1668 addr = memory_address (Pmode,
1669 plus_constant (last_addr,
1670 STACK_GROW_OFF (offset)));
1671 }
1672 else
1673 {
1674 /* Manual CSE if the difference is not known at compile-time. */
1675 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1676 addr = memory_address (Pmode,
1677 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1678 last_addr, temp));
1679 }
1680
1681 emit_stack_probe (addr);
1682 }
1683 }
1684 }
1685
1686 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1687 while probing it. This pushes when SIZE is positive. SIZE need not
1688 be constant. If ADJUST_BACK is true, adjust back the stack pointer
1689 by plus SIZE at the end. */
1690
1691 void
1692 anti_adjust_stack_and_probe (rtx size, bool adjust_back)
1693 {
1694 /* We skip the probe for the first interval + a small dope of 4 words and
1695 probe that many bytes past the specified size to maintain a protection
1696 area at the botton of the stack. */
1697 const int dope = 4 * UNITS_PER_WORD;
1698
1699 /* First ensure SIZE is Pmode. */
1700 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1701 size = convert_to_mode (Pmode, size, 1);
1702
1703 /* If we have a constant small number of probes to generate, that's the
1704 easy case. */
1705 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1706 {
1707 HOST_WIDE_INT isize = INTVAL (size), i;
1708 bool first_probe = true;
1709
1710 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
1711 values of N from 1 until it exceeds SIZE. If only one probe is
1712 needed, this will not generate any code. Then adjust and probe
1713 to PROBE_INTERVAL + SIZE. */
1714 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1715 {
1716 if (first_probe)
1717 {
1718 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
1719 first_probe = false;
1720 }
1721 else
1722 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1723 emit_stack_probe (stack_pointer_rtx);
1724 }
1725
1726 if (first_probe)
1727 anti_adjust_stack (plus_constant (size, PROBE_INTERVAL + dope));
1728 else
1729 anti_adjust_stack (plus_constant (size, PROBE_INTERVAL - i));
1730 emit_stack_probe (stack_pointer_rtx);
1731 }
1732
1733 /* In the variable case, do the same as above, but in a loop. Note that we
1734 must be extra careful with variables wrapping around because we might be
1735 at the very top (or the very bottom) of the address space and we have to
1736 be able to handle this case properly; in particular, we use an equality
1737 test for the loop condition. */
1738 else
1739 {
1740 rtx rounded_size, rounded_size_op, last_addr, temp;
1741 rtx loop_lab = gen_label_rtx ();
1742 rtx end_lab = gen_label_rtx ();
1743
1744
1745 /* Step 1: round SIZE to the previous multiple of the interval. */
1746
1747 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1748 rounded_size
1749 = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
1750 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1751
1752
1753 /* Step 2: compute initial and final value of the loop counter. */
1754
1755 /* SP = SP_0 + PROBE_INTERVAL. */
1756 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1757
1758 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
1759 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1760 stack_pointer_rtx,
1761 rounded_size_op), NULL_RTX);
1762
1763
1764 /* Step 3: the loop
1765
1766 while (SP != LAST_ADDR)
1767 {
1768 SP = SP + PROBE_INTERVAL
1769 probe at SP
1770 }
1771
1772 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
1773 values of N from 1 until it is equal to ROUNDED_SIZE. */
1774
1775 emit_label (loop_lab);
1776
1777 /* Jump to END_LAB if SP == LAST_ADDR. */
1778 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1779 Pmode, 1, end_lab);
1780
1781 /* SP = SP + PROBE_INTERVAL and probe at SP. */
1782 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1783 emit_stack_probe (stack_pointer_rtx);
1784
1785 emit_jump (loop_lab);
1786
1787 emit_label (end_lab);
1788
1789
1790 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
1791 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
1792
1793 /* TEMP = SIZE - ROUNDED_SIZE. */
1794 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1795 if (temp != const0_rtx)
1796 {
1797 /* Manual CSE if the difference is not known at compile-time. */
1798 if (GET_CODE (temp) != CONST_INT)
1799 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1800 anti_adjust_stack (temp);
1801 emit_stack_probe (stack_pointer_rtx);
1802 }
1803 }
1804
1805 /* Adjust back and account for the additional first interval. */
1806 if (adjust_back)
1807 adjust_stack (plus_constant (size, PROBE_INTERVAL + dope));
1808 else
1809 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1810 }
1811
1812 /* Return an rtx representing the register or memory location
1813 in which a scalar value of data type VALTYPE
1814 was returned by a function call to function FUNC.
1815 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1816 function is known, otherwise 0.
1817 OUTGOING is 1 if on a machine with register windows this function
1818 should return the register in which the function will put its result
1819 and 0 otherwise. */
1820
1821 rtx
1822 hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
1823 int outgoing ATTRIBUTE_UNUSED)
1824 {
1825 rtx val;
1826
1827 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
1828
1829 if (REG_P (val)
1830 && GET_MODE (val) == BLKmode)
1831 {
1832 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1833 enum machine_mode tmpmode;
1834
1835 /* int_size_in_bytes can return -1. We don't need a check here
1836 since the value of bytes will then be large enough that no
1837 mode will match anyway. */
1838
1839 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1840 tmpmode != VOIDmode;
1841 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1842 {
1843 /* Have we found a large enough mode? */
1844 if (GET_MODE_SIZE (tmpmode) >= bytes)
1845 break;
1846 }
1847
1848 /* No suitable mode found. */
1849 gcc_assert (tmpmode != VOIDmode);
1850
1851 PUT_MODE (val, tmpmode);
1852 }
1853 return val;
1854 }
1855
1856 /* Return an rtx representing the register or memory location
1857 in which a scalar value of mode MODE was returned by a library call. */
1858
1859 rtx
1860 hard_libcall_value (enum machine_mode mode, rtx fun)
1861 {
1862 return targetm.calls.libcall_value (mode, fun);
1863 }
1864
1865 /* Look up the tree code for a given rtx code
1866 to provide the arithmetic operation for REAL_ARITHMETIC.
1867 The function returns an int because the caller may not know
1868 what `enum tree_code' means. */
1869
1870 int
1871 rtx_to_tree_code (enum rtx_code code)
1872 {
1873 enum tree_code tcode;
1874
1875 switch (code)
1876 {
1877 case PLUS:
1878 tcode = PLUS_EXPR;
1879 break;
1880 case MINUS:
1881 tcode = MINUS_EXPR;
1882 break;
1883 case MULT:
1884 tcode = MULT_EXPR;
1885 break;
1886 case DIV:
1887 tcode = RDIV_EXPR;
1888 break;
1889 case SMIN:
1890 tcode = MIN_EXPR;
1891 break;
1892 case SMAX:
1893 tcode = MAX_EXPR;
1894 break;
1895 default:
1896 tcode = LAST_AND_UNUSED_TREE_CODE;
1897 break;
1898 }
1899 return ((int) tcode);
1900 }
1901
1902 #include "gt-explow.h"