calls.c (emit_library_call_value_1): Invoke promote_function_mode hook on libcall...
[gcc.git] / gcc / explow.c
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "diagnostic-core.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "flags.h"
32 #include "except.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "optabs.h"
36 #include "libfuncs.h"
37 #include "hard-reg-set.h"
38 #include "insn-config.h"
39 #include "ggc.h"
40 #include "recog.h"
41 #include "langhooks.h"
42 #include "target.h"
43 #include "output.h"
44
45 static rtx break_out_memory_refs (rtx);
46
47
48 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
49
50 HOST_WIDE_INT
51 trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
52 {
53 int width = GET_MODE_BITSIZE (mode);
54
55 /* You want to truncate to a _what_? */
56 gcc_assert (SCALAR_INT_MODE_P (mode));
57
58 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
59 if (mode == BImode)
60 return c & 1 ? STORE_FLAG_VALUE : 0;
61
62 /* Sign-extend for the requested mode. */
63
64 if (width < HOST_BITS_PER_WIDE_INT)
65 {
66 HOST_WIDE_INT sign = 1;
67 sign <<= width - 1;
68 c &= (sign << 1) - 1;
69 c ^= sign;
70 c -= sign;
71 }
72
73 return c;
74 }
75
76 /* Return an rtx for the sum of X and the integer C. */
77
78 rtx
79 plus_constant (rtx x, HOST_WIDE_INT c)
80 {
81 RTX_CODE code;
82 rtx y;
83 enum machine_mode mode;
84 rtx tem;
85 int all_constant = 0;
86
87 if (c == 0)
88 return x;
89
90 restart:
91
92 code = GET_CODE (x);
93 mode = GET_MODE (x);
94 y = x;
95
96 switch (code)
97 {
98 case CONST_INT:
99 return GEN_INT (INTVAL (x) + c);
100
101 case CONST_DOUBLE:
102 {
103 unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
104 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
105 unsigned HOST_WIDE_INT l2 = c;
106 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
107 unsigned HOST_WIDE_INT lv;
108 HOST_WIDE_INT hv;
109
110 add_double (l1, h1, l2, h2, &lv, &hv);
111
112 return immed_double_const (lv, hv, VOIDmode);
113 }
114
115 case MEM:
116 /* If this is a reference to the constant pool, try replacing it with
117 a reference to a new constant. If the resulting address isn't
118 valid, don't return it because we have no way to validize it. */
119 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
120 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
121 {
122 tem
123 = force_const_mem (GET_MODE (x),
124 plus_constant (get_pool_constant (XEXP (x, 0)),
125 c));
126 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
127 return tem;
128 }
129 break;
130
131 case CONST:
132 /* If adding to something entirely constant, set a flag
133 so that we can add a CONST around the result. */
134 x = XEXP (x, 0);
135 all_constant = 1;
136 goto restart;
137
138 case SYMBOL_REF:
139 case LABEL_REF:
140 all_constant = 1;
141 break;
142
143 case PLUS:
144 /* The interesting case is adding the integer to a sum.
145 Look for constant term in the sum and combine
146 with C. For an integer constant term, we make a combined
147 integer. For a constant term that is not an explicit integer,
148 we cannot really combine, but group them together anyway.
149
150 Restart or use a recursive call in case the remaining operand is
151 something that we handle specially, such as a SYMBOL_REF.
152
153 We may not immediately return from the recursive call here, lest
154 all_constant gets lost. */
155
156 if (CONST_INT_P (XEXP (x, 1)))
157 {
158 c += INTVAL (XEXP (x, 1));
159
160 if (GET_MODE (x) != VOIDmode)
161 c = trunc_int_for_mode (c, GET_MODE (x));
162
163 x = XEXP (x, 0);
164 goto restart;
165 }
166 else if (CONSTANT_P (XEXP (x, 1)))
167 {
168 x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
169 c = 0;
170 }
171 else if (find_constant_term_loc (&y))
172 {
173 /* We need to be careful since X may be shared and we can't
174 modify it in place. */
175 rtx copy = copy_rtx (x);
176 rtx *const_loc = find_constant_term_loc (&copy);
177
178 *const_loc = plus_constant (*const_loc, c);
179 x = copy;
180 c = 0;
181 }
182 break;
183
184 default:
185 break;
186 }
187
188 if (c != 0)
189 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
190
191 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
192 return x;
193 else if (all_constant)
194 return gen_rtx_CONST (mode, x);
195 else
196 return x;
197 }
198 \f
199 /* If X is a sum, return a new sum like X but lacking any constant terms.
200 Add all the removed constant terms into *CONSTPTR.
201 X itself is not altered. The result != X if and only if
202 it is not isomorphic to X. */
203
204 rtx
205 eliminate_constant_term (rtx x, rtx *constptr)
206 {
207 rtx x0, x1;
208 rtx tem;
209
210 if (GET_CODE (x) != PLUS)
211 return x;
212
213 /* First handle constants appearing at this level explicitly. */
214 if (CONST_INT_P (XEXP (x, 1))
215 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
216 XEXP (x, 1)))
217 && CONST_INT_P (tem))
218 {
219 *constptr = tem;
220 return eliminate_constant_term (XEXP (x, 0), constptr);
221 }
222
223 tem = const0_rtx;
224 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
225 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
226 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
227 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
228 *constptr, tem))
229 && CONST_INT_P (tem))
230 {
231 *constptr = tem;
232 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
233 }
234
235 return x;
236 }
237
238 /* Return an rtx for the size in bytes of the value of EXP. */
239
240 rtx
241 expr_size (tree exp)
242 {
243 tree size;
244
245 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
246 size = TREE_OPERAND (exp, 1);
247 else
248 {
249 size = tree_expr_size (exp);
250 gcc_assert (size);
251 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
252 }
253
254 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
255 }
256
257 /* Return a wide integer for the size in bytes of the value of EXP, or -1
258 if the size can vary or is larger than an integer. */
259
260 HOST_WIDE_INT
261 int_expr_size (tree exp)
262 {
263 tree size;
264
265 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
266 size = TREE_OPERAND (exp, 1);
267 else
268 {
269 size = tree_expr_size (exp);
270 gcc_assert (size);
271 }
272
273 if (size == 0 || !host_integerp (size, 0))
274 return -1;
275
276 return tree_low_cst (size, 0);
277 }
278 \f
279 /* Return a copy of X in which all memory references
280 and all constants that involve symbol refs
281 have been replaced with new temporary registers.
282 Also emit code to load the memory locations and constants
283 into those registers.
284
285 If X contains no such constants or memory references,
286 X itself (not a copy) is returned.
287
288 If a constant is found in the address that is not a legitimate constant
289 in an insn, it is left alone in the hope that it might be valid in the
290 address.
291
292 X may contain no arithmetic except addition, subtraction and multiplication.
293 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
294
295 static rtx
296 break_out_memory_refs (rtx x)
297 {
298 if (MEM_P (x)
299 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
300 && GET_MODE (x) != VOIDmode))
301 x = force_reg (GET_MODE (x), x);
302 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
303 || GET_CODE (x) == MULT)
304 {
305 rtx op0 = break_out_memory_refs (XEXP (x, 0));
306 rtx op1 = break_out_memory_refs (XEXP (x, 1));
307
308 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
309 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
310 }
311
312 return x;
313 }
314
315 /* Given X, a memory address in address space AS' pointer mode, convert it to
316 an address in the address space's address mode, or vice versa (TO_MODE says
317 which way). We take advantage of the fact that pointers are not allowed to
318 overflow by commuting arithmetic operations over conversions so that address
319 arithmetic insns can be used. */
320
321 rtx
322 convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED,
323 rtx x, addr_space_t as ATTRIBUTE_UNUSED)
324 {
325 #ifndef POINTERS_EXTEND_UNSIGNED
326 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
327 return x;
328 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
329 enum machine_mode pointer_mode, address_mode, from_mode;
330 rtx temp;
331 enum rtx_code code;
332
333 /* If X already has the right mode, just return it. */
334 if (GET_MODE (x) == to_mode)
335 return x;
336
337 pointer_mode = targetm.addr_space.pointer_mode (as);
338 address_mode = targetm.addr_space.address_mode (as);
339 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
340
341 /* Here we handle some special cases. If none of them apply, fall through
342 to the default case. */
343 switch (GET_CODE (x))
344 {
345 case CONST_INT:
346 case CONST_DOUBLE:
347 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
348 code = TRUNCATE;
349 else if (POINTERS_EXTEND_UNSIGNED < 0)
350 break;
351 else if (POINTERS_EXTEND_UNSIGNED > 0)
352 code = ZERO_EXTEND;
353 else
354 code = SIGN_EXTEND;
355 temp = simplify_unary_operation (code, to_mode, x, from_mode);
356 if (temp)
357 return temp;
358 break;
359
360 case SUBREG:
361 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
362 && GET_MODE (SUBREG_REG (x)) == to_mode)
363 return SUBREG_REG (x);
364 break;
365
366 case LABEL_REF:
367 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
368 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
369 return temp;
370 break;
371
372 case SYMBOL_REF:
373 temp = shallow_copy_rtx (x);
374 PUT_MODE (temp, to_mode);
375 return temp;
376 break;
377
378 case CONST:
379 return gen_rtx_CONST (to_mode,
380 convert_memory_address_addr_space
381 (to_mode, XEXP (x, 0), as));
382 break;
383
384 case PLUS:
385 case MULT:
386 /* For addition we can safely permute the conversion and addition
387 operation if one operand is a constant and converting the constant
388 does not change it or if one operand is a constant and we are
389 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
390 We can always safely permute them if we are making the address
391 narrower. */
392 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
393 || (GET_CODE (x) == PLUS
394 && CONST_INT_P (XEXP (x, 1))
395 && (XEXP (x, 1) == convert_memory_address_addr_space
396 (to_mode, XEXP (x, 1), as)
397 || POINTERS_EXTEND_UNSIGNED < 0)))
398 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
399 convert_memory_address_addr_space
400 (to_mode, XEXP (x, 0), as),
401 XEXP (x, 1));
402 break;
403
404 default:
405 break;
406 }
407
408 return convert_modes (to_mode, from_mode,
409 x, POINTERS_EXTEND_UNSIGNED);
410 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
411 }
412 \f
413 /* Return something equivalent to X but valid as a memory address for something
414 of mode MODE in the named address space AS. When X is not itself valid,
415 this works by copying X or subexpressions of it into registers. */
416
417 rtx
418 memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as)
419 {
420 rtx oldx = x;
421 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
422
423 x = convert_memory_address_addr_space (address_mode, x, as);
424
425 /* By passing constant addresses through registers
426 we get a chance to cse them. */
427 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
428 x = force_reg (address_mode, x);
429
430 /* We get better cse by rejecting indirect addressing at this stage.
431 Let the combiner create indirect addresses where appropriate.
432 For now, generate the code so that the subexpressions useful to share
433 are visible. But not if cse won't be done! */
434 else
435 {
436 if (! cse_not_expected && !REG_P (x))
437 x = break_out_memory_refs (x);
438
439 /* At this point, any valid address is accepted. */
440 if (memory_address_addr_space_p (mode, x, as))
441 goto done;
442
443 /* If it was valid before but breaking out memory refs invalidated it,
444 use it the old way. */
445 if (memory_address_addr_space_p (mode, oldx, as))
446 {
447 x = oldx;
448 goto done;
449 }
450
451 /* Perform machine-dependent transformations on X
452 in certain cases. This is not necessary since the code
453 below can handle all possible cases, but machine-dependent
454 transformations can make better code. */
455 {
456 rtx orig_x = x;
457 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
458 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
459 goto done;
460 }
461
462 /* PLUS and MULT can appear in special ways
463 as the result of attempts to make an address usable for indexing.
464 Usually they are dealt with by calling force_operand, below.
465 But a sum containing constant terms is special
466 if removing them makes the sum a valid address:
467 then we generate that address in a register
468 and index off of it. We do this because it often makes
469 shorter code, and because the addresses thus generated
470 in registers often become common subexpressions. */
471 if (GET_CODE (x) == PLUS)
472 {
473 rtx constant_term = const0_rtx;
474 rtx y = eliminate_constant_term (x, &constant_term);
475 if (constant_term == const0_rtx
476 || ! memory_address_addr_space_p (mode, y, as))
477 x = force_operand (x, NULL_RTX);
478 else
479 {
480 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
481 if (! memory_address_addr_space_p (mode, y, as))
482 x = force_operand (x, NULL_RTX);
483 else
484 x = y;
485 }
486 }
487
488 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
489 x = force_operand (x, NULL_RTX);
490
491 /* If we have a register that's an invalid address,
492 it must be a hard reg of the wrong class. Copy it to a pseudo. */
493 else if (REG_P (x))
494 x = copy_to_reg (x);
495
496 /* Last resort: copy the value to a register, since
497 the register is a valid address. */
498 else
499 x = force_reg (address_mode, x);
500 }
501
502 done:
503
504 gcc_assert (memory_address_addr_space_p (mode, x, as));
505 /* If we didn't change the address, we are done. Otherwise, mark
506 a reg as a pointer if we have REG or REG + CONST_INT. */
507 if (oldx == x)
508 return x;
509 else if (REG_P (x))
510 mark_reg_pointer (x, BITS_PER_UNIT);
511 else if (GET_CODE (x) == PLUS
512 && REG_P (XEXP (x, 0))
513 && CONST_INT_P (XEXP (x, 1)))
514 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
515
516 /* OLDX may have been the address on a temporary. Update the address
517 to indicate that X is now used. */
518 update_temp_slot_address (oldx, x);
519
520 return x;
521 }
522
523 /* Convert a mem ref into one with a valid memory address.
524 Pass through anything else unchanged. */
525
526 rtx
527 validize_mem (rtx ref)
528 {
529 if (!MEM_P (ref))
530 return ref;
531 ref = use_anchored_address (ref);
532 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
533 MEM_ADDR_SPACE (ref)))
534 return ref;
535
536 /* Don't alter REF itself, since that is probably a stack slot. */
537 return replace_equiv_address (ref, XEXP (ref, 0));
538 }
539
540 /* If X is a memory reference to a member of an object block, try rewriting
541 it to use an anchor instead. Return the new memory reference on success
542 and the old one on failure. */
543
544 rtx
545 use_anchored_address (rtx x)
546 {
547 rtx base;
548 HOST_WIDE_INT offset;
549
550 if (!flag_section_anchors)
551 return x;
552
553 if (!MEM_P (x))
554 return x;
555
556 /* Split the address into a base and offset. */
557 base = XEXP (x, 0);
558 offset = 0;
559 if (GET_CODE (base) == CONST
560 && GET_CODE (XEXP (base, 0)) == PLUS
561 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
562 {
563 offset += INTVAL (XEXP (XEXP (base, 0), 1));
564 base = XEXP (XEXP (base, 0), 0);
565 }
566
567 /* Check whether BASE is suitable for anchors. */
568 if (GET_CODE (base) != SYMBOL_REF
569 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
570 || SYMBOL_REF_ANCHOR_P (base)
571 || SYMBOL_REF_BLOCK (base) == NULL
572 || !targetm.use_anchors_for_symbol_p (base))
573 return x;
574
575 /* Decide where BASE is going to be. */
576 place_block_symbol (base);
577
578 /* Get the anchor we need to use. */
579 offset += SYMBOL_REF_BLOCK_OFFSET (base);
580 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
581 SYMBOL_REF_TLS_MODEL (base));
582
583 /* Work out the offset from the anchor. */
584 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
585
586 /* If we're going to run a CSE pass, force the anchor into a register.
587 We will then be able to reuse registers for several accesses, if the
588 target costs say that that's worthwhile. */
589 if (!cse_not_expected)
590 base = force_reg (GET_MODE (base), base);
591
592 return replace_equiv_address (x, plus_constant (base, offset));
593 }
594 \f
595 /* Copy the value or contents of X to a new temp reg and return that reg. */
596
597 rtx
598 copy_to_reg (rtx x)
599 {
600 rtx temp = gen_reg_rtx (GET_MODE (x));
601
602 /* If not an operand, must be an address with PLUS and MULT so
603 do the computation. */
604 if (! general_operand (x, VOIDmode))
605 x = force_operand (x, temp);
606
607 if (x != temp)
608 emit_move_insn (temp, x);
609
610 return temp;
611 }
612
613 /* Like copy_to_reg but always give the new register mode Pmode
614 in case X is a constant. */
615
616 rtx
617 copy_addr_to_reg (rtx x)
618 {
619 return copy_to_mode_reg (Pmode, x);
620 }
621
622 /* Like copy_to_reg but always give the new register mode MODE
623 in case X is a constant. */
624
625 rtx
626 copy_to_mode_reg (enum machine_mode mode, rtx x)
627 {
628 rtx temp = gen_reg_rtx (mode);
629
630 /* If not an operand, must be an address with PLUS and MULT so
631 do the computation. */
632 if (! general_operand (x, VOIDmode))
633 x = force_operand (x, temp);
634
635 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
636 if (x != temp)
637 emit_move_insn (temp, x);
638 return temp;
639 }
640
641 /* Load X into a register if it is not already one.
642 Use mode MODE for the register.
643 X should be valid for mode MODE, but it may be a constant which
644 is valid for all integer modes; that's why caller must specify MODE.
645
646 The caller must not alter the value in the register we return,
647 since we mark it as a "constant" register. */
648
649 rtx
650 force_reg (enum machine_mode mode, rtx x)
651 {
652 rtx temp, insn, set;
653
654 if (REG_P (x))
655 return x;
656
657 if (general_operand (x, mode))
658 {
659 temp = gen_reg_rtx (mode);
660 insn = emit_move_insn (temp, x);
661 }
662 else
663 {
664 temp = force_operand (x, NULL_RTX);
665 if (REG_P (temp))
666 insn = get_last_insn ();
667 else
668 {
669 rtx temp2 = gen_reg_rtx (mode);
670 insn = emit_move_insn (temp2, temp);
671 temp = temp2;
672 }
673 }
674
675 /* Let optimizers know that TEMP's value never changes
676 and that X can be substituted for it. Don't get confused
677 if INSN set something else (such as a SUBREG of TEMP). */
678 if (CONSTANT_P (x)
679 && (set = single_set (insn)) != 0
680 && SET_DEST (set) == temp
681 && ! rtx_equal_p (x, SET_SRC (set)))
682 set_unique_reg_note (insn, REG_EQUAL, x);
683
684 /* Let optimizers know that TEMP is a pointer, and if so, the
685 known alignment of that pointer. */
686 {
687 unsigned align = 0;
688 if (GET_CODE (x) == SYMBOL_REF)
689 {
690 align = BITS_PER_UNIT;
691 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
692 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
693 }
694 else if (GET_CODE (x) == LABEL_REF)
695 align = BITS_PER_UNIT;
696 else if (GET_CODE (x) == CONST
697 && GET_CODE (XEXP (x, 0)) == PLUS
698 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
699 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
700 {
701 rtx s = XEXP (XEXP (x, 0), 0);
702 rtx c = XEXP (XEXP (x, 0), 1);
703 unsigned sa, ca;
704
705 sa = BITS_PER_UNIT;
706 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
707 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
708
709 if (INTVAL (c) == 0)
710 align = sa;
711 else
712 {
713 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
714 align = MIN (sa, ca);
715 }
716 }
717
718 if (align || (MEM_P (x) && MEM_POINTER (x)))
719 mark_reg_pointer (temp, align);
720 }
721
722 return temp;
723 }
724
725 /* If X is a memory ref, copy its contents to a new temp reg and return
726 that reg. Otherwise, return X. */
727
728 rtx
729 force_not_mem (rtx x)
730 {
731 rtx temp;
732
733 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
734 return x;
735
736 temp = gen_reg_rtx (GET_MODE (x));
737
738 if (MEM_POINTER (x))
739 REG_POINTER (temp) = 1;
740
741 emit_move_insn (temp, x);
742 return temp;
743 }
744
745 /* Copy X to TARGET (if it's nonzero and a reg)
746 or to a new temp reg and return that reg.
747 MODE is the mode to use for X in case it is a constant. */
748
749 rtx
750 copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
751 {
752 rtx temp;
753
754 if (target && REG_P (target))
755 temp = target;
756 else
757 temp = gen_reg_rtx (mode);
758
759 emit_move_insn (temp, x);
760 return temp;
761 }
762 \f
763 /* Return the mode to use to pass or return a scalar of TYPE and MODE.
764 PUNSIGNEDP points to the signedness of the type and may be adjusted
765 to show what signedness to use on extension operations.
766
767 FOR_RETURN is nonzero if the caller is promoting the return value
768 of FNDECL, else it is for promoting args. */
769
770 enum machine_mode
771 promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp,
772 const_tree funtype, int for_return)
773 {
774 /* Called without a type node for a libcall. */
775 if (type == NULL_TREE)
776 {
777 if (INTEGRAL_MODE_P (mode))
778 return targetm.calls.promote_function_mode (NULL_TREE, mode,
779 punsignedp, funtype,
780 for_return);
781 else
782 return mode;
783 }
784
785 switch (TREE_CODE (type))
786 {
787 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
788 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
789 case POINTER_TYPE: case REFERENCE_TYPE:
790 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
791 for_return);
792
793 default:
794 return mode;
795 }
796 }
797 /* Return the mode to use to store a scalar of TYPE and MODE.
798 PUNSIGNEDP points to the signedness of the type and may be adjusted
799 to show what signedness to use on extension operations. */
800
801 enum machine_mode
802 promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode,
803 int *punsignedp ATTRIBUTE_UNUSED)
804 {
805 /* For libcalls this is invoked without TYPE from the backends
806 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
807 case. */
808 if (type == NULL_TREE)
809 return mode;
810
811 /* FIXME: this is the same logic that was there until GCC 4.4, but we
812 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
813 is not defined. The affected targets are M32C, S390, SPARC. */
814 #ifdef PROMOTE_MODE
815 const enum tree_code code = TREE_CODE (type);
816 int unsignedp = *punsignedp;
817
818 switch (code)
819 {
820 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
821 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
822 PROMOTE_MODE (mode, unsignedp, type);
823 *punsignedp = unsignedp;
824 return mode;
825 break;
826
827 #ifdef POINTERS_EXTEND_UNSIGNED
828 case REFERENCE_TYPE:
829 case POINTER_TYPE:
830 *punsignedp = POINTERS_EXTEND_UNSIGNED;
831 return targetm.addr_space.address_mode
832 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
833 break;
834 #endif
835
836 default:
837 return mode;
838 }
839 #else
840 return mode;
841 #endif
842 }
843
844
845 /* Use one of promote_mode or promote_function_mode to find the promoted
846 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
847 of DECL after promotion. */
848
849 enum machine_mode
850 promote_decl_mode (const_tree decl, int *punsignedp)
851 {
852 tree type = TREE_TYPE (decl);
853 int unsignedp = TYPE_UNSIGNED (type);
854 enum machine_mode mode = DECL_MODE (decl);
855 enum machine_mode pmode;
856
857 if (TREE_CODE (decl) == RESULT_DECL
858 || TREE_CODE (decl) == PARM_DECL)
859 pmode = promote_function_mode (type, mode, &unsignedp,
860 TREE_TYPE (current_function_decl), 2);
861 else
862 pmode = promote_mode (type, mode, &unsignedp);
863
864 if (punsignedp)
865 *punsignedp = unsignedp;
866 return pmode;
867 }
868
869 \f
870 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
871 This pops when ADJUST is positive. ADJUST need not be constant. */
872
873 void
874 adjust_stack (rtx adjust)
875 {
876 rtx temp;
877
878 if (adjust == const0_rtx)
879 return;
880
881 /* We expect all variable sized adjustments to be multiple of
882 PREFERRED_STACK_BOUNDARY. */
883 if (CONST_INT_P (adjust))
884 stack_pointer_delta -= INTVAL (adjust);
885
886 temp = expand_binop (Pmode,
887 #ifdef STACK_GROWS_DOWNWARD
888 add_optab,
889 #else
890 sub_optab,
891 #endif
892 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
893 OPTAB_LIB_WIDEN);
894
895 if (temp != stack_pointer_rtx)
896 emit_move_insn (stack_pointer_rtx, temp);
897 }
898
899 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
900 This pushes when ADJUST is positive. ADJUST need not be constant. */
901
902 void
903 anti_adjust_stack (rtx adjust)
904 {
905 rtx temp;
906
907 if (adjust == const0_rtx)
908 return;
909
910 /* We expect all variable sized adjustments to be multiple of
911 PREFERRED_STACK_BOUNDARY. */
912 if (CONST_INT_P (adjust))
913 stack_pointer_delta += INTVAL (adjust);
914
915 temp = expand_binop (Pmode,
916 #ifdef STACK_GROWS_DOWNWARD
917 sub_optab,
918 #else
919 add_optab,
920 #endif
921 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
922 OPTAB_LIB_WIDEN);
923
924 if (temp != stack_pointer_rtx)
925 emit_move_insn (stack_pointer_rtx, temp);
926 }
927
928 /* Round the size of a block to be pushed up to the boundary required
929 by this machine. SIZE is the desired size, which need not be constant. */
930
931 static rtx
932 round_push (rtx size)
933 {
934 rtx align_rtx, alignm1_rtx;
935
936 if (!SUPPORTS_STACK_ALIGNMENT
937 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
938 {
939 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
940
941 if (align == 1)
942 return size;
943
944 if (CONST_INT_P (size))
945 {
946 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
947
948 if (INTVAL (size) != new_size)
949 size = GEN_INT (new_size);
950 return size;
951 }
952
953 align_rtx = GEN_INT (align);
954 alignm1_rtx = GEN_INT (align - 1);
955 }
956 else
957 {
958 /* If crtl->preferred_stack_boundary might still grow, use
959 virtual_preferred_stack_boundary_rtx instead. This will be
960 substituted by the right value in vregs pass and optimized
961 during combine. */
962 align_rtx = virtual_preferred_stack_boundary_rtx;
963 alignm1_rtx = force_operand (plus_constant (align_rtx, -1), NULL_RTX);
964 }
965
966 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
967 but we know it can't. So add ourselves and then do
968 TRUNC_DIV_EXPR. */
969 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
970 NULL_RTX, 1, OPTAB_LIB_WIDEN);
971 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
972 NULL_RTX, 1);
973 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
974
975 return size;
976 }
977 \f
978 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
979 to a previously-created save area. If no save area has been allocated,
980 this function will allocate one. If a save area is specified, it
981 must be of the proper mode. */
982
983 void
984 emit_stack_save (enum save_level save_level, rtx *psave)
985 {
986 rtx sa = *psave;
987 /* The default is that we use a move insn and save in a Pmode object. */
988 rtx (*fcn) (rtx, rtx) = gen_move_insn;
989 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
990
991 /* See if this machine has anything special to do for this kind of save. */
992 switch (save_level)
993 {
994 #ifdef HAVE_save_stack_block
995 case SAVE_BLOCK:
996 if (HAVE_save_stack_block)
997 fcn = gen_save_stack_block;
998 break;
999 #endif
1000 #ifdef HAVE_save_stack_function
1001 case SAVE_FUNCTION:
1002 if (HAVE_save_stack_function)
1003 fcn = gen_save_stack_function;
1004 break;
1005 #endif
1006 #ifdef HAVE_save_stack_nonlocal
1007 case SAVE_NONLOCAL:
1008 if (HAVE_save_stack_nonlocal)
1009 fcn = gen_save_stack_nonlocal;
1010 break;
1011 #endif
1012 default:
1013 break;
1014 }
1015
1016 /* If there is no save area and we have to allocate one, do so. Otherwise
1017 verify the save area is the proper mode. */
1018
1019 if (sa == 0)
1020 {
1021 if (mode != VOIDmode)
1022 {
1023 if (save_level == SAVE_NONLOCAL)
1024 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1025 else
1026 *psave = sa = gen_reg_rtx (mode);
1027 }
1028 }
1029
1030 do_pending_stack_adjust ();
1031 if (sa != 0)
1032 sa = validize_mem (sa);
1033 emit_insn (fcn (sa, stack_pointer_rtx));
1034 }
1035
1036 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1037 area made by emit_stack_save. If it is zero, we have nothing to do. */
1038
1039 void
1040 emit_stack_restore (enum save_level save_level, rtx sa)
1041 {
1042 /* The default is that we use a move insn. */
1043 rtx (*fcn) (rtx, rtx) = gen_move_insn;
1044
1045 /* See if this machine has anything special to do for this kind of save. */
1046 switch (save_level)
1047 {
1048 #ifdef HAVE_restore_stack_block
1049 case SAVE_BLOCK:
1050 if (HAVE_restore_stack_block)
1051 fcn = gen_restore_stack_block;
1052 break;
1053 #endif
1054 #ifdef HAVE_restore_stack_function
1055 case SAVE_FUNCTION:
1056 if (HAVE_restore_stack_function)
1057 fcn = gen_restore_stack_function;
1058 break;
1059 #endif
1060 #ifdef HAVE_restore_stack_nonlocal
1061 case SAVE_NONLOCAL:
1062 if (HAVE_restore_stack_nonlocal)
1063 fcn = gen_restore_stack_nonlocal;
1064 break;
1065 #endif
1066 default:
1067 break;
1068 }
1069
1070 if (sa != 0)
1071 {
1072 sa = validize_mem (sa);
1073 /* These clobbers prevent the scheduler from moving
1074 references to variable arrays below the code
1075 that deletes (pops) the arrays. */
1076 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1077 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
1078 }
1079
1080 discard_pending_stack_adjust ();
1081
1082 emit_insn (fcn (stack_pointer_rtx, sa));
1083 }
1084
1085 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1086 function. This function should be called whenever we allocate or
1087 deallocate dynamic stack space. */
1088
1089 void
1090 update_nonlocal_goto_save_area (void)
1091 {
1092 tree t_save;
1093 rtx r_save;
1094
1095 /* The nonlocal_goto_save_area object is an array of N pointers. The
1096 first one is used for the frame pointer save; the rest are sized by
1097 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1098 of the stack save area slots. */
1099 t_save = build4 (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area,
1100 integer_one_node, NULL_TREE, NULL_TREE);
1101 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1102
1103 emit_stack_save (SAVE_NONLOCAL, &r_save);
1104 }
1105 \f
1106 /* Return an rtx representing the address of an area of memory dynamically
1107 pushed on the stack.
1108
1109 Any required stack pointer alignment is preserved.
1110
1111 SIZE is an rtx representing the size of the area.
1112
1113 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1114 parameter may be zero. If so, a proper value will be extracted
1115 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1116
1117 REQUIRED_ALIGN is the alignment (in bits) required for the region
1118 of memory.
1119
1120 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1121 stack space allocated by the generated code cannot be added with itself
1122 in the course of the execution of the function. It is always safe to
1123 pass FALSE here and the following criterion is sufficient in order to
1124 pass TRUE: every path in the CFG that starts at the allocation point and
1125 loops to it executes the associated deallocation code. */
1126
1127 rtx
1128 allocate_dynamic_stack_space (rtx size, unsigned size_align,
1129 unsigned required_align, bool cannot_accumulate)
1130 {
1131 HOST_WIDE_INT stack_usage_size = -1;
1132 rtx final_label, final_target, target;
1133 unsigned extra_align = 0;
1134 bool must_align;
1135
1136 /* If we're asking for zero bytes, it doesn't matter what we point
1137 to since we can't dereference it. But return a reasonable
1138 address anyway. */
1139 if (size == const0_rtx)
1140 return virtual_stack_dynamic_rtx;
1141
1142 /* Otherwise, show we're calling alloca or equivalent. */
1143 cfun->calls_alloca = 1;
1144
1145 /* If stack usage info is requested, look into the size we are passed.
1146 We need to do so this early to avoid the obfuscation that may be
1147 introduced later by the various alignment operations. */
1148 if (flag_stack_usage)
1149 {
1150 if (CONST_INT_P (size))
1151 stack_usage_size = INTVAL (size);
1152 else if (REG_P (size))
1153 {
1154 /* Look into the last emitted insn and see if we can deduce
1155 something for the register. */
1156 rtx insn, set, note;
1157 insn = get_last_insn ();
1158 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1159 {
1160 if (CONST_INT_P (SET_SRC (set)))
1161 stack_usage_size = INTVAL (SET_SRC (set));
1162 else if ((note = find_reg_equal_equiv_note (insn))
1163 && CONST_INT_P (XEXP (note, 0)))
1164 stack_usage_size = INTVAL (XEXP (note, 0));
1165 }
1166 }
1167
1168 /* If the size is not constant, we can't say anything. */
1169 if (stack_usage_size == -1)
1170 {
1171 current_function_has_unbounded_dynamic_stack_size = 1;
1172 stack_usage_size = 0;
1173 }
1174 }
1175
1176 /* Ensure the size is in the proper mode. */
1177 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1178 size = convert_to_mode (Pmode, size, 1);
1179
1180 /* Adjust SIZE_ALIGN, if needed. */
1181 if (CONST_INT_P (size))
1182 {
1183 unsigned HOST_WIDE_INT lsb;
1184
1185 lsb = INTVAL (size);
1186 lsb &= -lsb;
1187
1188 /* Watch out for overflow truncating to "unsigned". */
1189 if (lsb > UINT_MAX / BITS_PER_UNIT)
1190 size_align = 1u << (HOST_BITS_PER_INT - 1);
1191 else
1192 size_align = (unsigned)lsb * BITS_PER_UNIT;
1193 }
1194 else if (size_align < BITS_PER_UNIT)
1195 size_align = BITS_PER_UNIT;
1196
1197 /* We can't attempt to minimize alignment necessary, because we don't
1198 know the final value of preferred_stack_boundary yet while executing
1199 this code. */
1200 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1201 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1202
1203 /* We will need to ensure that the address we return is aligned to
1204 REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't
1205 always know its final value at this point in the compilation (it
1206 might depend on the size of the outgoing parameter lists, for
1207 example), so we must align the value to be returned in that case.
1208 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1209 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1210 We must also do an alignment operation on the returned value if
1211 the stack pointer alignment is less strict than REQUIRED_ALIGN.
1212
1213 If we have to align, we must leave space in SIZE for the hole
1214 that might result from the alignment operation. */
1215
1216 must_align = (crtl->preferred_stack_boundary < required_align);
1217 if (must_align)
1218 {
1219 if (required_align > PREFERRED_STACK_BOUNDARY)
1220 extra_align = PREFERRED_STACK_BOUNDARY;
1221 else if (required_align > STACK_BOUNDARY)
1222 extra_align = STACK_BOUNDARY;
1223 else
1224 extra_align = BITS_PER_UNIT;
1225 }
1226
1227 /* ??? STACK_POINTER_OFFSET is always defined now. */
1228 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1229 must_align = true;
1230 extra_align = BITS_PER_UNIT;
1231 #endif
1232
1233 if (must_align)
1234 {
1235 unsigned extra = (required_align - extra_align) / BITS_PER_UNIT;
1236
1237 size = plus_constant (size, extra);
1238 size = force_operand (size, NULL_RTX);
1239
1240 if (flag_stack_usage)
1241 stack_usage_size += extra;
1242
1243 if (extra && size_align > extra_align)
1244 size_align = extra_align;
1245 }
1246
1247 #ifdef SETJMP_VIA_SAVE_AREA
1248 /* If setjmp restores regs from a save area in the stack frame,
1249 avoid clobbering the reg save area. Note that the offset of
1250 virtual_incoming_args_rtx includes the preallocated stack args space.
1251 It would be no problem to clobber that, but it's on the wrong side
1252 of the old save area.
1253
1254 What used to happen is that, since we did not know for sure
1255 whether setjmp() was invoked until after RTL generation, we
1256 would use reg notes to store the "optimized" size and fix things
1257 up later. These days we know this information before we ever
1258 start building RTL so the reg notes are unnecessary. */
1259 if (cfun->calls_setjmp)
1260 {
1261 rtx dynamic_offset
1262 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1263 stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1264
1265 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1266 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1267
1268 /* The above dynamic offset cannot be computed statically at this
1269 point, but it will be possible to do so after RTL expansion is
1270 done. Record how many times we will need to add it. */
1271 if (flag_stack_usage)
1272 current_function_dynamic_alloc_count++;
1273
1274 /* ??? Can we infer a minimum of STACK_BOUNDARY here? */
1275 size_align = BITS_PER_UNIT;
1276 }
1277 #endif /* SETJMP_VIA_SAVE_AREA */
1278
1279 /* Round the size to a multiple of the required stack alignment.
1280 Since the stack if presumed to be rounded before this allocation,
1281 this will maintain the required alignment.
1282
1283 If the stack grows downward, we could save an insn by subtracting
1284 SIZE from the stack pointer and then aligning the stack pointer.
1285 The problem with this is that the stack pointer may be unaligned
1286 between the execution of the subtraction and alignment insns and
1287 some machines do not allow this. Even on those that do, some
1288 signal handlers malfunction if a signal should occur between those
1289 insns. Since this is an extremely rare event, we have no reliable
1290 way of knowing which systems have this problem. So we avoid even
1291 momentarily mis-aligning the stack. */
1292 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
1293 {
1294 size = round_push (size);
1295
1296 if (flag_stack_usage)
1297 {
1298 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
1299 stack_usage_size = (stack_usage_size + align - 1) / align * align;
1300 }
1301 }
1302
1303 target = gen_reg_rtx (Pmode);
1304
1305 /* The size is supposed to be fully adjusted at this point so record it
1306 if stack usage info is requested. */
1307 if (flag_stack_usage)
1308 {
1309 current_function_dynamic_stack_size += stack_usage_size;
1310
1311 /* ??? This is gross but the only safe stance in the absence
1312 of stack usage oriented flow analysis. */
1313 if (!cannot_accumulate)
1314 current_function_has_unbounded_dynamic_stack_size = 1;
1315 }
1316
1317 final_label = NULL_RTX;
1318 final_target = NULL_RTX;
1319
1320 /* If we are splitting the stack, we need to ask the backend whether
1321 there is enough room on the current stack. If there isn't, or if
1322 the backend doesn't know how to tell is, then we need to call a
1323 function to allocate memory in some other way. This memory will
1324 be released when we release the current stack segment. The
1325 effect is that stack allocation becomes less efficient, but at
1326 least it doesn't cause a stack overflow. */
1327 if (flag_split_stack)
1328 {
1329 rtx available_label, ask, space, func;
1330
1331 available_label = NULL_RTX;
1332
1333 #ifdef HAVE_split_stack_space_check
1334 if (HAVE_split_stack_space_check)
1335 {
1336 available_label = gen_label_rtx ();
1337
1338 /* This instruction will branch to AVAILABLE_LABEL if there
1339 are SIZE bytes available on the stack. */
1340 emit_insn (gen_split_stack_space_check (size, available_label));
1341 }
1342 #endif
1343
1344 /* The __morestack_allocate_stack_space function will allocate
1345 memory using malloc. If the alignment of the memory returned
1346 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1347 make sure we allocate enough space. */
1348 if (MALLOC_ABI_ALIGNMENT >= required_align)
1349 ask = size;
1350 else
1351 {
1352 ask = expand_binop (Pmode, add_optab, size,
1353 GEN_INT (required_align / BITS_PER_UNIT - 1),
1354 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1355 must_align = true;
1356 }
1357
1358 func = init_one_libfunc ("__morestack_allocate_stack_space");
1359
1360 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
1361 1, ask, Pmode);
1362
1363 if (available_label == NULL_RTX)
1364 return space;
1365
1366 final_target = gen_reg_rtx (Pmode);
1367
1368 emit_move_insn (final_target, space);
1369
1370 final_label = gen_label_rtx ();
1371 emit_jump (final_label);
1372
1373 emit_label (available_label);
1374 }
1375
1376 do_pending_stack_adjust ();
1377
1378 /* We ought to be called always on the toplevel and stack ought to be aligned
1379 properly. */
1380 gcc_assert (!(stack_pointer_delta
1381 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1382
1383 /* If needed, check that we have the required amount of stack. Take into
1384 account what has already been checked. */
1385 if (STACK_CHECK_MOVING_SP)
1386 ;
1387 else if (flag_stack_check == GENERIC_STACK_CHECK)
1388 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1389 size);
1390 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1391 probe_stack_range (STACK_CHECK_PROTECT, size);
1392
1393 /* Perform the required allocation from the stack. Some systems do
1394 this differently than simply incrementing/decrementing from the
1395 stack pointer, such as acquiring the space by calling malloc(). */
1396 #ifdef HAVE_allocate_stack
1397 if (HAVE_allocate_stack)
1398 {
1399 struct expand_operand ops[2];
1400 /* We don't have to check against the predicate for operand 0 since
1401 TARGET is known to be a pseudo of the proper mode, which must
1402 be valid for the operand. */
1403 create_fixed_operand (&ops[0], target);
1404 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
1405 expand_insn (CODE_FOR_allocate_stack, 2, ops);
1406 }
1407 else
1408 #endif
1409 {
1410 int saved_stack_pointer_delta;
1411
1412 #ifndef STACK_GROWS_DOWNWARD
1413 emit_move_insn (target, virtual_stack_dynamic_rtx);
1414 #endif
1415
1416 /* Check stack bounds if necessary. */
1417 if (crtl->limit_stack)
1418 {
1419 rtx available;
1420 rtx space_available = gen_label_rtx ();
1421 #ifdef STACK_GROWS_DOWNWARD
1422 available = expand_binop (Pmode, sub_optab,
1423 stack_pointer_rtx, stack_limit_rtx,
1424 NULL_RTX, 1, OPTAB_WIDEN);
1425 #else
1426 available = expand_binop (Pmode, sub_optab,
1427 stack_limit_rtx, stack_pointer_rtx,
1428 NULL_RTX, 1, OPTAB_WIDEN);
1429 #endif
1430 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1431 space_available);
1432 #ifdef HAVE_trap
1433 if (HAVE_trap)
1434 emit_insn (gen_trap ());
1435 else
1436 #endif
1437 error ("stack limits not supported on this target");
1438 emit_barrier ();
1439 emit_label (space_available);
1440 }
1441
1442 saved_stack_pointer_delta = stack_pointer_delta;
1443 if (flag_stack_check && STACK_CHECK_MOVING_SP)
1444 anti_adjust_stack_and_probe (size, false);
1445 else
1446 anti_adjust_stack (size);
1447 /* Even if size is constant, don't modify stack_pointer_delta.
1448 The constant size alloca should preserve
1449 crtl->preferred_stack_boundary alignment. */
1450 stack_pointer_delta = saved_stack_pointer_delta;
1451
1452 #ifdef STACK_GROWS_DOWNWARD
1453 emit_move_insn (target, virtual_stack_dynamic_rtx);
1454 #endif
1455 }
1456
1457 /* Finish up the split stack handling. */
1458 if (final_label != NULL_RTX)
1459 {
1460 gcc_assert (flag_split_stack);
1461 emit_move_insn (final_target, target);
1462 emit_label (final_label);
1463 target = final_target;
1464 }
1465
1466 if (must_align)
1467 {
1468 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1469 but we know it can't. So add ourselves and then do
1470 TRUNC_DIV_EXPR. */
1471 target = expand_binop (Pmode, add_optab, target,
1472 GEN_INT (required_align / BITS_PER_UNIT - 1),
1473 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1474 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1475 GEN_INT (required_align / BITS_PER_UNIT),
1476 NULL_RTX, 1);
1477 target = expand_mult (Pmode, target,
1478 GEN_INT (required_align / BITS_PER_UNIT),
1479 NULL_RTX, 1);
1480 }
1481
1482 /* Now that we've committed to a return value, mark its alignment. */
1483 mark_reg_pointer (target, required_align);
1484
1485 /* Record the new stack level for nonlocal gotos. */
1486 if (cfun->nonlocal_goto_save_area != 0)
1487 update_nonlocal_goto_save_area ();
1488
1489 return target;
1490 }
1491 \f
1492 /* A front end may want to override GCC's stack checking by providing a
1493 run-time routine to call to check the stack, so provide a mechanism for
1494 calling that routine. */
1495
1496 static GTY(()) rtx stack_check_libfunc;
1497
1498 void
1499 set_stack_check_libfunc (const char *libfunc_name)
1500 {
1501 gcc_assert (stack_check_libfunc == NULL_RTX);
1502 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
1503 }
1504 \f
1505 /* Emit one stack probe at ADDRESS, an address within the stack. */
1506
1507 void
1508 emit_stack_probe (rtx address)
1509 {
1510 rtx memref = gen_rtx_MEM (word_mode, address);
1511
1512 MEM_VOLATILE_P (memref) = 1;
1513
1514 /* See if we have an insn to probe the stack. */
1515 #ifdef HAVE_probe_stack
1516 if (HAVE_probe_stack)
1517 emit_insn (gen_probe_stack (memref));
1518 else
1519 #endif
1520 emit_move_insn (memref, const0_rtx);
1521 }
1522
1523 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1524 FIRST is a constant and size is a Pmode RTX. These are offsets from
1525 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1526 or subtract them from the stack pointer. */
1527
1528 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1529
1530 #ifdef STACK_GROWS_DOWNWARD
1531 #define STACK_GROW_OP MINUS
1532 #define STACK_GROW_OPTAB sub_optab
1533 #define STACK_GROW_OFF(off) -(off)
1534 #else
1535 #define STACK_GROW_OP PLUS
1536 #define STACK_GROW_OPTAB add_optab
1537 #define STACK_GROW_OFF(off) (off)
1538 #endif
1539
1540 void
1541 probe_stack_range (HOST_WIDE_INT first, rtx size)
1542 {
1543 /* First ensure SIZE is Pmode. */
1544 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1545 size = convert_to_mode (Pmode, size, 1);
1546
1547 /* Next see if we have a function to check the stack. */
1548 if (stack_check_libfunc)
1549 {
1550 rtx addr = memory_address (Pmode,
1551 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1552 stack_pointer_rtx,
1553 plus_constant (size, first)));
1554 emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1555 Pmode);
1556 return;
1557 }
1558
1559 /* Next see if we have an insn to check the stack. */
1560 #ifdef HAVE_check_stack
1561 if (HAVE_check_stack)
1562 {
1563 struct expand_operand ops[1];
1564 rtx addr = memory_address (Pmode,
1565 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1566 stack_pointer_rtx,
1567 plus_constant (size, first)));
1568
1569 create_input_operand (&ops[0], addr, Pmode);
1570 if (maybe_expand_insn (CODE_FOR_check_stack, 1, ops))
1571 return;
1572 }
1573 #endif
1574
1575 /* Otherwise we have to generate explicit probes. If we have a constant
1576 small number of them to generate, that's the easy case. */
1577 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1578 {
1579 HOST_WIDE_INT isize = INTVAL (size), i;
1580 rtx addr;
1581
1582 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1583 it exceeds SIZE. If only one probe is needed, this will not
1584 generate any code. Then probe at FIRST + SIZE. */
1585 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1586 {
1587 addr = memory_address (Pmode,
1588 plus_constant (stack_pointer_rtx,
1589 STACK_GROW_OFF (first + i)));
1590 emit_stack_probe (addr);
1591 }
1592
1593 addr = memory_address (Pmode,
1594 plus_constant (stack_pointer_rtx,
1595 STACK_GROW_OFF (first + isize)));
1596 emit_stack_probe (addr);
1597 }
1598
1599 /* In the variable case, do the same as above, but in a loop. Note that we
1600 must be extra careful with variables wrapping around because we might be
1601 at the very top (or the very bottom) of the address space and we have to
1602 be able to handle this case properly; in particular, we use an equality
1603 test for the loop condition. */
1604 else
1605 {
1606 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
1607 rtx loop_lab = gen_label_rtx ();
1608 rtx end_lab = gen_label_rtx ();
1609
1610
1611 /* Step 1: round SIZE to the previous multiple of the interval. */
1612
1613 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1614 rounded_size
1615 = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
1616 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1617
1618
1619 /* Step 2: compute initial and final value of the loop counter. */
1620
1621 /* TEST_ADDR = SP + FIRST. */
1622 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1623 stack_pointer_rtx,
1624 GEN_INT (first)), NULL_RTX);
1625
1626 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1627 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1628 test_addr,
1629 rounded_size_op), NULL_RTX);
1630
1631
1632 /* Step 3: the loop
1633
1634 while (TEST_ADDR != LAST_ADDR)
1635 {
1636 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1637 probe at TEST_ADDR
1638 }
1639
1640 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1641 until it is equal to ROUNDED_SIZE. */
1642
1643 emit_label (loop_lab);
1644
1645 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1646 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1647 end_lab);
1648
1649 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1650 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
1651 GEN_INT (PROBE_INTERVAL), test_addr,
1652 1, OPTAB_WIDEN);
1653
1654 gcc_assert (temp == test_addr);
1655
1656 /* Probe at TEST_ADDR. */
1657 emit_stack_probe (test_addr);
1658
1659 emit_jump (loop_lab);
1660
1661 emit_label (end_lab);
1662
1663
1664 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1665 that SIZE is equal to ROUNDED_SIZE. */
1666
1667 /* TEMP = SIZE - ROUNDED_SIZE. */
1668 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1669 if (temp != const0_rtx)
1670 {
1671 rtx addr;
1672
1673 if (CONST_INT_P (temp))
1674 {
1675 /* Use [base + disp} addressing mode if supported. */
1676 HOST_WIDE_INT offset = INTVAL (temp);
1677 addr = memory_address (Pmode,
1678 plus_constant (last_addr,
1679 STACK_GROW_OFF (offset)));
1680 }
1681 else
1682 {
1683 /* Manual CSE if the difference is not known at compile-time. */
1684 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1685 addr = memory_address (Pmode,
1686 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1687 last_addr, temp));
1688 }
1689
1690 emit_stack_probe (addr);
1691 }
1692 }
1693 }
1694
1695 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1696 while probing it. This pushes when SIZE is positive. SIZE need not
1697 be constant. If ADJUST_BACK is true, adjust back the stack pointer
1698 by plus SIZE at the end. */
1699
1700 void
1701 anti_adjust_stack_and_probe (rtx size, bool adjust_back)
1702 {
1703 /* We skip the probe for the first interval + a small dope of 4 words and
1704 probe that many bytes past the specified size to maintain a protection
1705 area at the botton of the stack. */
1706 const int dope = 4 * UNITS_PER_WORD;
1707
1708 /* First ensure SIZE is Pmode. */
1709 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1710 size = convert_to_mode (Pmode, size, 1);
1711
1712 /* If we have a constant small number of probes to generate, that's the
1713 easy case. */
1714 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1715 {
1716 HOST_WIDE_INT isize = INTVAL (size), i;
1717 bool first_probe = true;
1718
1719 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
1720 values of N from 1 until it exceeds SIZE. If only one probe is
1721 needed, this will not generate any code. Then adjust and probe
1722 to PROBE_INTERVAL + SIZE. */
1723 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1724 {
1725 if (first_probe)
1726 {
1727 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
1728 first_probe = false;
1729 }
1730 else
1731 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1732 emit_stack_probe (stack_pointer_rtx);
1733 }
1734
1735 if (first_probe)
1736 anti_adjust_stack (plus_constant (size, PROBE_INTERVAL + dope));
1737 else
1738 anti_adjust_stack (plus_constant (size, PROBE_INTERVAL - i));
1739 emit_stack_probe (stack_pointer_rtx);
1740 }
1741
1742 /* In the variable case, do the same as above, but in a loop. Note that we
1743 must be extra careful with variables wrapping around because we might be
1744 at the very top (or the very bottom) of the address space and we have to
1745 be able to handle this case properly; in particular, we use an equality
1746 test for the loop condition. */
1747 else
1748 {
1749 rtx rounded_size, rounded_size_op, last_addr, temp;
1750 rtx loop_lab = gen_label_rtx ();
1751 rtx end_lab = gen_label_rtx ();
1752
1753
1754 /* Step 1: round SIZE to the previous multiple of the interval. */
1755
1756 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1757 rounded_size
1758 = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
1759 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1760
1761
1762 /* Step 2: compute initial and final value of the loop counter. */
1763
1764 /* SP = SP_0 + PROBE_INTERVAL. */
1765 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1766
1767 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
1768 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1769 stack_pointer_rtx,
1770 rounded_size_op), NULL_RTX);
1771
1772
1773 /* Step 3: the loop
1774
1775 while (SP != LAST_ADDR)
1776 {
1777 SP = SP + PROBE_INTERVAL
1778 probe at SP
1779 }
1780
1781 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
1782 values of N from 1 until it is equal to ROUNDED_SIZE. */
1783
1784 emit_label (loop_lab);
1785
1786 /* Jump to END_LAB if SP == LAST_ADDR. */
1787 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1788 Pmode, 1, end_lab);
1789
1790 /* SP = SP + PROBE_INTERVAL and probe at SP. */
1791 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1792 emit_stack_probe (stack_pointer_rtx);
1793
1794 emit_jump (loop_lab);
1795
1796 emit_label (end_lab);
1797
1798
1799 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
1800 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
1801
1802 /* TEMP = SIZE - ROUNDED_SIZE. */
1803 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1804 if (temp != const0_rtx)
1805 {
1806 /* Manual CSE if the difference is not known at compile-time. */
1807 if (GET_CODE (temp) != CONST_INT)
1808 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1809 anti_adjust_stack (temp);
1810 emit_stack_probe (stack_pointer_rtx);
1811 }
1812 }
1813
1814 /* Adjust back and account for the additional first interval. */
1815 if (adjust_back)
1816 adjust_stack (plus_constant (size, PROBE_INTERVAL + dope));
1817 else
1818 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1819 }
1820
1821 /* Return an rtx representing the register or memory location
1822 in which a scalar value of data type VALTYPE
1823 was returned by a function call to function FUNC.
1824 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1825 function is known, otherwise 0.
1826 OUTGOING is 1 if on a machine with register windows this function
1827 should return the register in which the function will put its result
1828 and 0 otherwise. */
1829
1830 rtx
1831 hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
1832 int outgoing ATTRIBUTE_UNUSED)
1833 {
1834 rtx val;
1835
1836 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
1837
1838 if (REG_P (val)
1839 && GET_MODE (val) == BLKmode)
1840 {
1841 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1842 enum machine_mode tmpmode;
1843
1844 /* int_size_in_bytes can return -1. We don't need a check here
1845 since the value of bytes will then be large enough that no
1846 mode will match anyway. */
1847
1848 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1849 tmpmode != VOIDmode;
1850 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1851 {
1852 /* Have we found a large enough mode? */
1853 if (GET_MODE_SIZE (tmpmode) >= bytes)
1854 break;
1855 }
1856
1857 /* No suitable mode found. */
1858 gcc_assert (tmpmode != VOIDmode);
1859
1860 PUT_MODE (val, tmpmode);
1861 }
1862 return val;
1863 }
1864
1865 /* Return an rtx representing the register or memory location
1866 in which a scalar value of mode MODE was returned by a library call. */
1867
1868 rtx
1869 hard_libcall_value (enum machine_mode mode, rtx fun)
1870 {
1871 return targetm.calls.libcall_value (mode, fun);
1872 }
1873
1874 /* Look up the tree code for a given rtx code
1875 to provide the arithmetic operation for REAL_ARITHMETIC.
1876 The function returns an int because the caller may not know
1877 what `enum tree_code' means. */
1878
1879 int
1880 rtx_to_tree_code (enum rtx_code code)
1881 {
1882 enum tree_code tcode;
1883
1884 switch (code)
1885 {
1886 case PLUS:
1887 tcode = PLUS_EXPR;
1888 break;
1889 case MINUS:
1890 tcode = MINUS_EXPR;
1891 break;
1892 case MULT:
1893 tcode = MULT_EXPR;
1894 break;
1895 case DIV:
1896 tcode = RDIV_EXPR;
1897 break;
1898 case SMIN:
1899 tcode = MIN_EXPR;
1900 break;
1901 case SMAX:
1902 tcode = MAX_EXPR;
1903 break;
1904 default:
1905 tcode = LAST_AND_UNUSED_TREE_CODE;
1906 break;
1907 }
1908 return ((int) tcode);
1909 }
1910
1911 #include "gt-explow.h"