Permute conversion and addition of constant for zero-extend.
[gcc.git] / gcc / explow.c
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
3 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "diagnostic-core.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "flags.h"
32 #include "except.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "optabs.h"
36 #include "libfuncs.h"
37 #include "hard-reg-set.h"
38 #include "insn-config.h"
39 #include "ggc.h"
40 #include "recog.h"
41 #include "langhooks.h"
42 #include "target.h"
43 #include "common/common-target.h"
44 #include "output.h"
45
46 static rtx break_out_memory_refs (rtx);
47
48
49 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
50
51 HOST_WIDE_INT
52 trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
53 {
54 int width = GET_MODE_PRECISION (mode);
55
56 /* You want to truncate to a _what_? */
57 gcc_assert (SCALAR_INT_MODE_P (mode));
58
59 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
60 if (mode == BImode)
61 return c & 1 ? STORE_FLAG_VALUE : 0;
62
63 /* Sign-extend for the requested mode. */
64
65 if (width < HOST_BITS_PER_WIDE_INT)
66 {
67 HOST_WIDE_INT sign = 1;
68 sign <<= width - 1;
69 c &= (sign << 1) - 1;
70 c ^= sign;
71 c -= sign;
72 }
73
74 return c;
75 }
76
77 /* Return an rtx for the sum of X and the integer C. */
78
79 rtx
80 plus_constant (rtx x, HOST_WIDE_INT c)
81 {
82 RTX_CODE code;
83 rtx y;
84 enum machine_mode mode;
85 rtx tem;
86 int all_constant = 0;
87
88 if (c == 0)
89 return x;
90
91 restart:
92
93 code = GET_CODE (x);
94 mode = GET_MODE (x);
95 y = x;
96
97 switch (code)
98 {
99 case CONST_INT:
100 return GEN_INT (INTVAL (x) + c);
101
102 case CONST_DOUBLE:
103 {
104 unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
105 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
106 unsigned HOST_WIDE_INT l2 = c;
107 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
108 unsigned HOST_WIDE_INT lv;
109 HOST_WIDE_INT hv;
110
111 add_double (l1, h1, l2, h2, &lv, &hv);
112
113 return immed_double_const (lv, hv, VOIDmode);
114 }
115
116 case MEM:
117 /* If this is a reference to the constant pool, try replacing it with
118 a reference to a new constant. If the resulting address isn't
119 valid, don't return it because we have no way to validize it. */
120 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
121 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
122 {
123 tem
124 = force_const_mem (GET_MODE (x),
125 plus_constant (get_pool_constant (XEXP (x, 0)),
126 c));
127 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
128 return tem;
129 }
130 break;
131
132 case CONST:
133 /* If adding to something entirely constant, set a flag
134 so that we can add a CONST around the result. */
135 x = XEXP (x, 0);
136 all_constant = 1;
137 goto restart;
138
139 case SYMBOL_REF:
140 case LABEL_REF:
141 all_constant = 1;
142 break;
143
144 case PLUS:
145 /* The interesting case is adding the integer to a sum.
146 Look for constant term in the sum and combine
147 with C. For an integer constant term, we make a combined
148 integer. For a constant term that is not an explicit integer,
149 we cannot really combine, but group them together anyway.
150
151 Restart or use a recursive call in case the remaining operand is
152 something that we handle specially, such as a SYMBOL_REF.
153
154 We may not immediately return from the recursive call here, lest
155 all_constant gets lost. */
156
157 if (CONST_INT_P (XEXP (x, 1)))
158 {
159 c += INTVAL (XEXP (x, 1));
160
161 if (GET_MODE (x) != VOIDmode)
162 c = trunc_int_for_mode (c, GET_MODE (x));
163
164 x = XEXP (x, 0);
165 goto restart;
166 }
167 else if (CONSTANT_P (XEXP (x, 1)))
168 {
169 x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
170 c = 0;
171 }
172 else if (find_constant_term_loc (&y))
173 {
174 /* We need to be careful since X may be shared and we can't
175 modify it in place. */
176 rtx copy = copy_rtx (x);
177 rtx *const_loc = find_constant_term_loc (&copy);
178
179 *const_loc = plus_constant (*const_loc, c);
180 x = copy;
181 c = 0;
182 }
183 break;
184
185 default:
186 break;
187 }
188
189 if (c != 0)
190 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
191
192 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
193 return x;
194 else if (all_constant)
195 return gen_rtx_CONST (mode, x);
196 else
197 return x;
198 }
199 \f
200 /* If X is a sum, return a new sum like X but lacking any constant terms.
201 Add all the removed constant terms into *CONSTPTR.
202 X itself is not altered. The result != X if and only if
203 it is not isomorphic to X. */
204
205 rtx
206 eliminate_constant_term (rtx x, rtx *constptr)
207 {
208 rtx x0, x1;
209 rtx tem;
210
211 if (GET_CODE (x) != PLUS)
212 return x;
213
214 /* First handle constants appearing at this level explicitly. */
215 if (CONST_INT_P (XEXP (x, 1))
216 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
217 XEXP (x, 1)))
218 && CONST_INT_P (tem))
219 {
220 *constptr = tem;
221 return eliminate_constant_term (XEXP (x, 0), constptr);
222 }
223
224 tem = const0_rtx;
225 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
226 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
227 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
228 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
229 *constptr, tem))
230 && CONST_INT_P (tem))
231 {
232 *constptr = tem;
233 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
234 }
235
236 return x;
237 }
238
239 /* Return an rtx for the size in bytes of the value of EXP. */
240
241 rtx
242 expr_size (tree exp)
243 {
244 tree size;
245
246 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
247 size = TREE_OPERAND (exp, 1);
248 else
249 {
250 size = tree_expr_size (exp);
251 gcc_assert (size);
252 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
253 }
254
255 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
256 }
257
258 /* Return a wide integer for the size in bytes of the value of EXP, or -1
259 if the size can vary or is larger than an integer. */
260
261 HOST_WIDE_INT
262 int_expr_size (tree exp)
263 {
264 tree size;
265
266 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
267 size = TREE_OPERAND (exp, 1);
268 else
269 {
270 size = tree_expr_size (exp);
271 gcc_assert (size);
272 }
273
274 if (size == 0 || !host_integerp (size, 0))
275 return -1;
276
277 return tree_low_cst (size, 0);
278 }
279 \f
280 /* Return a copy of X in which all memory references
281 and all constants that involve symbol refs
282 have been replaced with new temporary registers.
283 Also emit code to load the memory locations and constants
284 into those registers.
285
286 If X contains no such constants or memory references,
287 X itself (not a copy) is returned.
288
289 If a constant is found in the address that is not a legitimate constant
290 in an insn, it is left alone in the hope that it might be valid in the
291 address.
292
293 X may contain no arithmetic except addition, subtraction and multiplication.
294 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
295
296 static rtx
297 break_out_memory_refs (rtx x)
298 {
299 if (MEM_P (x)
300 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
301 && GET_MODE (x) != VOIDmode))
302 x = force_reg (GET_MODE (x), x);
303 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
304 || GET_CODE (x) == MULT)
305 {
306 rtx op0 = break_out_memory_refs (XEXP (x, 0));
307 rtx op1 = break_out_memory_refs (XEXP (x, 1));
308
309 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
310 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
311 }
312
313 return x;
314 }
315
316 /* Given X, a memory address in address space AS' pointer mode, convert it to
317 an address in the address space's address mode, or vice versa (TO_MODE says
318 which way). We take advantage of the fact that pointers are not allowed to
319 overflow by commuting arithmetic operations over conversions so that address
320 arithmetic insns can be used. */
321
322 rtx
323 convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED,
324 rtx x, addr_space_t as ATTRIBUTE_UNUSED)
325 {
326 #ifndef POINTERS_EXTEND_UNSIGNED
327 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
328 return x;
329 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
330 enum machine_mode pointer_mode, address_mode, from_mode;
331 rtx temp;
332 enum rtx_code code;
333
334 /* If X already has the right mode, just return it. */
335 if (GET_MODE (x) == to_mode)
336 return x;
337
338 pointer_mode = targetm.addr_space.pointer_mode (as);
339 address_mode = targetm.addr_space.address_mode (as);
340 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
341
342 /* Here we handle some special cases. If none of them apply, fall through
343 to the default case. */
344 switch (GET_CODE (x))
345 {
346 case CONST_INT:
347 case CONST_DOUBLE:
348 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
349 code = TRUNCATE;
350 else if (POINTERS_EXTEND_UNSIGNED < 0)
351 break;
352 else if (POINTERS_EXTEND_UNSIGNED > 0)
353 code = ZERO_EXTEND;
354 else
355 code = SIGN_EXTEND;
356 temp = simplify_unary_operation (code, to_mode, x, from_mode);
357 if (temp)
358 return temp;
359 break;
360
361 case SUBREG:
362 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
363 && GET_MODE (SUBREG_REG (x)) == to_mode)
364 return SUBREG_REG (x);
365 break;
366
367 case LABEL_REF:
368 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
369 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
370 return temp;
371 break;
372
373 case SYMBOL_REF:
374 temp = shallow_copy_rtx (x);
375 PUT_MODE (temp, to_mode);
376 return temp;
377 break;
378
379 case CONST:
380 return gen_rtx_CONST (to_mode,
381 convert_memory_address_addr_space
382 (to_mode, XEXP (x, 0), as));
383 break;
384
385 case PLUS:
386 case MULT:
387 /* FIXME: For addition, we used to permute the conversion and
388 addition operation only if one operand is a constant and
389 converting the constant does not change it or if one operand
390 is a constant and we are using a ptr_extend instruction
391 (POINTERS_EXTEND_UNSIGNED < 0) even if the resulting address
392 may overflow/underflow. We relax the condition to include
393 zero-extend (POINTERS_EXTEND_UNSIGNED > 0) since the other
394 parts of the compiler depend on it. See PR 49721.
395
396 We can always safely permute them if we are making the address
397 narrower. */
398 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
399 || (GET_CODE (x) == PLUS
400 && CONST_INT_P (XEXP (x, 1))
401 && (POINTERS_EXTEND_UNSIGNED != 0
402 || XEXP (x, 1) == convert_memory_address_addr_space
403 (to_mode, XEXP (x, 1), as))))
404 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
405 convert_memory_address_addr_space
406 (to_mode, XEXP (x, 0), as),
407 XEXP (x, 1));
408 break;
409
410 default:
411 break;
412 }
413
414 return convert_modes (to_mode, from_mode,
415 x, POINTERS_EXTEND_UNSIGNED);
416 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
417 }
418 \f
419 /* Return something equivalent to X but valid as a memory address for something
420 of mode MODE in the named address space AS. When X is not itself valid,
421 this works by copying X or subexpressions of it into registers. */
422
423 rtx
424 memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as)
425 {
426 rtx oldx = x;
427 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
428
429 x = convert_memory_address_addr_space (address_mode, x, as);
430
431 /* By passing constant addresses through registers
432 we get a chance to cse them. */
433 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
434 x = force_reg (address_mode, x);
435
436 /* We get better cse by rejecting indirect addressing at this stage.
437 Let the combiner create indirect addresses where appropriate.
438 For now, generate the code so that the subexpressions useful to share
439 are visible. But not if cse won't be done! */
440 else
441 {
442 if (! cse_not_expected && !REG_P (x))
443 x = break_out_memory_refs (x);
444
445 /* At this point, any valid address is accepted. */
446 if (memory_address_addr_space_p (mode, x, as))
447 goto done;
448
449 /* If it was valid before but breaking out memory refs invalidated it,
450 use it the old way. */
451 if (memory_address_addr_space_p (mode, oldx, as))
452 {
453 x = oldx;
454 goto done;
455 }
456
457 /* Perform machine-dependent transformations on X
458 in certain cases. This is not necessary since the code
459 below can handle all possible cases, but machine-dependent
460 transformations can make better code. */
461 {
462 rtx orig_x = x;
463 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
464 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
465 goto done;
466 }
467
468 /* PLUS and MULT can appear in special ways
469 as the result of attempts to make an address usable for indexing.
470 Usually they are dealt with by calling force_operand, below.
471 But a sum containing constant terms is special
472 if removing them makes the sum a valid address:
473 then we generate that address in a register
474 and index off of it. We do this because it often makes
475 shorter code, and because the addresses thus generated
476 in registers often become common subexpressions. */
477 if (GET_CODE (x) == PLUS)
478 {
479 rtx constant_term = const0_rtx;
480 rtx y = eliminate_constant_term (x, &constant_term);
481 if (constant_term == const0_rtx
482 || ! memory_address_addr_space_p (mode, y, as))
483 x = force_operand (x, NULL_RTX);
484 else
485 {
486 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
487 if (! memory_address_addr_space_p (mode, y, as))
488 x = force_operand (x, NULL_RTX);
489 else
490 x = y;
491 }
492 }
493
494 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
495 x = force_operand (x, NULL_RTX);
496
497 /* If we have a register that's an invalid address,
498 it must be a hard reg of the wrong class. Copy it to a pseudo. */
499 else if (REG_P (x))
500 x = copy_to_reg (x);
501
502 /* Last resort: copy the value to a register, since
503 the register is a valid address. */
504 else
505 x = force_reg (address_mode, x);
506 }
507
508 done:
509
510 gcc_assert (memory_address_addr_space_p (mode, x, as));
511 /* If we didn't change the address, we are done. Otherwise, mark
512 a reg as a pointer if we have REG or REG + CONST_INT. */
513 if (oldx == x)
514 return x;
515 else if (REG_P (x))
516 mark_reg_pointer (x, BITS_PER_UNIT);
517 else if (GET_CODE (x) == PLUS
518 && REG_P (XEXP (x, 0))
519 && CONST_INT_P (XEXP (x, 1)))
520 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
521
522 /* OLDX may have been the address on a temporary. Update the address
523 to indicate that X is now used. */
524 update_temp_slot_address (oldx, x);
525
526 return x;
527 }
528
529 /* Convert a mem ref into one with a valid memory address.
530 Pass through anything else unchanged. */
531
532 rtx
533 validize_mem (rtx ref)
534 {
535 if (!MEM_P (ref))
536 return ref;
537 ref = use_anchored_address (ref);
538 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
539 MEM_ADDR_SPACE (ref)))
540 return ref;
541
542 /* Don't alter REF itself, since that is probably a stack slot. */
543 return replace_equiv_address (ref, XEXP (ref, 0));
544 }
545
546 /* If X is a memory reference to a member of an object block, try rewriting
547 it to use an anchor instead. Return the new memory reference on success
548 and the old one on failure. */
549
550 rtx
551 use_anchored_address (rtx x)
552 {
553 rtx base;
554 HOST_WIDE_INT offset;
555
556 if (!flag_section_anchors)
557 return x;
558
559 if (!MEM_P (x))
560 return x;
561
562 /* Split the address into a base and offset. */
563 base = XEXP (x, 0);
564 offset = 0;
565 if (GET_CODE (base) == CONST
566 && GET_CODE (XEXP (base, 0)) == PLUS
567 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
568 {
569 offset += INTVAL (XEXP (XEXP (base, 0), 1));
570 base = XEXP (XEXP (base, 0), 0);
571 }
572
573 /* Check whether BASE is suitable for anchors. */
574 if (GET_CODE (base) != SYMBOL_REF
575 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
576 || SYMBOL_REF_ANCHOR_P (base)
577 || SYMBOL_REF_BLOCK (base) == NULL
578 || !targetm.use_anchors_for_symbol_p (base))
579 return x;
580
581 /* Decide where BASE is going to be. */
582 place_block_symbol (base);
583
584 /* Get the anchor we need to use. */
585 offset += SYMBOL_REF_BLOCK_OFFSET (base);
586 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
587 SYMBOL_REF_TLS_MODEL (base));
588
589 /* Work out the offset from the anchor. */
590 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
591
592 /* If we're going to run a CSE pass, force the anchor into a register.
593 We will then be able to reuse registers for several accesses, if the
594 target costs say that that's worthwhile. */
595 if (!cse_not_expected)
596 base = force_reg (GET_MODE (base), base);
597
598 return replace_equiv_address (x, plus_constant (base, offset));
599 }
600 \f
601 /* Copy the value or contents of X to a new temp reg and return that reg. */
602
603 rtx
604 copy_to_reg (rtx x)
605 {
606 rtx temp = gen_reg_rtx (GET_MODE (x));
607
608 /* If not an operand, must be an address with PLUS and MULT so
609 do the computation. */
610 if (! general_operand (x, VOIDmode))
611 x = force_operand (x, temp);
612
613 if (x != temp)
614 emit_move_insn (temp, x);
615
616 return temp;
617 }
618
619 /* Like copy_to_reg but always give the new register mode Pmode
620 in case X is a constant. */
621
622 rtx
623 copy_addr_to_reg (rtx x)
624 {
625 return copy_to_mode_reg (Pmode, x);
626 }
627
628 /* Like copy_to_reg but always give the new register mode MODE
629 in case X is a constant. */
630
631 rtx
632 copy_to_mode_reg (enum machine_mode mode, rtx x)
633 {
634 rtx temp = gen_reg_rtx (mode);
635
636 /* If not an operand, must be an address with PLUS and MULT so
637 do the computation. */
638 if (! general_operand (x, VOIDmode))
639 x = force_operand (x, temp);
640
641 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
642 if (x != temp)
643 emit_move_insn (temp, x);
644 return temp;
645 }
646
647 /* Load X into a register if it is not already one.
648 Use mode MODE for the register.
649 X should be valid for mode MODE, but it may be a constant which
650 is valid for all integer modes; that's why caller must specify MODE.
651
652 The caller must not alter the value in the register we return,
653 since we mark it as a "constant" register. */
654
655 rtx
656 force_reg (enum machine_mode mode, rtx x)
657 {
658 rtx temp, insn, set;
659
660 if (REG_P (x))
661 return x;
662
663 if (general_operand (x, mode))
664 {
665 temp = gen_reg_rtx (mode);
666 insn = emit_move_insn (temp, x);
667 }
668 else
669 {
670 temp = force_operand (x, NULL_RTX);
671 if (REG_P (temp))
672 insn = get_last_insn ();
673 else
674 {
675 rtx temp2 = gen_reg_rtx (mode);
676 insn = emit_move_insn (temp2, temp);
677 temp = temp2;
678 }
679 }
680
681 /* Let optimizers know that TEMP's value never changes
682 and that X can be substituted for it. Don't get confused
683 if INSN set something else (such as a SUBREG of TEMP). */
684 if (CONSTANT_P (x)
685 && (set = single_set (insn)) != 0
686 && SET_DEST (set) == temp
687 && ! rtx_equal_p (x, SET_SRC (set)))
688 set_unique_reg_note (insn, REG_EQUAL, x);
689
690 /* Let optimizers know that TEMP is a pointer, and if so, the
691 known alignment of that pointer. */
692 {
693 unsigned align = 0;
694 if (GET_CODE (x) == SYMBOL_REF)
695 {
696 align = BITS_PER_UNIT;
697 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
698 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
699 }
700 else if (GET_CODE (x) == LABEL_REF)
701 align = BITS_PER_UNIT;
702 else if (GET_CODE (x) == CONST
703 && GET_CODE (XEXP (x, 0)) == PLUS
704 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
705 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
706 {
707 rtx s = XEXP (XEXP (x, 0), 0);
708 rtx c = XEXP (XEXP (x, 0), 1);
709 unsigned sa, ca;
710
711 sa = BITS_PER_UNIT;
712 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
713 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
714
715 if (INTVAL (c) == 0)
716 align = sa;
717 else
718 {
719 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
720 align = MIN (sa, ca);
721 }
722 }
723
724 if (align || (MEM_P (x) && MEM_POINTER (x)))
725 mark_reg_pointer (temp, align);
726 }
727
728 return temp;
729 }
730
731 /* If X is a memory ref, copy its contents to a new temp reg and return
732 that reg. Otherwise, return X. */
733
734 rtx
735 force_not_mem (rtx x)
736 {
737 rtx temp;
738
739 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
740 return x;
741
742 temp = gen_reg_rtx (GET_MODE (x));
743
744 if (MEM_POINTER (x))
745 REG_POINTER (temp) = 1;
746
747 emit_move_insn (temp, x);
748 return temp;
749 }
750
751 /* Copy X to TARGET (if it's nonzero and a reg)
752 or to a new temp reg and return that reg.
753 MODE is the mode to use for X in case it is a constant. */
754
755 rtx
756 copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
757 {
758 rtx temp;
759
760 if (target && REG_P (target))
761 temp = target;
762 else
763 temp = gen_reg_rtx (mode);
764
765 emit_move_insn (temp, x);
766 return temp;
767 }
768 \f
769 /* Return the mode to use to pass or return a scalar of TYPE and MODE.
770 PUNSIGNEDP points to the signedness of the type and may be adjusted
771 to show what signedness to use on extension operations.
772
773 FOR_RETURN is nonzero if the caller is promoting the return value
774 of FNDECL, else it is for promoting args. */
775
776 enum machine_mode
777 promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp,
778 const_tree funtype, int for_return)
779 {
780 /* Called without a type node for a libcall. */
781 if (type == NULL_TREE)
782 {
783 if (INTEGRAL_MODE_P (mode))
784 return targetm.calls.promote_function_mode (NULL_TREE, mode,
785 punsignedp, funtype,
786 for_return);
787 else
788 return mode;
789 }
790
791 switch (TREE_CODE (type))
792 {
793 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
794 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
795 case POINTER_TYPE: case REFERENCE_TYPE:
796 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
797 for_return);
798
799 default:
800 return mode;
801 }
802 }
803 /* Return the mode to use to store a scalar of TYPE and MODE.
804 PUNSIGNEDP points to the signedness of the type and may be adjusted
805 to show what signedness to use on extension operations. */
806
807 enum machine_mode
808 promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode,
809 int *punsignedp ATTRIBUTE_UNUSED)
810 {
811 #ifdef PROMOTE_MODE
812 enum tree_code code;
813 int unsignedp;
814 #endif
815
816 /* For libcalls this is invoked without TYPE from the backends
817 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
818 case. */
819 if (type == NULL_TREE)
820 return mode;
821
822 /* FIXME: this is the same logic that was there until GCC 4.4, but we
823 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
824 is not defined. The affected targets are M32C, S390, SPARC. */
825 #ifdef PROMOTE_MODE
826 code = TREE_CODE (type);
827 unsignedp = *punsignedp;
828
829 switch (code)
830 {
831 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
832 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
833 PROMOTE_MODE (mode, unsignedp, type);
834 *punsignedp = unsignedp;
835 return mode;
836 break;
837
838 #ifdef POINTERS_EXTEND_UNSIGNED
839 case REFERENCE_TYPE:
840 case POINTER_TYPE:
841 *punsignedp = POINTERS_EXTEND_UNSIGNED;
842 return targetm.addr_space.address_mode
843 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
844 break;
845 #endif
846
847 default:
848 return mode;
849 }
850 #else
851 return mode;
852 #endif
853 }
854
855
856 /* Use one of promote_mode or promote_function_mode to find the promoted
857 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
858 of DECL after promotion. */
859
860 enum machine_mode
861 promote_decl_mode (const_tree decl, int *punsignedp)
862 {
863 tree type = TREE_TYPE (decl);
864 int unsignedp = TYPE_UNSIGNED (type);
865 enum machine_mode mode = DECL_MODE (decl);
866 enum machine_mode pmode;
867
868 if (TREE_CODE (decl) == RESULT_DECL
869 || TREE_CODE (decl) == PARM_DECL)
870 pmode = promote_function_mode (type, mode, &unsignedp,
871 TREE_TYPE (current_function_decl), 2);
872 else
873 pmode = promote_mode (type, mode, &unsignedp);
874
875 if (punsignedp)
876 *punsignedp = unsignedp;
877 return pmode;
878 }
879
880 \f
881 /* Controls the behaviour of {anti_,}adjust_stack. */
882 static bool suppress_reg_args_size;
883
884 /* A helper for adjust_stack and anti_adjust_stack. */
885
886 static void
887 adjust_stack_1 (rtx adjust, bool anti_p)
888 {
889 rtx temp, insn;
890
891 #ifndef STACK_GROWS_DOWNWARD
892 /* Hereafter anti_p means subtract_p. */
893 anti_p = !anti_p;
894 #endif
895
896 temp = expand_binop (Pmode,
897 anti_p ? sub_optab : add_optab,
898 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
899 OPTAB_LIB_WIDEN);
900
901 if (temp != stack_pointer_rtx)
902 insn = emit_move_insn (stack_pointer_rtx, temp);
903 else
904 {
905 insn = get_last_insn ();
906 temp = single_set (insn);
907 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
908 }
909
910 if (!suppress_reg_args_size)
911 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
912 }
913
914 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
915 This pops when ADJUST is positive. ADJUST need not be constant. */
916
917 void
918 adjust_stack (rtx adjust)
919 {
920 if (adjust == const0_rtx)
921 return;
922
923 /* We expect all variable sized adjustments to be multiple of
924 PREFERRED_STACK_BOUNDARY. */
925 if (CONST_INT_P (adjust))
926 stack_pointer_delta -= INTVAL (adjust);
927
928 adjust_stack_1 (adjust, false);
929 }
930
931 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
932 This pushes when ADJUST is positive. ADJUST need not be constant. */
933
934 void
935 anti_adjust_stack (rtx adjust)
936 {
937 if (adjust == const0_rtx)
938 return;
939
940 /* We expect all variable sized adjustments to be multiple of
941 PREFERRED_STACK_BOUNDARY. */
942 if (CONST_INT_P (adjust))
943 stack_pointer_delta += INTVAL (adjust);
944
945 adjust_stack_1 (adjust, true);
946 }
947
948 /* Round the size of a block to be pushed up to the boundary required
949 by this machine. SIZE is the desired size, which need not be constant. */
950
951 static rtx
952 round_push (rtx size)
953 {
954 rtx align_rtx, alignm1_rtx;
955
956 if (!SUPPORTS_STACK_ALIGNMENT
957 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
958 {
959 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
960
961 if (align == 1)
962 return size;
963
964 if (CONST_INT_P (size))
965 {
966 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
967
968 if (INTVAL (size) != new_size)
969 size = GEN_INT (new_size);
970 return size;
971 }
972
973 align_rtx = GEN_INT (align);
974 alignm1_rtx = GEN_INT (align - 1);
975 }
976 else
977 {
978 /* If crtl->preferred_stack_boundary might still grow, use
979 virtual_preferred_stack_boundary_rtx instead. This will be
980 substituted by the right value in vregs pass and optimized
981 during combine. */
982 align_rtx = virtual_preferred_stack_boundary_rtx;
983 alignm1_rtx = force_operand (plus_constant (align_rtx, -1), NULL_RTX);
984 }
985
986 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
987 but we know it can't. So add ourselves and then do
988 TRUNC_DIV_EXPR. */
989 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
990 NULL_RTX, 1, OPTAB_LIB_WIDEN);
991 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
992 NULL_RTX, 1);
993 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
994
995 return size;
996 }
997 \f
998 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
999 to a previously-created save area. If no save area has been allocated,
1000 this function will allocate one. If a save area is specified, it
1001 must be of the proper mode. */
1002
1003 void
1004 emit_stack_save (enum save_level save_level, rtx *psave)
1005 {
1006 rtx sa = *psave;
1007 /* The default is that we use a move insn and save in a Pmode object. */
1008 rtx (*fcn) (rtx, rtx) = gen_move_insn;
1009 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
1010
1011 /* See if this machine has anything special to do for this kind of save. */
1012 switch (save_level)
1013 {
1014 #ifdef HAVE_save_stack_block
1015 case SAVE_BLOCK:
1016 if (HAVE_save_stack_block)
1017 fcn = gen_save_stack_block;
1018 break;
1019 #endif
1020 #ifdef HAVE_save_stack_function
1021 case SAVE_FUNCTION:
1022 if (HAVE_save_stack_function)
1023 fcn = gen_save_stack_function;
1024 break;
1025 #endif
1026 #ifdef HAVE_save_stack_nonlocal
1027 case SAVE_NONLOCAL:
1028 if (HAVE_save_stack_nonlocal)
1029 fcn = gen_save_stack_nonlocal;
1030 break;
1031 #endif
1032 default:
1033 break;
1034 }
1035
1036 /* If there is no save area and we have to allocate one, do so. Otherwise
1037 verify the save area is the proper mode. */
1038
1039 if (sa == 0)
1040 {
1041 if (mode != VOIDmode)
1042 {
1043 if (save_level == SAVE_NONLOCAL)
1044 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1045 else
1046 *psave = sa = gen_reg_rtx (mode);
1047 }
1048 }
1049
1050 do_pending_stack_adjust ();
1051 if (sa != 0)
1052 sa = validize_mem (sa);
1053 emit_insn (fcn (sa, stack_pointer_rtx));
1054 }
1055
1056 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1057 area made by emit_stack_save. If it is zero, we have nothing to do. */
1058
1059 void
1060 emit_stack_restore (enum save_level save_level, rtx sa)
1061 {
1062 /* The default is that we use a move insn. */
1063 rtx (*fcn) (rtx, rtx) = gen_move_insn;
1064
1065 /* See if this machine has anything special to do for this kind of save. */
1066 switch (save_level)
1067 {
1068 #ifdef HAVE_restore_stack_block
1069 case SAVE_BLOCK:
1070 if (HAVE_restore_stack_block)
1071 fcn = gen_restore_stack_block;
1072 break;
1073 #endif
1074 #ifdef HAVE_restore_stack_function
1075 case SAVE_FUNCTION:
1076 if (HAVE_restore_stack_function)
1077 fcn = gen_restore_stack_function;
1078 break;
1079 #endif
1080 #ifdef HAVE_restore_stack_nonlocal
1081 case SAVE_NONLOCAL:
1082 if (HAVE_restore_stack_nonlocal)
1083 fcn = gen_restore_stack_nonlocal;
1084 break;
1085 #endif
1086 default:
1087 break;
1088 }
1089
1090 if (sa != 0)
1091 {
1092 sa = validize_mem (sa);
1093 /* These clobbers prevent the scheduler from moving
1094 references to variable arrays below the code
1095 that deletes (pops) the arrays. */
1096 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1097 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
1098 }
1099
1100 discard_pending_stack_adjust ();
1101
1102 emit_insn (fcn (stack_pointer_rtx, sa));
1103 }
1104
1105 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1106 function. This function should be called whenever we allocate or
1107 deallocate dynamic stack space. */
1108
1109 void
1110 update_nonlocal_goto_save_area (void)
1111 {
1112 tree t_save;
1113 rtx r_save;
1114
1115 /* The nonlocal_goto_save_area object is an array of N pointers. The
1116 first one is used for the frame pointer save; the rest are sized by
1117 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1118 of the stack save area slots. */
1119 t_save = build4 (ARRAY_REF,
1120 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1121 cfun->nonlocal_goto_save_area,
1122 integer_one_node, NULL_TREE, NULL_TREE);
1123 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1124
1125 emit_stack_save (SAVE_NONLOCAL, &r_save);
1126 }
1127 \f
1128 /* Return an rtx representing the address of an area of memory dynamically
1129 pushed on the stack.
1130
1131 Any required stack pointer alignment is preserved.
1132
1133 SIZE is an rtx representing the size of the area.
1134
1135 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1136 parameter may be zero. If so, a proper value will be extracted
1137 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1138
1139 REQUIRED_ALIGN is the alignment (in bits) required for the region
1140 of memory.
1141
1142 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1143 stack space allocated by the generated code cannot be added with itself
1144 in the course of the execution of the function. It is always safe to
1145 pass FALSE here and the following criterion is sufficient in order to
1146 pass TRUE: every path in the CFG that starts at the allocation point and
1147 loops to it executes the associated deallocation code. */
1148
1149 rtx
1150 allocate_dynamic_stack_space (rtx size, unsigned size_align,
1151 unsigned required_align, bool cannot_accumulate)
1152 {
1153 HOST_WIDE_INT stack_usage_size = -1;
1154 rtx final_label, final_target, target;
1155 unsigned extra_align = 0;
1156 bool must_align;
1157
1158 /* If we're asking for zero bytes, it doesn't matter what we point
1159 to since we can't dereference it. But return a reasonable
1160 address anyway. */
1161 if (size == const0_rtx)
1162 return virtual_stack_dynamic_rtx;
1163
1164 /* Otherwise, show we're calling alloca or equivalent. */
1165 cfun->calls_alloca = 1;
1166
1167 /* If stack usage info is requested, look into the size we are passed.
1168 We need to do so this early to avoid the obfuscation that may be
1169 introduced later by the various alignment operations. */
1170 if (flag_stack_usage_info)
1171 {
1172 if (CONST_INT_P (size))
1173 stack_usage_size = INTVAL (size);
1174 else if (REG_P (size))
1175 {
1176 /* Look into the last emitted insn and see if we can deduce
1177 something for the register. */
1178 rtx insn, set, note;
1179 insn = get_last_insn ();
1180 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1181 {
1182 if (CONST_INT_P (SET_SRC (set)))
1183 stack_usage_size = INTVAL (SET_SRC (set));
1184 else if ((note = find_reg_equal_equiv_note (insn))
1185 && CONST_INT_P (XEXP (note, 0)))
1186 stack_usage_size = INTVAL (XEXP (note, 0));
1187 }
1188 }
1189
1190 /* If the size is not constant, we can't say anything. */
1191 if (stack_usage_size == -1)
1192 {
1193 current_function_has_unbounded_dynamic_stack_size = 1;
1194 stack_usage_size = 0;
1195 }
1196 }
1197
1198 /* Ensure the size is in the proper mode. */
1199 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1200 size = convert_to_mode (Pmode, size, 1);
1201
1202 /* Adjust SIZE_ALIGN, if needed. */
1203 if (CONST_INT_P (size))
1204 {
1205 unsigned HOST_WIDE_INT lsb;
1206
1207 lsb = INTVAL (size);
1208 lsb &= -lsb;
1209
1210 /* Watch out for overflow truncating to "unsigned". */
1211 if (lsb > UINT_MAX / BITS_PER_UNIT)
1212 size_align = 1u << (HOST_BITS_PER_INT - 1);
1213 else
1214 size_align = (unsigned)lsb * BITS_PER_UNIT;
1215 }
1216 else if (size_align < BITS_PER_UNIT)
1217 size_align = BITS_PER_UNIT;
1218
1219 /* We can't attempt to minimize alignment necessary, because we don't
1220 know the final value of preferred_stack_boundary yet while executing
1221 this code. */
1222 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1223 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1224
1225 /* We will need to ensure that the address we return is aligned to
1226 REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't
1227 always know its final value at this point in the compilation (it
1228 might depend on the size of the outgoing parameter lists, for
1229 example), so we must align the value to be returned in that case.
1230 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1231 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1232 We must also do an alignment operation on the returned value if
1233 the stack pointer alignment is less strict than REQUIRED_ALIGN.
1234
1235 If we have to align, we must leave space in SIZE for the hole
1236 that might result from the alignment operation. */
1237
1238 must_align = (crtl->preferred_stack_boundary < required_align);
1239 if (must_align)
1240 {
1241 if (required_align > PREFERRED_STACK_BOUNDARY)
1242 extra_align = PREFERRED_STACK_BOUNDARY;
1243 else if (required_align > STACK_BOUNDARY)
1244 extra_align = STACK_BOUNDARY;
1245 else
1246 extra_align = BITS_PER_UNIT;
1247 }
1248
1249 /* ??? STACK_POINTER_OFFSET is always defined now. */
1250 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1251 must_align = true;
1252 extra_align = BITS_PER_UNIT;
1253 #endif
1254
1255 if (must_align)
1256 {
1257 unsigned extra = (required_align - extra_align) / BITS_PER_UNIT;
1258
1259 size = plus_constant (size, extra);
1260 size = force_operand (size, NULL_RTX);
1261
1262 if (flag_stack_usage_info)
1263 stack_usage_size += extra;
1264
1265 if (extra && size_align > extra_align)
1266 size_align = extra_align;
1267 }
1268
1269 /* Round the size to a multiple of the required stack alignment.
1270 Since the stack if presumed to be rounded before this allocation,
1271 this will maintain the required alignment.
1272
1273 If the stack grows downward, we could save an insn by subtracting
1274 SIZE from the stack pointer and then aligning the stack pointer.
1275 The problem with this is that the stack pointer may be unaligned
1276 between the execution of the subtraction and alignment insns and
1277 some machines do not allow this. Even on those that do, some
1278 signal handlers malfunction if a signal should occur between those
1279 insns. Since this is an extremely rare event, we have no reliable
1280 way of knowing which systems have this problem. So we avoid even
1281 momentarily mis-aligning the stack. */
1282 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
1283 {
1284 size = round_push (size);
1285
1286 if (flag_stack_usage_info)
1287 {
1288 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
1289 stack_usage_size = (stack_usage_size + align - 1) / align * align;
1290 }
1291 }
1292
1293 target = gen_reg_rtx (Pmode);
1294
1295 /* The size is supposed to be fully adjusted at this point so record it
1296 if stack usage info is requested. */
1297 if (flag_stack_usage_info)
1298 {
1299 current_function_dynamic_stack_size += stack_usage_size;
1300
1301 /* ??? This is gross but the only safe stance in the absence
1302 of stack usage oriented flow analysis. */
1303 if (!cannot_accumulate)
1304 current_function_has_unbounded_dynamic_stack_size = 1;
1305 }
1306
1307 final_label = NULL_RTX;
1308 final_target = NULL_RTX;
1309
1310 /* If we are splitting the stack, we need to ask the backend whether
1311 there is enough room on the current stack. If there isn't, or if
1312 the backend doesn't know how to tell is, then we need to call a
1313 function to allocate memory in some other way. This memory will
1314 be released when we release the current stack segment. The
1315 effect is that stack allocation becomes less efficient, but at
1316 least it doesn't cause a stack overflow. */
1317 if (flag_split_stack)
1318 {
1319 rtx available_label, ask, space, func;
1320
1321 available_label = NULL_RTX;
1322
1323 #ifdef HAVE_split_stack_space_check
1324 if (HAVE_split_stack_space_check)
1325 {
1326 available_label = gen_label_rtx ();
1327
1328 /* This instruction will branch to AVAILABLE_LABEL if there
1329 are SIZE bytes available on the stack. */
1330 emit_insn (gen_split_stack_space_check (size, available_label));
1331 }
1332 #endif
1333
1334 /* The __morestack_allocate_stack_space function will allocate
1335 memory using malloc. If the alignment of the memory returned
1336 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1337 make sure we allocate enough space. */
1338 if (MALLOC_ABI_ALIGNMENT >= required_align)
1339 ask = size;
1340 else
1341 {
1342 ask = expand_binop (Pmode, add_optab, size,
1343 GEN_INT (required_align / BITS_PER_UNIT - 1),
1344 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1345 must_align = true;
1346 }
1347
1348 func = init_one_libfunc ("__morestack_allocate_stack_space");
1349
1350 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
1351 1, ask, Pmode);
1352
1353 if (available_label == NULL_RTX)
1354 return space;
1355
1356 final_target = gen_reg_rtx (Pmode);
1357
1358 emit_move_insn (final_target, space);
1359
1360 final_label = gen_label_rtx ();
1361 emit_jump (final_label);
1362
1363 emit_label (available_label);
1364 }
1365
1366 do_pending_stack_adjust ();
1367
1368 /* We ought to be called always on the toplevel and stack ought to be aligned
1369 properly. */
1370 gcc_assert (!(stack_pointer_delta
1371 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1372
1373 /* If needed, check that we have the required amount of stack. Take into
1374 account what has already been checked. */
1375 if (STACK_CHECK_MOVING_SP)
1376 ;
1377 else if (flag_stack_check == GENERIC_STACK_CHECK)
1378 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1379 size);
1380 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1381 probe_stack_range (STACK_CHECK_PROTECT, size);
1382
1383 /* Don't let anti_adjust_stack emit notes. */
1384 suppress_reg_args_size = true;
1385
1386 /* Perform the required allocation from the stack. Some systems do
1387 this differently than simply incrementing/decrementing from the
1388 stack pointer, such as acquiring the space by calling malloc(). */
1389 #ifdef HAVE_allocate_stack
1390 if (HAVE_allocate_stack)
1391 {
1392 struct expand_operand ops[2];
1393 /* We don't have to check against the predicate for operand 0 since
1394 TARGET is known to be a pseudo of the proper mode, which must
1395 be valid for the operand. */
1396 create_fixed_operand (&ops[0], target);
1397 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
1398 expand_insn (CODE_FOR_allocate_stack, 2, ops);
1399 }
1400 else
1401 #endif
1402 {
1403 int saved_stack_pointer_delta;
1404
1405 #ifndef STACK_GROWS_DOWNWARD
1406 emit_move_insn (target, virtual_stack_dynamic_rtx);
1407 #endif
1408
1409 /* Check stack bounds if necessary. */
1410 if (crtl->limit_stack)
1411 {
1412 rtx available;
1413 rtx space_available = gen_label_rtx ();
1414 #ifdef STACK_GROWS_DOWNWARD
1415 available = expand_binop (Pmode, sub_optab,
1416 stack_pointer_rtx, stack_limit_rtx,
1417 NULL_RTX, 1, OPTAB_WIDEN);
1418 #else
1419 available = expand_binop (Pmode, sub_optab,
1420 stack_limit_rtx, stack_pointer_rtx,
1421 NULL_RTX, 1, OPTAB_WIDEN);
1422 #endif
1423 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1424 space_available);
1425 #ifdef HAVE_trap
1426 if (HAVE_trap)
1427 emit_insn (gen_trap ());
1428 else
1429 #endif
1430 error ("stack limits not supported on this target");
1431 emit_barrier ();
1432 emit_label (space_available);
1433 }
1434
1435 saved_stack_pointer_delta = stack_pointer_delta;
1436
1437 if (flag_stack_check && STACK_CHECK_MOVING_SP)
1438 anti_adjust_stack_and_probe (size, false);
1439 else
1440 anti_adjust_stack (size);
1441
1442 /* Even if size is constant, don't modify stack_pointer_delta.
1443 The constant size alloca should preserve
1444 crtl->preferred_stack_boundary alignment. */
1445 stack_pointer_delta = saved_stack_pointer_delta;
1446
1447 #ifdef STACK_GROWS_DOWNWARD
1448 emit_move_insn (target, virtual_stack_dynamic_rtx);
1449 #endif
1450 }
1451
1452 suppress_reg_args_size = false;
1453
1454 /* Finish up the split stack handling. */
1455 if (final_label != NULL_RTX)
1456 {
1457 gcc_assert (flag_split_stack);
1458 emit_move_insn (final_target, target);
1459 emit_label (final_label);
1460 target = final_target;
1461 }
1462
1463 if (must_align)
1464 {
1465 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1466 but we know it can't. So add ourselves and then do
1467 TRUNC_DIV_EXPR. */
1468 target = expand_binop (Pmode, add_optab, target,
1469 GEN_INT (required_align / BITS_PER_UNIT - 1),
1470 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1471 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1472 GEN_INT (required_align / BITS_PER_UNIT),
1473 NULL_RTX, 1);
1474 target = expand_mult (Pmode, target,
1475 GEN_INT (required_align / BITS_PER_UNIT),
1476 NULL_RTX, 1);
1477 }
1478
1479 /* Now that we've committed to a return value, mark its alignment. */
1480 mark_reg_pointer (target, required_align);
1481
1482 /* Record the new stack level for nonlocal gotos. */
1483 if (cfun->nonlocal_goto_save_area != 0)
1484 update_nonlocal_goto_save_area ();
1485
1486 return target;
1487 }
1488 \f
1489 /* A front end may want to override GCC's stack checking by providing a
1490 run-time routine to call to check the stack, so provide a mechanism for
1491 calling that routine. */
1492
1493 static GTY(()) rtx stack_check_libfunc;
1494
1495 void
1496 set_stack_check_libfunc (const char *libfunc_name)
1497 {
1498 gcc_assert (stack_check_libfunc == NULL_RTX);
1499 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
1500 }
1501 \f
1502 /* Emit one stack probe at ADDRESS, an address within the stack. */
1503
1504 void
1505 emit_stack_probe (rtx address)
1506 {
1507 rtx memref = gen_rtx_MEM (word_mode, address);
1508
1509 MEM_VOLATILE_P (memref) = 1;
1510
1511 /* See if we have an insn to probe the stack. */
1512 #ifdef HAVE_probe_stack
1513 if (HAVE_probe_stack)
1514 emit_insn (gen_probe_stack (memref));
1515 else
1516 #endif
1517 emit_move_insn (memref, const0_rtx);
1518 }
1519
1520 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1521 FIRST is a constant and size is a Pmode RTX. These are offsets from
1522 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1523 or subtract them from the stack pointer. */
1524
1525 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1526
1527 #ifdef STACK_GROWS_DOWNWARD
1528 #define STACK_GROW_OP MINUS
1529 #define STACK_GROW_OPTAB sub_optab
1530 #define STACK_GROW_OFF(off) -(off)
1531 #else
1532 #define STACK_GROW_OP PLUS
1533 #define STACK_GROW_OPTAB add_optab
1534 #define STACK_GROW_OFF(off) (off)
1535 #endif
1536
1537 void
1538 probe_stack_range (HOST_WIDE_INT first, rtx size)
1539 {
1540 /* First ensure SIZE is Pmode. */
1541 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1542 size = convert_to_mode (Pmode, size, 1);
1543
1544 /* Next see if we have a function to check the stack. */
1545 if (stack_check_libfunc)
1546 {
1547 rtx addr = memory_address (Pmode,
1548 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1549 stack_pointer_rtx,
1550 plus_constant (size, first)));
1551 emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1552 Pmode);
1553 return;
1554 }
1555
1556 /* Next see if we have an insn to check the stack. */
1557 #ifdef HAVE_check_stack
1558 if (HAVE_check_stack)
1559 {
1560 struct expand_operand ops[1];
1561 rtx addr = memory_address (Pmode,
1562 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1563 stack_pointer_rtx,
1564 plus_constant (size, first)));
1565
1566 create_input_operand (&ops[0], addr, Pmode);
1567 if (maybe_expand_insn (CODE_FOR_check_stack, 1, ops))
1568 return;
1569 }
1570 #endif
1571
1572 /* Otherwise we have to generate explicit probes. If we have a constant
1573 small number of them to generate, that's the easy case. */
1574 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1575 {
1576 HOST_WIDE_INT isize = INTVAL (size), i;
1577 rtx addr;
1578
1579 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1580 it exceeds SIZE. If only one probe is needed, this will not
1581 generate any code. Then probe at FIRST + SIZE. */
1582 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1583 {
1584 addr = memory_address (Pmode,
1585 plus_constant (stack_pointer_rtx,
1586 STACK_GROW_OFF (first + i)));
1587 emit_stack_probe (addr);
1588 }
1589
1590 addr = memory_address (Pmode,
1591 plus_constant (stack_pointer_rtx,
1592 STACK_GROW_OFF (first + isize)));
1593 emit_stack_probe (addr);
1594 }
1595
1596 /* In the variable case, do the same as above, but in a loop. Note that we
1597 must be extra careful with variables wrapping around because we might be
1598 at the very top (or the very bottom) of the address space and we have to
1599 be able to handle this case properly; in particular, we use an equality
1600 test for the loop condition. */
1601 else
1602 {
1603 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
1604 rtx loop_lab = gen_label_rtx ();
1605 rtx end_lab = gen_label_rtx ();
1606
1607
1608 /* Step 1: round SIZE to the previous multiple of the interval. */
1609
1610 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1611 rounded_size
1612 = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
1613 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1614
1615
1616 /* Step 2: compute initial and final value of the loop counter. */
1617
1618 /* TEST_ADDR = SP + FIRST. */
1619 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1620 stack_pointer_rtx,
1621 GEN_INT (first)), NULL_RTX);
1622
1623 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1624 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1625 test_addr,
1626 rounded_size_op), NULL_RTX);
1627
1628
1629 /* Step 3: the loop
1630
1631 while (TEST_ADDR != LAST_ADDR)
1632 {
1633 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1634 probe at TEST_ADDR
1635 }
1636
1637 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1638 until it is equal to ROUNDED_SIZE. */
1639
1640 emit_label (loop_lab);
1641
1642 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1643 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1644 end_lab);
1645
1646 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1647 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
1648 GEN_INT (PROBE_INTERVAL), test_addr,
1649 1, OPTAB_WIDEN);
1650
1651 gcc_assert (temp == test_addr);
1652
1653 /* Probe at TEST_ADDR. */
1654 emit_stack_probe (test_addr);
1655
1656 emit_jump (loop_lab);
1657
1658 emit_label (end_lab);
1659
1660
1661 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1662 that SIZE is equal to ROUNDED_SIZE. */
1663
1664 /* TEMP = SIZE - ROUNDED_SIZE. */
1665 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1666 if (temp != const0_rtx)
1667 {
1668 rtx addr;
1669
1670 if (CONST_INT_P (temp))
1671 {
1672 /* Use [base + disp} addressing mode if supported. */
1673 HOST_WIDE_INT offset = INTVAL (temp);
1674 addr = memory_address (Pmode,
1675 plus_constant (last_addr,
1676 STACK_GROW_OFF (offset)));
1677 }
1678 else
1679 {
1680 /* Manual CSE if the difference is not known at compile-time. */
1681 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1682 addr = memory_address (Pmode,
1683 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1684 last_addr, temp));
1685 }
1686
1687 emit_stack_probe (addr);
1688 }
1689 }
1690 }
1691
1692 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1693 while probing it. This pushes when SIZE is positive. SIZE need not
1694 be constant. If ADJUST_BACK is true, adjust back the stack pointer
1695 by plus SIZE at the end. */
1696
1697 void
1698 anti_adjust_stack_and_probe (rtx size, bool adjust_back)
1699 {
1700 /* We skip the probe for the first interval + a small dope of 4 words and
1701 probe that many bytes past the specified size to maintain a protection
1702 area at the botton of the stack. */
1703 const int dope = 4 * UNITS_PER_WORD;
1704
1705 /* First ensure SIZE is Pmode. */
1706 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1707 size = convert_to_mode (Pmode, size, 1);
1708
1709 /* If we have a constant small number of probes to generate, that's the
1710 easy case. */
1711 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1712 {
1713 HOST_WIDE_INT isize = INTVAL (size), i;
1714 bool first_probe = true;
1715
1716 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
1717 values of N from 1 until it exceeds SIZE. If only one probe is
1718 needed, this will not generate any code. Then adjust and probe
1719 to PROBE_INTERVAL + SIZE. */
1720 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1721 {
1722 if (first_probe)
1723 {
1724 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
1725 first_probe = false;
1726 }
1727 else
1728 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1729 emit_stack_probe (stack_pointer_rtx);
1730 }
1731
1732 if (first_probe)
1733 anti_adjust_stack (plus_constant (size, PROBE_INTERVAL + dope));
1734 else
1735 anti_adjust_stack (plus_constant (size, PROBE_INTERVAL - i));
1736 emit_stack_probe (stack_pointer_rtx);
1737 }
1738
1739 /* In the variable case, do the same as above, but in a loop. Note that we
1740 must be extra careful with variables wrapping around because we might be
1741 at the very top (or the very bottom) of the address space and we have to
1742 be able to handle this case properly; in particular, we use an equality
1743 test for the loop condition. */
1744 else
1745 {
1746 rtx rounded_size, rounded_size_op, last_addr, temp;
1747 rtx loop_lab = gen_label_rtx ();
1748 rtx end_lab = gen_label_rtx ();
1749
1750
1751 /* Step 1: round SIZE to the previous multiple of the interval. */
1752
1753 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1754 rounded_size
1755 = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
1756 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1757
1758
1759 /* Step 2: compute initial and final value of the loop counter. */
1760
1761 /* SP = SP_0 + PROBE_INTERVAL. */
1762 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1763
1764 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
1765 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1766 stack_pointer_rtx,
1767 rounded_size_op), NULL_RTX);
1768
1769
1770 /* Step 3: the loop
1771
1772 while (SP != LAST_ADDR)
1773 {
1774 SP = SP + PROBE_INTERVAL
1775 probe at SP
1776 }
1777
1778 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
1779 values of N from 1 until it is equal to ROUNDED_SIZE. */
1780
1781 emit_label (loop_lab);
1782
1783 /* Jump to END_LAB if SP == LAST_ADDR. */
1784 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1785 Pmode, 1, end_lab);
1786
1787 /* SP = SP + PROBE_INTERVAL and probe at SP. */
1788 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1789 emit_stack_probe (stack_pointer_rtx);
1790
1791 emit_jump (loop_lab);
1792
1793 emit_label (end_lab);
1794
1795
1796 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
1797 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
1798
1799 /* TEMP = SIZE - ROUNDED_SIZE. */
1800 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1801 if (temp != const0_rtx)
1802 {
1803 /* Manual CSE if the difference is not known at compile-time. */
1804 if (GET_CODE (temp) != CONST_INT)
1805 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1806 anti_adjust_stack (temp);
1807 emit_stack_probe (stack_pointer_rtx);
1808 }
1809 }
1810
1811 /* Adjust back and account for the additional first interval. */
1812 if (adjust_back)
1813 adjust_stack (plus_constant (size, PROBE_INTERVAL + dope));
1814 else
1815 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1816 }
1817
1818 /* Return an rtx representing the register or memory location
1819 in which a scalar value of data type VALTYPE
1820 was returned by a function call to function FUNC.
1821 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1822 function is known, otherwise 0.
1823 OUTGOING is 1 if on a machine with register windows this function
1824 should return the register in which the function will put its result
1825 and 0 otherwise. */
1826
1827 rtx
1828 hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
1829 int outgoing ATTRIBUTE_UNUSED)
1830 {
1831 rtx val;
1832
1833 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
1834
1835 if (REG_P (val)
1836 && GET_MODE (val) == BLKmode)
1837 {
1838 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1839 enum machine_mode tmpmode;
1840
1841 /* int_size_in_bytes can return -1. We don't need a check here
1842 since the value of bytes will then be large enough that no
1843 mode will match anyway. */
1844
1845 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1846 tmpmode != VOIDmode;
1847 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1848 {
1849 /* Have we found a large enough mode? */
1850 if (GET_MODE_SIZE (tmpmode) >= bytes)
1851 break;
1852 }
1853
1854 /* No suitable mode found. */
1855 gcc_assert (tmpmode != VOIDmode);
1856
1857 PUT_MODE (val, tmpmode);
1858 }
1859 return val;
1860 }
1861
1862 /* Return an rtx representing the register or memory location
1863 in which a scalar value of mode MODE was returned by a library call. */
1864
1865 rtx
1866 hard_libcall_value (enum machine_mode mode, rtx fun)
1867 {
1868 return targetm.calls.libcall_value (mode, fun);
1869 }
1870
1871 /* Look up the tree code for a given rtx code
1872 to provide the arithmetic operation for REAL_ARITHMETIC.
1873 The function returns an int because the caller may not know
1874 what `enum tree_code' means. */
1875
1876 int
1877 rtx_to_tree_code (enum rtx_code code)
1878 {
1879 enum tree_code tcode;
1880
1881 switch (code)
1882 {
1883 case PLUS:
1884 tcode = PLUS_EXPR;
1885 break;
1886 case MINUS:
1887 tcode = MINUS_EXPR;
1888 break;
1889 case MULT:
1890 tcode = MULT_EXPR;
1891 break;
1892 case DIV:
1893 tcode = RDIV_EXPR;
1894 break;
1895 case SMIN:
1896 tcode = MIN_EXPR;
1897 break;
1898 case SMAX:
1899 tcode = MAX_EXPR;
1900 break;
1901 default:
1902 tcode = LAST_AND_UNUSED_TREE_CODE;
1903 break;
1904 }
1905 return ((int) tcode);
1906 }
1907
1908 #include "gt-explow.h"