Apply mechanical replacement (generated patch).
[gcc.git] / gcc / explow.c
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "target.h"
25 #include "function.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "expmed.h"
31 #include "profile-count.h"
32 #include "optabs.h"
33 #include "emit-rtl.h"
34 #include "recog.h"
35 #include "diagnostic-core.h"
36 #include "stor-layout.h"
37 #include "except.h"
38 #include "dojump.h"
39 #include "explow.h"
40 #include "expr.h"
41 #include "stringpool.h"
42 #include "common/common-target.h"
43 #include "output.h"
44 #include "params.h"
45
46 static rtx break_out_memory_refs (rtx);
47 static void anti_adjust_stack_and_probe_stack_clash (rtx);
48
49
50 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
51
52 HOST_WIDE_INT
53 trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
54 {
55 /* Not scalar_int_mode because we also allow pointer bound modes. */
56 scalar_mode smode = as_a <scalar_mode> (mode);
57 int width = GET_MODE_PRECISION (smode);
58
59 /* You want to truncate to a _what_? */
60 gcc_assert (SCALAR_INT_MODE_P (mode));
61
62 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
63 if (smode == BImode)
64 return c & 1 ? STORE_FLAG_VALUE : 0;
65
66 /* Sign-extend for the requested mode. */
67
68 if (width < HOST_BITS_PER_WIDE_INT)
69 {
70 HOST_WIDE_INT sign = 1;
71 sign <<= width - 1;
72 c &= (sign << 1) - 1;
73 c ^= sign;
74 c -= sign;
75 }
76
77 return c;
78 }
79
80 /* Likewise for polynomial values, using the sign-extended representation
81 for each individual coefficient. */
82
83 poly_int64
84 trunc_int_for_mode (poly_int64 x, machine_mode mode)
85 {
86 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
87 x.coeffs[i] = trunc_int_for_mode (x.coeffs[i], mode);
88 return x;
89 }
90
91 /* Return an rtx for the sum of X and the integer C, given that X has
92 mode MODE. INPLACE is true if X can be modified inplace or false
93 if it must be treated as immutable. */
94
95 rtx
96 plus_constant (machine_mode mode, rtx x, poly_int64 c, bool inplace)
97 {
98 RTX_CODE code;
99 rtx y;
100 rtx tem;
101 int all_constant = 0;
102
103 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
104
105 if (known_eq (c, 0))
106 return x;
107
108 restart:
109
110 code = GET_CODE (x);
111 y = x;
112
113 switch (code)
114 {
115 CASE_CONST_SCALAR_INT:
116 return immed_wide_int_const (wi::add (rtx_mode_t (x, mode), c), mode);
117 case MEM:
118 /* If this is a reference to the constant pool, try replacing it with
119 a reference to a new constant. If the resulting address isn't
120 valid, don't return it because we have no way to validize it. */
121 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
122 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
123 {
124 rtx cst = get_pool_constant (XEXP (x, 0));
125
126 if (GET_CODE (cst) == CONST_VECTOR
127 && GET_MODE_INNER (GET_MODE (cst)) == mode)
128 {
129 cst = gen_lowpart (mode, cst);
130 gcc_assert (cst);
131 }
132 if (GET_MODE (cst) == VOIDmode || GET_MODE (cst) == mode)
133 {
134 tem = plus_constant (mode, cst, c);
135 tem = force_const_mem (GET_MODE (x), tem);
136 /* Targets may disallow some constants in the constant pool, thus
137 force_const_mem may return NULL_RTX. */
138 if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
139 return tem;
140 }
141 }
142 break;
143
144 case CONST:
145 /* If adding to something entirely constant, set a flag
146 so that we can add a CONST around the result. */
147 if (inplace && shared_const_p (x))
148 inplace = false;
149 x = XEXP (x, 0);
150 all_constant = 1;
151 goto restart;
152
153 case SYMBOL_REF:
154 case LABEL_REF:
155 all_constant = 1;
156 break;
157
158 case PLUS:
159 /* The interesting case is adding the integer to a sum. Look
160 for constant term in the sum and combine with C. For an
161 integer constant term or a constant term that is not an
162 explicit integer, we combine or group them together anyway.
163
164 We may not immediately return from the recursive call here, lest
165 all_constant gets lost. */
166
167 if (CONSTANT_P (XEXP (x, 1)))
168 {
169 rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
170 if (term == const0_rtx)
171 x = XEXP (x, 0);
172 else if (inplace)
173 XEXP (x, 1) = term;
174 else
175 x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
176 c = 0;
177 }
178 else if (rtx *const_loc = find_constant_term_loc (&y))
179 {
180 if (!inplace)
181 {
182 /* We need to be careful since X may be shared and we can't
183 modify it in place. */
184 x = copy_rtx (x);
185 const_loc = find_constant_term_loc (&x);
186 }
187 *const_loc = plus_constant (mode, *const_loc, c, true);
188 c = 0;
189 }
190 break;
191
192 default:
193 if (CONST_POLY_INT_P (x))
194 return immed_wide_int_const (const_poly_int_value (x) + c, mode);
195 break;
196 }
197
198 if (maybe_ne (c, 0))
199 x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
200
201 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
202 return x;
203 else if (all_constant)
204 return gen_rtx_CONST (mode, x);
205 else
206 return x;
207 }
208 \f
209 /* If X is a sum, return a new sum like X but lacking any constant terms.
210 Add all the removed constant terms into *CONSTPTR.
211 X itself is not altered. The result != X if and only if
212 it is not isomorphic to X. */
213
214 rtx
215 eliminate_constant_term (rtx x, rtx *constptr)
216 {
217 rtx x0, x1;
218 rtx tem;
219
220 if (GET_CODE (x) != PLUS)
221 return x;
222
223 /* First handle constants appearing at this level explicitly. */
224 if (CONST_INT_P (XEXP (x, 1))
225 && (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
226 XEXP (x, 1))) != 0
227 && CONST_INT_P (tem))
228 {
229 *constptr = tem;
230 return eliminate_constant_term (XEXP (x, 0), constptr);
231 }
232
233 tem = const0_rtx;
234 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
235 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
236 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
237 && (tem = simplify_binary_operation (PLUS, GET_MODE (x),
238 *constptr, tem)) != 0
239 && CONST_INT_P (tem))
240 {
241 *constptr = tem;
242 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
243 }
244
245 return x;
246 }
247
248 \f
249 /* Return a copy of X in which all memory references
250 and all constants that involve symbol refs
251 have been replaced with new temporary registers.
252 Also emit code to load the memory locations and constants
253 into those registers.
254
255 If X contains no such constants or memory references,
256 X itself (not a copy) is returned.
257
258 If a constant is found in the address that is not a legitimate constant
259 in an insn, it is left alone in the hope that it might be valid in the
260 address.
261
262 X may contain no arithmetic except addition, subtraction and multiplication.
263 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
264
265 static rtx
266 break_out_memory_refs (rtx x)
267 {
268 if (MEM_P (x)
269 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
270 && GET_MODE (x) != VOIDmode))
271 x = force_reg (GET_MODE (x), x);
272 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
273 || GET_CODE (x) == MULT)
274 {
275 rtx op0 = break_out_memory_refs (XEXP (x, 0));
276 rtx op1 = break_out_memory_refs (XEXP (x, 1));
277
278 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
279 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
280 }
281
282 return x;
283 }
284
285 /* Given X, a memory address in address space AS' pointer mode, convert it to
286 an address in the address space's address mode, or vice versa (TO_MODE says
287 which way). We take advantage of the fact that pointers are not allowed to
288 overflow by commuting arithmetic operations over conversions so that address
289 arithmetic insns can be used. IN_CONST is true if this conversion is inside
290 a CONST. NO_EMIT is true if no insns should be emitted, and instead
291 it should return NULL if it can't be simplified without emitting insns. */
292
293 rtx
294 convert_memory_address_addr_space_1 (scalar_int_mode to_mode ATTRIBUTE_UNUSED,
295 rtx x, addr_space_t as ATTRIBUTE_UNUSED,
296 bool in_const ATTRIBUTE_UNUSED,
297 bool no_emit ATTRIBUTE_UNUSED)
298 {
299 #ifndef POINTERS_EXTEND_UNSIGNED
300 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
301 return x;
302 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
303 scalar_int_mode pointer_mode, address_mode, from_mode;
304 rtx temp;
305 enum rtx_code code;
306
307 /* If X already has the right mode, just return it. */
308 if (GET_MODE (x) == to_mode)
309 return x;
310
311 pointer_mode = targetm.addr_space.pointer_mode (as);
312 address_mode = targetm.addr_space.address_mode (as);
313 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
314
315 /* Here we handle some special cases. If none of them apply, fall through
316 to the default case. */
317 switch (GET_CODE (x))
318 {
319 CASE_CONST_SCALAR_INT:
320 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
321 code = TRUNCATE;
322 else if (POINTERS_EXTEND_UNSIGNED < 0)
323 break;
324 else if (POINTERS_EXTEND_UNSIGNED > 0)
325 code = ZERO_EXTEND;
326 else
327 code = SIGN_EXTEND;
328 temp = simplify_unary_operation (code, to_mode, x, from_mode);
329 if (temp)
330 return temp;
331 break;
332
333 case SUBREG:
334 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
335 && GET_MODE (SUBREG_REG (x)) == to_mode)
336 return SUBREG_REG (x);
337 break;
338
339 case LABEL_REF:
340 temp = gen_rtx_LABEL_REF (to_mode, label_ref_label (x));
341 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
342 return temp;
343
344 case SYMBOL_REF:
345 temp = shallow_copy_rtx (x);
346 PUT_MODE (temp, to_mode);
347 return temp;
348
349 case CONST:
350 temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0), as,
351 true, no_emit);
352 return temp ? gen_rtx_CONST (to_mode, temp) : temp;
353
354 case PLUS:
355 case MULT:
356 /* For addition we can safely permute the conversion and addition
357 operation if one operand is a constant and converting the constant
358 does not change it or if one operand is a constant and we are
359 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
360 We can always safely permute them if we are making the address
361 narrower. Inside a CONST RTL, this is safe for both pointers
362 zero or sign extended as pointers cannot wrap. */
363 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
364 || (GET_CODE (x) == PLUS
365 && CONST_INT_P (XEXP (x, 1))
366 && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
367 || XEXP (x, 1) == convert_memory_address_addr_space_1
368 (to_mode, XEXP (x, 1), as, in_const,
369 no_emit)
370 || POINTERS_EXTEND_UNSIGNED < 0)))
371 {
372 temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0),
373 as, in_const, no_emit);
374 return (temp ? gen_rtx_fmt_ee (GET_CODE (x), to_mode,
375 temp, XEXP (x, 1))
376 : temp);
377 }
378 break;
379
380 default:
381 break;
382 }
383
384 if (no_emit)
385 return NULL_RTX;
386
387 return convert_modes (to_mode, from_mode,
388 x, POINTERS_EXTEND_UNSIGNED);
389 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
390 }
391
392 /* Given X, a memory address in address space AS' pointer mode, convert it to
393 an address in the address space's address mode, or vice versa (TO_MODE says
394 which way). We take advantage of the fact that pointers are not allowed to
395 overflow by commuting arithmetic operations over conversions so that address
396 arithmetic insns can be used. */
397
398 rtx
399 convert_memory_address_addr_space (scalar_int_mode to_mode, rtx x,
400 addr_space_t as)
401 {
402 return convert_memory_address_addr_space_1 (to_mode, x, as, false, false);
403 }
404 \f
405
406 /* Return something equivalent to X but valid as a memory address for something
407 of mode MODE in the named address space AS. When X is not itself valid,
408 this works by copying X or subexpressions of it into registers. */
409
410 rtx
411 memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
412 {
413 rtx oldx = x;
414 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
415
416 x = convert_memory_address_addr_space (address_mode, x, as);
417
418 /* By passing constant addresses through registers
419 we get a chance to cse them. */
420 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
421 x = force_reg (address_mode, x);
422
423 /* We get better cse by rejecting indirect addressing at this stage.
424 Let the combiner create indirect addresses where appropriate.
425 For now, generate the code so that the subexpressions useful to share
426 are visible. But not if cse won't be done! */
427 else
428 {
429 if (! cse_not_expected && !REG_P (x))
430 x = break_out_memory_refs (x);
431
432 /* At this point, any valid address is accepted. */
433 if (memory_address_addr_space_p (mode, x, as))
434 goto done;
435
436 /* If it was valid before but breaking out memory refs invalidated it,
437 use it the old way. */
438 if (memory_address_addr_space_p (mode, oldx, as))
439 {
440 x = oldx;
441 goto done;
442 }
443
444 /* Perform machine-dependent transformations on X
445 in certain cases. This is not necessary since the code
446 below can handle all possible cases, but machine-dependent
447 transformations can make better code. */
448 {
449 rtx orig_x = x;
450 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
451 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
452 goto done;
453 }
454
455 /* PLUS and MULT can appear in special ways
456 as the result of attempts to make an address usable for indexing.
457 Usually they are dealt with by calling force_operand, below.
458 But a sum containing constant terms is special
459 if removing them makes the sum a valid address:
460 then we generate that address in a register
461 and index off of it. We do this because it often makes
462 shorter code, and because the addresses thus generated
463 in registers often become common subexpressions. */
464 if (GET_CODE (x) == PLUS)
465 {
466 rtx constant_term = const0_rtx;
467 rtx y = eliminate_constant_term (x, &constant_term);
468 if (constant_term == const0_rtx
469 || ! memory_address_addr_space_p (mode, y, as))
470 x = force_operand (x, NULL_RTX);
471 else
472 {
473 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
474 if (! memory_address_addr_space_p (mode, y, as))
475 x = force_operand (x, NULL_RTX);
476 else
477 x = y;
478 }
479 }
480
481 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
482 x = force_operand (x, NULL_RTX);
483
484 /* If we have a register that's an invalid address,
485 it must be a hard reg of the wrong class. Copy it to a pseudo. */
486 else if (REG_P (x))
487 x = copy_to_reg (x);
488
489 /* Last resort: copy the value to a register, since
490 the register is a valid address. */
491 else
492 x = force_reg (address_mode, x);
493 }
494
495 done:
496
497 gcc_assert (memory_address_addr_space_p (mode, x, as));
498 /* If we didn't change the address, we are done. Otherwise, mark
499 a reg as a pointer if we have REG or REG + CONST_INT. */
500 if (oldx == x)
501 return x;
502 else if (REG_P (x))
503 mark_reg_pointer (x, BITS_PER_UNIT);
504 else if (GET_CODE (x) == PLUS
505 && REG_P (XEXP (x, 0))
506 && CONST_INT_P (XEXP (x, 1)))
507 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
508
509 /* OLDX may have been the address on a temporary. Update the address
510 to indicate that X is now used. */
511 update_temp_slot_address (oldx, x);
512
513 return x;
514 }
515
516 /* Convert a mem ref into one with a valid memory address.
517 Pass through anything else unchanged. */
518
519 rtx
520 validize_mem (rtx ref)
521 {
522 if (!MEM_P (ref))
523 return ref;
524 ref = use_anchored_address (ref);
525 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
526 MEM_ADDR_SPACE (ref)))
527 return ref;
528
529 /* Don't alter REF itself, since that is probably a stack slot. */
530 return replace_equiv_address (ref, XEXP (ref, 0));
531 }
532
533 /* If X is a memory reference to a member of an object block, try rewriting
534 it to use an anchor instead. Return the new memory reference on success
535 and the old one on failure. */
536
537 rtx
538 use_anchored_address (rtx x)
539 {
540 rtx base;
541 HOST_WIDE_INT offset;
542 machine_mode mode;
543
544 if (!flag_section_anchors)
545 return x;
546
547 if (!MEM_P (x))
548 return x;
549
550 /* Split the address into a base and offset. */
551 base = XEXP (x, 0);
552 offset = 0;
553 if (GET_CODE (base) == CONST
554 && GET_CODE (XEXP (base, 0)) == PLUS
555 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
556 {
557 offset += INTVAL (XEXP (XEXP (base, 0), 1));
558 base = XEXP (XEXP (base, 0), 0);
559 }
560
561 /* Check whether BASE is suitable for anchors. */
562 if (GET_CODE (base) != SYMBOL_REF
563 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
564 || SYMBOL_REF_ANCHOR_P (base)
565 || SYMBOL_REF_BLOCK (base) == NULL
566 || !targetm.use_anchors_for_symbol_p (base))
567 return x;
568
569 /* Decide where BASE is going to be. */
570 place_block_symbol (base);
571
572 /* Get the anchor we need to use. */
573 offset += SYMBOL_REF_BLOCK_OFFSET (base);
574 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
575 SYMBOL_REF_TLS_MODEL (base));
576
577 /* Work out the offset from the anchor. */
578 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
579
580 /* If we're going to run a CSE pass, force the anchor into a register.
581 We will then be able to reuse registers for several accesses, if the
582 target costs say that that's worthwhile. */
583 mode = GET_MODE (base);
584 if (!cse_not_expected)
585 base = force_reg (mode, base);
586
587 return replace_equiv_address (x, plus_constant (mode, base, offset));
588 }
589 \f
590 /* Copy the value or contents of X to a new temp reg and return that reg. */
591
592 rtx
593 copy_to_reg (rtx x)
594 {
595 rtx temp = gen_reg_rtx (GET_MODE (x));
596
597 /* If not an operand, must be an address with PLUS and MULT so
598 do the computation. */
599 if (! general_operand (x, VOIDmode))
600 x = force_operand (x, temp);
601
602 if (x != temp)
603 emit_move_insn (temp, x);
604
605 return temp;
606 }
607
608 /* Like copy_to_reg but always give the new register mode Pmode
609 in case X is a constant. */
610
611 rtx
612 copy_addr_to_reg (rtx x)
613 {
614 return copy_to_mode_reg (Pmode, x);
615 }
616
617 /* Like copy_to_reg but always give the new register mode MODE
618 in case X is a constant. */
619
620 rtx
621 copy_to_mode_reg (machine_mode mode, rtx x)
622 {
623 rtx temp = gen_reg_rtx (mode);
624
625 /* If not an operand, must be an address with PLUS and MULT so
626 do the computation. */
627 if (! general_operand (x, VOIDmode))
628 x = force_operand (x, temp);
629
630 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
631 if (x != temp)
632 emit_move_insn (temp, x);
633 return temp;
634 }
635
636 /* Load X into a register if it is not already one.
637 Use mode MODE for the register.
638 X should be valid for mode MODE, but it may be a constant which
639 is valid for all integer modes; that's why caller must specify MODE.
640
641 The caller must not alter the value in the register we return,
642 since we mark it as a "constant" register. */
643
644 rtx
645 force_reg (machine_mode mode, rtx x)
646 {
647 rtx temp, set;
648 rtx_insn *insn;
649
650 if (REG_P (x))
651 return x;
652
653 if (general_operand (x, mode))
654 {
655 temp = gen_reg_rtx (mode);
656 insn = emit_move_insn (temp, x);
657 }
658 else
659 {
660 temp = force_operand (x, NULL_RTX);
661 if (REG_P (temp))
662 insn = get_last_insn ();
663 else
664 {
665 rtx temp2 = gen_reg_rtx (mode);
666 insn = emit_move_insn (temp2, temp);
667 temp = temp2;
668 }
669 }
670
671 /* Let optimizers know that TEMP's value never changes
672 and that X can be substituted for it. Don't get confused
673 if INSN set something else (such as a SUBREG of TEMP). */
674 if (CONSTANT_P (x)
675 && (set = single_set (insn)) != 0
676 && SET_DEST (set) == temp
677 && ! rtx_equal_p (x, SET_SRC (set)))
678 set_unique_reg_note (insn, REG_EQUAL, x);
679
680 /* Let optimizers know that TEMP is a pointer, and if so, the
681 known alignment of that pointer. */
682 {
683 unsigned align = 0;
684 if (GET_CODE (x) == SYMBOL_REF)
685 {
686 align = BITS_PER_UNIT;
687 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
688 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
689 }
690 else if (GET_CODE (x) == LABEL_REF)
691 align = BITS_PER_UNIT;
692 else if (GET_CODE (x) == CONST
693 && GET_CODE (XEXP (x, 0)) == PLUS
694 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
695 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
696 {
697 rtx s = XEXP (XEXP (x, 0), 0);
698 rtx c = XEXP (XEXP (x, 0), 1);
699 unsigned sa, ca;
700
701 sa = BITS_PER_UNIT;
702 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
703 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
704
705 if (INTVAL (c) == 0)
706 align = sa;
707 else
708 {
709 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
710 align = MIN (sa, ca);
711 }
712 }
713
714 if (align || (MEM_P (x) && MEM_POINTER (x)))
715 mark_reg_pointer (temp, align);
716 }
717
718 return temp;
719 }
720
721 /* If X is a memory ref, copy its contents to a new temp reg and return
722 that reg. Otherwise, return X. */
723
724 rtx
725 force_not_mem (rtx x)
726 {
727 rtx temp;
728
729 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
730 return x;
731
732 temp = gen_reg_rtx (GET_MODE (x));
733
734 if (MEM_POINTER (x))
735 REG_POINTER (temp) = 1;
736
737 emit_move_insn (temp, x);
738 return temp;
739 }
740
741 /* Copy X to TARGET (if it's nonzero and a reg)
742 or to a new temp reg and return that reg.
743 MODE is the mode to use for X in case it is a constant. */
744
745 rtx
746 copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
747 {
748 rtx temp;
749
750 if (target && REG_P (target))
751 temp = target;
752 else
753 temp = gen_reg_rtx (mode);
754
755 emit_move_insn (temp, x);
756 return temp;
757 }
758 \f
759 /* Return the mode to use to pass or return a scalar of TYPE and MODE.
760 PUNSIGNEDP points to the signedness of the type and may be adjusted
761 to show what signedness to use on extension operations.
762
763 FOR_RETURN is nonzero if the caller is promoting the return value
764 of FNDECL, else it is for promoting args. */
765
766 machine_mode
767 promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
768 const_tree funtype, int for_return)
769 {
770 /* Called without a type node for a libcall. */
771 if (type == NULL_TREE)
772 {
773 if (INTEGRAL_MODE_P (mode))
774 return targetm.calls.promote_function_mode (NULL_TREE, mode,
775 punsignedp, funtype,
776 for_return);
777 else
778 return mode;
779 }
780
781 switch (TREE_CODE (type))
782 {
783 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
784 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
785 case POINTER_TYPE: case REFERENCE_TYPE:
786 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
787 for_return);
788
789 default:
790 return mode;
791 }
792 }
793 /* Return the mode to use to store a scalar of TYPE and MODE.
794 PUNSIGNEDP points to the signedness of the type and may be adjusted
795 to show what signedness to use on extension operations. */
796
797 machine_mode
798 promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
799 int *punsignedp ATTRIBUTE_UNUSED)
800 {
801 #ifdef PROMOTE_MODE
802 enum tree_code code;
803 int unsignedp;
804 scalar_mode smode;
805 #endif
806
807 /* For libcalls this is invoked without TYPE from the backends
808 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
809 case. */
810 if (type == NULL_TREE)
811 return mode;
812
813 /* FIXME: this is the same logic that was there until GCC 4.4, but we
814 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
815 is not defined. The affected targets are M32C, S390, SPARC. */
816 #ifdef PROMOTE_MODE
817 code = TREE_CODE (type);
818 unsignedp = *punsignedp;
819
820 switch (code)
821 {
822 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
823 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
824 /* Values of these types always have scalar mode. */
825 smode = as_a <scalar_mode> (mode);
826 PROMOTE_MODE (smode, unsignedp, type);
827 *punsignedp = unsignedp;
828 return smode;
829
830 #ifdef POINTERS_EXTEND_UNSIGNED
831 case REFERENCE_TYPE:
832 case POINTER_TYPE:
833 *punsignedp = POINTERS_EXTEND_UNSIGNED;
834 return targetm.addr_space.address_mode
835 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
836 #endif
837
838 default:
839 return mode;
840 }
841 #else
842 return mode;
843 #endif
844 }
845
846
847 /* Use one of promote_mode or promote_function_mode to find the promoted
848 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
849 of DECL after promotion. */
850
851 machine_mode
852 promote_decl_mode (const_tree decl, int *punsignedp)
853 {
854 tree type = TREE_TYPE (decl);
855 int unsignedp = TYPE_UNSIGNED (type);
856 machine_mode mode = DECL_MODE (decl);
857 machine_mode pmode;
858
859 if (TREE_CODE (decl) == RESULT_DECL && !DECL_BY_REFERENCE (decl))
860 pmode = promote_function_mode (type, mode, &unsignedp,
861 TREE_TYPE (current_function_decl), 1);
862 else if (TREE_CODE (decl) == RESULT_DECL || TREE_CODE (decl) == PARM_DECL)
863 pmode = promote_function_mode (type, mode, &unsignedp,
864 TREE_TYPE (current_function_decl), 2);
865 else
866 pmode = promote_mode (type, mode, &unsignedp);
867
868 if (punsignedp)
869 *punsignedp = unsignedp;
870 return pmode;
871 }
872
873 /* Return the promoted mode for name. If it is a named SSA_NAME, it
874 is the same as promote_decl_mode. Otherwise, it is the promoted
875 mode of a temp decl of same type as the SSA_NAME, if we had created
876 one. */
877
878 machine_mode
879 promote_ssa_mode (const_tree name, int *punsignedp)
880 {
881 gcc_assert (TREE_CODE (name) == SSA_NAME);
882
883 /* Partitions holding parms and results must be promoted as expected
884 by function.c. */
885 if (SSA_NAME_VAR (name)
886 && (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
887 || TREE_CODE (SSA_NAME_VAR (name)) == RESULT_DECL))
888 {
889 machine_mode mode = promote_decl_mode (SSA_NAME_VAR (name), punsignedp);
890 if (mode != BLKmode)
891 return mode;
892 }
893
894 tree type = TREE_TYPE (name);
895 int unsignedp = TYPE_UNSIGNED (type);
896 machine_mode pmode = promote_mode (type, TYPE_MODE (type), &unsignedp);
897 if (punsignedp)
898 *punsignedp = unsignedp;
899
900 return pmode;
901 }
902
903
904 \f
905 /* Controls the behavior of {anti_,}adjust_stack. */
906 static bool suppress_reg_args_size;
907
908 /* A helper for adjust_stack and anti_adjust_stack. */
909
910 static void
911 adjust_stack_1 (rtx adjust, bool anti_p)
912 {
913 rtx temp;
914 rtx_insn *insn;
915
916 /* Hereafter anti_p means subtract_p. */
917 if (!STACK_GROWS_DOWNWARD)
918 anti_p = !anti_p;
919
920 temp = expand_binop (Pmode,
921 anti_p ? sub_optab : add_optab,
922 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
923 OPTAB_LIB_WIDEN);
924
925 if (temp != stack_pointer_rtx)
926 insn = emit_move_insn (stack_pointer_rtx, temp);
927 else
928 {
929 insn = get_last_insn ();
930 temp = single_set (insn);
931 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
932 }
933
934 if (!suppress_reg_args_size)
935 add_args_size_note (insn, stack_pointer_delta);
936 }
937
938 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
939 This pops when ADJUST is positive. ADJUST need not be constant. */
940
941 void
942 adjust_stack (rtx adjust)
943 {
944 if (adjust == const0_rtx)
945 return;
946
947 /* We expect all variable sized adjustments to be multiple of
948 PREFERRED_STACK_BOUNDARY. */
949 poly_int64 const_adjust;
950 if (poly_int_rtx_p (adjust, &const_adjust))
951 stack_pointer_delta -= const_adjust;
952
953 adjust_stack_1 (adjust, false);
954 }
955
956 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
957 This pushes when ADJUST is positive. ADJUST need not be constant. */
958
959 void
960 anti_adjust_stack (rtx adjust)
961 {
962 if (adjust == const0_rtx)
963 return;
964
965 /* We expect all variable sized adjustments to be multiple of
966 PREFERRED_STACK_BOUNDARY. */
967 poly_int64 const_adjust;
968 if (poly_int_rtx_p (adjust, &const_adjust))
969 stack_pointer_delta += const_adjust;
970
971 adjust_stack_1 (adjust, true);
972 }
973
974 /* Round the size of a block to be pushed up to the boundary required
975 by this machine. SIZE is the desired size, which need not be constant. */
976
977 static rtx
978 round_push (rtx size)
979 {
980 rtx align_rtx, alignm1_rtx;
981
982 if (!SUPPORTS_STACK_ALIGNMENT
983 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
984 {
985 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
986
987 if (align == 1)
988 return size;
989
990 if (CONST_INT_P (size))
991 {
992 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
993
994 if (INTVAL (size) != new_size)
995 size = GEN_INT (new_size);
996 return size;
997 }
998
999 align_rtx = GEN_INT (align);
1000 alignm1_rtx = GEN_INT (align - 1);
1001 }
1002 else
1003 {
1004 /* If crtl->preferred_stack_boundary might still grow, use
1005 virtual_preferred_stack_boundary_rtx instead. This will be
1006 substituted by the right value in vregs pass and optimized
1007 during combine. */
1008 align_rtx = virtual_preferred_stack_boundary_rtx;
1009 alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
1010 NULL_RTX);
1011 }
1012
1013 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1014 but we know it can't. So add ourselves and then do
1015 TRUNC_DIV_EXPR. */
1016 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
1017 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1018 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
1019 NULL_RTX, 1);
1020 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
1021
1022 return size;
1023 }
1024 \f
1025 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
1026 to a previously-created save area. If no save area has been allocated,
1027 this function will allocate one. If a save area is specified, it
1028 must be of the proper mode. */
1029
1030 void
1031 emit_stack_save (enum save_level save_level, rtx *psave)
1032 {
1033 rtx sa = *psave;
1034 /* The default is that we use a move insn and save in a Pmode object. */
1035 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
1036 machine_mode mode = STACK_SAVEAREA_MODE (save_level);
1037
1038 /* See if this machine has anything special to do for this kind of save. */
1039 switch (save_level)
1040 {
1041 case SAVE_BLOCK:
1042 if (targetm.have_save_stack_block ())
1043 fcn = targetm.gen_save_stack_block;
1044 break;
1045 case SAVE_FUNCTION:
1046 if (targetm.have_save_stack_function ())
1047 fcn = targetm.gen_save_stack_function;
1048 break;
1049 case SAVE_NONLOCAL:
1050 if (targetm.have_save_stack_nonlocal ())
1051 fcn = targetm.gen_save_stack_nonlocal;
1052 break;
1053 default:
1054 break;
1055 }
1056
1057 /* If there is no save area and we have to allocate one, do so. Otherwise
1058 verify the save area is the proper mode. */
1059
1060 if (sa == 0)
1061 {
1062 if (mode != VOIDmode)
1063 {
1064 if (save_level == SAVE_NONLOCAL)
1065 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1066 else
1067 *psave = sa = gen_reg_rtx (mode);
1068 }
1069 }
1070
1071 do_pending_stack_adjust ();
1072 if (sa != 0)
1073 sa = validize_mem (sa);
1074 emit_insn (fcn (sa, stack_pointer_rtx));
1075 }
1076
1077 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1078 area made by emit_stack_save. If it is zero, we have nothing to do. */
1079
1080 void
1081 emit_stack_restore (enum save_level save_level, rtx sa)
1082 {
1083 /* The default is that we use a move insn. */
1084 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
1085
1086 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1087 STACK_POINTER and HARD_FRAME_POINTER.
1088 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1089 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1090 aligned variables, which is reflected in ix86_can_eliminate.
1091 We normally still have the realigned STACK_POINTER that we can use.
1092 But if there is a stack restore still present at reload, it can trigger
1093 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1094 FRAME_POINTER into a hard reg.
1095 To prevent this situation, we force need_drap if we emit a stack
1096 restore. */
1097 if (SUPPORTS_STACK_ALIGNMENT)
1098 crtl->need_drap = true;
1099
1100 /* See if this machine has anything special to do for this kind of save. */
1101 switch (save_level)
1102 {
1103 case SAVE_BLOCK:
1104 if (targetm.have_restore_stack_block ())
1105 fcn = targetm.gen_restore_stack_block;
1106 break;
1107 case SAVE_FUNCTION:
1108 if (targetm.have_restore_stack_function ())
1109 fcn = targetm.gen_restore_stack_function;
1110 break;
1111 case SAVE_NONLOCAL:
1112 if (targetm.have_restore_stack_nonlocal ())
1113 fcn = targetm.gen_restore_stack_nonlocal;
1114 break;
1115 default:
1116 break;
1117 }
1118
1119 if (sa != 0)
1120 {
1121 sa = validize_mem (sa);
1122 /* These clobbers prevent the scheduler from moving
1123 references to variable arrays below the code
1124 that deletes (pops) the arrays. */
1125 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1126 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
1127 }
1128
1129 discard_pending_stack_adjust ();
1130
1131 emit_insn (fcn (stack_pointer_rtx, sa));
1132 }
1133
1134 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1135 function. This should be called whenever we allocate or deallocate
1136 dynamic stack space. */
1137
1138 void
1139 update_nonlocal_goto_save_area (void)
1140 {
1141 tree t_save;
1142 rtx r_save;
1143
1144 /* The nonlocal_goto_save_area object is an array of N pointers. The
1145 first one is used for the frame pointer save; the rest are sized by
1146 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1147 of the stack save area slots. */
1148 t_save = build4 (ARRAY_REF,
1149 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1150 cfun->nonlocal_goto_save_area,
1151 integer_one_node, NULL_TREE, NULL_TREE);
1152 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1153
1154 emit_stack_save (SAVE_NONLOCAL, &r_save);
1155 }
1156
1157 /* Record a new stack level for the current function. This should be called
1158 whenever we allocate or deallocate dynamic stack space. */
1159
1160 void
1161 record_new_stack_level (void)
1162 {
1163 /* Record the new stack level for nonlocal gotos. */
1164 if (cfun->nonlocal_goto_save_area)
1165 update_nonlocal_goto_save_area ();
1166
1167 /* Record the new stack level for SJLJ exceptions. */
1168 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1169 update_sjlj_context ();
1170 }
1171
1172 /* Return an rtx doing runtime alignment to REQUIRED_ALIGN on TARGET. */
1173
1174 rtx
1175 align_dynamic_address (rtx target, unsigned required_align)
1176 {
1177 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1178 but we know it can't. So add ourselves and then do
1179 TRUNC_DIV_EXPR. */
1180 target = expand_binop (Pmode, add_optab, target,
1181 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1182 Pmode),
1183 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1184 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1185 gen_int_mode (required_align / BITS_PER_UNIT,
1186 Pmode),
1187 NULL_RTX, 1);
1188 target = expand_mult (Pmode, target,
1189 gen_int_mode (required_align / BITS_PER_UNIT,
1190 Pmode),
1191 NULL_RTX, 1);
1192
1193 return target;
1194 }
1195
1196 /* Return an rtx through *PSIZE, representing the size of an area of memory to
1197 be dynamically pushed on the stack.
1198
1199 *PSIZE is an rtx representing the size of the area.
1200
1201 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1202 parameter may be zero. If so, a proper value will be extracted
1203 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1204
1205 REQUIRED_ALIGN is the alignment (in bits) required for the region
1206 of memory.
1207
1208 If PSTACK_USAGE_SIZE is not NULL it points to a value that is increased for
1209 the additional size returned. */
1210 void
1211 get_dynamic_stack_size (rtx *psize, unsigned size_align,
1212 unsigned required_align,
1213 HOST_WIDE_INT *pstack_usage_size)
1214 {
1215 rtx size = *psize;
1216
1217 /* Ensure the size is in the proper mode. */
1218 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1219 size = convert_to_mode (Pmode, size, 1);
1220
1221 if (CONST_INT_P (size))
1222 {
1223 unsigned HOST_WIDE_INT lsb;
1224
1225 lsb = INTVAL (size);
1226 lsb &= -lsb;
1227
1228 /* Watch out for overflow truncating to "unsigned". */
1229 if (lsb > UINT_MAX / BITS_PER_UNIT)
1230 size_align = 1u << (HOST_BITS_PER_INT - 1);
1231 else
1232 size_align = (unsigned)lsb * BITS_PER_UNIT;
1233 }
1234 else if (size_align < BITS_PER_UNIT)
1235 size_align = BITS_PER_UNIT;
1236
1237 /* We can't attempt to minimize alignment necessary, because we don't
1238 know the final value of preferred_stack_boundary yet while executing
1239 this code. */
1240 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1241 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1242
1243 /* We will need to ensure that the address we return is aligned to
1244 REQUIRED_ALIGN. At this point in the compilation, we don't always
1245 know the final value of the STACK_DYNAMIC_OFFSET used in function.c
1246 (it might depend on the size of the outgoing parameter lists, for
1247 example), so we must preventively align the value. We leave space
1248 in SIZE for the hole that might result from the alignment operation. */
1249
1250 unsigned known_align = REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM);
1251 if (known_align == 0)
1252 known_align = BITS_PER_UNIT;
1253 if (required_align > known_align)
1254 {
1255 unsigned extra = (required_align - known_align) / BITS_PER_UNIT;
1256 size = plus_constant (Pmode, size, extra);
1257 size = force_operand (size, NULL_RTX);
1258 if (size_align > known_align)
1259 size_align = known_align;
1260
1261 if (flag_stack_usage_info && pstack_usage_size)
1262 *pstack_usage_size += extra;
1263 }
1264
1265 /* Round the size to a multiple of the required stack alignment.
1266 Since the stack is presumed to be rounded before this allocation,
1267 this will maintain the required alignment.
1268
1269 If the stack grows downward, we could save an insn by subtracting
1270 SIZE from the stack pointer and then aligning the stack pointer.
1271 The problem with this is that the stack pointer may be unaligned
1272 between the execution of the subtraction and alignment insns and
1273 some machines do not allow this. Even on those that do, some
1274 signal handlers malfunction if a signal should occur between those
1275 insns. Since this is an extremely rare event, we have no reliable
1276 way of knowing which systems have this problem. So we avoid even
1277 momentarily mis-aligning the stack. */
1278 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
1279 {
1280 size = round_push (size);
1281
1282 if (flag_stack_usage_info && pstack_usage_size)
1283 {
1284 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
1285 *pstack_usage_size =
1286 (*pstack_usage_size + align - 1) / align * align;
1287 }
1288 }
1289
1290 *psize = size;
1291 }
1292
1293 /* Return the number of bytes to "protect" on the stack for -fstack-check.
1294
1295 "protect" in the context of -fstack-check means how many bytes we
1296 should always ensure are available on the stack. More importantly
1297 this is how many bytes are skipped when probing the stack.
1298
1299 On some targets we want to reuse the -fstack-check prologue support
1300 to give a degree of protection against stack clashing style attacks.
1301
1302 In that scenario we do not want to skip bytes before probing as that
1303 would render the stack clash protections useless.
1304
1305 So we never use STACK_CHECK_PROTECT directly. Instead we indirect though
1306 this helper which allows us to provide different values for
1307 -fstack-check and -fstack-clash-protection. */
1308 HOST_WIDE_INT
1309 get_stack_check_protect (void)
1310 {
1311 if (flag_stack_clash_protection)
1312 return 0;
1313 return STACK_CHECK_PROTECT;
1314 }
1315
1316 /* Return an rtx representing the address of an area of memory dynamically
1317 pushed on the stack.
1318
1319 Any required stack pointer alignment is preserved.
1320
1321 SIZE is an rtx representing the size of the area.
1322
1323 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1324 parameter may be zero. If so, a proper value will be extracted
1325 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1326
1327 REQUIRED_ALIGN is the alignment (in bits) required for the region
1328 of memory.
1329
1330 MAX_SIZE is an upper bound for SIZE, if SIZE is not constant, or -1 if
1331 no such upper bound is known.
1332
1333 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1334 stack space allocated by the generated code cannot be added with itself
1335 in the course of the execution of the function. It is always safe to
1336 pass FALSE here and the following criterion is sufficient in order to
1337 pass TRUE: every path in the CFG that starts at the allocation point and
1338 loops to it executes the associated deallocation code. */
1339
1340 rtx
1341 allocate_dynamic_stack_space (rtx size, unsigned size_align,
1342 unsigned required_align,
1343 HOST_WIDE_INT max_size,
1344 bool cannot_accumulate)
1345 {
1346 HOST_WIDE_INT stack_usage_size = -1;
1347 rtx_code_label *final_label;
1348 rtx final_target, target;
1349
1350 /* If we're asking for zero bytes, it doesn't matter what we point
1351 to since we can't dereference it. But return a reasonable
1352 address anyway. */
1353 if (size == const0_rtx)
1354 return virtual_stack_dynamic_rtx;
1355
1356 /* Otherwise, show we're calling alloca or equivalent. */
1357 cfun->calls_alloca = 1;
1358
1359 /* If stack usage info is requested, look into the size we are passed.
1360 We need to do so this early to avoid the obfuscation that may be
1361 introduced later by the various alignment operations. */
1362 if (flag_stack_usage_info)
1363 {
1364 if (CONST_INT_P (size))
1365 stack_usage_size = INTVAL (size);
1366 else if (REG_P (size))
1367 {
1368 /* Look into the last emitted insn and see if we can deduce
1369 something for the register. */
1370 rtx_insn *insn;
1371 rtx set, note;
1372 insn = get_last_insn ();
1373 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1374 {
1375 if (CONST_INT_P (SET_SRC (set)))
1376 stack_usage_size = INTVAL (SET_SRC (set));
1377 else if ((note = find_reg_equal_equiv_note (insn))
1378 && CONST_INT_P (XEXP (note, 0)))
1379 stack_usage_size = INTVAL (XEXP (note, 0));
1380 }
1381 }
1382
1383 /* If the size is not constant, try the maximum size. */
1384 if (stack_usage_size < 0)
1385 stack_usage_size = max_size;
1386
1387 /* If the size is still not constant, we can't say anything. */
1388 if (stack_usage_size < 0)
1389 {
1390 current_function_has_unbounded_dynamic_stack_size = 1;
1391 stack_usage_size = 0;
1392 }
1393 }
1394
1395 get_dynamic_stack_size (&size, size_align, required_align, &stack_usage_size);
1396
1397 target = gen_reg_rtx (Pmode);
1398
1399 /* The size is supposed to be fully adjusted at this point so record it
1400 if stack usage info is requested. */
1401 if (flag_stack_usage_info)
1402 {
1403 current_function_dynamic_stack_size += stack_usage_size;
1404
1405 /* ??? This is gross but the only safe stance in the absence
1406 of stack usage oriented flow analysis. */
1407 if (!cannot_accumulate)
1408 current_function_has_unbounded_dynamic_stack_size = 1;
1409 }
1410
1411 do_pending_stack_adjust ();
1412
1413 final_label = NULL;
1414 final_target = NULL_RTX;
1415
1416 /* If we are splitting the stack, we need to ask the backend whether
1417 there is enough room on the current stack. If there isn't, or if
1418 the backend doesn't know how to tell is, then we need to call a
1419 function to allocate memory in some other way. This memory will
1420 be released when we release the current stack segment. The
1421 effect is that stack allocation becomes less efficient, but at
1422 least it doesn't cause a stack overflow. */
1423 if (flag_split_stack)
1424 {
1425 rtx_code_label *available_label;
1426 rtx ask, space, func;
1427
1428 available_label = NULL;
1429
1430 if (targetm.have_split_stack_space_check ())
1431 {
1432 available_label = gen_label_rtx ();
1433
1434 /* This instruction will branch to AVAILABLE_LABEL if there
1435 are SIZE bytes available on the stack. */
1436 emit_insn (targetm.gen_split_stack_space_check
1437 (size, available_label));
1438 }
1439
1440 /* The __morestack_allocate_stack_space function will allocate
1441 memory using malloc. If the alignment of the memory returned
1442 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1443 make sure we allocate enough space. */
1444 if (MALLOC_ABI_ALIGNMENT >= required_align)
1445 ask = size;
1446 else
1447 ask = expand_binop (Pmode, add_optab, size,
1448 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1449 Pmode),
1450 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1451
1452 func = init_one_libfunc ("__morestack_allocate_stack_space");
1453
1454 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
1455 ask, Pmode);
1456
1457 if (available_label == NULL_RTX)
1458 return space;
1459
1460 final_target = gen_reg_rtx (Pmode);
1461
1462 emit_move_insn (final_target, space);
1463
1464 final_label = gen_label_rtx ();
1465 emit_jump (final_label);
1466
1467 emit_label (available_label);
1468 }
1469
1470 /* We ought to be called always on the toplevel and stack ought to be aligned
1471 properly. */
1472 gcc_assert (multiple_p (stack_pointer_delta,
1473 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
1474
1475 /* If needed, check that we have the required amount of stack. Take into
1476 account what has already been checked. */
1477 if (STACK_CHECK_MOVING_SP)
1478 ;
1479 else if (flag_stack_check == GENERIC_STACK_CHECK)
1480 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1481 size);
1482 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1483 probe_stack_range (get_stack_check_protect (), size);
1484
1485 /* Don't let anti_adjust_stack emit notes. */
1486 suppress_reg_args_size = true;
1487
1488 /* Perform the required allocation from the stack. Some systems do
1489 this differently than simply incrementing/decrementing from the
1490 stack pointer, such as acquiring the space by calling malloc(). */
1491 if (targetm.have_allocate_stack ())
1492 {
1493 class expand_operand ops[2];
1494 /* We don't have to check against the predicate for operand 0 since
1495 TARGET is known to be a pseudo of the proper mode, which must
1496 be valid for the operand. */
1497 create_fixed_operand (&ops[0], target);
1498 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
1499 expand_insn (targetm.code_for_allocate_stack, 2, ops);
1500 }
1501 else
1502 {
1503 poly_int64 saved_stack_pointer_delta;
1504
1505 if (!STACK_GROWS_DOWNWARD)
1506 emit_move_insn (target, virtual_stack_dynamic_rtx);
1507
1508 /* Check stack bounds if necessary. */
1509 if (crtl->limit_stack)
1510 {
1511 rtx available;
1512 rtx_code_label *space_available = gen_label_rtx ();
1513 if (STACK_GROWS_DOWNWARD)
1514 available = expand_binop (Pmode, sub_optab,
1515 stack_pointer_rtx, stack_limit_rtx,
1516 NULL_RTX, 1, OPTAB_WIDEN);
1517 else
1518 available = expand_binop (Pmode, sub_optab,
1519 stack_limit_rtx, stack_pointer_rtx,
1520 NULL_RTX, 1, OPTAB_WIDEN);
1521
1522 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1523 space_available);
1524 if (targetm.have_trap ())
1525 emit_insn (targetm.gen_trap ());
1526 else
1527 error ("stack limits not supported on this target");
1528 emit_barrier ();
1529 emit_label (space_available);
1530 }
1531
1532 saved_stack_pointer_delta = stack_pointer_delta;
1533
1534 if (flag_stack_check && STACK_CHECK_MOVING_SP)
1535 anti_adjust_stack_and_probe (size, false);
1536 else if (flag_stack_clash_protection)
1537 anti_adjust_stack_and_probe_stack_clash (size);
1538 else
1539 anti_adjust_stack (size);
1540
1541 /* Even if size is constant, don't modify stack_pointer_delta.
1542 The constant size alloca should preserve
1543 crtl->preferred_stack_boundary alignment. */
1544 stack_pointer_delta = saved_stack_pointer_delta;
1545
1546 if (STACK_GROWS_DOWNWARD)
1547 emit_move_insn (target, virtual_stack_dynamic_rtx);
1548 }
1549
1550 suppress_reg_args_size = false;
1551
1552 /* Finish up the split stack handling. */
1553 if (final_label != NULL_RTX)
1554 {
1555 gcc_assert (flag_split_stack);
1556 emit_move_insn (final_target, target);
1557 emit_label (final_label);
1558 target = final_target;
1559 }
1560
1561 target = align_dynamic_address (target, required_align);
1562
1563 /* Now that we've committed to a return value, mark its alignment. */
1564 mark_reg_pointer (target, required_align);
1565
1566 /* Record the new stack level. */
1567 record_new_stack_level ();
1568
1569 return target;
1570 }
1571
1572 /* Return an rtx representing the address of an area of memory already
1573 statically pushed onto the stack in the virtual stack vars area. (It is
1574 assumed that the area is allocated in the function prologue.)
1575
1576 Any required stack pointer alignment is preserved.
1577
1578 OFFSET is the offset of the area into the virtual stack vars area.
1579
1580 REQUIRED_ALIGN is the alignment (in bits) required for the region
1581 of memory. */
1582
1583 rtx
1584 get_dynamic_stack_base (poly_int64 offset, unsigned required_align)
1585 {
1586 rtx target;
1587
1588 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1589 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1590
1591 target = gen_reg_rtx (Pmode);
1592 emit_move_insn (target, virtual_stack_vars_rtx);
1593 target = expand_binop (Pmode, add_optab, target,
1594 gen_int_mode (offset, Pmode),
1595 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1596 target = align_dynamic_address (target, required_align);
1597
1598 /* Now that we've committed to a return value, mark its alignment. */
1599 mark_reg_pointer (target, required_align);
1600
1601 return target;
1602 }
1603 \f
1604 /* A front end may want to override GCC's stack checking by providing a
1605 run-time routine to call to check the stack, so provide a mechanism for
1606 calling that routine. */
1607
1608 static GTY(()) rtx stack_check_libfunc;
1609
1610 void
1611 set_stack_check_libfunc (const char *libfunc_name)
1612 {
1613 gcc_assert (stack_check_libfunc == NULL_RTX);
1614 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
1615 tree decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
1616 get_identifier (libfunc_name), void_type_node);
1617 DECL_EXTERNAL (decl) = 1;
1618 SET_SYMBOL_REF_DECL (stack_check_libfunc, decl);
1619 }
1620 \f
1621 /* Emit one stack probe at ADDRESS, an address within the stack. */
1622
1623 void
1624 emit_stack_probe (rtx address)
1625 {
1626 if (targetm.have_probe_stack_address ())
1627 {
1628 class expand_operand ops[1];
1629 insn_code icode = targetm.code_for_probe_stack_address;
1630 create_address_operand (ops, address);
1631 maybe_legitimize_operands (icode, 0, 1, ops);
1632 expand_insn (icode, 1, ops);
1633 }
1634 else
1635 {
1636 rtx memref = gen_rtx_MEM (word_mode, address);
1637
1638 MEM_VOLATILE_P (memref) = 1;
1639 memref = validize_mem (memref);
1640
1641 /* See if we have an insn to probe the stack. */
1642 if (targetm.have_probe_stack ())
1643 emit_insn (targetm.gen_probe_stack (memref));
1644 else
1645 emit_move_insn (memref, const0_rtx);
1646 }
1647 }
1648
1649 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1650 FIRST is a constant and size is a Pmode RTX. These are offsets from
1651 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1652 or subtract them from the stack pointer. */
1653
1654 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1655
1656 #if STACK_GROWS_DOWNWARD
1657 #define STACK_GROW_OP MINUS
1658 #define STACK_GROW_OPTAB sub_optab
1659 #define STACK_GROW_OFF(off) -(off)
1660 #else
1661 #define STACK_GROW_OP PLUS
1662 #define STACK_GROW_OPTAB add_optab
1663 #define STACK_GROW_OFF(off) (off)
1664 #endif
1665
1666 void
1667 probe_stack_range (HOST_WIDE_INT first, rtx size)
1668 {
1669 /* First ensure SIZE is Pmode. */
1670 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1671 size = convert_to_mode (Pmode, size, 1);
1672
1673 /* Next see if we have a function to check the stack. */
1674 if (stack_check_libfunc)
1675 {
1676 rtx addr = memory_address (Pmode,
1677 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1678 stack_pointer_rtx,
1679 plus_constant (Pmode,
1680 size, first)));
1681 emit_library_call (stack_check_libfunc, LCT_THROW, VOIDmode,
1682 addr, Pmode);
1683 }
1684
1685 /* Next see if we have an insn to check the stack. */
1686 else if (targetm.have_check_stack ())
1687 {
1688 class expand_operand ops[1];
1689 rtx addr = memory_address (Pmode,
1690 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1691 stack_pointer_rtx,
1692 plus_constant (Pmode,
1693 size, first)));
1694 bool success;
1695 create_input_operand (&ops[0], addr, Pmode);
1696 success = maybe_expand_insn (targetm.code_for_check_stack, 1, ops);
1697 gcc_assert (success);
1698 }
1699
1700 /* Otherwise we have to generate explicit probes. If we have a constant
1701 small number of them to generate, that's the easy case. */
1702 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1703 {
1704 HOST_WIDE_INT isize = INTVAL (size), i;
1705 rtx addr;
1706
1707 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1708 it exceeds SIZE. If only one probe is needed, this will not
1709 generate any code. Then probe at FIRST + SIZE. */
1710 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1711 {
1712 addr = memory_address (Pmode,
1713 plus_constant (Pmode, stack_pointer_rtx,
1714 STACK_GROW_OFF (first + i)));
1715 emit_stack_probe (addr);
1716 }
1717
1718 addr = memory_address (Pmode,
1719 plus_constant (Pmode, stack_pointer_rtx,
1720 STACK_GROW_OFF (first + isize)));
1721 emit_stack_probe (addr);
1722 }
1723
1724 /* In the variable case, do the same as above, but in a loop. Note that we
1725 must be extra careful with variables wrapping around because we might be
1726 at the very top (or the very bottom) of the address space and we have to
1727 be able to handle this case properly; in particular, we use an equality
1728 test for the loop condition. */
1729 else
1730 {
1731 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
1732 rtx_code_label *loop_lab = gen_label_rtx ();
1733 rtx_code_label *end_lab = gen_label_rtx ();
1734
1735 /* Step 1: round SIZE to the previous multiple of the interval. */
1736
1737 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1738 rounded_size
1739 = simplify_gen_binary (AND, Pmode, size,
1740 gen_int_mode (-PROBE_INTERVAL, Pmode));
1741 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1742
1743
1744 /* Step 2: compute initial and final value of the loop counter. */
1745
1746 /* TEST_ADDR = SP + FIRST. */
1747 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1748 stack_pointer_rtx,
1749 gen_int_mode (first, Pmode)),
1750 NULL_RTX);
1751
1752 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1753 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1754 test_addr,
1755 rounded_size_op), NULL_RTX);
1756
1757
1758 /* Step 3: the loop
1759
1760 while (TEST_ADDR != LAST_ADDR)
1761 {
1762 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1763 probe at TEST_ADDR
1764 }
1765
1766 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1767 until it is equal to ROUNDED_SIZE. */
1768
1769 emit_label (loop_lab);
1770
1771 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1772 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1773 end_lab);
1774
1775 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1776 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
1777 gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
1778 1, OPTAB_WIDEN);
1779
1780 gcc_assert (temp == test_addr);
1781
1782 /* Probe at TEST_ADDR. */
1783 emit_stack_probe (test_addr);
1784
1785 emit_jump (loop_lab);
1786
1787 emit_label (end_lab);
1788
1789
1790 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1791 that SIZE is equal to ROUNDED_SIZE. */
1792
1793 /* TEMP = SIZE - ROUNDED_SIZE. */
1794 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1795 if (temp != const0_rtx)
1796 {
1797 rtx addr;
1798
1799 if (CONST_INT_P (temp))
1800 {
1801 /* Use [base + disp} addressing mode if supported. */
1802 HOST_WIDE_INT offset = INTVAL (temp);
1803 addr = memory_address (Pmode,
1804 plus_constant (Pmode, last_addr,
1805 STACK_GROW_OFF (offset)));
1806 }
1807 else
1808 {
1809 /* Manual CSE if the difference is not known at compile-time. */
1810 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1811 addr = memory_address (Pmode,
1812 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1813 last_addr, temp));
1814 }
1815
1816 emit_stack_probe (addr);
1817 }
1818 }
1819
1820 /* Make sure nothing is scheduled before we are done. */
1821 emit_insn (gen_blockage ());
1822 }
1823
1824 /* Compute parameters for stack clash probing a dynamic stack
1825 allocation of SIZE bytes.
1826
1827 We compute ROUNDED_SIZE, LAST_ADDR, RESIDUAL and PROBE_INTERVAL.
1828
1829 Additionally we conditionally dump the type of probing that will
1830 be needed given the values computed. */
1831
1832 void
1833 compute_stack_clash_protection_loop_data (rtx *rounded_size, rtx *last_addr,
1834 rtx *residual,
1835 HOST_WIDE_INT *probe_interval,
1836 rtx size)
1837 {
1838 /* Round SIZE down to STACK_CLASH_PROTECTION_PROBE_INTERVAL */
1839 *probe_interval
1840 = 1 << param_stack_clash_protection_probe_interval;
1841 *rounded_size = simplify_gen_binary (AND, Pmode, size,
1842 GEN_INT (-*probe_interval));
1843
1844 /* Compute the value of the stack pointer for the last iteration.
1845 It's just SP + ROUNDED_SIZE. */
1846 rtx rounded_size_op = force_operand (*rounded_size, NULL_RTX);
1847 *last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1848 stack_pointer_rtx,
1849 rounded_size_op),
1850 NULL_RTX);
1851
1852 /* Compute any residuals not allocated by the loop above. Residuals
1853 are just the ROUNDED_SIZE - SIZE. */
1854 *residual = simplify_gen_binary (MINUS, Pmode, size, *rounded_size);
1855
1856 /* Dump key information to make writing tests easy. */
1857 if (dump_file)
1858 {
1859 if (*rounded_size == CONST0_RTX (Pmode))
1860 fprintf (dump_file,
1861 "Stack clash skipped dynamic allocation and probing loop.\n");
1862 else if (CONST_INT_P (*rounded_size)
1863 && INTVAL (*rounded_size) <= 4 * *probe_interval)
1864 fprintf (dump_file,
1865 "Stack clash dynamic allocation and probing inline.\n");
1866 else if (CONST_INT_P (*rounded_size))
1867 fprintf (dump_file,
1868 "Stack clash dynamic allocation and probing in "
1869 "rotated loop.\n");
1870 else
1871 fprintf (dump_file,
1872 "Stack clash dynamic allocation and probing in loop.\n");
1873
1874 if (*residual != CONST0_RTX (Pmode))
1875 fprintf (dump_file,
1876 "Stack clash dynamic allocation and probing residuals.\n");
1877 else
1878 fprintf (dump_file,
1879 "Stack clash skipped dynamic allocation and "
1880 "probing residuals.\n");
1881 }
1882 }
1883
1884 /* Emit the start of an allocate/probe loop for stack
1885 clash protection.
1886
1887 LOOP_LAB and END_LAB are returned for use when we emit the
1888 end of the loop.
1889
1890 LAST addr is the value for SP which stops the loop. */
1891 void
1892 emit_stack_clash_protection_probe_loop_start (rtx *loop_lab,
1893 rtx *end_lab,
1894 rtx last_addr,
1895 bool rotated)
1896 {
1897 /* Essentially we want to emit any setup code, the top of loop
1898 label and the comparison at the top of the loop. */
1899 *loop_lab = gen_label_rtx ();
1900 *end_lab = gen_label_rtx ();
1901
1902 emit_label (*loop_lab);
1903 if (!rotated)
1904 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1905 Pmode, 1, *end_lab);
1906 }
1907
1908 /* Emit the end of a stack clash probing loop.
1909
1910 This consists of just the jump back to LOOP_LAB and
1911 emitting END_LOOP after the loop. */
1912
1913 void
1914 emit_stack_clash_protection_probe_loop_end (rtx loop_lab, rtx end_loop,
1915 rtx last_addr, bool rotated)
1916 {
1917 if (rotated)
1918 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, NE, NULL_RTX,
1919 Pmode, 1, loop_lab);
1920 else
1921 emit_jump (loop_lab);
1922
1923 emit_label (end_loop);
1924
1925 }
1926
1927 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1928 while probing it. This pushes when SIZE is positive. SIZE need not
1929 be constant.
1930
1931 This is subtly different than anti_adjust_stack_and_probe to try and
1932 prevent stack-clash attacks
1933
1934 1. It must assume no knowledge of the probing state, any allocation
1935 must probe.
1936
1937 Consider the case of a 1 byte alloca in a loop. If the sum of the
1938 allocations is large, then this could be used to jump the guard if
1939 probes were not emitted.
1940
1941 2. It never skips probes, whereas anti_adjust_stack_and_probe will
1942 skip probes on the first couple PROBE_INTERVALs on the assumption
1943 they're done elsewhere.
1944
1945 3. It only allocates and probes SIZE bytes, it does not need to
1946 allocate/probe beyond that because this probing style does not
1947 guarantee signal handling capability if the guard is hit. */
1948
1949 static void
1950 anti_adjust_stack_and_probe_stack_clash (rtx size)
1951 {
1952 /* First ensure SIZE is Pmode. */
1953 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1954 size = convert_to_mode (Pmode, size, 1);
1955
1956 /* We can get here with a constant size on some targets. */
1957 rtx rounded_size, last_addr, residual;
1958 HOST_WIDE_INT probe_interval, probe_range;
1959 bool target_probe_range_p = false;
1960 compute_stack_clash_protection_loop_data (&rounded_size, &last_addr,
1961 &residual, &probe_interval, size);
1962
1963 /* Get the back-end specific probe ranges. */
1964 probe_range = targetm.stack_clash_protection_alloca_probe_range ();
1965 target_probe_range_p = probe_range != 0;
1966 gcc_assert (probe_range >= 0);
1967
1968 /* If no back-end specific range defined, default to the top of the newly
1969 allocated range. */
1970 if (probe_range == 0)
1971 probe_range = probe_interval - GET_MODE_SIZE (word_mode);
1972
1973 if (rounded_size != CONST0_RTX (Pmode))
1974 {
1975 if (CONST_INT_P (rounded_size)
1976 && INTVAL (rounded_size) <= 4 * probe_interval)
1977 {
1978 for (HOST_WIDE_INT i = 0;
1979 i < INTVAL (rounded_size);
1980 i += probe_interval)
1981 {
1982 anti_adjust_stack (GEN_INT (probe_interval));
1983 /* The prologue does not probe residuals. Thus the offset
1984 here to probe just beyond what the prologue had already
1985 allocated. */
1986 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
1987 probe_range));
1988
1989 emit_insn (gen_blockage ());
1990 }
1991 }
1992 else
1993 {
1994 rtx loop_lab, end_loop;
1995 bool rotate_loop = CONST_INT_P (rounded_size);
1996 emit_stack_clash_protection_probe_loop_start (&loop_lab, &end_loop,
1997 last_addr, rotate_loop);
1998
1999 anti_adjust_stack (GEN_INT (probe_interval));
2000
2001 /* The prologue does not probe residuals. Thus the offset here
2002 to probe just beyond what the prologue had already
2003 allocated. */
2004 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
2005 probe_range));
2006
2007 emit_stack_clash_protection_probe_loop_end (loop_lab, end_loop,
2008 last_addr, rotate_loop);
2009 emit_insn (gen_blockage ());
2010 }
2011 }
2012
2013 if (residual != CONST0_RTX (Pmode))
2014 {
2015 rtx label = NULL_RTX;
2016 /* RESIDUAL could be zero at runtime and in that case *sp could
2017 hold live data. Furthermore, we do not want to probe into the
2018 red zone.
2019
2020 If TARGET_PROBE_RANGE_P then the target has promised it's safe to
2021 probe at offset 0. In which case we no longer have to check for
2022 RESIDUAL == 0. However we still need to probe at the right offset
2023 when RESIDUAL > PROBE_RANGE, in which case we probe at PROBE_RANGE.
2024
2025 If !TARGET_PROBE_RANGE_P then go ahead and just guard the probe at *sp
2026 on RESIDUAL != 0 at runtime if RESIDUAL is not a compile time constant.
2027 */
2028 anti_adjust_stack (residual);
2029
2030 if (!CONST_INT_P (residual))
2031 {
2032 label = gen_label_rtx ();
2033 rtx_code op = target_probe_range_p ? LT : EQ;
2034 rtx probe_cmp_value = target_probe_range_p
2035 ? gen_rtx_CONST_INT (GET_MODE (residual), probe_range)
2036 : CONST0_RTX (GET_MODE (residual));
2037
2038 if (target_probe_range_p)
2039 emit_stack_probe (stack_pointer_rtx);
2040
2041 emit_cmp_and_jump_insns (residual, probe_cmp_value,
2042 op, NULL_RTX, Pmode, 1, label);
2043 }
2044
2045 rtx x = NULL_RTX;
2046
2047 /* If RESIDUAL isn't a constant and TARGET_PROBE_RANGE_P then we probe up
2048 by the ABI defined safe value. */
2049 if (!CONST_INT_P (residual) && target_probe_range_p)
2050 x = GEN_INT (probe_range);
2051 /* If RESIDUAL is a constant but smaller than the ABI defined safe value,
2052 we still want to probe up, but the safest amount if a word. */
2053 else if (target_probe_range_p)
2054 {
2055 if (INTVAL (residual) <= probe_range)
2056 x = GEN_INT (GET_MODE_SIZE (word_mode));
2057 else
2058 x = GEN_INT (probe_range);
2059 }
2060 else
2061 /* If nothing else, probe at the top of the new allocation. */
2062 x = plus_constant (Pmode, residual, -GET_MODE_SIZE (word_mode));
2063
2064 emit_stack_probe (gen_rtx_PLUS (Pmode, stack_pointer_rtx, x));
2065
2066 emit_insn (gen_blockage ());
2067 if (!CONST_INT_P (residual))
2068 emit_label (label);
2069 }
2070 }
2071
2072
2073 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
2074 while probing it. This pushes when SIZE is positive. SIZE need not
2075 be constant. If ADJUST_BACK is true, adjust back the stack pointer
2076 by plus SIZE at the end. */
2077
2078 void
2079 anti_adjust_stack_and_probe (rtx size, bool adjust_back)
2080 {
2081 /* We skip the probe for the first interval + a small dope of 4 words and
2082 probe that many bytes past the specified size to maintain a protection
2083 area at the botton of the stack. */
2084 const int dope = 4 * UNITS_PER_WORD;
2085
2086 /* First ensure SIZE is Pmode. */
2087 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
2088 size = convert_to_mode (Pmode, size, 1);
2089
2090 /* If we have a constant small number of probes to generate, that's the
2091 easy case. */
2092 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
2093 {
2094 HOST_WIDE_INT isize = INTVAL (size), i;
2095 bool first_probe = true;
2096
2097 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
2098 values of N from 1 until it exceeds SIZE. If only one probe is
2099 needed, this will not generate any code. Then adjust and probe
2100 to PROBE_INTERVAL + SIZE. */
2101 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
2102 {
2103 if (first_probe)
2104 {
2105 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
2106 first_probe = false;
2107 }
2108 else
2109 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
2110 emit_stack_probe (stack_pointer_rtx);
2111 }
2112
2113 if (first_probe)
2114 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
2115 else
2116 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
2117 emit_stack_probe (stack_pointer_rtx);
2118 }
2119
2120 /* In the variable case, do the same as above, but in a loop. Note that we
2121 must be extra careful with variables wrapping around because we might be
2122 at the very top (or the very bottom) of the address space and we have to
2123 be able to handle this case properly; in particular, we use an equality
2124 test for the loop condition. */
2125 else
2126 {
2127 rtx rounded_size, rounded_size_op, last_addr, temp;
2128 rtx_code_label *loop_lab = gen_label_rtx ();
2129 rtx_code_label *end_lab = gen_label_rtx ();
2130
2131
2132 /* Step 1: round SIZE to the previous multiple of the interval. */
2133
2134 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
2135 rounded_size
2136 = simplify_gen_binary (AND, Pmode, size,
2137 gen_int_mode (-PROBE_INTERVAL, Pmode));
2138 rounded_size_op = force_operand (rounded_size, NULL_RTX);
2139
2140
2141 /* Step 2: compute initial and final value of the loop counter. */
2142
2143 /* SP = SP_0 + PROBE_INTERVAL. */
2144 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
2145
2146 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
2147 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
2148 stack_pointer_rtx,
2149 rounded_size_op), NULL_RTX);
2150
2151
2152 /* Step 3: the loop
2153
2154 while (SP != LAST_ADDR)
2155 {
2156 SP = SP + PROBE_INTERVAL
2157 probe at SP
2158 }
2159
2160 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
2161 values of N from 1 until it is equal to ROUNDED_SIZE. */
2162
2163 emit_label (loop_lab);
2164
2165 /* Jump to END_LAB if SP == LAST_ADDR. */
2166 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
2167 Pmode, 1, end_lab);
2168
2169 /* SP = SP + PROBE_INTERVAL and probe at SP. */
2170 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
2171 emit_stack_probe (stack_pointer_rtx);
2172
2173 emit_jump (loop_lab);
2174
2175 emit_label (end_lab);
2176
2177
2178 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
2179 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
2180
2181 /* TEMP = SIZE - ROUNDED_SIZE. */
2182 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
2183 if (temp != const0_rtx)
2184 {
2185 /* Manual CSE if the difference is not known at compile-time. */
2186 if (GET_CODE (temp) != CONST_INT)
2187 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
2188 anti_adjust_stack (temp);
2189 emit_stack_probe (stack_pointer_rtx);
2190 }
2191 }
2192
2193 /* Adjust back and account for the additional first interval. */
2194 if (adjust_back)
2195 adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
2196 else
2197 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
2198 }
2199
2200 /* Return an rtx representing the register or memory location
2201 in which a scalar value of data type VALTYPE
2202 was returned by a function call to function FUNC.
2203 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
2204 function is known, otherwise 0.
2205 OUTGOING is 1 if on a machine with register windows this function
2206 should return the register in which the function will put its result
2207 and 0 otherwise. */
2208
2209 rtx
2210 hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
2211 int outgoing ATTRIBUTE_UNUSED)
2212 {
2213 rtx val;
2214
2215 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
2216
2217 if (REG_P (val)
2218 && GET_MODE (val) == BLKmode)
2219 {
2220 unsigned HOST_WIDE_INT bytes = arg_int_size_in_bytes (valtype);
2221 opt_scalar_int_mode tmpmode;
2222
2223 /* int_size_in_bytes can return -1. We don't need a check here
2224 since the value of bytes will then be large enough that no
2225 mode will match anyway. */
2226
2227 FOR_EACH_MODE_IN_CLASS (tmpmode, MODE_INT)
2228 {
2229 /* Have we found a large enough mode? */
2230 if (GET_MODE_SIZE (tmpmode.require ()) >= bytes)
2231 break;
2232 }
2233
2234 PUT_MODE (val, tmpmode.require ());
2235 }
2236 return val;
2237 }
2238
2239 /* Return an rtx representing the register or memory location
2240 in which a scalar value of mode MODE was returned by a library call. */
2241
2242 rtx
2243 hard_libcall_value (machine_mode mode, rtx fun)
2244 {
2245 return targetm.calls.libcall_value (mode, fun);
2246 }
2247
2248 /* Look up the tree code for a given rtx code
2249 to provide the arithmetic operation for real_arithmetic.
2250 The function returns an int because the caller may not know
2251 what `enum tree_code' means. */
2252
2253 int
2254 rtx_to_tree_code (enum rtx_code code)
2255 {
2256 enum tree_code tcode;
2257
2258 switch (code)
2259 {
2260 case PLUS:
2261 tcode = PLUS_EXPR;
2262 break;
2263 case MINUS:
2264 tcode = MINUS_EXPR;
2265 break;
2266 case MULT:
2267 tcode = MULT_EXPR;
2268 break;
2269 case DIV:
2270 tcode = RDIV_EXPR;
2271 break;
2272 case SMIN:
2273 tcode = MIN_EXPR;
2274 break;
2275 case SMAX:
2276 tcode = MAX_EXPR;
2277 break;
2278 default:
2279 tcode = LAST_AND_UNUSED_TREE_CODE;
2280 break;
2281 }
2282 return ((int) tcode);
2283 }
2284
2285 #include "gt-explow.h"