Don't use #if inside C test expression.
[gcc.git] / gcc / explow.c
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "toplev.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "flags.h"
30 #include "function.h"
31 #include "expr.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "recog.h"
35
36 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
37 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
38 #endif
39
40 static rtx break_out_memory_refs PARAMS ((rtx));
41 static void emit_stack_probe PARAMS ((rtx));
42
43
44 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
45
46 HOST_WIDE_INT
47 trunc_int_for_mode (c, mode)
48 HOST_WIDE_INT c;
49 enum machine_mode mode;
50 {
51 int width = GET_MODE_BITSIZE (mode);
52
53 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
54 if (mode == BImode)
55 return c & 1 ? STORE_FLAG_VALUE : 0;
56
57 /* Sign-extend for the requested mode. */
58
59 if (width < HOST_BITS_PER_WIDE_INT)
60 {
61 HOST_WIDE_INT sign = 1;
62 sign <<= width - 1;
63 c &= (sign << 1) - 1;
64 c ^= sign;
65 c -= sign;
66 }
67
68 return c;
69 }
70
71 /* Return an rtx for the sum of X and the integer C.
72
73 This function should be used via the `plus_constant' macro. */
74
75 rtx
76 plus_constant_wide (x, c)
77 register rtx x;
78 register HOST_WIDE_INT c;
79 {
80 register RTX_CODE code;
81 rtx y;
82 register enum machine_mode mode;
83 register rtx tem;
84 int all_constant = 0;
85
86 if (c == 0)
87 return x;
88
89 restart:
90
91 code = GET_CODE (x);
92 mode = GET_MODE (x);
93 y = x;
94
95 switch (code)
96 {
97 case CONST_INT:
98 return GEN_INT (INTVAL (x) + c);
99
100 case CONST_DOUBLE:
101 {
102 unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
103 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
104 unsigned HOST_WIDE_INT l2 = c;
105 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
106 unsigned HOST_WIDE_INT lv;
107 HOST_WIDE_INT hv;
108
109 add_double (l1, h1, l2, h2, &lv, &hv);
110
111 return immed_double_const (lv, hv, VOIDmode);
112 }
113
114 case MEM:
115 /* If this is a reference to the constant pool, try replacing it with
116 a reference to a new constant. If the resulting address isn't
117 valid, don't return it because we have no way to validize it. */
118 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
119 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
120 {
121 tem
122 = force_const_mem (GET_MODE (x),
123 plus_constant (get_pool_constant (XEXP (x, 0)),
124 c));
125 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
126 return tem;
127 }
128 break;
129
130 case CONST:
131 /* If adding to something entirely constant, set a flag
132 so that we can add a CONST around the result. */
133 x = XEXP (x, 0);
134 all_constant = 1;
135 goto restart;
136
137 case SYMBOL_REF:
138 case LABEL_REF:
139 all_constant = 1;
140 break;
141
142 case PLUS:
143 /* The interesting case is adding the integer to a sum.
144 Look for constant term in the sum and combine
145 with C. For an integer constant term, we make a combined
146 integer. For a constant term that is not an explicit integer,
147 we cannot really combine, but group them together anyway.
148
149 Restart or use a recursive call in case the remaining operand is
150 something that we handle specially, such as a SYMBOL_REF.
151
152 We may not immediately return from the recursive call here, lest
153 all_constant gets lost. */
154
155 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
156 {
157 c += INTVAL (XEXP (x, 1));
158
159 if (GET_MODE (x) != VOIDmode)
160 c = trunc_int_for_mode (c, GET_MODE (x));
161
162 x = XEXP (x, 0);
163 goto restart;
164 }
165 else if (CONSTANT_P (XEXP (x, 1)))
166 {
167 x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
168 c = 0;
169 }
170 else if (find_constant_term_loc (&y))
171 {
172 /* We need to be careful since X may be shared and we can't
173 modify it in place. */
174 rtx copy = copy_rtx (x);
175 rtx *const_loc = find_constant_term_loc (&copy);
176
177 *const_loc = plus_constant (*const_loc, c);
178 x = copy;
179 c = 0;
180 }
181 break;
182
183 default:
184 break;
185 }
186
187 if (c != 0)
188 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
189
190 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
191 return x;
192 else if (all_constant)
193 return gen_rtx_CONST (mode, x);
194 else
195 return x;
196 }
197 \f
198 /* If X is a sum, return a new sum like X but lacking any constant terms.
199 Add all the removed constant terms into *CONSTPTR.
200 X itself is not altered. The result != X if and only if
201 it is not isomorphic to X. */
202
203 rtx
204 eliminate_constant_term (x, constptr)
205 rtx x;
206 rtx *constptr;
207 {
208 register rtx x0, x1;
209 rtx tem;
210
211 if (GET_CODE (x) != PLUS)
212 return x;
213
214 /* First handle constants appearing at this level explicitly. */
215 if (GET_CODE (XEXP (x, 1)) == CONST_INT
216 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
217 XEXP (x, 1)))
218 && GET_CODE (tem) == CONST_INT)
219 {
220 *constptr = tem;
221 return eliminate_constant_term (XEXP (x, 0), constptr);
222 }
223
224 tem = const0_rtx;
225 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
226 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
227 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
228 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
229 *constptr, tem))
230 && GET_CODE (tem) == CONST_INT)
231 {
232 *constptr = tem;
233 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
234 }
235
236 return x;
237 }
238
239 /* Returns the insn that next references REG after INSN, or 0
240 if REG is clobbered before next referenced or we cannot find
241 an insn that references REG in a straight-line piece of code. */
242
243 rtx
244 find_next_ref (reg, insn)
245 rtx reg;
246 rtx insn;
247 {
248 rtx next;
249
250 for (insn = NEXT_INSN (insn); insn; insn = next)
251 {
252 next = NEXT_INSN (insn);
253 if (GET_CODE (insn) == NOTE)
254 continue;
255 if (GET_CODE (insn) == CODE_LABEL
256 || GET_CODE (insn) == BARRIER)
257 return 0;
258 if (GET_CODE (insn) == INSN
259 || GET_CODE (insn) == JUMP_INSN
260 || GET_CODE (insn) == CALL_INSN)
261 {
262 if (reg_set_p (reg, insn))
263 return 0;
264 if (reg_mentioned_p (reg, PATTERN (insn)))
265 return insn;
266 if (GET_CODE (insn) == JUMP_INSN)
267 {
268 if (any_uncondjump_p (insn))
269 next = JUMP_LABEL (insn);
270 else
271 return 0;
272 }
273 if (GET_CODE (insn) == CALL_INSN
274 && REGNO (reg) < FIRST_PSEUDO_REGISTER
275 && call_used_regs[REGNO (reg)])
276 return 0;
277 }
278 else
279 abort ();
280 }
281 return 0;
282 }
283
284 /* Return an rtx for the size in bytes of the value of EXP. */
285
286 rtx
287 expr_size (exp)
288 tree exp;
289 {
290 tree size;
291
292 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
293 && DECL_SIZE_UNIT (exp) != 0)
294 size = DECL_SIZE_UNIT (exp);
295 else
296 size = size_in_bytes (TREE_TYPE (exp));
297
298 if (TREE_CODE (size) != INTEGER_CST
299 && contains_placeholder_p (size))
300 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
301
302 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype),
303 EXPAND_MEMORY_USE_BAD);
304 }
305 \f
306 /* Return a copy of X in which all memory references
307 and all constants that involve symbol refs
308 have been replaced with new temporary registers.
309 Also emit code to load the memory locations and constants
310 into those registers.
311
312 If X contains no such constants or memory references,
313 X itself (not a copy) is returned.
314
315 If a constant is found in the address that is not a legitimate constant
316 in an insn, it is left alone in the hope that it might be valid in the
317 address.
318
319 X may contain no arithmetic except addition, subtraction and multiplication.
320 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
321
322 static rtx
323 break_out_memory_refs (x)
324 register rtx x;
325 {
326 if (GET_CODE (x) == MEM
327 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
328 && GET_MODE (x) != VOIDmode))
329 x = force_reg (GET_MODE (x), x);
330 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
331 || GET_CODE (x) == MULT)
332 {
333 register rtx op0 = break_out_memory_refs (XEXP (x, 0));
334 register rtx op1 = break_out_memory_refs (XEXP (x, 1));
335
336 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
337 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
338 }
339
340 return x;
341 }
342
343 #ifdef POINTERS_EXTEND_UNSIGNED
344
345 /* Given X, a memory address in ptr_mode, convert it to an address
346 in Pmode, or vice versa (TO_MODE says which way). We take advantage of
347 the fact that pointers are not allowed to overflow by commuting arithmetic
348 operations over conversions so that address arithmetic insns can be
349 used. */
350
351 rtx
352 convert_memory_address (to_mode, x)
353 enum machine_mode to_mode;
354 rtx x;
355 {
356 enum machine_mode from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
357 rtx temp;
358
359 /* Here we handle some special cases. If none of them apply, fall through
360 to the default case. */
361 switch (GET_CODE (x))
362 {
363 case CONST_INT:
364 case CONST_DOUBLE:
365 return x;
366
367 case SUBREG:
368 if (POINTERS_EXTEND_UNSIGNED >= 0
369 && GET_MODE (SUBREG_REG (x)) == to_mode)
370 return SUBREG_REG (x);
371 break;
372
373 case LABEL_REF:
374 if (POINTERS_EXTEND_UNSIGNED >= 0)
375 {
376 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
377 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
378 return temp;
379 }
380 break;
381
382 case SYMBOL_REF:
383 if (POINTERS_EXTEND_UNSIGNED >= 0)
384 {
385 temp = gen_rtx_SYMBOL_REF (to_mode, XSTR (x, 0));
386 SYMBOL_REF_FLAG (temp) = SYMBOL_REF_FLAG (x);
387 CONSTANT_POOL_ADDRESS_P (temp) = CONSTANT_POOL_ADDRESS_P (x);
388 STRING_POOL_ADDRESS_P (temp) = STRING_POOL_ADDRESS_P (x);
389 return temp;
390 }
391 break;
392
393 case CONST:
394 if (POINTERS_EXTEND_UNSIGNED >= 0)
395 return gen_rtx_CONST (to_mode,
396 convert_memory_address (to_mode, XEXP (x, 0)));
397 break;
398
399 case PLUS:
400 case MULT:
401 /* For addition the second operand is a small constant, we can safely
402 permute the conversion and addition operation. We can always safely
403 permute them if we are making the address narrower. In addition,
404 always permute the operations if this is a constant. */
405 if (POINTERS_EXTEND_UNSIGNED >= 0
406 && (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
407 || (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT
408 && (INTVAL (XEXP (x, 1)) + 20000 < 40000
409 || CONSTANT_P (XEXP (x, 0))))))
410 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
411 convert_memory_address (to_mode, XEXP (x, 0)),
412 convert_memory_address (to_mode, XEXP (x, 1)));
413 break;
414
415 default:
416 break;
417 }
418
419 return convert_modes (to_mode, from_mode,
420 x, POINTERS_EXTEND_UNSIGNED);
421 }
422 #endif
423
424 /* Given a memory address or facsimile X, construct a new address,
425 currently equivalent, that is stable: future stores won't change it.
426
427 X must be composed of constants, register and memory references
428 combined with addition, subtraction and multiplication:
429 in other words, just what you can get from expand_expr if sum_ok is 1.
430
431 Works by making copies of all regs and memory locations used
432 by X and combining them the same way X does.
433 You could also stabilize the reference to this address
434 by copying the address to a register with copy_to_reg;
435 but then you wouldn't get indexed addressing in the reference. */
436
437 rtx
438 copy_all_regs (x)
439 register rtx x;
440 {
441 if (GET_CODE (x) == REG)
442 {
443 if (REGNO (x) != FRAME_POINTER_REGNUM
444 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
445 && REGNO (x) != HARD_FRAME_POINTER_REGNUM
446 #endif
447 )
448 x = copy_to_reg (x);
449 }
450 else if (GET_CODE (x) == MEM)
451 x = copy_to_reg (x);
452 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
453 || GET_CODE (x) == MULT)
454 {
455 register rtx op0 = copy_all_regs (XEXP (x, 0));
456 register rtx op1 = copy_all_regs (XEXP (x, 1));
457 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
458 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
459 }
460 return x;
461 }
462 \f
463 /* Return something equivalent to X but valid as a memory address
464 for something of mode MODE. When X is not itself valid, this
465 works by copying X or subexpressions of it into registers. */
466
467 rtx
468 memory_address (mode, x)
469 enum machine_mode mode;
470 register rtx x;
471 {
472 register rtx oldx = x;
473
474 if (GET_CODE (x) == ADDRESSOF)
475 return x;
476
477 #ifdef POINTERS_EXTEND_UNSIGNED
478 if (GET_MODE (x) == ptr_mode)
479 x = convert_memory_address (Pmode, x);
480 #endif
481
482 /* By passing constant addresses thru registers
483 we get a chance to cse them. */
484 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
485 x = force_reg (Pmode, x);
486
487 /* Accept a QUEUED that refers to a REG
488 even though that isn't a valid address.
489 On attempting to put this in an insn we will call protect_from_queue
490 which will turn it into a REG, which is valid. */
491 else if (GET_CODE (x) == QUEUED
492 && GET_CODE (QUEUED_VAR (x)) == REG)
493 ;
494
495 /* We get better cse by rejecting indirect addressing at this stage.
496 Let the combiner create indirect addresses where appropriate.
497 For now, generate the code so that the subexpressions useful to share
498 are visible. But not if cse won't be done! */
499 else
500 {
501 if (! cse_not_expected && GET_CODE (x) != REG)
502 x = break_out_memory_refs (x);
503
504 /* At this point, any valid address is accepted. */
505 GO_IF_LEGITIMATE_ADDRESS (mode, x, win);
506
507 /* If it was valid before but breaking out memory refs invalidated it,
508 use it the old way. */
509 if (memory_address_p (mode, oldx))
510 goto win2;
511
512 /* Perform machine-dependent transformations on X
513 in certain cases. This is not necessary since the code
514 below can handle all possible cases, but machine-dependent
515 transformations can make better code. */
516 LEGITIMIZE_ADDRESS (x, oldx, mode, win);
517
518 /* PLUS and MULT can appear in special ways
519 as the result of attempts to make an address usable for indexing.
520 Usually they are dealt with by calling force_operand, below.
521 But a sum containing constant terms is special
522 if removing them makes the sum a valid address:
523 then we generate that address in a register
524 and index off of it. We do this because it often makes
525 shorter code, and because the addresses thus generated
526 in registers often become common subexpressions. */
527 if (GET_CODE (x) == PLUS)
528 {
529 rtx constant_term = const0_rtx;
530 rtx y = eliminate_constant_term (x, &constant_term);
531 if (constant_term == const0_rtx
532 || ! memory_address_p (mode, y))
533 x = force_operand (x, NULL_RTX);
534 else
535 {
536 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
537 if (! memory_address_p (mode, y))
538 x = force_operand (x, NULL_RTX);
539 else
540 x = y;
541 }
542 }
543
544 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
545 x = force_operand (x, NULL_RTX);
546
547 /* If we have a register that's an invalid address,
548 it must be a hard reg of the wrong class. Copy it to a pseudo. */
549 else if (GET_CODE (x) == REG)
550 x = copy_to_reg (x);
551
552 /* Last resort: copy the value to a register, since
553 the register is a valid address. */
554 else
555 x = force_reg (Pmode, x);
556
557 goto done;
558
559 win2:
560 x = oldx;
561 win:
562 if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
563 /* Don't copy an addr via a reg if it is one of our stack slots. */
564 && ! (GET_CODE (x) == PLUS
565 && (XEXP (x, 0) == virtual_stack_vars_rtx
566 || XEXP (x, 0) == virtual_incoming_args_rtx)))
567 {
568 if (general_operand (x, Pmode))
569 x = force_reg (Pmode, x);
570 else
571 x = force_operand (x, NULL_RTX);
572 }
573 }
574
575 done:
576
577 /* If we didn't change the address, we are done. Otherwise, mark
578 a reg as a pointer if we have REG or REG + CONST_INT. */
579 if (oldx == x)
580 return x;
581 else if (GET_CODE (x) == REG)
582 mark_reg_pointer (x, BITS_PER_UNIT);
583 else if (GET_CODE (x) == PLUS
584 && GET_CODE (XEXP (x, 0)) == REG
585 && GET_CODE (XEXP (x, 1)) == CONST_INT)
586 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
587
588 /* OLDX may have been the address on a temporary. Update the address
589 to indicate that X is now used. */
590 update_temp_slot_address (oldx, x);
591
592 return x;
593 }
594
595 /* Like `memory_address' but pretend `flag_force_addr' is 0. */
596
597 rtx
598 memory_address_noforce (mode, x)
599 enum machine_mode mode;
600 rtx x;
601 {
602 int ambient_force_addr = flag_force_addr;
603 rtx val;
604
605 flag_force_addr = 0;
606 val = memory_address (mode, x);
607 flag_force_addr = ambient_force_addr;
608 return val;
609 }
610
611 /* Convert a mem ref into one with a valid memory address.
612 Pass through anything else unchanged. */
613
614 rtx
615 validize_mem (ref)
616 rtx ref;
617 {
618 if (GET_CODE (ref) != MEM)
619 return ref;
620 if (! (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (ref, 0)))
621 && memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
622 return ref;
623
624 /* Don't alter REF itself, since that is probably a stack slot. */
625 return replace_equiv_address (ref, XEXP (ref, 0));
626 }
627 \f
628 /* Given REF, either a MEM or a REG, and T, either the type of X or
629 the expression corresponding to REF, set RTX_UNCHANGING_P if
630 appropriate. */
631
632 void
633 maybe_set_unchanging (ref, t)
634 rtx ref;
635 tree t;
636 {
637 /* We can set RTX_UNCHANGING_P from TREE_READONLY for decls whose
638 initialization is only executed once, or whose initializer always
639 has the same value. Currently we simplify this to PARM_DECLs in the
640 first case, and decls with TREE_CONSTANT initializers in the second. */
641 if ((TREE_READONLY (t) && DECL_P (t)
642 && (TREE_CODE (t) == PARM_DECL
643 || DECL_INITIAL (t) == NULL_TREE
644 || TREE_CONSTANT (DECL_INITIAL (t))))
645 || TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
646 RTX_UNCHANGING_P (ref) = 1;
647 }
648
649 /* Given REF, a MEM, and T, either the type of X or the expression
650 corresponding to REF, set the memory attributes. OBJECTP is nonzero
651 if we are making a new object of this type. */
652
653 void
654 set_mem_attributes (ref, t, objectp)
655 rtx ref;
656 tree t;
657 int objectp;
658 {
659 tree type;
660
661 /* It can happen that type_for_mode was given a mode for which there
662 is no language-level type. In which case it returns NULL, which
663 we can see here. */
664 if (t == NULL_TREE)
665 return;
666
667 type = TYPE_P (t) ? t : TREE_TYPE (t);
668
669 /* Get the alias set from the expression or type (perhaps using a
670 front-end routine) and then copy bits from the type. */
671
672 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY (type)
673 here, because, in C and C++, the fact that a location is accessed
674 through a const expression does not mean that the value there can
675 never change. */
676 set_mem_alias_set (ref, get_alias_set (t));
677 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
678 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
679
680 /* If we are making an object of this type, we know that it is a scalar if
681 the type is not an aggregate. */
682 if (objectp && ! AGGREGATE_TYPE_P (type))
683 MEM_SCALAR_P (ref) = 1;
684
685 /* If T is a type, this is all we can do. Otherwise, we may be able
686 to deduce some more information about the expression. */
687 if (TYPE_P (t))
688 return;
689
690 maybe_set_unchanging (ref, t);
691 if (TREE_THIS_VOLATILE (t))
692 MEM_VOLATILE_P (ref) = 1;
693
694 /* Now see if we can say more about whether it's an aggregate or
695 scalar. If we already know it's an aggregate, don't bother. */
696 if (MEM_IN_STRUCT_P (ref))
697 return;
698
699 /* Now remove any NOPs: they don't change what the underlying object is.
700 Likewise for SAVE_EXPR. */
701 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
702 || TREE_CODE (t) == NON_LVALUE_EXPR || TREE_CODE (t) == SAVE_EXPR)
703 t = TREE_OPERAND (t, 0);
704
705 /* Since we already know the type isn't an aggregate, if this is a decl,
706 it must be a scalar. Or if it is a reference into an aggregate,
707 this is part of an aggregate. Otherwise we don't know. */
708 if (DECL_P (t))
709 MEM_SCALAR_P (ref) = 1;
710 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
711 || TREE_CODE (t) == ARRAY_RANGE_REF
712 || TREE_CODE (t) == BIT_FIELD_REF)
713 MEM_IN_STRUCT_P (ref) = 1;
714 }
715 \f
716 /* Return a modified copy of X with its memory address copied
717 into a temporary register to protect it from side effects.
718 If X is not a MEM, it is returned unchanged (and not copied).
719 Perhaps even if it is a MEM, if there is no need to change it. */
720
721 rtx
722 stabilize (x)
723 rtx x;
724 {
725
726 if (GET_CODE (x) != MEM
727 || ! rtx_unstable_p (XEXP (x, 0)))
728 return x;
729
730 return
731 replace_equiv_address (x, force_reg (Pmode, copy_all_regs (XEXP (x, 0))));
732 }
733 \f
734 /* Copy the value or contents of X to a new temp reg and return that reg. */
735
736 rtx
737 copy_to_reg (x)
738 rtx x;
739 {
740 register rtx temp = gen_reg_rtx (GET_MODE (x));
741
742 /* If not an operand, must be an address with PLUS and MULT so
743 do the computation. */
744 if (! general_operand (x, VOIDmode))
745 x = force_operand (x, temp);
746
747 if (x != temp)
748 emit_move_insn (temp, x);
749
750 return temp;
751 }
752
753 /* Like copy_to_reg but always give the new register mode Pmode
754 in case X is a constant. */
755
756 rtx
757 copy_addr_to_reg (x)
758 rtx x;
759 {
760 return copy_to_mode_reg (Pmode, x);
761 }
762
763 /* Like copy_to_reg but always give the new register mode MODE
764 in case X is a constant. */
765
766 rtx
767 copy_to_mode_reg (mode, x)
768 enum machine_mode mode;
769 rtx x;
770 {
771 register rtx temp = gen_reg_rtx (mode);
772
773 /* If not an operand, must be an address with PLUS and MULT so
774 do the computation. */
775 if (! general_operand (x, VOIDmode))
776 x = force_operand (x, temp);
777
778 if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
779 abort ();
780 if (x != temp)
781 emit_move_insn (temp, x);
782 return temp;
783 }
784
785 /* Load X into a register if it is not already one.
786 Use mode MODE for the register.
787 X should be valid for mode MODE, but it may be a constant which
788 is valid for all integer modes; that's why caller must specify MODE.
789
790 The caller must not alter the value in the register we return,
791 since we mark it as a "constant" register. */
792
793 rtx
794 force_reg (mode, x)
795 enum machine_mode mode;
796 rtx x;
797 {
798 register rtx temp, insn, set;
799
800 if (GET_CODE (x) == REG)
801 return x;
802
803 temp = gen_reg_rtx (mode);
804
805 if (! general_operand (x, mode))
806 x = force_operand (x, NULL_RTX);
807
808 insn = emit_move_insn (temp, x);
809
810 /* Let optimizers know that TEMP's value never changes
811 and that X can be substituted for it. Don't get confused
812 if INSN set something else (such as a SUBREG of TEMP). */
813 if (CONSTANT_P (x)
814 && (set = single_set (insn)) != 0
815 && SET_DEST (set) == temp)
816 {
817 rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
818
819 if (note)
820 XEXP (note, 0) = x;
821 else
822 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, x, REG_NOTES (insn));
823 }
824 return temp;
825 }
826
827 /* If X is a memory ref, copy its contents to a new temp reg and return
828 that reg. Otherwise, return X. */
829
830 rtx
831 force_not_mem (x)
832 rtx x;
833 {
834 register rtx temp;
835
836 if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
837 return x;
838
839 temp = gen_reg_rtx (GET_MODE (x));
840 emit_move_insn (temp, x);
841 return temp;
842 }
843
844 /* Copy X to TARGET (if it's nonzero and a reg)
845 or to a new temp reg and return that reg.
846 MODE is the mode to use for X in case it is a constant. */
847
848 rtx
849 copy_to_suggested_reg (x, target, mode)
850 rtx x, target;
851 enum machine_mode mode;
852 {
853 register rtx temp;
854
855 if (target && GET_CODE (target) == REG)
856 temp = target;
857 else
858 temp = gen_reg_rtx (mode);
859
860 emit_move_insn (temp, x);
861 return temp;
862 }
863 \f
864 /* Return the mode to use to store a scalar of TYPE and MODE.
865 PUNSIGNEDP points to the signedness of the type and may be adjusted
866 to show what signedness to use on extension operations.
867
868 FOR_CALL is non-zero if this call is promoting args for a call. */
869
870 enum machine_mode
871 promote_mode (type, mode, punsignedp, for_call)
872 tree type;
873 enum machine_mode mode;
874 int *punsignedp;
875 int for_call ATTRIBUTE_UNUSED;
876 {
877 enum tree_code code = TREE_CODE (type);
878 int unsignedp = *punsignedp;
879
880 #ifdef PROMOTE_FOR_CALL_ONLY
881 if (! for_call)
882 return mode;
883 #endif
884
885 switch (code)
886 {
887 #ifdef PROMOTE_MODE
888 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
889 case CHAR_TYPE: case REAL_TYPE: case OFFSET_TYPE:
890 PROMOTE_MODE (mode, unsignedp, type);
891 break;
892 #endif
893
894 #ifdef POINTERS_EXTEND_UNSIGNED
895 case REFERENCE_TYPE:
896 case POINTER_TYPE:
897 mode = Pmode;
898 unsignedp = POINTERS_EXTEND_UNSIGNED;
899 break;
900 #endif
901
902 default:
903 break;
904 }
905
906 *punsignedp = unsignedp;
907 return mode;
908 }
909 \f
910 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
911 This pops when ADJUST is positive. ADJUST need not be constant. */
912
913 void
914 adjust_stack (adjust)
915 rtx adjust;
916 {
917 rtx temp;
918 adjust = protect_from_queue (adjust, 0);
919
920 if (adjust == const0_rtx)
921 return;
922
923 /* We expect all variable sized adjustments to be multiple of
924 PREFERRED_STACK_BOUNDARY. */
925 if (GET_CODE (adjust) == CONST_INT)
926 stack_pointer_delta -= INTVAL (adjust);
927
928 temp = expand_binop (Pmode,
929 #ifdef STACK_GROWS_DOWNWARD
930 add_optab,
931 #else
932 sub_optab,
933 #endif
934 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
935 OPTAB_LIB_WIDEN);
936
937 if (temp != stack_pointer_rtx)
938 emit_move_insn (stack_pointer_rtx, temp);
939 }
940
941 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
942 This pushes when ADJUST is positive. ADJUST need not be constant. */
943
944 void
945 anti_adjust_stack (adjust)
946 rtx adjust;
947 {
948 rtx temp;
949 adjust = protect_from_queue (adjust, 0);
950
951 if (adjust == const0_rtx)
952 return;
953
954 /* We expect all variable sized adjustments to be multiple of
955 PREFERRED_STACK_BOUNDARY. */
956 if (GET_CODE (adjust) == CONST_INT)
957 stack_pointer_delta += INTVAL (adjust);
958
959 temp = expand_binop (Pmode,
960 #ifdef STACK_GROWS_DOWNWARD
961 sub_optab,
962 #else
963 add_optab,
964 #endif
965 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
966 OPTAB_LIB_WIDEN);
967
968 if (temp != stack_pointer_rtx)
969 emit_move_insn (stack_pointer_rtx, temp);
970 }
971
972 /* Round the size of a block to be pushed up to the boundary required
973 by this machine. SIZE is the desired size, which need not be constant. */
974
975 rtx
976 round_push (size)
977 rtx size;
978 {
979 #ifdef PREFERRED_STACK_BOUNDARY
980 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
981 if (align == 1)
982 return size;
983 if (GET_CODE (size) == CONST_INT)
984 {
985 int new = (INTVAL (size) + align - 1) / align * align;
986 if (INTVAL (size) != new)
987 size = GEN_INT (new);
988 }
989 else
990 {
991 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
992 but we know it can't. So add ourselves and then do
993 TRUNC_DIV_EXPR. */
994 size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
995 NULL_RTX, 1, OPTAB_LIB_WIDEN);
996 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
997 NULL_RTX, 1);
998 size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
999 }
1000 #endif /* PREFERRED_STACK_BOUNDARY */
1001 return size;
1002 }
1003 \f
1004 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
1005 to a previously-created save area. If no save area has been allocated,
1006 this function will allocate one. If a save area is specified, it
1007 must be of the proper mode.
1008
1009 The insns are emitted after insn AFTER, if nonzero, otherwise the insns
1010 are emitted at the current position. */
1011
1012 void
1013 emit_stack_save (save_level, psave, after)
1014 enum save_level save_level;
1015 rtx *psave;
1016 rtx after;
1017 {
1018 rtx sa = *psave;
1019 /* The default is that we use a move insn and save in a Pmode object. */
1020 rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
1021 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
1022
1023 /* See if this machine has anything special to do for this kind of save. */
1024 switch (save_level)
1025 {
1026 #ifdef HAVE_save_stack_block
1027 case SAVE_BLOCK:
1028 if (HAVE_save_stack_block)
1029 fcn = gen_save_stack_block;
1030 break;
1031 #endif
1032 #ifdef HAVE_save_stack_function
1033 case SAVE_FUNCTION:
1034 if (HAVE_save_stack_function)
1035 fcn = gen_save_stack_function;
1036 break;
1037 #endif
1038 #ifdef HAVE_save_stack_nonlocal
1039 case SAVE_NONLOCAL:
1040 if (HAVE_save_stack_nonlocal)
1041 fcn = gen_save_stack_nonlocal;
1042 break;
1043 #endif
1044 default:
1045 break;
1046 }
1047
1048 /* If there is no save area and we have to allocate one, do so. Otherwise
1049 verify the save area is the proper mode. */
1050
1051 if (sa == 0)
1052 {
1053 if (mode != VOIDmode)
1054 {
1055 if (save_level == SAVE_NONLOCAL)
1056 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1057 else
1058 *psave = sa = gen_reg_rtx (mode);
1059 }
1060 }
1061 else
1062 {
1063 if (mode == VOIDmode || GET_MODE (sa) != mode)
1064 abort ();
1065 }
1066
1067 if (after)
1068 {
1069 rtx seq;
1070
1071 start_sequence ();
1072 /* We must validize inside the sequence, to ensure that any instructions
1073 created by the validize call also get moved to the right place. */
1074 if (sa != 0)
1075 sa = validize_mem (sa);
1076 emit_insn (fcn (sa, stack_pointer_rtx));
1077 seq = gen_sequence ();
1078 end_sequence ();
1079 emit_insn_after (seq, after);
1080 }
1081 else
1082 {
1083 if (sa != 0)
1084 sa = validize_mem (sa);
1085 emit_insn (fcn (sa, stack_pointer_rtx));
1086 }
1087 }
1088
1089 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1090 area made by emit_stack_save. If it is zero, we have nothing to do.
1091
1092 Put any emitted insns after insn AFTER, if nonzero, otherwise at
1093 current position. */
1094
1095 void
1096 emit_stack_restore (save_level, sa, after)
1097 enum save_level save_level;
1098 rtx after;
1099 rtx sa;
1100 {
1101 /* The default is that we use a move insn. */
1102 rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
1103
1104 /* See if this machine has anything special to do for this kind of save. */
1105 switch (save_level)
1106 {
1107 #ifdef HAVE_restore_stack_block
1108 case SAVE_BLOCK:
1109 if (HAVE_restore_stack_block)
1110 fcn = gen_restore_stack_block;
1111 break;
1112 #endif
1113 #ifdef HAVE_restore_stack_function
1114 case SAVE_FUNCTION:
1115 if (HAVE_restore_stack_function)
1116 fcn = gen_restore_stack_function;
1117 break;
1118 #endif
1119 #ifdef HAVE_restore_stack_nonlocal
1120 case SAVE_NONLOCAL:
1121 if (HAVE_restore_stack_nonlocal)
1122 fcn = gen_restore_stack_nonlocal;
1123 break;
1124 #endif
1125 default:
1126 break;
1127 }
1128
1129 if (sa != 0)
1130 sa = validize_mem (sa);
1131
1132 if (after)
1133 {
1134 rtx seq;
1135
1136 start_sequence ();
1137 emit_insn (fcn (stack_pointer_rtx, sa));
1138 seq = gen_sequence ();
1139 end_sequence ();
1140 emit_insn_after (seq, after);
1141 }
1142 else
1143 emit_insn (fcn (stack_pointer_rtx, sa));
1144 }
1145 \f
1146 #ifdef SETJMP_VIA_SAVE_AREA
1147 /* Optimize RTL generated by allocate_dynamic_stack_space for targets
1148 where SETJMP_VIA_SAVE_AREA is true. The problem is that on these
1149 platforms, the dynamic stack space used can corrupt the original
1150 frame, thus causing a crash if a longjmp unwinds to it. */
1151
1152 void
1153 optimize_save_area_alloca (insns)
1154 rtx insns;
1155 {
1156 rtx insn;
1157
1158 for (insn = insns; insn; insn = NEXT_INSN(insn))
1159 {
1160 rtx note;
1161
1162 if (GET_CODE (insn) != INSN)
1163 continue;
1164
1165 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1166 {
1167 if (REG_NOTE_KIND (note) != REG_SAVE_AREA)
1168 continue;
1169
1170 if (!current_function_calls_setjmp)
1171 {
1172 rtx pat = PATTERN (insn);
1173
1174 /* If we do not see the note in a pattern matching
1175 these precise characteristics, we did something
1176 entirely wrong in allocate_dynamic_stack_space.
1177
1178 Note, one way this could happen is if SETJMP_VIA_SAVE_AREA
1179 was defined on a machine where stacks grow towards higher
1180 addresses.
1181
1182 Right now only supported port with stack that grow upward
1183 is the HPPA and it does not define SETJMP_VIA_SAVE_AREA. */
1184 if (GET_CODE (pat) != SET
1185 || SET_DEST (pat) != stack_pointer_rtx
1186 || GET_CODE (SET_SRC (pat)) != MINUS
1187 || XEXP (SET_SRC (pat), 0) != stack_pointer_rtx)
1188 abort ();
1189
1190 /* This will now be transformed into a (set REG REG)
1191 so we can just blow away all the other notes. */
1192 XEXP (SET_SRC (pat), 1) = XEXP (note, 0);
1193 REG_NOTES (insn) = NULL_RTX;
1194 }
1195 else
1196 {
1197 /* setjmp was called, we must remove the REG_SAVE_AREA
1198 note so that later passes do not get confused by its
1199 presence. */
1200 if (note == REG_NOTES (insn))
1201 {
1202 REG_NOTES (insn) = XEXP (note, 1);
1203 }
1204 else
1205 {
1206 rtx srch;
1207
1208 for (srch = REG_NOTES (insn); srch; srch = XEXP (srch, 1))
1209 if (XEXP (srch, 1) == note)
1210 break;
1211
1212 if (srch == NULL_RTX)
1213 abort();
1214
1215 XEXP (srch, 1) = XEXP (note, 1);
1216 }
1217 }
1218 /* Once we've seen the note of interest, we need not look at
1219 the rest of them. */
1220 break;
1221 }
1222 }
1223 }
1224 #endif /* SETJMP_VIA_SAVE_AREA */
1225
1226 /* Return an rtx representing the address of an area of memory dynamically
1227 pushed on the stack. This region of memory is always aligned to
1228 a multiple of BIGGEST_ALIGNMENT.
1229
1230 Any required stack pointer alignment is preserved.
1231
1232 SIZE is an rtx representing the size of the area.
1233 TARGET is a place in which the address can be placed.
1234
1235 KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */
1236
1237 rtx
1238 allocate_dynamic_stack_space (size, target, known_align)
1239 rtx size;
1240 rtx target;
1241 int known_align;
1242 {
1243 #ifdef SETJMP_VIA_SAVE_AREA
1244 rtx setjmpless_size = NULL_RTX;
1245 #endif
1246
1247 /* If we're asking for zero bytes, it doesn't matter what we point
1248 to since we can't dereference it. But return a reasonable
1249 address anyway. */
1250 if (size == const0_rtx)
1251 return virtual_stack_dynamic_rtx;
1252
1253 /* Otherwise, show we're calling alloca or equivalent. */
1254 current_function_calls_alloca = 1;
1255
1256 /* Ensure the size is in the proper mode. */
1257 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1258 size = convert_to_mode (Pmode, size, 1);
1259
1260 /* We can't attempt to minimize alignment necessary, because we don't
1261 know the final value of preferred_stack_boundary yet while executing
1262 this code. */
1263 #ifdef PREFERRED_STACK_BOUNDARY
1264 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1265 #endif
1266
1267 /* We will need to ensure that the address we return is aligned to
1268 BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
1269 always know its final value at this point in the compilation (it
1270 might depend on the size of the outgoing parameter lists, for
1271 example), so we must align the value to be returned in that case.
1272 (Note that STACK_DYNAMIC_OFFSET will have a default non-zero value if
1273 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1274 We must also do an alignment operation on the returned value if
1275 the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1276
1277 If we have to align, we must leave space in SIZE for the hole
1278 that might result from the alignment operation. */
1279
1280 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) || ! defined (PREFERRED_STACK_BOUNDARY)
1281 #define MUST_ALIGN 1
1282 #else
1283 #define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1284 #endif
1285
1286 if (MUST_ALIGN)
1287 size
1288 = force_operand (plus_constant (size,
1289 BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1290 NULL_RTX);
1291
1292 #ifdef SETJMP_VIA_SAVE_AREA
1293 /* If setjmp restores regs from a save area in the stack frame,
1294 avoid clobbering the reg save area. Note that the offset of
1295 virtual_incoming_args_rtx includes the preallocated stack args space.
1296 It would be no problem to clobber that, but it's on the wrong side
1297 of the old save area. */
1298 {
1299 rtx dynamic_offset
1300 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1301 stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1302
1303 if (!current_function_calls_setjmp)
1304 {
1305 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1306
1307 /* See optimize_save_area_alloca to understand what is being
1308 set up here. */
1309
1310 #if !defined(PREFERRED_STACK_BOUNDARY) || !defined(MUST_ALIGN) || (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT)
1311 /* If anyone creates a target with these characteristics, let them
1312 know that our optimization cannot work correctly in such a case. */
1313 abort ();
1314 #endif
1315
1316 if (GET_CODE (size) == CONST_INT)
1317 {
1318 HOST_WIDE_INT new = INTVAL (size) / align * align;
1319
1320 if (INTVAL (size) != new)
1321 setjmpless_size = GEN_INT (new);
1322 else
1323 setjmpless_size = size;
1324 }
1325 else
1326 {
1327 /* Since we know overflow is not possible, we avoid using
1328 CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead. */
1329 setjmpless_size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1330 GEN_INT (align), NULL_RTX, 1);
1331 setjmpless_size = expand_mult (Pmode, setjmpless_size,
1332 GEN_INT (align), NULL_RTX, 1);
1333 }
1334 /* Our optimization works based upon being able to perform a simple
1335 transformation of this RTL into a (set REG REG) so make sure things
1336 did in fact end up in a REG. */
1337 if (!register_operand (setjmpless_size, Pmode))
1338 setjmpless_size = force_reg (Pmode, setjmpless_size);
1339 }
1340
1341 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1342 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1343 }
1344 #endif /* SETJMP_VIA_SAVE_AREA */
1345
1346 /* Round the size to a multiple of the required stack alignment.
1347 Since the stack if presumed to be rounded before this allocation,
1348 this will maintain the required alignment.
1349
1350 If the stack grows downward, we could save an insn by subtracting
1351 SIZE from the stack pointer and then aligning the stack pointer.
1352 The problem with this is that the stack pointer may be unaligned
1353 between the execution of the subtraction and alignment insns and
1354 some machines do not allow this. Even on those that do, some
1355 signal handlers malfunction if a signal should occur between those
1356 insns. Since this is an extremely rare event, we have no reliable
1357 way of knowing which systems have this problem. So we avoid even
1358 momentarily mis-aligning the stack. */
1359
1360 #ifdef PREFERRED_STACK_BOUNDARY
1361 /* If we added a variable amount to SIZE,
1362 we can no longer assume it is aligned. */
1363 #if !defined (SETJMP_VIA_SAVE_AREA)
1364 if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1365 #endif
1366 size = round_push (size);
1367 #endif
1368
1369 do_pending_stack_adjust ();
1370
1371 /* We ought to be called always on the toplevel and stack ought to be aligned
1372 propertly. */
1373 #ifdef PREFERRED_STACK_BOUNDARY
1374 if (stack_pointer_delta % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))
1375 abort ();
1376 #endif
1377
1378 /* If needed, check that we have the required amount of stack. Take into
1379 account what has already been checked. */
1380 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1381 probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1382
1383 /* Don't use a TARGET that isn't a pseudo or is the wrong mode. */
1384 if (target == 0 || GET_CODE (target) != REG
1385 || REGNO (target) < FIRST_PSEUDO_REGISTER
1386 || GET_MODE (target) != Pmode)
1387 target = gen_reg_rtx (Pmode);
1388
1389 mark_reg_pointer (target, known_align);
1390
1391 /* Perform the required allocation from the stack. Some systems do
1392 this differently than simply incrementing/decrementing from the
1393 stack pointer, such as acquiring the space by calling malloc(). */
1394 #ifdef HAVE_allocate_stack
1395 if (HAVE_allocate_stack)
1396 {
1397 enum machine_mode mode = STACK_SIZE_MODE;
1398 insn_operand_predicate_fn pred;
1399
1400 pred = insn_data[(int) CODE_FOR_allocate_stack].operand[0].predicate;
1401 if (pred && ! ((*pred) (target, Pmode)))
1402 #ifdef POINTERS_EXTEND_UNSIGNED
1403 target = convert_memory_address (Pmode, target);
1404 #else
1405 target = copy_to_mode_reg (Pmode, target);
1406 #endif
1407
1408 if (mode == VOIDmode)
1409 mode = Pmode;
1410
1411 pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
1412 if (pred && ! ((*pred) (size, mode)))
1413 size = copy_to_mode_reg (mode, size);
1414
1415 emit_insn (gen_allocate_stack (target, size));
1416 }
1417 else
1418 #endif
1419 {
1420 #ifndef STACK_GROWS_DOWNWARD
1421 emit_move_insn (target, virtual_stack_dynamic_rtx);
1422 #endif
1423
1424 /* Check stack bounds if necessary. */
1425 if (current_function_limit_stack)
1426 {
1427 rtx available;
1428 rtx space_available = gen_label_rtx ();
1429 #ifdef STACK_GROWS_DOWNWARD
1430 available = expand_binop (Pmode, sub_optab,
1431 stack_pointer_rtx, stack_limit_rtx,
1432 NULL_RTX, 1, OPTAB_WIDEN);
1433 #else
1434 available = expand_binop (Pmode, sub_optab,
1435 stack_limit_rtx, stack_pointer_rtx,
1436 NULL_RTX, 1, OPTAB_WIDEN);
1437 #endif
1438 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1439 0, space_available);
1440 #ifdef HAVE_trap
1441 if (HAVE_trap)
1442 emit_insn (gen_trap ());
1443 else
1444 #endif
1445 error ("stack limits not supported on this target");
1446 emit_barrier ();
1447 emit_label (space_available);
1448 }
1449
1450 anti_adjust_stack (size);
1451 #ifdef SETJMP_VIA_SAVE_AREA
1452 if (setjmpless_size != NULL_RTX)
1453 {
1454 rtx note_target = get_last_insn ();
1455
1456 REG_NOTES (note_target)
1457 = gen_rtx_EXPR_LIST (REG_SAVE_AREA, setjmpless_size,
1458 REG_NOTES (note_target));
1459 }
1460 #endif /* SETJMP_VIA_SAVE_AREA */
1461
1462 #ifdef STACK_GROWS_DOWNWARD
1463 emit_move_insn (target, virtual_stack_dynamic_rtx);
1464 #endif
1465 }
1466
1467 if (MUST_ALIGN)
1468 {
1469 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1470 but we know it can't. So add ourselves and then do
1471 TRUNC_DIV_EXPR. */
1472 target = expand_binop (Pmode, add_optab, target,
1473 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1474 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1475 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1476 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1477 NULL_RTX, 1);
1478 target = expand_mult (Pmode, target,
1479 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1480 NULL_RTX, 1);
1481 }
1482
1483 /* Some systems require a particular insn to refer to the stack
1484 to make the pages exist. */
1485 #ifdef HAVE_probe
1486 if (HAVE_probe)
1487 emit_insn (gen_probe ());
1488 #endif
1489
1490 /* Record the new stack level for nonlocal gotos. */
1491 if (nonlocal_goto_handler_slots != 0)
1492 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
1493
1494 return target;
1495 }
1496 \f
1497 /* A front end may want to override GCC's stack checking by providing a
1498 run-time routine to call to check the stack, so provide a mechanism for
1499 calling that routine. */
1500
1501 static rtx stack_check_libfunc;
1502
1503 void
1504 set_stack_check_libfunc (libfunc)
1505 rtx libfunc;
1506 {
1507 stack_check_libfunc = libfunc;
1508 }
1509 \f
1510 /* Emit one stack probe at ADDRESS, an address within the stack. */
1511
1512 static void
1513 emit_stack_probe (address)
1514 rtx address;
1515 {
1516 rtx memref = gen_rtx_MEM (word_mode, address);
1517
1518 MEM_VOLATILE_P (memref) = 1;
1519
1520 if (STACK_CHECK_PROBE_LOAD)
1521 emit_move_insn (gen_reg_rtx (word_mode), memref);
1522 else
1523 emit_move_insn (memref, const0_rtx);
1524 }
1525
1526 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1527 FIRST is a constant and size is a Pmode RTX. These are offsets from the
1528 current stack pointer. STACK_GROWS_DOWNWARD says whether to add or
1529 subtract from the stack. If SIZE is constant, this is done
1530 with a fixed number of probes. Otherwise, we must make a loop. */
1531
1532 #ifdef STACK_GROWS_DOWNWARD
1533 #define STACK_GROW_OP MINUS
1534 #else
1535 #define STACK_GROW_OP PLUS
1536 #endif
1537
1538 void
1539 probe_stack_range (first, size)
1540 HOST_WIDE_INT first;
1541 rtx size;
1542 {
1543 /* First see if the front end has set up a function for us to call to
1544 check the stack. */
1545 if (stack_check_libfunc != 0)
1546 {
1547 rtx addr = memory_address (QImode,
1548 gen_rtx (STACK_GROW_OP, Pmode,
1549 stack_pointer_rtx,
1550 plus_constant (size, first)));
1551
1552 #ifdef POINTERS_EXTEND_UNSIGNED
1553 if (GET_MODE (addr) != ptr_mode)
1554 addr = convert_memory_address (ptr_mode, addr);
1555 #endif
1556
1557 emit_library_call (stack_check_libfunc, 0, VOIDmode, 1, addr,
1558 ptr_mode);
1559 }
1560
1561 /* Next see if we have an insn to check the stack. Use it if so. */
1562 #ifdef HAVE_check_stack
1563 else if (HAVE_check_stack)
1564 {
1565 insn_operand_predicate_fn pred;
1566 rtx last_addr
1567 = force_operand (gen_rtx_STACK_GROW_OP (Pmode,
1568 stack_pointer_rtx,
1569 plus_constant (size, first)),
1570 NULL_RTX);
1571
1572 pred = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
1573 if (pred && ! ((*pred) (last_addr, Pmode)))
1574 last_addr = copy_to_mode_reg (Pmode, last_addr);
1575
1576 emit_insn (gen_check_stack (last_addr));
1577 }
1578 #endif
1579
1580 /* If we have to generate explicit probes, see if we have a constant
1581 small number of them to generate. If so, that's the easy case. */
1582 else if (GET_CODE (size) == CONST_INT
1583 && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1584 {
1585 HOST_WIDE_INT offset;
1586
1587 /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1588 for values of N from 1 until it exceeds LAST. If only one
1589 probe is needed, this will not generate any code. Then probe
1590 at LAST. */
1591 for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1592 offset < INTVAL (size);
1593 offset = offset + STACK_CHECK_PROBE_INTERVAL)
1594 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1595 stack_pointer_rtx,
1596 GEN_INT (offset)));
1597
1598 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1599 stack_pointer_rtx,
1600 plus_constant (size, first)));
1601 }
1602
1603 /* In the variable case, do the same as above, but in a loop. We emit loop
1604 notes so that loop optimization can be done. */
1605 else
1606 {
1607 rtx test_addr
1608 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1609 stack_pointer_rtx,
1610 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1611 NULL_RTX);
1612 rtx last_addr
1613 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1614 stack_pointer_rtx,
1615 plus_constant (size, first)),
1616 NULL_RTX);
1617 rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1618 rtx loop_lab = gen_label_rtx ();
1619 rtx test_lab = gen_label_rtx ();
1620 rtx end_lab = gen_label_rtx ();
1621 rtx temp;
1622
1623 if (GET_CODE (test_addr) != REG
1624 || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1625 test_addr = force_reg (Pmode, test_addr);
1626
1627 emit_note (NULL, NOTE_INSN_LOOP_BEG);
1628 emit_jump (test_lab);
1629
1630 emit_label (loop_lab);
1631 emit_stack_probe (test_addr);
1632
1633 emit_note (NULL, NOTE_INSN_LOOP_CONT);
1634
1635 #ifdef STACK_GROWS_DOWNWARD
1636 #define CMP_OPCODE GTU
1637 temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1638 1, OPTAB_WIDEN);
1639 #else
1640 #define CMP_OPCODE LTU
1641 temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1642 1, OPTAB_WIDEN);
1643 #endif
1644
1645 if (temp != test_addr)
1646 abort ();
1647
1648 emit_label (test_lab);
1649 emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1650 NULL_RTX, Pmode, 1, 0, loop_lab);
1651 emit_jump (end_lab);
1652 emit_note (NULL, NOTE_INSN_LOOP_END);
1653 emit_label (end_lab);
1654
1655 emit_stack_probe (last_addr);
1656 }
1657 }
1658 \f
1659 /* Return an rtx representing the register or memory location
1660 in which a scalar value of data type VALTYPE
1661 was returned by a function call to function FUNC.
1662 FUNC is a FUNCTION_DECL node if the precise function is known,
1663 otherwise 0.
1664 OUTGOING is 1 if on a machine with register windows this function
1665 should return the register in which the function will put its result
1666 and 0 otherwise. */
1667
1668 rtx
1669 hard_function_value (valtype, func, outgoing)
1670 tree valtype;
1671 tree func ATTRIBUTE_UNUSED;
1672 int outgoing ATTRIBUTE_UNUSED;
1673 {
1674 rtx val;
1675
1676 #ifdef FUNCTION_OUTGOING_VALUE
1677 if (outgoing)
1678 val = FUNCTION_OUTGOING_VALUE (valtype, func);
1679 else
1680 #endif
1681 val = FUNCTION_VALUE (valtype, func);
1682
1683 if (GET_CODE (val) == REG
1684 && GET_MODE (val) == BLKmode)
1685 {
1686 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1687 enum machine_mode tmpmode;
1688
1689 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1690 tmpmode != VOIDmode;
1691 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1692 {
1693 /* Have we found a large enough mode? */
1694 if (GET_MODE_SIZE (tmpmode) >= bytes)
1695 break;
1696 }
1697
1698 /* No suitable mode found. */
1699 if (tmpmode == VOIDmode)
1700 abort ();
1701
1702 PUT_MODE (val, tmpmode);
1703 }
1704 return val;
1705 }
1706
1707 /* Return an rtx representing the register or memory location
1708 in which a scalar value of mode MODE was returned by a library call. */
1709
1710 rtx
1711 hard_libcall_value (mode)
1712 enum machine_mode mode;
1713 {
1714 return LIBCALL_VALUE (mode);
1715 }
1716
1717 /* Look up the tree code for a given rtx code
1718 to provide the arithmetic operation for REAL_ARITHMETIC.
1719 The function returns an int because the caller may not know
1720 what `enum tree_code' means. */
1721
1722 int
1723 rtx_to_tree_code (code)
1724 enum rtx_code code;
1725 {
1726 enum tree_code tcode;
1727
1728 switch (code)
1729 {
1730 case PLUS:
1731 tcode = PLUS_EXPR;
1732 break;
1733 case MINUS:
1734 tcode = MINUS_EXPR;
1735 break;
1736 case MULT:
1737 tcode = MULT_EXPR;
1738 break;
1739 case DIV:
1740 tcode = RDIV_EXPR;
1741 break;
1742 case SMIN:
1743 tcode = MIN_EXPR;
1744 break;
1745 case SMAX:
1746 tcode = MAX_EXPR;
1747 break;
1748 default:
1749 tcode = LAST_AND_UNUSED_TREE_CODE;
1750 break;
1751 }
1752 return ((int) tcode);
1753 }