1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "insn-attr.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
40 #include "typeclass.h"
42 #include "langhooks.h"
45 #include "tree-iterator.h"
47 #include "gimple-ssa.h"
49 #include "tree-ssanames.h"
51 #include "common/common-target.h"
54 #include "diagnostic.h"
55 #include "tree-ssa-live.h"
56 #include "tree-outof-ssa.h"
57 #include "target-globals.h"
59 #include "tree-ssa-address.h"
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
69 #ifndef PUSH_ARGS_REVERSED
70 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
71 #define PUSH_ARGS_REVERSED /* If it's last to first. */
77 #ifndef STACK_PUSH_CODE
78 #ifdef STACK_GROWS_DOWNWARD
79 #define STACK_PUSH_CODE PRE_DEC
81 #define STACK_PUSH_CODE PRE_INC
86 /* If this is nonzero, we do not bother generating VOLATILE
87 around volatile memory references, and we are willing to
88 output indirect addresses. If cse is to follow, we reject
89 indirect addresses so a useful potential cse is generated;
90 if it is used only once, instruction combination will produce
91 the same indirect address eventually. */
94 /* This structure is used by move_by_pieces to describe the move to
96 struct move_by_pieces_d
105 int explicit_inc_from
;
106 unsigned HOST_WIDE_INT len
;
107 HOST_WIDE_INT offset
;
111 /* This structure is used by store_by_pieces to describe the clear to
114 struct store_by_pieces_d
120 unsigned HOST_WIDE_INT len
;
121 HOST_WIDE_INT offset
;
122 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
127 static void move_by_pieces_1 (insn_gen_fn
, machine_mode
,
128 struct move_by_pieces_d
*);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
);
131 static tree
emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
133 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
134 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces_d
*, unsigned int);
136 static void store_by_pieces_2 (insn_gen_fn
, machine_mode
,
137 struct store_by_pieces_d
*);
138 static tree
clear_storage_libcall_fn (int);
139 static rtx
compress_float_constant (rtx
, rtx
);
140 static rtx
get_subtarget (rtx
);
141 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
142 HOST_WIDE_INT
, enum machine_mode
,
143 tree
, int, alias_set_type
);
144 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
145 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
,
146 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
147 enum machine_mode
, tree
, alias_set_type
, bool);
149 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
151 static int is_aligning_offset (const_tree
, const_tree
);
152 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
153 enum expand_modifier
);
154 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
155 static rtx
do_store_flag (sepops
, rtx
, enum machine_mode
);
157 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
159 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
, int);
160 static rtx
const_vector_from_tree (tree
);
161 static void write_complex_part (rtx
, rtx
, bool);
163 /* This macro is used to determine whether move_by_pieces should be called
164 to perform a structure copy. */
165 #ifndef MOVE_BY_PIECES_P
166 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
167 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
168 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
171 /* This macro is used to determine whether clear_by_pieces should be
172 called to clear storage. */
173 #ifndef CLEAR_BY_PIECES_P
174 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
175 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
176 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
179 /* This macro is used to determine whether store_by_pieces should be
180 called to "memset" storage with byte values other than zero. */
181 #ifndef SET_BY_PIECES_P
182 #define SET_BY_PIECES_P(SIZE, ALIGN) \
183 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
184 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
187 /* This macro is used to determine whether store_by_pieces should be
188 called to "memcpy" storage when the source is a constant string. */
189 #ifndef STORE_BY_PIECES_P
190 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
192 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
195 /* This is run to set up which modes can be used
196 directly in memory and to initialize the block move optab. It is run
197 at the beginning of compilation and when the target is reinitialized. */
200 init_expr_target (void)
203 enum machine_mode mode
;
208 /* Try indexing by frame ptr and try by stack ptr.
209 It is known that on the Convex the stack ptr isn't a valid index.
210 With luck, one or the other is valid on any machine. */
211 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
212 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
214 /* A scratch register we can modify in-place below to avoid
215 useless RTL allocations. */
216 reg
= gen_rtx_REG (VOIDmode
, -1);
218 insn
= rtx_alloc (INSN
);
219 pat
= gen_rtx_SET (VOIDmode
, NULL_RTX
, NULL_RTX
);
220 PATTERN (insn
) = pat
;
222 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
223 mode
= (enum machine_mode
) ((int) mode
+ 1))
227 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
228 PUT_MODE (mem
, mode
);
229 PUT_MODE (mem1
, mode
);
230 PUT_MODE (reg
, mode
);
232 /* See if there is some register that can be used in this mode and
233 directly loaded or stored from memory. */
235 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
236 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
237 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
240 if (! HARD_REGNO_MODE_OK (regno
, mode
))
243 SET_REGNO (reg
, regno
);
246 SET_DEST (pat
) = reg
;
247 if (recog (pat
, insn
, &num_clobbers
) >= 0)
248 direct_load
[(int) mode
] = 1;
250 SET_SRC (pat
) = mem1
;
251 SET_DEST (pat
) = reg
;
252 if (recog (pat
, insn
, &num_clobbers
) >= 0)
253 direct_load
[(int) mode
] = 1;
256 SET_DEST (pat
) = mem
;
257 if (recog (pat
, insn
, &num_clobbers
) >= 0)
258 direct_store
[(int) mode
] = 1;
261 SET_DEST (pat
) = mem1
;
262 if (recog (pat
, insn
, &num_clobbers
) >= 0)
263 direct_store
[(int) mode
] = 1;
267 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
269 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
270 mode
= GET_MODE_WIDER_MODE (mode
))
272 enum machine_mode srcmode
;
273 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
274 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
278 ic
= can_extend_p (mode
, srcmode
, 0);
279 if (ic
== CODE_FOR_nothing
)
282 PUT_MODE (mem
, srcmode
);
284 if (insn_operand_matches (ic
, 1, mem
))
285 float_extend_from_mem
[mode
][srcmode
] = true;
290 /* This is run at the start of compiling a function. */
295 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
298 /* Copy data from FROM to TO, where the machine modes are not the same.
299 Both modes may be integer, or both may be floating, or both may be
301 UNSIGNEDP should be nonzero if FROM is an unsigned type.
302 This causes zero-extension instead of sign-extension. */
305 convert_move (rtx to
, rtx from
, int unsignedp
)
307 enum machine_mode to_mode
= GET_MODE (to
);
308 enum machine_mode from_mode
= GET_MODE (from
);
309 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
310 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
314 /* rtx code for making an equivalent value. */
315 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
316 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
319 gcc_assert (to_real
== from_real
);
320 gcc_assert (to_mode
!= BLKmode
);
321 gcc_assert (from_mode
!= BLKmode
);
323 /* If the source and destination are already the same, then there's
328 /* If FROM is a SUBREG that indicates that we have already done at least
329 the required extension, strip it. We don't handle such SUBREGs as
332 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
333 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from
)))
334 >= GET_MODE_PRECISION (to_mode
))
335 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
336 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
338 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
340 if (to_mode
== from_mode
341 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
343 emit_move_insn (to
, from
);
347 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
349 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
351 if (VECTOR_MODE_P (to_mode
))
352 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
354 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
356 emit_move_insn (to
, from
);
360 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
362 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
363 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
372 gcc_assert ((GET_MODE_PRECISION (from_mode
)
373 != GET_MODE_PRECISION (to_mode
))
374 || (DECIMAL_FLOAT_MODE_P (from_mode
)
375 != DECIMAL_FLOAT_MODE_P (to_mode
)));
377 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
378 /* Conversion between decimal float and binary float, same size. */
379 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
380 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
385 /* Try converting directly if the insn is supported. */
387 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
388 if (code
!= CODE_FOR_nothing
)
390 emit_unop_insn (code
, to
, from
,
391 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
395 /* Otherwise use a libcall. */
396 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
398 /* Is this conversion implemented yet? */
399 gcc_assert (libcall
);
402 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
404 insns
= get_insns ();
406 emit_libcall_block (insns
, to
, value
,
407 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
409 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
413 /* Handle pointer conversion. */ /* SPEE 900220. */
414 /* Targets are expected to provide conversion insns between PxImode and
415 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
416 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
418 enum machine_mode full_mode
419 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
421 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
422 != CODE_FOR_nothing
);
424 if (full_mode
!= from_mode
)
425 from
= convert_to_mode (full_mode
, from
, unsignedp
);
426 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
430 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
433 enum machine_mode full_mode
434 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
435 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
436 enum insn_code icode
;
438 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
439 gcc_assert (icode
!= CODE_FOR_nothing
);
441 if (to_mode
== full_mode
)
443 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
447 new_from
= gen_reg_rtx (full_mode
);
448 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
450 /* else proceed to integer conversions below. */
451 from_mode
= full_mode
;
455 /* Make sure both are fixed-point modes or both are not. */
456 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
457 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
458 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
460 /* If we widen from_mode to to_mode and they are in the same class,
461 we won't saturate the result.
462 Otherwise, always saturate the result to play safe. */
463 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
464 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
465 expand_fixed_convert (to
, from
, 0, 0);
467 expand_fixed_convert (to
, from
, 0, 1);
471 /* Now both modes are integers. */
473 /* Handle expanding beyond a word. */
474 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
475 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
482 enum machine_mode lowpart_mode
;
483 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
485 /* Try converting directly if the insn is supported. */
486 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
489 /* If FROM is a SUBREG, put it into a register. Do this
490 so that we always generate the same set of insns for
491 better cse'ing; if an intermediate assignment occurred,
492 we won't be doing the operation directly on the SUBREG. */
493 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
494 from
= force_reg (from_mode
, from
);
495 emit_unop_insn (code
, to
, from
, equiv_code
);
498 /* Next, try converting via full word. */
499 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
500 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
501 != CODE_FOR_nothing
))
503 rtx word_to
= gen_reg_rtx (word_mode
);
506 if (reg_overlap_mentioned_p (to
, from
))
507 from
= force_reg (from_mode
, from
);
510 convert_move (word_to
, from
, unsignedp
);
511 emit_unop_insn (code
, to
, word_to
, equiv_code
);
515 /* No special multiword conversion insn; do it by hand. */
518 /* Since we will turn this into a no conflict block, we must ensure the
519 the source does not overlap the target so force it into an isolated
520 register when maybe so. Likewise for any MEM input, since the
521 conversion sequence might require several references to it and we
522 must ensure we're getting the same value every time. */
524 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
525 from
= force_reg (from_mode
, from
);
527 /* Get a copy of FROM widened to a word, if necessary. */
528 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
529 lowpart_mode
= word_mode
;
531 lowpart_mode
= from_mode
;
533 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
535 lowpart
= gen_lowpart (lowpart_mode
, to
);
536 emit_move_insn (lowpart
, lowfrom
);
538 /* Compute the value to put in each remaining word. */
540 fill_value
= const0_rtx
;
542 fill_value
= emit_store_flag (gen_reg_rtx (word_mode
),
543 LT
, lowfrom
, const0_rtx
,
546 /* Fill the remaining words. */
547 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
549 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
550 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
552 gcc_assert (subword
);
554 if (fill_value
!= subword
)
555 emit_move_insn (subword
, fill_value
);
558 insns
= get_insns ();
565 /* Truncating multi-word to a word or less. */
566 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
567 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
570 && ! MEM_VOLATILE_P (from
)
571 && direct_load
[(int) to_mode
]
572 && ! mode_dependent_address_p (XEXP (from
, 0),
573 MEM_ADDR_SPACE (from
)))
575 || GET_CODE (from
) == SUBREG
))
576 from
= force_reg (from_mode
, from
);
577 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
581 /* Now follow all the conversions between integers
582 no more than a word long. */
584 /* For truncation, usually we can just refer to FROM in a narrower mode. */
585 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
586 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
589 && ! MEM_VOLATILE_P (from
)
590 && direct_load
[(int) to_mode
]
591 && ! mode_dependent_address_p (XEXP (from
, 0),
592 MEM_ADDR_SPACE (from
)))
594 || GET_CODE (from
) == SUBREG
))
595 from
= force_reg (from_mode
, from
);
596 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
597 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
598 from
= copy_to_reg (from
);
599 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
603 /* Handle extension. */
604 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
606 /* Convert directly if that works. */
607 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
610 emit_unop_insn (code
, to
, from
, equiv_code
);
615 enum machine_mode intermediate
;
619 /* Search for a mode to convert via. */
620 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
621 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
622 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
624 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
625 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, intermediate
)))
626 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
627 != CODE_FOR_nothing
))
629 convert_move (to
, convert_to_mode (intermediate
, from
,
630 unsignedp
), unsignedp
);
634 /* No suitable intermediate mode.
635 Generate what we need with shifts. */
636 shift_amount
= (GET_MODE_PRECISION (to_mode
)
637 - GET_MODE_PRECISION (from_mode
));
638 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
639 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
641 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
644 emit_move_insn (to
, tmp
);
649 /* Support special truncate insns for certain modes. */
650 if (convert_optab_handler (trunc_optab
, to_mode
,
651 from_mode
) != CODE_FOR_nothing
)
653 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
658 /* Handle truncation of volatile memrefs, and so on;
659 the things that couldn't be truncated directly,
660 and for which there was no special instruction.
662 ??? Code above formerly short-circuited this, for most integer
663 mode pairs, with a force_reg in from_mode followed by a recursive
664 call to this routine. Appears always to have been wrong. */
665 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
667 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
668 emit_move_insn (to
, temp
);
672 /* Mode combination is not recognized. */
676 /* Return an rtx for a value that would result
677 from converting X to mode MODE.
678 Both X and MODE may be floating, or both integer.
679 UNSIGNEDP is nonzero if X is an unsigned value.
680 This can be done by referring to a part of X in place
681 or by copying to a new temporary with conversion. */
684 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
686 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
689 /* Return an rtx for a value that would result
690 from converting X from mode OLDMODE to mode MODE.
691 Both modes may be floating, or both integer.
692 UNSIGNEDP is nonzero if X is an unsigned value.
694 This can be done by referring to a part of X in place
695 or by copying to a new temporary with conversion.
697 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
700 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
704 /* If FROM is a SUBREG that indicates that we have already done at least
705 the required extension, strip it. */
707 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
708 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
709 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
710 x
= gen_lowpart (mode
, x
);
712 if (GET_MODE (x
) != VOIDmode
)
713 oldmode
= GET_MODE (x
);
718 /* There is one case that we must handle specially: If we are converting
719 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
720 we are to interpret the constant as unsigned, gen_lowpart will do
721 the wrong if the constant appears negative. What we want to do is
722 make the high-order word of the constant zero, not all ones. */
724 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
725 && GET_MODE_BITSIZE (mode
) == HOST_BITS_PER_DOUBLE_INT
726 && CONST_INT_P (x
) && INTVAL (x
) < 0)
728 double_int val
= double_int::from_uhwi (INTVAL (x
));
730 /* We need to zero extend VAL. */
731 if (oldmode
!= VOIDmode
)
732 val
= val
.zext (GET_MODE_BITSIZE (oldmode
));
734 return immed_double_int_const (val
, mode
);
737 /* We can do this with a gen_lowpart if both desired and current modes
738 are integer, and this is either a constant integer, a register, or a
739 non-volatile MEM. Except for the constant case where MODE is no
740 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
743 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
)
744 || (GET_MODE_CLASS (mode
) == MODE_INT
745 && GET_MODE_CLASS (oldmode
) == MODE_INT
746 && (CONST_DOUBLE_AS_INT_P (x
)
747 || (GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (oldmode
)
748 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
749 && direct_load
[(int) mode
])
751 && (! HARD_REGISTER_P (x
)
752 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
753 && TRULY_NOOP_TRUNCATION_MODES_P (mode
,
756 /* ?? If we don't know OLDMODE, we have to assume here that
757 X does not need sign- or zero-extension. This may not be
758 the case, but it's the best we can do. */
759 if (CONST_INT_P (x
) && oldmode
!= VOIDmode
760 && GET_MODE_PRECISION (mode
) > GET_MODE_PRECISION (oldmode
))
762 HOST_WIDE_INT val
= INTVAL (x
);
764 /* We must sign or zero-extend in this case. Start by
765 zero-extending, then sign extend if we need to. */
766 val
&= GET_MODE_MASK (oldmode
);
768 && val_signbit_known_set_p (oldmode
, val
))
769 val
|= ~GET_MODE_MASK (oldmode
);
771 return gen_int_mode (val
, mode
);
774 return gen_lowpart (mode
, x
);
777 /* Converting from integer constant into mode is always equivalent to an
779 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
781 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
782 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
785 temp
= gen_reg_rtx (mode
);
786 convert_move (temp
, x
, unsignedp
);
790 /* Return the largest alignment we can use for doing a move (or store)
791 of MAX_PIECES. ALIGN is the largest alignment we could use. */
794 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
796 enum machine_mode tmode
;
798 tmode
= mode_for_size (max_pieces
* BITS_PER_UNIT
, MODE_INT
, 1);
799 if (align
>= GET_MODE_ALIGNMENT (tmode
))
800 align
= GET_MODE_ALIGNMENT (tmode
);
803 enum machine_mode tmode
, xmode
;
805 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
807 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
808 if (GET_MODE_SIZE (tmode
) > max_pieces
809 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
812 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
818 /* Return the widest integer mode no wider than SIZE. If no such mode
819 can be found, return VOIDmode. */
821 static enum machine_mode
822 widest_int_mode_for_size (unsigned int size
)
824 enum machine_mode tmode
, mode
= VOIDmode
;
826 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
827 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
828 if (GET_MODE_SIZE (tmode
) < size
)
834 /* STORE_MAX_PIECES is the number of bytes at a time that we can
835 store efficiently. Due to internal GCC limitations, this is
836 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
837 for an immediate constant. */
839 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
841 /* Determine whether the LEN bytes can be moved by using several move
842 instructions. Return nonzero if a call to move_by_pieces should
846 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED
,
847 unsigned int align ATTRIBUTE_UNUSED
)
849 return MOVE_BY_PIECES_P (len
, align
);
852 /* Generate several move instructions to copy LEN bytes from block FROM to
853 block TO. (These are MEM rtx's with BLKmode).
855 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
856 used to push FROM to the stack.
858 ALIGN is maximum stack alignment we can assume.
860 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
861 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
865 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
866 unsigned int align
, int endp
)
868 struct move_by_pieces_d data
;
869 enum machine_mode to_addr_mode
;
870 enum machine_mode from_addr_mode
= get_address_mode (from
);
871 rtx to_addr
, from_addr
= XEXP (from
, 0);
872 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
873 enum insn_code icode
;
875 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
878 data
.from_addr
= from_addr
;
881 to_addr_mode
= get_address_mode (to
);
882 to_addr
= XEXP (to
, 0);
885 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
886 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
888 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
892 to_addr_mode
= VOIDmode
;
896 #ifdef STACK_GROWS_DOWNWARD
902 data
.to_addr
= to_addr
;
905 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
906 || GET_CODE (from_addr
) == POST_INC
907 || GET_CODE (from_addr
) == POST_DEC
);
909 data
.explicit_inc_from
= 0;
910 data
.explicit_inc_to
= 0;
911 if (data
.reverse
) data
.offset
= len
;
914 /* If copying requires more than two move insns,
915 copy addresses to registers (to make displacements shorter)
916 and use post-increment if available. */
917 if (!(data
.autinc_from
&& data
.autinc_to
)
918 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
920 /* Find the mode of the largest move...
921 MODE might not be used depending on the definitions of the
922 USE_* macros below. */
923 enum machine_mode mode ATTRIBUTE_UNUSED
924 = widest_int_mode_for_size (max_size
);
926 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
928 data
.from_addr
= copy_to_mode_reg (from_addr_mode
,
929 plus_constant (from_addr_mode
,
931 data
.autinc_from
= 1;
932 data
.explicit_inc_from
= -1;
934 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
936 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
937 data
.autinc_from
= 1;
938 data
.explicit_inc_from
= 1;
940 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
941 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
942 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
944 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
945 plus_constant (to_addr_mode
,
948 data
.explicit_inc_to
= -1;
950 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
952 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
954 data
.explicit_inc_to
= 1;
956 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
957 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
960 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
962 /* First move what we can in the largest integer mode, then go to
963 successively smaller modes. */
965 while (max_size
> 1 && data
.len
> 0)
967 enum machine_mode mode
= widest_int_mode_for_size (max_size
);
969 if (mode
== VOIDmode
)
972 icode
= optab_handler (mov_optab
, mode
);
973 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
974 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
976 max_size
= GET_MODE_SIZE (mode
);
979 /* The code above should have handled everything. */
980 gcc_assert (!data
.len
);
986 gcc_assert (!data
.reverse
);
991 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
992 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
994 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
995 plus_constant (to_addr_mode
,
999 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1006 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1014 /* Return number of insns required to move L bytes by pieces.
1015 ALIGN (in bits) is maximum alignment we can assume. */
1017 unsigned HOST_WIDE_INT
1018 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
1019 unsigned int max_size
)
1021 unsigned HOST_WIDE_INT n_insns
= 0;
1023 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
1025 while (max_size
> 1 && l
> 0)
1027 enum machine_mode mode
;
1028 enum insn_code icode
;
1030 mode
= widest_int_mode_for_size (max_size
);
1032 if (mode
== VOIDmode
)
1035 icode
= optab_handler (mov_optab
, mode
);
1036 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1037 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1039 max_size
= GET_MODE_SIZE (mode
);
1046 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1047 with move instructions for mode MODE. GENFUN is the gen_... function
1048 to make a move insn for that mode. DATA has all the other info. */
1051 move_by_pieces_1 (insn_gen_fn genfun
, machine_mode mode
,
1052 struct move_by_pieces_d
*data
)
1054 unsigned int size
= GET_MODE_SIZE (mode
);
1055 rtx to1
= NULL_RTX
, from1
;
1057 while (data
->len
>= size
)
1060 data
->offset
-= size
;
1064 if (data
->autinc_to
)
1065 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1068 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1071 if (data
->autinc_from
)
1072 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1075 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1077 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1078 emit_insn (gen_add2_insn (data
->to_addr
,
1079 gen_int_mode (-(HOST_WIDE_INT
) size
,
1080 GET_MODE (data
->to_addr
))));
1081 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1082 emit_insn (gen_add2_insn (data
->from_addr
,
1083 gen_int_mode (-(HOST_WIDE_INT
) size
,
1084 GET_MODE (data
->from_addr
))));
1087 emit_insn ((*genfun
) (to1
, from1
));
1090 #ifdef PUSH_ROUNDING
1091 emit_single_push_insn (mode
, from1
, NULL
);
1097 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1098 emit_insn (gen_add2_insn (data
->to_addr
,
1100 GET_MODE (data
->to_addr
))));
1101 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1102 emit_insn (gen_add2_insn (data
->from_addr
,
1104 GET_MODE (data
->from_addr
))));
1106 if (! data
->reverse
)
1107 data
->offset
+= size
;
1113 /* Emit code to move a block Y to a block X. This may be done with
1114 string-move instructions, with multiple scalar move instructions,
1115 or with a library call.
1117 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1118 SIZE is an rtx that says how long they are.
1119 ALIGN is the maximum alignment we can assume they have.
1120 METHOD describes what kind of copy this is, and what mechanisms may be used.
1122 Return the address of the new block, if memcpy is called and returns it,
1126 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1127 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
1134 if (CONST_INT_P (size
)
1135 && INTVAL (size
) == 0)
1140 case BLOCK_OP_NORMAL
:
1141 case BLOCK_OP_TAILCALL
:
1142 may_use_call
= true;
1145 case BLOCK_OP_CALL_PARM
:
1146 may_use_call
= block_move_libcall_safe_for_call_parm ();
1148 /* Make inhibit_defer_pop nonzero around the library call
1149 to force it to pop the arguments right away. */
1153 case BLOCK_OP_NO_LIBCALL
:
1154 may_use_call
= false;
1161 gcc_assert (MEM_P (x
) && MEM_P (y
));
1162 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1163 gcc_assert (align
>= BITS_PER_UNIT
);
1165 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1166 block copy is more efficient for other large modes, e.g. DCmode. */
1167 x
= adjust_address (x
, BLKmode
, 0);
1168 y
= adjust_address (y
, BLKmode
, 0);
1170 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1171 can be incorrect is coming from __builtin_memcpy. */
1172 if (CONST_INT_P (size
))
1174 x
= shallow_copy_rtx (x
);
1175 y
= shallow_copy_rtx (y
);
1176 set_mem_size (x
, INTVAL (size
));
1177 set_mem_size (y
, INTVAL (size
));
1180 if (CONST_INT_P (size
) && MOVE_BY_PIECES_P (INTVAL (size
), align
))
1181 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1182 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1183 expected_align
, expected_size
))
1185 else if (may_use_call
1186 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1187 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1189 /* Since x and y are passed to a libcall, mark the corresponding
1190 tree EXPR as addressable. */
1191 tree y_expr
= MEM_EXPR (y
);
1192 tree x_expr
= MEM_EXPR (x
);
1194 mark_addressable (y_expr
);
1196 mark_addressable (x_expr
);
1197 retval
= emit_block_move_via_libcall (x
, y
, size
,
1198 method
== BLOCK_OP_TAILCALL
);
1202 emit_block_move_via_loop (x
, y
, size
, align
);
1204 if (method
== BLOCK_OP_CALL_PARM
)
1211 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1213 return emit_block_move_hints (x
, y
, size
, method
, 0, -1);
1216 /* A subroutine of emit_block_move. Returns true if calling the
1217 block move libcall will not clobber any parameters which may have
1218 already been placed on the stack. */
1221 block_move_libcall_safe_for_call_parm (void)
1223 #if defined (REG_PARM_STACK_SPACE)
1227 /* If arguments are pushed on the stack, then they're safe. */
1231 /* If registers go on the stack anyway, any argument is sure to clobber
1232 an outgoing argument. */
1233 #if defined (REG_PARM_STACK_SPACE)
1234 fn
= emit_block_move_libcall_fn (false);
1235 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1236 depend on its argument. */
1238 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1239 && REG_PARM_STACK_SPACE (fn
) != 0)
1243 /* If any argument goes in memory, then it might clobber an outgoing
1246 CUMULATIVE_ARGS args_so_far_v
;
1247 cumulative_args_t args_so_far
;
1250 fn
= emit_block_move_libcall_fn (false);
1251 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1252 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1254 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1255 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1257 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1258 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, mode
,
1260 if (!tmp
|| !REG_P (tmp
))
1262 if (targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL
, 1))
1264 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
1271 /* A subroutine of emit_block_move. Expand a movmem pattern;
1272 return true if successful. */
1275 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1276 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
1278 int save_volatile_ok
= volatile_ok
;
1279 enum machine_mode mode
;
1281 if (expected_align
< align
)
1282 expected_align
= align
;
1284 /* Since this is a move insn, we don't care about volatility. */
1287 /* Try the most limited insn first, because there's no point
1288 including more than one in the machine description unless
1289 the more limited one has some advantage. */
1291 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1292 mode
= GET_MODE_WIDER_MODE (mode
))
1294 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1296 if (code
!= CODE_FOR_nothing
1297 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1298 here because if SIZE is less than the mode mask, as it is
1299 returned by the macro, it will definitely be less than the
1300 actual mode mask. Since SIZE is within the Pmode address
1301 space, we limit MODE to Pmode. */
1302 && ((CONST_INT_P (size
)
1303 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1304 <= (GET_MODE_MASK (mode
) >> 1)))
1305 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1307 struct expand_operand ops
[6];
1310 /* ??? When called via emit_block_move_for_call, it'd be
1311 nice if there were some way to inform the backend, so
1312 that it doesn't fail the expansion because it thinks
1313 emitting the libcall would be more efficient. */
1314 nops
= insn_data
[(int) code
].n_generator_args
;
1315 gcc_assert (nops
== 4 || nops
== 6);
1317 create_fixed_operand (&ops
[0], x
);
1318 create_fixed_operand (&ops
[1], y
);
1319 /* The check above guarantees that this size conversion is valid. */
1320 create_convert_operand_to (&ops
[2], size
, mode
, true);
1321 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1324 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1325 create_integer_operand (&ops
[5], expected_size
);
1327 if (maybe_expand_insn (code
, nops
, ops
))
1329 volatile_ok
= save_volatile_ok
;
1335 volatile_ok
= save_volatile_ok
;
1339 /* A subroutine of emit_block_move. Expand a call to memcpy.
1340 Return the return value from memcpy, 0 otherwise. */
1343 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1345 rtx dst_addr
, src_addr
;
1346 tree call_expr
, fn
, src_tree
, dst_tree
, size_tree
;
1347 enum machine_mode size_mode
;
1350 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1351 pseudos. We can then place those new pseudos into a VAR_DECL and
1354 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1355 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1357 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1358 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1360 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1361 src_tree
= make_tree (ptr_type_node
, src_addr
);
1363 size_mode
= TYPE_MODE (sizetype
);
1365 size
= convert_to_mode (size_mode
, size
, 1);
1366 size
= copy_to_mode_reg (size_mode
, size
);
1368 /* It is incorrect to use the libcall calling conventions to call
1369 memcpy in this context. This could be a user call to memcpy and
1370 the user may wish to examine the return value from memcpy. For
1371 targets where libcalls and normal calls have different conventions
1372 for returning pointers, we could end up generating incorrect code. */
1374 size_tree
= make_tree (sizetype
, size
);
1376 fn
= emit_block_move_libcall_fn (true);
1377 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1378 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1380 retval
= expand_normal (call_expr
);
1385 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1386 for the function we use for block copies. */
1388 static GTY(()) tree block_move_fn
;
1391 init_block_move_fn (const char *asmspec
)
1395 tree args
, fn
, attrs
, attr_args
;
1397 fn
= get_identifier ("memcpy");
1398 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1399 const_ptr_type_node
, sizetype
,
1402 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
1403 DECL_EXTERNAL (fn
) = 1;
1404 TREE_PUBLIC (fn
) = 1;
1405 DECL_ARTIFICIAL (fn
) = 1;
1406 TREE_NOTHROW (fn
) = 1;
1407 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1408 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1410 attr_args
= build_tree_list (NULL_TREE
, build_string (1, "1"));
1411 attrs
= tree_cons (get_identifier ("fn spec"), attr_args
, NULL
);
1413 decl_attributes (&fn
, attrs
, ATTR_FLAG_BUILT_IN
);
1419 set_user_assembler_name (block_move_fn
, asmspec
);
1423 emit_block_move_libcall_fn (int for_call
)
1425 static bool emitted_extern
;
1428 init_block_move_fn (NULL
);
1430 if (for_call
&& !emitted_extern
)
1432 emitted_extern
= true;
1433 make_decl_rtl (block_move_fn
);
1436 return block_move_fn
;
1439 /* A subroutine of emit_block_move. Copy the data via an explicit
1440 loop. This is used only when libcalls are forbidden. */
1441 /* ??? It'd be nice to copy in hunks larger than QImode. */
1444 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1445 unsigned int align ATTRIBUTE_UNUSED
)
1447 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1448 enum machine_mode x_addr_mode
= get_address_mode (x
);
1449 enum machine_mode y_addr_mode
= get_address_mode (y
);
1450 enum machine_mode iter_mode
;
1452 iter_mode
= GET_MODE (size
);
1453 if (iter_mode
== VOIDmode
)
1454 iter_mode
= word_mode
;
1456 top_label
= gen_label_rtx ();
1457 cmp_label
= gen_label_rtx ();
1458 iter
= gen_reg_rtx (iter_mode
);
1460 emit_move_insn (iter
, const0_rtx
);
1462 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1463 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1464 do_pending_stack_adjust ();
1466 emit_jump (cmp_label
);
1467 emit_label (top_label
);
1469 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1470 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1472 if (x_addr_mode
!= y_addr_mode
)
1473 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1474 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1476 x
= change_address (x
, QImode
, x_addr
);
1477 y
= change_address (y
, QImode
, y_addr
);
1479 emit_move_insn (x
, y
);
1481 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1482 true, OPTAB_LIB_WIDEN
);
1484 emit_move_insn (iter
, tmp
);
1486 emit_label (cmp_label
);
1488 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1489 true, top_label
, REG_BR_PROB_BASE
* 90 / 100);
1492 /* Copy all or part of a value X into registers starting at REGNO.
1493 The number of registers to be filled is NREGS. */
1496 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1499 #ifdef HAVE_load_multiple
1507 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
1508 x
= validize_mem (force_const_mem (mode
, x
));
1510 /* See if the machine can do this with a load multiple insn. */
1511 #ifdef HAVE_load_multiple
1512 if (HAVE_load_multiple
)
1514 last
= get_last_insn ();
1515 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1523 delete_insns_since (last
);
1527 for (i
= 0; i
< nregs
; i
++)
1528 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1529 operand_subword_force (x
, i
, mode
));
1532 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1533 The number of registers to be filled is NREGS. */
1536 move_block_from_reg (int regno
, rtx x
, int nregs
)
1543 /* See if the machine can do this with a store multiple insn. */
1544 #ifdef HAVE_store_multiple
1545 if (HAVE_store_multiple
)
1547 rtx last
= get_last_insn ();
1548 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1556 delete_insns_since (last
);
1560 for (i
= 0; i
< nregs
; i
++)
1562 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1566 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1570 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1571 ORIG, where ORIG is a non-consecutive group of registers represented by
1572 a PARALLEL. The clone is identical to the original except in that the
1573 original set of registers is replaced by a new set of pseudo registers.
1574 The new set has the same modes as the original set. */
1577 gen_group_rtx (rtx orig
)
1582 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1584 length
= XVECLEN (orig
, 0);
1585 tmps
= XALLOCAVEC (rtx
, length
);
1587 /* Skip a NULL entry in first slot. */
1588 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1593 for (; i
< length
; i
++)
1595 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1596 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1598 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1601 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1604 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1605 except that values are placed in TMPS[i], and must later be moved
1606 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1609 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1613 enum machine_mode m
= GET_MODE (orig_src
);
1615 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1618 && !SCALAR_INT_MODE_P (m
)
1619 && !MEM_P (orig_src
)
1620 && GET_CODE (orig_src
) != CONCAT
)
1622 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1623 if (imode
== BLKmode
)
1624 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
1626 src
= gen_reg_rtx (imode
);
1627 if (imode
!= BLKmode
)
1628 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1629 emit_move_insn (src
, orig_src
);
1630 /* ...and back again. */
1631 if (imode
!= BLKmode
)
1632 src
= gen_lowpart (imode
, src
);
1633 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1637 /* Check for a NULL entry, used to indicate that the parameter goes
1638 both on the stack and in registers. */
1639 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1644 /* Process the pieces. */
1645 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1647 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1648 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1649 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1652 /* Handle trailing fragments that run over the size of the struct. */
1653 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1655 /* Arrange to shift the fragment to where it belongs.
1656 extract_bit_field loads to the lsb of the reg. */
1658 #ifdef BLOCK_REG_PADDING
1659 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1660 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1665 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1666 bytelen
= ssize
- bytepos
;
1667 gcc_assert (bytelen
> 0);
1670 /* If we won't be loading directly from memory, protect the real source
1671 from strange tricks we might play; but make sure that the source can
1672 be loaded directly into the destination. */
1674 if (!MEM_P (orig_src
)
1675 && (!CONSTANT_P (orig_src
)
1676 || (GET_MODE (orig_src
) != mode
1677 && GET_MODE (orig_src
) != VOIDmode
)))
1679 if (GET_MODE (orig_src
) == VOIDmode
)
1680 src
= gen_reg_rtx (mode
);
1682 src
= gen_reg_rtx (GET_MODE (orig_src
));
1684 emit_move_insn (src
, orig_src
);
1687 /* Optimize the access just a bit. */
1689 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1690 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1691 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1692 && bytelen
== GET_MODE_SIZE (mode
))
1694 tmps
[i
] = gen_reg_rtx (mode
);
1695 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1697 else if (COMPLEX_MODE_P (mode
)
1698 && GET_MODE (src
) == mode
1699 && bytelen
== GET_MODE_SIZE (mode
))
1700 /* Let emit_move_complex do the bulk of the work. */
1702 else if (GET_CODE (src
) == CONCAT
)
1704 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1705 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1707 if ((bytepos
== 0 && bytelen
== slen0
)
1708 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1710 /* The following assumes that the concatenated objects all
1711 have the same size. In this case, a simple calculation
1712 can be used to determine the object and the bit field
1714 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1715 if (! CONSTANT_P (tmps
[i
])
1716 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1717 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1718 (bytepos
% slen0
) * BITS_PER_UNIT
,
1719 1, NULL_RTX
, mode
, mode
);
1725 gcc_assert (!bytepos
);
1726 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1727 emit_move_insn (mem
, src
);
1728 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1729 0, 1, NULL_RTX
, mode
, mode
);
1732 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1733 SIMD register, which is currently broken. While we get GCC
1734 to emit proper RTL for these cases, let's dump to memory. */
1735 else if (VECTOR_MODE_P (GET_MODE (dst
))
1738 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1741 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1742 emit_move_insn (mem
, src
);
1743 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1745 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1746 && XVECLEN (dst
, 0) > 1)
1747 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
1748 else if (CONSTANT_P (src
))
1750 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
1758 gcc_assert (2 * len
== ssize
);
1759 split_double (src
, &first
, &second
);
1766 else if (REG_P (src
) && GET_MODE (src
) == mode
)
1769 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1770 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1774 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1779 /* Emit code to move a block SRC of type TYPE to a block DST,
1780 where DST is non-consecutive registers represented by a PARALLEL.
1781 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1785 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1790 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
1791 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1793 /* Copy the extracted pieces into the proper (probable) hard regs. */
1794 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1796 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1799 emit_move_insn (d
, tmps
[i
]);
1803 /* Similar, but load SRC into new pseudos in a format that looks like
1804 PARALLEL. This can later be fed to emit_group_move to get things
1805 in the right place. */
1808 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1813 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1814 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1816 /* Convert the vector to look just like the original PARALLEL, except
1817 with the computed values. */
1818 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1820 rtx e
= XVECEXP (parallel
, 0, i
);
1821 rtx d
= XEXP (e
, 0);
1825 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1826 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1828 RTVEC_ELT (vec
, i
) = e
;
1831 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1834 /* Emit code to move a block SRC to block DST, where SRC and DST are
1835 non-consecutive groups of registers, each represented by a PARALLEL. */
1838 emit_group_move (rtx dst
, rtx src
)
1842 gcc_assert (GET_CODE (src
) == PARALLEL
1843 && GET_CODE (dst
) == PARALLEL
1844 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1846 /* Skip first entry if NULL. */
1847 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1848 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1849 XEXP (XVECEXP (src
, 0, i
), 0));
1852 /* Move a group of registers represented by a PARALLEL into pseudos. */
1855 emit_group_move_into_temps (rtx src
)
1857 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1860 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1862 rtx e
= XVECEXP (src
, 0, i
);
1863 rtx d
= XEXP (e
, 0);
1866 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1867 RTVEC_ELT (vec
, i
) = e
;
1870 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1873 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1874 where SRC is non-consecutive registers represented by a PARALLEL.
1875 SSIZE represents the total size of block ORIG_DST, or -1 if not
1879 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1882 int start
, finish
, i
;
1883 enum machine_mode m
= GET_MODE (orig_dst
);
1885 gcc_assert (GET_CODE (src
) == PARALLEL
);
1887 if (!SCALAR_INT_MODE_P (m
)
1888 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1890 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1891 if (imode
== BLKmode
)
1892 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
1894 dst
= gen_reg_rtx (imode
);
1895 emit_group_store (dst
, src
, type
, ssize
);
1896 if (imode
!= BLKmode
)
1897 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1898 emit_move_insn (orig_dst
, dst
);
1902 /* Check for a NULL entry, used to indicate that the parameter goes
1903 both on the stack and in registers. */
1904 if (XEXP (XVECEXP (src
, 0, 0), 0))
1908 finish
= XVECLEN (src
, 0);
1910 tmps
= XALLOCAVEC (rtx
, finish
);
1912 /* Copy the (probable) hard regs into pseudos. */
1913 for (i
= start
; i
< finish
; i
++)
1915 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1916 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
1918 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1919 emit_move_insn (tmps
[i
], reg
);
1925 /* If we won't be storing directly into memory, protect the real destination
1926 from strange tricks we might play. */
1928 if (GET_CODE (dst
) == PARALLEL
)
1932 /* We can get a PARALLEL dst if there is a conditional expression in
1933 a return statement. In that case, the dst and src are the same,
1934 so no action is necessary. */
1935 if (rtx_equal_p (dst
, src
))
1938 /* It is unclear if we can ever reach here, but we may as well handle
1939 it. Allocate a temporary, and split this into a store/load to/from
1942 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
1943 emit_group_store (temp
, src
, type
, ssize
);
1944 emit_group_load (dst
, temp
, type
, ssize
);
1947 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1949 enum machine_mode outer
= GET_MODE (dst
);
1950 enum machine_mode inner
;
1951 HOST_WIDE_INT bytepos
;
1955 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
1956 dst
= gen_reg_rtx (outer
);
1958 /* Make life a bit easier for combine. */
1959 /* If the first element of the vector is the low part
1960 of the destination mode, use a paradoxical subreg to
1961 initialize the destination. */
1964 inner
= GET_MODE (tmps
[start
]);
1965 bytepos
= subreg_lowpart_offset (inner
, outer
);
1966 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
1968 temp
= simplify_gen_subreg (outer
, tmps
[start
],
1972 emit_move_insn (dst
, temp
);
1979 /* If the first element wasn't the low part, try the last. */
1981 && start
< finish
- 1)
1983 inner
= GET_MODE (tmps
[finish
- 1]);
1984 bytepos
= subreg_lowpart_offset (inner
, outer
);
1985 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
1987 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
1991 emit_move_insn (dst
, temp
);
1998 /* Otherwise, simply initialize the result to zero. */
2000 emit_move_insn (dst
, CONST0_RTX (outer
));
2003 /* Process the pieces. */
2004 for (i
= start
; i
< finish
; i
++)
2006 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2007 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2008 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2009 unsigned int adj_bytelen
= bytelen
;
2012 /* Handle trailing fragments that run over the size of the struct. */
2013 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2014 adj_bytelen
= ssize
- bytepos
;
2016 if (GET_CODE (dst
) == CONCAT
)
2018 if (bytepos
+ adj_bytelen
2019 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2020 dest
= XEXP (dst
, 0);
2021 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2023 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2024 dest
= XEXP (dst
, 1);
2028 enum machine_mode dest_mode
= GET_MODE (dest
);
2029 enum machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2031 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2033 if (GET_MODE_ALIGNMENT (dest_mode
)
2034 >= GET_MODE_ALIGNMENT (tmp_mode
))
2036 dest
= assign_stack_temp (dest_mode
,
2037 GET_MODE_SIZE (dest_mode
));
2038 emit_move_insn (adjust_address (dest
,
2046 dest
= assign_stack_temp (tmp_mode
,
2047 GET_MODE_SIZE (tmp_mode
));
2048 emit_move_insn (dest
, tmps
[i
]);
2049 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2055 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2057 /* store_bit_field always takes its value from the lsb.
2058 Move the fragment to the lsb if it's not already there. */
2060 #ifdef BLOCK_REG_PADDING
2061 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2062 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2068 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2069 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2072 bytelen
= adj_bytelen
;
2075 /* Optimize the access just a bit. */
2077 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2078 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2079 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2080 && bytelen
== GET_MODE_SIZE (mode
))
2081 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2083 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2084 0, 0, mode
, tmps
[i
]);
2087 /* Copy from the pseudo into the (probable) hard reg. */
2088 if (orig_dst
!= dst
)
2089 emit_move_insn (orig_dst
, dst
);
2092 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2093 of the value stored in X. */
2096 maybe_emit_group_store (rtx x
, tree type
)
2098 enum machine_mode mode
= TYPE_MODE (type
);
2099 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2100 if (GET_CODE (x
) == PARALLEL
)
2102 rtx result
= gen_reg_rtx (mode
);
2103 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2109 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2111 This is used on targets that return BLKmode values in registers. */
2114 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2116 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2117 rtx src
= NULL
, dst
= NULL
;
2118 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2119 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2120 enum machine_mode mode
= GET_MODE (srcreg
);
2121 enum machine_mode tmode
= GET_MODE (target
);
2122 enum machine_mode copy_mode
;
2124 /* BLKmode registers created in the back-end shouldn't have survived. */
2125 gcc_assert (mode
!= BLKmode
);
2127 /* If the structure doesn't take up a whole number of words, see whether
2128 SRCREG is padded on the left or on the right. If it's on the left,
2129 set PADDING_CORRECTION to the number of bits to skip.
2131 In most ABIs, the structure will be returned at the least end of
2132 the register, which translates to right padding on little-endian
2133 targets and left padding on big-endian targets. The opposite
2134 holds if the structure is returned at the most significant
2135 end of the register. */
2136 if (bytes
% UNITS_PER_WORD
!= 0
2137 && (targetm
.calls
.return_in_msb (type
)
2139 : BYTES_BIG_ENDIAN
))
2141 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2143 /* We can use a single move if we have an exact mode for the size. */
2144 else if (MEM_P (target
)
2145 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
2146 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2147 && bytes
== GET_MODE_SIZE (mode
))
2149 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2153 /* And if we additionally have the same mode for a register. */
2154 else if (REG_P (target
)
2155 && GET_MODE (target
) == mode
2156 && bytes
== GET_MODE_SIZE (mode
))
2158 emit_move_insn (target
, srcreg
);
2162 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2163 into a new pseudo which is a full word. */
2164 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2166 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2170 /* Copy the structure BITSIZE bits at a time. If the target lives in
2171 memory, take care of not reading/writing past its end by selecting
2172 a copy mode suited to BITSIZE. This should always be possible given
2175 If the target lives in register, make sure not to select a copy mode
2176 larger than the mode of the register.
2178 We could probably emit more efficient code for machines which do not use
2179 strict alignment, but it doesn't seem worth the effort at the current
2182 copy_mode
= word_mode
;
2185 enum machine_mode mem_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
2186 if (mem_mode
!= BLKmode
)
2187 copy_mode
= mem_mode
;
2189 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2192 for (bitpos
= 0, xbitpos
= padding_correction
;
2193 bitpos
< bytes
* BITS_PER_UNIT
;
2194 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2196 /* We need a new source operand each time xbitpos is on a
2197 word boundary and when xbitpos == padding_correction
2198 (the first time through). */
2199 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2200 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2202 /* We need a new destination operand each time bitpos is on
2204 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2206 else if (bitpos
% BITS_PER_WORD
== 0)
2207 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2209 /* Use xbitpos for the source extraction (right justified) and
2210 bitpos for the destination store (left justified). */
2211 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2212 extract_bit_field (src
, bitsize
,
2213 xbitpos
% BITS_PER_WORD
, 1,
2214 NULL_RTX
, copy_mode
, copy_mode
));
2218 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2219 register if it contains any data, otherwise return null.
2221 This is used on targets that return BLKmode values in registers. */
2224 copy_blkmode_to_reg (enum machine_mode mode
, tree src
)
2227 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2228 unsigned int bitsize
;
2229 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2230 enum machine_mode dst_mode
;
2232 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2234 x
= expand_normal (src
);
2236 bytes
= int_size_in_bytes (TREE_TYPE (src
));
2240 /* If the structure doesn't take up a whole number of words, see
2241 whether the register value should be padded on the left or on
2242 the right. Set PADDING_CORRECTION to the number of padding
2243 bits needed on the left side.
2245 In most ABIs, the structure will be returned at the least end of
2246 the register, which translates to right padding on little-endian
2247 targets and left padding on big-endian targets. The opposite
2248 holds if the structure is returned at the most significant
2249 end of the register. */
2250 if (bytes
% UNITS_PER_WORD
!= 0
2251 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2253 : BYTES_BIG_ENDIAN
))
2254 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2257 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2258 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2259 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2261 /* Copy the structure BITSIZE bits at a time. */
2262 for (bitpos
= 0, xbitpos
= padding_correction
;
2263 bitpos
< bytes
* BITS_PER_UNIT
;
2264 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2266 /* We need a new destination pseudo each time xbitpos is
2267 on a word boundary and when xbitpos == padding_correction
2268 (the first time through). */
2269 if (xbitpos
% BITS_PER_WORD
== 0
2270 || xbitpos
== padding_correction
)
2272 /* Generate an appropriate register. */
2273 dst_word
= gen_reg_rtx (word_mode
);
2274 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2276 /* Clear the destination before we move anything into it. */
2277 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2280 /* We need a new source operand each time bitpos is on a word
2282 if (bitpos
% BITS_PER_WORD
== 0)
2283 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2285 /* Use bitpos for the source extraction (left justified) and
2286 xbitpos for the destination store (right justified). */
2287 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2289 extract_bit_field (src_word
, bitsize
,
2290 bitpos
% BITS_PER_WORD
, 1,
2291 NULL_RTX
, word_mode
, word_mode
));
2294 if (mode
== BLKmode
)
2296 /* Find the smallest integer mode large enough to hold the
2297 entire structure. */
2298 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2300 mode
= GET_MODE_WIDER_MODE (mode
))
2301 /* Have we found a large enough mode? */
2302 if (GET_MODE_SIZE (mode
) >= bytes
)
2305 /* A suitable mode should have been found. */
2306 gcc_assert (mode
!= VOIDmode
);
2309 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2310 dst_mode
= word_mode
;
2313 dst
= gen_reg_rtx (dst_mode
);
2315 for (i
= 0; i
< n_regs
; i
++)
2316 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2318 if (mode
!= dst_mode
)
2319 dst
= gen_lowpart (mode
, dst
);
2324 /* Add a USE expression for REG to the (possibly empty) list pointed
2325 to by CALL_FUSAGE. REG must denote a hard register. */
2328 use_reg_mode (rtx
*call_fusage
, rtx reg
, enum machine_mode mode
)
2330 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2333 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2336 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2337 starting at REGNO. All of these registers must be hard registers. */
2340 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2344 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2346 for (i
= 0; i
< nregs
; i
++)
2347 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2350 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2351 PARALLEL REGS. This is for calls that pass values in multiple
2352 non-contiguous locations. The Irix 6 ABI has examples of this. */
2355 use_group_regs (rtx
*call_fusage
, rtx regs
)
2359 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2361 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2363 /* A NULL entry means the parameter goes both on the stack and in
2364 registers. This can also be a MEM for targets that pass values
2365 partially on the stack and partially in registers. */
2366 if (reg
!= 0 && REG_P (reg
))
2367 use_reg (call_fusage
, reg
);
2371 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2372 assigment and the code of the expresion on the RHS is CODE. Return
2376 get_def_for_expr (tree name
, enum tree_code code
)
2380 if (TREE_CODE (name
) != SSA_NAME
)
2383 def_stmt
= get_gimple_for_ssa_name (name
);
2385 || gimple_assign_rhs_code (def_stmt
) != code
)
2391 #ifdef HAVE_conditional_move
2392 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2393 assigment and the class of the expresion on the RHS is CLASS. Return
2397 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2401 if (TREE_CODE (name
) != SSA_NAME
)
2404 def_stmt
= get_gimple_for_ssa_name (name
);
2406 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2414 /* Determine whether the LEN bytes generated by CONSTFUN can be
2415 stored to memory using several move instructions. CONSTFUNDATA is
2416 a pointer which will be passed as argument in every CONSTFUN call.
2417 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2418 a memset operation and false if it's a copy of a constant string.
2419 Return nonzero if a call to store_by_pieces should succeed. */
2422 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2423 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2424 void *constfundata
, unsigned int align
, bool memsetp
)
2426 unsigned HOST_WIDE_INT l
;
2427 unsigned int max_size
;
2428 HOST_WIDE_INT offset
= 0;
2429 enum machine_mode mode
;
2430 enum insn_code icode
;
2432 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2433 rtx cst ATTRIBUTE_UNUSED
;
2439 ? SET_BY_PIECES_P (len
, align
)
2440 : STORE_BY_PIECES_P (len
, align
)))
2443 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2445 /* We would first store what we can in the largest integer mode, then go to
2446 successively smaller modes. */
2449 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2453 max_size
= STORE_MAX_PIECES
+ 1;
2454 while (max_size
> 1 && l
> 0)
2456 mode
= widest_int_mode_for_size (max_size
);
2458 if (mode
== VOIDmode
)
2461 icode
= optab_handler (mov_optab
, mode
);
2462 if (icode
!= CODE_FOR_nothing
2463 && align
>= GET_MODE_ALIGNMENT (mode
))
2465 unsigned int size
= GET_MODE_SIZE (mode
);
2472 cst
= (*constfun
) (constfundata
, offset
, mode
);
2473 if (!targetm
.legitimate_constant_p (mode
, cst
))
2483 max_size
= GET_MODE_SIZE (mode
);
2486 /* The code above should have handled everything. */
2493 /* Generate several move instructions to store LEN bytes generated by
2494 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2495 pointer which will be passed as argument in every CONSTFUN call.
2496 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2497 a memset operation and false if it's a copy of a constant string.
2498 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2499 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2503 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2504 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2505 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
2507 enum machine_mode to_addr_mode
= get_address_mode (to
);
2508 struct store_by_pieces_d data
;
2512 gcc_assert (endp
!= 2);
2517 ? SET_BY_PIECES_P (len
, align
)
2518 : STORE_BY_PIECES_P (len
, align
));
2519 data
.constfun
= constfun
;
2520 data
.constfundata
= constfundata
;
2523 store_by_pieces_1 (&data
, align
);
2528 gcc_assert (!data
.reverse
);
2533 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2534 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2536 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
2537 plus_constant (to_addr_mode
,
2541 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2548 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2556 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2557 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2560 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2562 struct store_by_pieces_d data
;
2567 data
.constfun
= clear_by_pieces_1
;
2568 data
.constfundata
= NULL
;
2571 store_by_pieces_1 (&data
, align
);
2574 /* Callback routine for clear_by_pieces.
2575 Return const0_rtx unconditionally. */
2578 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2579 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2580 enum machine_mode mode ATTRIBUTE_UNUSED
)
2585 /* Subroutine of clear_by_pieces and store_by_pieces.
2586 Generate several move instructions to store LEN bytes of block TO. (A MEM
2587 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2590 store_by_pieces_1 (struct store_by_pieces_d
*data ATTRIBUTE_UNUSED
,
2591 unsigned int align ATTRIBUTE_UNUSED
)
2593 enum machine_mode to_addr_mode
= get_address_mode (data
->to
);
2594 rtx to_addr
= XEXP (data
->to
, 0);
2595 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2596 enum insn_code icode
;
2599 data
->to_addr
= to_addr
;
2601 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2602 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2604 data
->explicit_inc_to
= 0;
2606 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2608 data
->offset
= data
->len
;
2610 /* If storing requires more than two move insns,
2611 copy addresses to registers (to make displacements shorter)
2612 and use post-increment if available. */
2613 if (!data
->autinc_to
2614 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2616 /* Determine the main mode we'll be using.
2617 MODE might not be used depending on the definitions of the
2618 USE_* macros below. */
2619 enum machine_mode mode ATTRIBUTE_UNUSED
2620 = widest_int_mode_for_size (max_size
);
2622 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2624 data
->to_addr
= copy_to_mode_reg (to_addr_mode
,
2625 plus_constant (to_addr_mode
,
2628 data
->autinc_to
= 1;
2629 data
->explicit_inc_to
= -1;
2632 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2633 && ! data
->autinc_to
)
2635 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2636 data
->autinc_to
= 1;
2637 data
->explicit_inc_to
= 1;
2640 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2641 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2644 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2646 /* First store what we can in the largest integer mode, then go to
2647 successively smaller modes. */
2649 while (max_size
> 1 && data
->len
> 0)
2651 enum machine_mode mode
= widest_int_mode_for_size (max_size
);
2653 if (mode
== VOIDmode
)
2656 icode
= optab_handler (mov_optab
, mode
);
2657 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2658 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2660 max_size
= GET_MODE_SIZE (mode
);
2663 /* The code above should have handled everything. */
2664 gcc_assert (!data
->len
);
2667 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2668 with move instructions for mode MODE. GENFUN is the gen_... function
2669 to make a move insn for that mode. DATA has all the other info. */
2672 store_by_pieces_2 (insn_gen_fn genfun
, machine_mode mode
,
2673 struct store_by_pieces_d
*data
)
2675 unsigned int size
= GET_MODE_SIZE (mode
);
2678 while (data
->len
>= size
)
2681 data
->offset
-= size
;
2683 if (data
->autinc_to
)
2684 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2687 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2689 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2690 emit_insn (gen_add2_insn (data
->to_addr
,
2691 gen_int_mode (-(HOST_WIDE_INT
) size
,
2692 GET_MODE (data
->to_addr
))));
2694 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2695 emit_insn ((*genfun
) (to1
, cst
));
2697 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2698 emit_insn (gen_add2_insn (data
->to_addr
,
2700 GET_MODE (data
->to_addr
))));
2702 if (! data
->reverse
)
2703 data
->offset
+= size
;
2709 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2710 its length in bytes. */
2713 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2714 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
2716 enum machine_mode mode
= GET_MODE (object
);
2719 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2721 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2722 just move a zero. Otherwise, do this a piece at a time. */
2724 && CONST_INT_P (size
)
2725 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2727 rtx zero
= CONST0_RTX (mode
);
2730 emit_move_insn (object
, zero
);
2734 if (COMPLEX_MODE_P (mode
))
2736 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2739 write_complex_part (object
, zero
, 0);
2740 write_complex_part (object
, zero
, 1);
2746 if (size
== const0_rtx
)
2749 align
= MEM_ALIGN (object
);
2751 if (CONST_INT_P (size
)
2752 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2753 clear_by_pieces (object
, INTVAL (size
), align
);
2754 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2755 expected_align
, expected_size
))
2757 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
2758 return set_storage_via_libcall (object
, size
, const0_rtx
,
2759 method
== BLOCK_OP_TAILCALL
);
2767 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2769 return clear_storage_hints (object
, size
, method
, 0, -1);
2773 /* A subroutine of clear_storage. Expand a call to memset.
2774 Return the return value of memset, 0 otherwise. */
2777 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
2779 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
2780 enum machine_mode size_mode
;
2783 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2784 place those into new pseudos into a VAR_DECL and use them later. */
2786 object
= copy_addr_to_reg (XEXP (object
, 0));
2788 size_mode
= TYPE_MODE (sizetype
);
2789 size
= convert_to_mode (size_mode
, size
, 1);
2790 size
= copy_to_mode_reg (size_mode
, size
);
2792 /* It is incorrect to use the libcall calling conventions to call
2793 memset in this context. This could be a user call to memset and
2794 the user may wish to examine the return value from memset. For
2795 targets where libcalls and normal calls have different conventions
2796 for returning pointers, we could end up generating incorrect code. */
2798 object_tree
= make_tree (ptr_type_node
, object
);
2799 if (!CONST_INT_P (val
))
2800 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
2801 size_tree
= make_tree (sizetype
, size
);
2802 val_tree
= make_tree (integer_type_node
, val
);
2804 fn
= clear_storage_libcall_fn (true);
2805 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
2806 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2808 retval
= expand_normal (call_expr
);
2813 /* A subroutine of set_storage_via_libcall. Create the tree node
2814 for the function we use for block clears. */
2816 tree block_clear_fn
;
2819 init_block_clear_fn (const char *asmspec
)
2821 if (!block_clear_fn
)
2825 fn
= get_identifier ("memset");
2826 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2827 integer_type_node
, sizetype
,
2830 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
2831 DECL_EXTERNAL (fn
) = 1;
2832 TREE_PUBLIC (fn
) = 1;
2833 DECL_ARTIFICIAL (fn
) = 1;
2834 TREE_NOTHROW (fn
) = 1;
2835 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2836 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2838 block_clear_fn
= fn
;
2842 set_user_assembler_name (block_clear_fn
, asmspec
);
2846 clear_storage_libcall_fn (int for_call
)
2848 static bool emitted_extern
;
2850 if (!block_clear_fn
)
2851 init_block_clear_fn (NULL
);
2853 if (for_call
&& !emitted_extern
)
2855 emitted_extern
= true;
2856 make_decl_rtl (block_clear_fn
);
2859 return block_clear_fn
;
2862 /* Expand a setmem pattern; return true if successful. */
2865 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
2866 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
2868 /* Try the most limited insn first, because there's no point
2869 including more than one in the machine description unless
2870 the more limited one has some advantage. */
2872 enum machine_mode mode
;
2874 if (expected_align
< align
)
2875 expected_align
= align
;
2877 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2878 mode
= GET_MODE_WIDER_MODE (mode
))
2880 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
2882 if (code
!= CODE_FOR_nothing
2883 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2884 here because if SIZE is less than the mode mask, as it is
2885 returned by the macro, it will definitely be less than the
2886 actual mode mask. Since SIZE is within the Pmode address
2887 space, we limit MODE to Pmode. */
2888 && ((CONST_INT_P (size
)
2889 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2890 <= (GET_MODE_MASK (mode
) >> 1)))
2891 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
2893 struct expand_operand ops
[6];
2896 nops
= insn_data
[(int) code
].n_generator_args
;
2897 gcc_assert (nops
== 4 || nops
== 6);
2899 create_fixed_operand (&ops
[0], object
);
2900 /* The check above guarantees that this size conversion is valid. */
2901 create_convert_operand_to (&ops
[1], size
, mode
, true);
2902 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
2903 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
2906 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
2907 create_integer_operand (&ops
[5], expected_size
);
2909 if (maybe_expand_insn (code
, nops
, ops
))
2918 /* Write to one of the components of the complex value CPLX. Write VAL to
2919 the real part if IMAG_P is false, and the imaginary part if its true. */
2922 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2924 enum machine_mode cmode
;
2925 enum machine_mode imode
;
2928 if (GET_CODE (cplx
) == CONCAT
)
2930 emit_move_insn (XEXP (cplx
, imag_p
), val
);
2934 cmode
= GET_MODE (cplx
);
2935 imode
= GET_MODE_INNER (cmode
);
2936 ibitsize
= GET_MODE_BITSIZE (imode
);
2938 /* For MEMs simplify_gen_subreg may generate an invalid new address
2939 because, e.g., the original address is considered mode-dependent
2940 by the target, which restricts simplify_subreg from invoking
2941 adjust_address_nv. Instead of preparing fallback support for an
2942 invalid address, we call adjust_address_nv directly. */
2945 emit_move_insn (adjust_address_nv (cplx
, imode
,
2946 imag_p
? GET_MODE_SIZE (imode
) : 0),
2951 /* If the sub-object is at least word sized, then we know that subregging
2952 will work. This special case is important, since store_bit_field
2953 wants to operate on integer modes, and there's rarely an OImode to
2954 correspond to TCmode. */
2955 if (ibitsize
>= BITS_PER_WORD
2956 /* For hard regs we have exact predicates. Assume we can split
2957 the original object if it spans an even number of hard regs.
2958 This special case is important for SCmode on 64-bit platforms
2959 where the natural size of floating-point regs is 32-bit. */
2961 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2962 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2964 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
2965 imag_p
? GET_MODE_SIZE (imode
) : 0);
2968 emit_move_insn (part
, val
);
2972 /* simplify_gen_subreg may fail for sub-word MEMs. */
2973 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2976 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
);
2979 /* Extract one of the components of the complex value CPLX. Extract the
2980 real part if IMAG_P is false, and the imaginary part if it's true. */
2983 read_complex_part (rtx cplx
, bool imag_p
)
2985 enum machine_mode cmode
, imode
;
2988 if (GET_CODE (cplx
) == CONCAT
)
2989 return XEXP (cplx
, imag_p
);
2991 cmode
= GET_MODE (cplx
);
2992 imode
= GET_MODE_INNER (cmode
);
2993 ibitsize
= GET_MODE_BITSIZE (imode
);
2995 /* Special case reads from complex constants that got spilled to memory. */
2996 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
2998 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
2999 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3001 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3002 if (CONSTANT_CLASS_P (part
))
3003 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3007 /* For MEMs simplify_gen_subreg may generate an invalid new address
3008 because, e.g., the original address is considered mode-dependent
3009 by the target, which restricts simplify_subreg from invoking
3010 adjust_address_nv. Instead of preparing fallback support for an
3011 invalid address, we call adjust_address_nv directly. */
3013 return adjust_address_nv (cplx
, imode
,
3014 imag_p
? GET_MODE_SIZE (imode
) : 0);
3016 /* If the sub-object is at least word sized, then we know that subregging
3017 will work. This special case is important, since extract_bit_field
3018 wants to operate on integer modes, and there's rarely an OImode to
3019 correspond to TCmode. */
3020 if (ibitsize
>= BITS_PER_WORD
3021 /* For hard regs we have exact predicates. Assume we can split
3022 the original object if it spans an even number of hard regs.
3023 This special case is important for SCmode on 64-bit platforms
3024 where the natural size of floating-point regs is 32-bit. */
3026 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3027 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
3029 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3030 imag_p
? GET_MODE_SIZE (imode
) : 0);
3034 /* simplify_gen_subreg may fail for sub-word MEMs. */
3035 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3038 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3039 true, NULL_RTX
, imode
, imode
);
3042 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3043 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3044 represented in NEW_MODE. If FORCE is true, this will never happen, as
3045 we'll force-create a SUBREG if needed. */
3048 emit_move_change_mode (enum machine_mode new_mode
,
3049 enum machine_mode old_mode
, rtx x
, bool force
)
3053 if (push_operand (x
, GET_MODE (x
)))
3055 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3056 MEM_COPY_ATTRIBUTES (ret
, x
);
3060 /* We don't have to worry about changing the address since the
3061 size in bytes is supposed to be the same. */
3062 if (reload_in_progress
)
3064 /* Copy the MEM to change the mode and move any
3065 substitutions from the old MEM to the new one. */
3066 ret
= adjust_address_nv (x
, new_mode
, 0);
3067 copy_replacements (x
, ret
);
3070 ret
= adjust_address (x
, new_mode
, 0);
3074 /* Note that we do want simplify_subreg's behavior of validating
3075 that the new mode is ok for a hard register. If we were to use
3076 simplify_gen_subreg, we would create the subreg, but would
3077 probably run into the target not being able to implement it. */
3078 /* Except, of course, when FORCE is true, when this is exactly what
3079 we want. Which is needed for CCmodes on some targets. */
3081 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3083 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3089 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3090 an integer mode of the same size as MODE. Returns the instruction
3091 emitted, or NULL if such a move could not be generated. */
3094 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
, bool force
)
3096 enum machine_mode imode
;
3097 enum insn_code code
;
3099 /* There must exist a mode of the exact size we require. */
3100 imode
= int_mode_for_mode (mode
);
3101 if (imode
== BLKmode
)
3104 /* The target must support moves in this mode. */
3105 code
= optab_handler (mov_optab
, imode
);
3106 if (code
== CODE_FOR_nothing
)
3109 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3112 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3115 return emit_insn (GEN_FCN (code
) (x
, y
));
3118 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3119 Return an equivalent MEM that does not use an auto-increment. */
3122 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
3124 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3125 HOST_WIDE_INT adjust
;
3128 adjust
= GET_MODE_SIZE (mode
);
3129 #ifdef PUSH_ROUNDING
3130 adjust
= PUSH_ROUNDING (adjust
);
3132 if (code
== PRE_DEC
|| code
== POST_DEC
)
3134 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3136 rtx expr
= XEXP (XEXP (x
, 0), 1);
3139 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3140 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
3141 val
= INTVAL (XEXP (expr
, 1));
3142 if (GET_CODE (expr
) == MINUS
)
3144 gcc_assert (adjust
== val
|| adjust
== -val
);
3148 /* Do not use anti_adjust_stack, since we don't want to update
3149 stack_pointer_delta. */
3150 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3151 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3152 0, OPTAB_LIB_WIDEN
);
3153 if (temp
!= stack_pointer_rtx
)
3154 emit_move_insn (stack_pointer_rtx
, temp
);
3161 temp
= stack_pointer_rtx
;
3166 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3172 return replace_equiv_address (x
, temp
);
3175 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3176 X is known to satisfy push_operand, and MODE is known to be complex.
3177 Returns the last instruction emitted. */
3180 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
3182 enum machine_mode submode
= GET_MODE_INNER (mode
);
3185 #ifdef PUSH_ROUNDING
3186 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3188 /* In case we output to the stack, but the size is smaller than the
3189 machine can push exactly, we need to use move instructions. */
3190 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3192 x
= emit_move_resolve_push (mode
, x
);
3193 return emit_move_insn (x
, y
);
3197 /* Note that the real part always precedes the imag part in memory
3198 regardless of machine's endianness. */
3199 switch (GET_CODE (XEXP (x
, 0)))
3213 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3214 read_complex_part (y
, imag_first
));
3215 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3216 read_complex_part (y
, !imag_first
));
3219 /* A subroutine of emit_move_complex. Perform the move from Y to X
3220 via two moves of the parts. Returns the last instruction emitted. */
3223 emit_move_complex_parts (rtx x
, rtx y
)
3225 /* Show the output dies here. This is necessary for SUBREGs
3226 of pseudos since we cannot track their lifetimes correctly;
3227 hard regs shouldn't appear here except as return values. */
3228 if (!reload_completed
&& !reload_in_progress
3229 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3232 write_complex_part (x
, read_complex_part (y
, false), false);
3233 write_complex_part (x
, read_complex_part (y
, true), true);
3235 return get_last_insn ();
3238 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3239 MODE is known to be complex. Returns the last instruction emitted. */
3242 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
3246 /* Need to take special care for pushes, to maintain proper ordering
3247 of the data, and possibly extra padding. */
3248 if (push_operand (x
, mode
))
3249 return emit_move_complex_push (mode
, x
, y
);
3251 /* See if we can coerce the target into moving both values at once, except
3252 for floating point where we favor moving as parts if this is easy. */
3253 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3254 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3256 && HARD_REGISTER_P (x
)
3257 && hard_regno_nregs
[REGNO (x
)][mode
] == 1)
3259 && HARD_REGISTER_P (y
)
3260 && hard_regno_nregs
[REGNO (y
)][mode
] == 1))
3262 /* Not possible if the values are inherently not adjacent. */
3263 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3265 /* Is possible if both are registers (or subregs of registers). */
3266 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3268 /* If one of the operands is a memory, and alignment constraints
3269 are friendly enough, we may be able to do combined memory operations.
3270 We do not attempt this if Y is a constant because that combination is
3271 usually better with the by-parts thing below. */
3272 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3273 && (!STRICT_ALIGNMENT
3274 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3283 /* For memory to memory moves, optimal behavior can be had with the
3284 existing block move logic. */
3285 if (MEM_P (x
) && MEM_P (y
))
3287 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3288 BLOCK_OP_NO_LIBCALL
);
3289 return get_last_insn ();
3292 ret
= emit_move_via_integer (mode
, x
, y
, true);
3297 return emit_move_complex_parts (x
, y
);
3300 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3301 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3304 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
3308 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3311 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3312 if (code
!= CODE_FOR_nothing
)
3314 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3315 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3316 return emit_insn (GEN_FCN (code
) (x
, y
));
3320 /* Otherwise, find the MODE_INT mode of the same width. */
3321 ret
= emit_move_via_integer (mode
, x
, y
, false);
3322 gcc_assert (ret
!= NULL
);
3326 /* Return true if word I of OP lies entirely in the
3327 undefined bits of a paradoxical subreg. */
3330 undefined_operand_subword_p (const_rtx op
, int i
)
3332 enum machine_mode innermode
, innermostmode
;
3334 if (GET_CODE (op
) != SUBREG
)
3336 innermode
= GET_MODE (op
);
3337 innermostmode
= GET_MODE (SUBREG_REG (op
));
3338 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3339 /* The SUBREG_BYTE represents offset, as if the value were stored in
3340 memory, except for a paradoxical subreg where we define
3341 SUBREG_BYTE to be 0; undo this exception as in
3343 if (SUBREG_BYTE (op
) == 0
3344 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3346 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3347 if (WORDS_BIG_ENDIAN
)
3348 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3349 if (BYTES_BIG_ENDIAN
)
3350 offset
+= difference
% UNITS_PER_WORD
;
3352 if (offset
>= GET_MODE_SIZE (innermostmode
)
3353 || offset
<= -GET_MODE_SIZE (word_mode
))
3358 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3359 MODE is any multi-word or full-word mode that lacks a move_insn
3360 pattern. Note that you will get better code if you define such
3361 patterns, even if they must turn into multiple assembler instructions. */
3364 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3371 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3373 /* If X is a push on the stack, do the push now and replace
3374 X with a reference to the stack pointer. */
3375 if (push_operand (x
, mode
))
3376 x
= emit_move_resolve_push (mode
, x
);
3378 /* If we are in reload, see if either operand is a MEM whose address
3379 is scheduled for replacement. */
3380 if (reload_in_progress
&& MEM_P (x
)
3381 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3382 x
= replace_equiv_address_nv (x
, inner
);
3383 if (reload_in_progress
&& MEM_P (y
)
3384 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3385 y
= replace_equiv_address_nv (y
, inner
);
3389 need_clobber
= false;
3391 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3394 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3397 /* Do not generate code for a move if it would come entirely
3398 from the undefined bits of a paradoxical subreg. */
3399 if (undefined_operand_subword_p (y
, i
))
3402 ypart
= operand_subword (y
, i
, 1, mode
);
3404 /* If we can't get a part of Y, put Y into memory if it is a
3405 constant. Otherwise, force it into a register. Then we must
3406 be able to get a part of Y. */
3407 if (ypart
== 0 && CONSTANT_P (y
))
3409 y
= use_anchored_address (force_const_mem (mode
, y
));
3410 ypart
= operand_subword (y
, i
, 1, mode
);
3412 else if (ypart
== 0)
3413 ypart
= operand_subword_force (y
, i
, mode
);
3415 gcc_assert (xpart
&& ypart
);
3417 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3419 last_insn
= emit_move_insn (xpart
, ypart
);
3425 /* Show the output dies here. This is necessary for SUBREGs
3426 of pseudos since we cannot track their lifetimes correctly;
3427 hard regs shouldn't appear here except as return values.
3428 We never want to emit such a clobber after reload. */
3430 && ! (reload_in_progress
|| reload_completed
)
3431 && need_clobber
!= 0)
3439 /* Low level part of emit_move_insn.
3440 Called just like emit_move_insn, but assumes X and Y
3441 are basically valid. */
3444 emit_move_insn_1 (rtx x
, rtx y
)
3446 enum machine_mode mode
= GET_MODE (x
);
3447 enum insn_code code
;
3449 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3451 code
= optab_handler (mov_optab
, mode
);
3452 if (code
!= CODE_FOR_nothing
)
3453 return emit_insn (GEN_FCN (code
) (x
, y
));
3455 /* Expand complex moves by moving real part and imag part. */
3456 if (COMPLEX_MODE_P (mode
))
3457 return emit_move_complex (mode
, x
, y
);
3459 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3460 || ALL_FIXED_POINT_MODE_P (mode
))
3462 rtx result
= emit_move_via_integer (mode
, x
, y
, true);
3464 /* If we can't find an integer mode, use multi words. */
3468 return emit_move_multi_word (mode
, x
, y
);
3471 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3472 return emit_move_ccmode (mode
, x
, y
);
3474 /* Try using a move pattern for the corresponding integer mode. This is
3475 only safe when simplify_subreg can convert MODE constants into integer
3476 constants. At present, it can only do this reliably if the value
3477 fits within a HOST_WIDE_INT. */
3478 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3480 rtx ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3484 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3489 return emit_move_multi_word (mode
, x
, y
);
3492 /* Generate code to copy Y into X.
3493 Both Y and X must have the same mode, except that
3494 Y can be a constant with VOIDmode.
3495 This mode cannot be BLKmode; use emit_block_move for that.
3497 Return the last instruction emitted. */
3500 emit_move_insn (rtx x
, rtx y
)
3502 enum machine_mode mode
= GET_MODE (x
);
3503 rtx y_cst
= NULL_RTX
;
3506 gcc_assert (mode
!= BLKmode
3507 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3512 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3513 && (last_insn
= compress_float_constant (x
, y
)))
3518 if (!targetm
.legitimate_constant_p (mode
, y
))
3520 y
= force_const_mem (mode
, y
);
3522 /* If the target's cannot_force_const_mem prevented the spill,
3523 assume that the target's move expanders will also take care
3524 of the non-legitimate constant. */
3528 y
= use_anchored_address (y
);
3532 /* If X or Y are memory references, verify that their addresses are valid
3535 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3537 && ! push_operand (x
, GET_MODE (x
))))
3538 x
= validize_mem (x
);
3541 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3542 MEM_ADDR_SPACE (y
)))
3543 y
= validize_mem (y
);
3545 gcc_assert (mode
!= BLKmode
);
3547 last_insn
= emit_move_insn_1 (x
, y
);
3549 if (y_cst
&& REG_P (x
)
3550 && (set
= single_set (last_insn
)) != NULL_RTX
3551 && SET_DEST (set
) == x
3552 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3553 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3558 /* If Y is representable exactly in a narrower mode, and the target can
3559 perform the extension directly from constant or memory, then emit the
3560 move as an extension. */
3563 compress_float_constant (rtx x
, rtx y
)
3565 enum machine_mode dstmode
= GET_MODE (x
);
3566 enum machine_mode orig_srcmode
= GET_MODE (y
);
3567 enum machine_mode srcmode
;
3569 int oldcost
, newcost
;
3570 bool speed
= optimize_insn_for_speed_p ();
3572 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3574 if (targetm
.legitimate_constant_p (dstmode
, y
))
3575 oldcost
= set_src_cost (y
, speed
);
3577 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), speed
);
3579 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3580 srcmode
!= orig_srcmode
;
3581 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3584 rtx trunc_y
, last_insn
;
3586 /* Skip if the target can't extend this way. */
3587 ic
= can_extend_p (dstmode
, srcmode
, 0);
3588 if (ic
== CODE_FOR_nothing
)
3591 /* Skip if the narrowed value isn't exact. */
3592 if (! exact_real_truncate (srcmode
, &r
))
3595 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3597 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3599 /* Skip if the target needs extra instructions to perform
3601 if (!insn_operand_matches (ic
, 1, trunc_y
))
3603 /* This is valid, but may not be cheaper than the original. */
3604 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3606 if (oldcost
< newcost
)
3609 else if (float_extend_from_mem
[dstmode
][srcmode
])
3611 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3612 /* This is valid, but may not be cheaper than the original. */
3613 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3615 if (oldcost
< newcost
)
3617 trunc_y
= validize_mem (trunc_y
);
3622 /* For CSE's benefit, force the compressed constant pool entry
3623 into a new pseudo. This constant may be used in different modes,
3624 and if not, combine will put things back together for us. */
3625 trunc_y
= force_reg (srcmode
, trunc_y
);
3626 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3627 last_insn
= get_last_insn ();
3630 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3638 /* Pushing data onto the stack. */
3640 /* Push a block of length SIZE (perhaps variable)
3641 and return an rtx to address the beginning of the block.
3642 The value may be virtual_outgoing_args_rtx.
3644 EXTRA is the number of bytes of padding to push in addition to SIZE.
3645 BELOW nonzero means this padding comes at low addresses;
3646 otherwise, the padding comes at high addresses. */
3649 push_block (rtx size
, int extra
, int below
)
3653 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3654 if (CONSTANT_P (size
))
3655 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3656 else if (REG_P (size
) && extra
== 0)
3657 anti_adjust_stack (size
);
3660 temp
= copy_to_mode_reg (Pmode
, size
);
3662 temp
= expand_binop (Pmode
, add_optab
, temp
,
3663 gen_int_mode (extra
, Pmode
),
3664 temp
, 0, OPTAB_LIB_WIDEN
);
3665 anti_adjust_stack (temp
);
3668 #ifndef STACK_GROWS_DOWNWARD
3674 temp
= virtual_outgoing_args_rtx
;
3675 if (extra
!= 0 && below
)
3676 temp
= plus_constant (Pmode
, temp
, extra
);
3680 if (CONST_INT_P (size
))
3681 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3682 -INTVAL (size
) - (below
? 0 : extra
));
3683 else if (extra
!= 0 && !below
)
3684 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3685 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3688 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3689 negate_rtx (Pmode
, size
));
3692 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3695 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3698 mem_autoinc_base (rtx mem
)
3702 rtx addr
= XEXP (mem
, 0);
3703 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3704 return XEXP (addr
, 0);
3709 /* A utility routine used here, in reload, and in try_split. The insns
3710 after PREV up to and including LAST are known to adjust the stack,
3711 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3712 placing notes as appropriate. PREV may be NULL, indicating the
3713 entire insn sequence prior to LAST should be scanned.
3715 The set of allowed stack pointer modifications is small:
3716 (1) One or more auto-inc style memory references (aka pushes),
3717 (2) One or more addition/subtraction with the SP as destination,
3718 (3) A single move insn with the SP as destination,
3719 (4) A call_pop insn,
3720 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3722 Insns in the sequence that do not modify the SP are ignored,
3723 except for noreturn calls.
3725 The return value is the amount of adjustment that can be trivially
3726 verified, via immediate operand or auto-inc. If the adjustment
3727 cannot be trivially extracted, the return value is INT_MIN. */
3730 find_args_size_adjust (rtx insn
)
3735 pat
= PATTERN (insn
);
3738 /* Look for a call_pop pattern. */
3741 /* We have to allow non-call_pop patterns for the case
3742 of emit_single_push_insn of a TLS address. */
3743 if (GET_CODE (pat
) != PARALLEL
)
3746 /* All call_pop have a stack pointer adjust in the parallel.
3747 The call itself is always first, and the stack adjust is
3748 usually last, so search from the end. */
3749 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
3751 set
= XVECEXP (pat
, 0, i
);
3752 if (GET_CODE (set
) != SET
)
3754 dest
= SET_DEST (set
);
3755 if (dest
== stack_pointer_rtx
)
3758 /* We'd better have found the stack pointer adjust. */
3761 /* Fall through to process the extracted SET and DEST
3762 as if it was a standalone insn. */
3764 else if (GET_CODE (pat
) == SET
)
3766 else if ((set
= single_set (insn
)) != NULL
)
3768 else if (GET_CODE (pat
) == PARALLEL
)
3770 /* ??? Some older ports use a parallel with a stack adjust
3771 and a store for a PUSH_ROUNDING pattern, rather than a
3772 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3773 /* ??? See h8300 and m68k, pushqi1. */
3774 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
3776 set
= XVECEXP (pat
, 0, i
);
3777 if (GET_CODE (set
) != SET
)
3779 dest
= SET_DEST (set
);
3780 if (dest
== stack_pointer_rtx
)
3783 /* We do not expect an auto-inc of the sp in the parallel. */
3784 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
3785 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3786 != stack_pointer_rtx
);
3794 dest
= SET_DEST (set
);
3796 /* Look for direct modifications of the stack pointer. */
3797 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
3799 /* Look for a trivial adjustment, otherwise assume nothing. */
3800 /* Note that the SPU restore_stack_block pattern refers to
3801 the stack pointer in V4SImode. Consider that non-trivial. */
3802 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
3803 && GET_CODE (SET_SRC (set
)) == PLUS
3804 && XEXP (SET_SRC (set
), 0) == stack_pointer_rtx
3805 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
3806 return INTVAL (XEXP (SET_SRC (set
), 1));
3807 /* ??? Reload can generate no-op moves, which will be cleaned
3808 up later. Recognize it and continue searching. */
3809 else if (rtx_equal_p (dest
, SET_SRC (set
)))
3812 return HOST_WIDE_INT_MIN
;
3818 /* Otherwise only think about autoinc patterns. */
3819 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
3822 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3823 != stack_pointer_rtx
);
3825 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
3826 mem
= SET_SRC (set
);
3830 addr
= XEXP (mem
, 0);
3831 switch (GET_CODE (addr
))
3835 return GET_MODE_SIZE (GET_MODE (mem
));
3838 return -GET_MODE_SIZE (GET_MODE (mem
));
3841 addr
= XEXP (addr
, 1);
3842 gcc_assert (GET_CODE (addr
) == PLUS
);
3843 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
3844 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
3845 return INTVAL (XEXP (addr
, 1));
3853 fixup_args_size_notes (rtx prev
, rtx last
, int end_args_size
)
3855 int args_size
= end_args_size
;
3856 bool saw_unknown
= false;
3859 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
3861 HOST_WIDE_INT this_delta
;
3863 if (!NONDEBUG_INSN_P (insn
))
3866 this_delta
= find_args_size_adjust (insn
);
3867 if (this_delta
== 0)
3870 || ACCUMULATE_OUTGOING_ARGS
3871 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
3875 gcc_assert (!saw_unknown
);
3876 if (this_delta
== HOST_WIDE_INT_MIN
)
3879 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (args_size
));
3880 #ifdef STACK_GROWS_DOWNWARD
3881 this_delta
= -(unsigned HOST_WIDE_INT
) this_delta
;
3883 args_size
-= this_delta
;
3886 return saw_unknown
? INT_MIN
: args_size
;
3889 #ifdef PUSH_ROUNDING
3890 /* Emit single push insn. */
3893 emit_single_push_insn_1 (enum machine_mode mode
, rtx x
, tree type
)
3896 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3898 enum insn_code icode
;
3900 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3901 /* If there is push pattern, use it. Otherwise try old way of throwing
3902 MEM representing push operation to move expander. */
3903 icode
= optab_handler (push_optab
, mode
);
3904 if (icode
!= CODE_FOR_nothing
)
3906 struct expand_operand ops
[1];
3908 create_input_operand (&ops
[0], x
, mode
);
3909 if (maybe_expand_insn (icode
, 1, ops
))
3912 if (GET_MODE_SIZE (mode
) == rounded_size
)
3913 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3914 /* If we are to pad downward, adjust the stack pointer first and
3915 then store X into the stack location using an offset. This is
3916 because emit_move_insn does not know how to pad; it does not have
3918 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3920 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3921 HOST_WIDE_INT offset
;
3923 emit_move_insn (stack_pointer_rtx
,
3924 expand_binop (Pmode
,
3925 #ifdef STACK_GROWS_DOWNWARD
3931 gen_int_mode (rounded_size
, Pmode
),
3932 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3934 offset
= (HOST_WIDE_INT
) padding_size
;
3935 #ifdef STACK_GROWS_DOWNWARD
3936 if (STACK_PUSH_CODE
== POST_DEC
)
3937 /* We have already decremented the stack pointer, so get the
3939 offset
+= (HOST_WIDE_INT
) rounded_size
;
3941 if (STACK_PUSH_CODE
== POST_INC
)
3942 /* We have already incremented the stack pointer, so get the
3944 offset
-= (HOST_WIDE_INT
) rounded_size
;
3946 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3947 gen_int_mode (offset
, Pmode
));
3951 #ifdef STACK_GROWS_DOWNWARD
3952 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3953 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3954 gen_int_mode (-(HOST_WIDE_INT
) rounded_size
,
3957 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3958 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3959 gen_int_mode (rounded_size
, Pmode
));
3961 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3964 dest
= gen_rtx_MEM (mode
, dest_addr
);
3968 set_mem_attributes (dest
, type
, 1);
3970 if (flag_optimize_sibling_calls
)
3971 /* Function incoming arguments may overlap with sibling call
3972 outgoing arguments and we cannot allow reordering of reads
3973 from function arguments with stores to outgoing arguments
3974 of sibling calls. */
3975 set_mem_alias_set (dest
, 0);
3977 emit_move_insn (dest
, x
);
3980 /* Emit and annotate a single push insn. */
3983 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3985 int delta
, old_delta
= stack_pointer_delta
;
3986 rtx prev
= get_last_insn ();
3989 emit_single_push_insn_1 (mode
, x
, type
);
3991 last
= get_last_insn ();
3993 /* Notice the common case where we emitted exactly one insn. */
3994 if (PREV_INSN (last
) == prev
)
3996 add_reg_note (last
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4000 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4001 gcc_assert (delta
== INT_MIN
|| delta
== old_delta
);
4005 /* Generate code to push X onto the stack, assuming it has mode MODE and
4007 MODE is redundant except when X is a CONST_INT (since they don't
4009 SIZE is an rtx for the size of data to be copied (in bytes),
4010 needed only if X is BLKmode.
4012 ALIGN (in bits) is maximum alignment we can assume.
4014 If PARTIAL and REG are both nonzero, then copy that many of the first
4015 bytes of X into registers starting with REG, and push the rest of X.
4016 The amount of space pushed is decreased by PARTIAL bytes.
4017 REG must be a hard register in this case.
4018 If REG is zero but PARTIAL is not, take any all others actions for an
4019 argument partially in registers, but do not actually load any
4022 EXTRA is the amount in bytes of extra space to leave next to this arg.
4023 This is ignored if an argument block has already been allocated.
4025 On a machine that lacks real push insns, ARGS_ADDR is the address of
4026 the bottom of the argument block for this call. We use indexing off there
4027 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4028 argument block has not been preallocated.
4030 ARGS_SO_FAR is the size of args previously pushed for this call.
4032 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4033 for arguments passed in registers. If nonzero, it will be the number
4034 of bytes required. */
4037 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
4038 unsigned int align
, int partial
, rtx reg
, int extra
,
4039 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4043 enum direction stack_direction
4044 #ifdef STACK_GROWS_DOWNWARD
4050 /* Decide where to pad the argument: `downward' for below,
4051 `upward' for above, or `none' for don't pad it.
4052 Default is below for small data on big-endian machines; else above. */
4053 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
4055 /* Invert direction if stack is post-decrement.
4057 if (STACK_PUSH_CODE
== POST_DEC
)
4058 if (where_pad
!= none
)
4059 where_pad
= (where_pad
== downward
? upward
: downward
);
4064 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4066 /* Copy a block into the stack, entirely or partially. */
4073 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4074 used
= partial
- offset
;
4076 if (mode
!= BLKmode
)
4078 /* A value is to be stored in an insufficiently aligned
4079 stack slot; copy via a suitably aligned slot if
4081 size
= GEN_INT (GET_MODE_SIZE (mode
));
4082 if (!MEM_P (xinner
))
4084 temp
= assign_temp (type
, 1, 1);
4085 emit_move_insn (temp
, xinner
);
4092 /* USED is now the # of bytes we need not copy to the stack
4093 because registers will take care of them. */
4096 xinner
= adjust_address (xinner
, BLKmode
, used
);
4098 /* If the partial register-part of the arg counts in its stack size,
4099 skip the part of stack space corresponding to the registers.
4100 Otherwise, start copying to the beginning of the stack space,
4101 by setting SKIP to 0. */
4102 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4104 #ifdef PUSH_ROUNDING
4105 /* Do it with several push insns if that doesn't take lots of insns
4106 and if there is no difficulty with push insns that skip bytes
4107 on the stack for alignment purposes. */
4110 && CONST_INT_P (size
)
4112 && MEM_ALIGN (xinner
) >= align
4113 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
4114 /* Here we avoid the case of a structure whose weak alignment
4115 forces many pushes of a small amount of data,
4116 and such small pushes do rounding that causes trouble. */
4117 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
4118 || align
>= BIGGEST_ALIGNMENT
4119 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
4120 == (align
/ BITS_PER_UNIT
)))
4121 && (HOST_WIDE_INT
) PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
4123 /* Push padding now if padding above and stack grows down,
4124 or if padding below and stack grows up.
4125 But if space already allocated, this has already been done. */
4126 if (extra
&& args_addr
== 0
4127 && where_pad
!= none
&& where_pad
!= stack_direction
)
4128 anti_adjust_stack (GEN_INT (extra
));
4130 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
4133 #endif /* PUSH_ROUNDING */
4137 /* Otherwise make space on the stack and copy the data
4138 to the address of that space. */
4140 /* Deduct words put into registers from the size we must copy. */
4143 if (CONST_INT_P (size
))
4144 size
= GEN_INT (INTVAL (size
) - used
);
4146 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4147 gen_int_mode (used
, GET_MODE (size
)),
4148 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4151 /* Get the address of the stack space.
4152 In this case, we do not deal with EXTRA separately.
4153 A single stack adjust will do. */
4156 temp
= push_block (size
, extra
, where_pad
== downward
);
4159 else if (CONST_INT_P (args_so_far
))
4160 temp
= memory_address (BLKmode
,
4161 plus_constant (Pmode
, args_addr
,
4162 skip
+ INTVAL (args_so_far
)));
4164 temp
= memory_address (BLKmode
,
4165 plus_constant (Pmode
,
4166 gen_rtx_PLUS (Pmode
,
4171 if (!ACCUMULATE_OUTGOING_ARGS
)
4173 /* If the source is referenced relative to the stack pointer,
4174 copy it to another register to stabilize it. We do not need
4175 to do this if we know that we won't be changing sp. */
4177 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4178 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4179 temp
= copy_to_reg (temp
);
4182 target
= gen_rtx_MEM (BLKmode
, temp
);
4184 /* We do *not* set_mem_attributes here, because incoming arguments
4185 may overlap with sibling call outgoing arguments and we cannot
4186 allow reordering of reads from function arguments with stores
4187 to outgoing arguments of sibling calls. We do, however, want
4188 to record the alignment of the stack slot. */
4189 /* ALIGN may well be better aligned than TYPE, e.g. due to
4190 PARM_BOUNDARY. Assume the caller isn't lying. */
4191 set_mem_align (target
, align
);
4193 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4196 else if (partial
> 0)
4198 /* Scalar partly in registers. */
4200 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
4203 /* # bytes of start of argument
4204 that we must make space for but need not store. */
4205 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4206 int args_offset
= INTVAL (args_so_far
);
4209 /* Push padding now if padding above and stack grows down,
4210 or if padding below and stack grows up.
4211 But if space already allocated, this has already been done. */
4212 if (extra
&& args_addr
== 0
4213 && where_pad
!= none
&& where_pad
!= stack_direction
)
4214 anti_adjust_stack (GEN_INT (extra
));
4216 /* If we make space by pushing it, we might as well push
4217 the real data. Otherwise, we can leave OFFSET nonzero
4218 and leave the space uninitialized. */
4222 /* Now NOT_STACK gets the number of words that we don't need to
4223 allocate on the stack. Convert OFFSET to words too. */
4224 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4225 offset
/= UNITS_PER_WORD
;
4227 /* If the partial register-part of the arg counts in its stack size,
4228 skip the part of stack space corresponding to the registers.
4229 Otherwise, start copying to the beginning of the stack space,
4230 by setting SKIP to 0. */
4231 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4233 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4234 x
= validize_mem (force_const_mem (mode
, x
));
4236 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4237 SUBREGs of such registers are not allowed. */
4238 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4239 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4240 x
= copy_to_reg (x
);
4242 /* Loop over all the words allocated on the stack for this arg. */
4243 /* We can do it by words, because any scalar bigger than a word
4244 has a size a multiple of a word. */
4245 #ifndef PUSH_ARGS_REVERSED
4246 for (i
= not_stack
; i
< size
; i
++)
4248 for (i
= size
- 1; i
>= not_stack
; i
--)
4250 if (i
>= not_stack
+ offset
)
4251 emit_push_insn (operand_subword_force (x
, i
, mode
),
4252 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4254 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4256 reg_parm_stack_space
, alignment_pad
);
4263 /* Push padding now if padding above and stack grows down,
4264 or if padding below and stack grows up.
4265 But if space already allocated, this has already been done. */
4266 if (extra
&& args_addr
== 0
4267 && where_pad
!= none
&& where_pad
!= stack_direction
)
4268 anti_adjust_stack (GEN_INT (extra
));
4270 #ifdef PUSH_ROUNDING
4271 if (args_addr
== 0 && PUSH_ARGS
)
4272 emit_single_push_insn (mode
, x
, type
);
4276 if (CONST_INT_P (args_so_far
))
4278 = memory_address (mode
,
4279 plus_constant (Pmode
, args_addr
,
4280 INTVAL (args_so_far
)));
4282 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4284 dest
= gen_rtx_MEM (mode
, addr
);
4286 /* We do *not* set_mem_attributes here, because incoming arguments
4287 may overlap with sibling call outgoing arguments and we cannot
4288 allow reordering of reads from function arguments with stores
4289 to outgoing arguments of sibling calls. We do, however, want
4290 to record the alignment of the stack slot. */
4291 /* ALIGN may well be better aligned than TYPE, e.g. due to
4292 PARM_BOUNDARY. Assume the caller isn't lying. */
4293 set_mem_align (dest
, align
);
4295 emit_move_insn (dest
, x
);
4299 /* If part should go in registers, copy that part
4300 into the appropriate registers. Do this now, at the end,
4301 since mem-to-mem copies above may do function calls. */
4302 if (partial
> 0 && reg
!= 0)
4304 /* Handle calls that pass values in multiple non-contiguous locations.
4305 The Irix 6 ABI has examples of this. */
4306 if (GET_CODE (reg
) == PARALLEL
)
4307 emit_group_load (reg
, x
, type
, -1);
4310 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4311 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
4315 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4316 anti_adjust_stack (GEN_INT (extra
));
4318 if (alignment_pad
&& args_addr
== 0)
4319 anti_adjust_stack (alignment_pad
);
4322 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4326 get_subtarget (rtx x
)
4330 /* Only registers can be subtargets. */
4332 /* Don't use hard regs to avoid extending their life. */
4333 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4337 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4338 FIELD is a bitfield. Returns true if the optimization was successful,
4339 and there's nothing else to do. */
4342 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
4343 unsigned HOST_WIDE_INT bitpos
,
4344 unsigned HOST_WIDE_INT bitregion_start
,
4345 unsigned HOST_WIDE_INT bitregion_end
,
4346 enum machine_mode mode1
, rtx str_rtx
,
4349 enum machine_mode str_mode
= GET_MODE (str_rtx
);
4350 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4355 enum tree_code code
;
4357 if (mode1
!= VOIDmode
4358 || bitsize
>= BITS_PER_WORD
4359 || str_bitsize
> BITS_PER_WORD
4360 || TREE_SIDE_EFFECTS (to
)
4361 || TREE_THIS_VOLATILE (to
))
4365 if (TREE_CODE (src
) != SSA_NAME
)
4367 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4370 srcstmt
= get_gimple_for_ssa_name (src
);
4372 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4375 code
= gimple_assign_rhs_code (srcstmt
);
4377 op0
= gimple_assign_rhs1 (srcstmt
);
4379 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4380 to find its initialization. Hopefully the initialization will
4381 be from a bitfield load. */
4382 if (TREE_CODE (op0
) == SSA_NAME
)
4384 gimple op0stmt
= get_gimple_for_ssa_name (op0
);
4386 /* We want to eventually have OP0 be the same as TO, which
4387 should be a bitfield. */
4389 || !is_gimple_assign (op0stmt
)
4390 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4392 op0
= gimple_assign_rhs1 (op0stmt
);
4395 op1
= gimple_assign_rhs2 (srcstmt
);
4397 if (!operand_equal_p (to
, op0
, 0))
4400 if (MEM_P (str_rtx
))
4402 unsigned HOST_WIDE_INT offset1
;
4404 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4405 str_mode
= word_mode
;
4406 str_mode
= get_best_mode (bitsize
, bitpos
,
4407 bitregion_start
, bitregion_end
,
4408 MEM_ALIGN (str_rtx
), str_mode
, 0);
4409 if (str_mode
== VOIDmode
)
4411 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4414 bitpos
%= str_bitsize
;
4415 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4416 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4418 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4421 /* If the bit field covers the whole REG/MEM, store_field
4422 will likely generate better code. */
4423 if (bitsize
>= str_bitsize
)
4426 /* We can't handle fields split across multiple entities. */
4427 if (bitpos
+ bitsize
> str_bitsize
)
4430 if (BYTES_BIG_ENDIAN
)
4431 bitpos
= str_bitsize
- bitpos
- bitsize
;
4437 /* For now, just optimize the case of the topmost bitfield
4438 where we don't need to do any masking and also
4439 1 bit bitfields where xor can be used.
4440 We might win by one instruction for the other bitfields
4441 too if insv/extv instructions aren't used, so that
4442 can be added later. */
4443 if (bitpos
+ bitsize
!= str_bitsize
4444 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4447 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4448 value
= convert_modes (str_mode
,
4449 TYPE_MODE (TREE_TYPE (op1
)), value
,
4450 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4452 /* We may be accessing data outside the field, which means
4453 we can alias adjacent data. */
4454 if (MEM_P (str_rtx
))
4456 str_rtx
= shallow_copy_rtx (str_rtx
);
4457 set_mem_alias_set (str_rtx
, 0);
4458 set_mem_expr (str_rtx
, 0);
4461 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4462 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
4464 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4467 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4468 result
= expand_binop (str_mode
, binop
, str_rtx
,
4469 value
, str_rtx
, 1, OPTAB_WIDEN
);
4470 if (result
!= str_rtx
)
4471 emit_move_insn (str_rtx
, result
);
4476 if (TREE_CODE (op1
) != INTEGER_CST
)
4478 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4479 value
= convert_modes (str_mode
,
4480 TYPE_MODE (TREE_TYPE (op1
)), value
,
4481 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4483 /* We may be accessing data outside the field, which means
4484 we can alias adjacent data. */
4485 if (MEM_P (str_rtx
))
4487 str_rtx
= shallow_copy_rtx (str_rtx
);
4488 set_mem_alias_set (str_rtx
, 0);
4489 set_mem_expr (str_rtx
, 0);
4492 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4493 if (bitpos
+ bitsize
!= str_bitsize
)
4495 rtx mask
= gen_int_mode (((unsigned HOST_WIDE_INT
) 1 << bitsize
) - 1,
4497 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4499 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4500 result
= expand_binop (str_mode
, binop
, str_rtx
,
4501 value
, str_rtx
, 1, OPTAB_WIDEN
);
4502 if (result
!= str_rtx
)
4503 emit_move_insn (str_rtx
, result
);
4513 /* In the C++ memory model, consecutive bit fields in a structure are
4514 considered one memory location.
4516 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4517 returns the bit range of consecutive bits in which this COMPONENT_REF
4518 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4519 and *OFFSET may be adjusted in the process.
4521 If the access does not need to be restricted, 0 is returned in both
4522 *BITSTART and *BITEND. */
4525 get_bit_range (unsigned HOST_WIDE_INT
*bitstart
,
4526 unsigned HOST_WIDE_INT
*bitend
,
4528 HOST_WIDE_INT
*bitpos
,
4531 HOST_WIDE_INT bitoffset
;
4534 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4536 field
= TREE_OPERAND (exp
, 1);
4537 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4538 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4539 need to limit the range we can access. */
4542 *bitstart
= *bitend
= 0;
4546 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4547 part of a larger bit field, then the representative does not serve any
4548 useful purpose. This can occur in Ada. */
4549 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4551 enum machine_mode rmode
;
4552 HOST_WIDE_INT rbitsize
, rbitpos
;
4556 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4557 &roffset
, &rmode
, &unsignedp
, &volatilep
, false);
4558 if ((rbitpos
% BITS_PER_UNIT
) != 0)
4560 *bitstart
= *bitend
= 0;
4565 /* Compute the adjustment to bitpos from the offset of the field
4566 relative to the representative. DECL_FIELD_OFFSET of field and
4567 repr are the same by construction if they are not constants,
4568 see finish_bitfield_layout. */
4569 if (host_integerp (DECL_FIELD_OFFSET (field
), 1)
4570 && host_integerp (DECL_FIELD_OFFSET (repr
), 1))
4571 bitoffset
= (tree_low_cst (DECL_FIELD_OFFSET (field
), 1)
4572 - tree_low_cst (DECL_FIELD_OFFSET (repr
), 1)) * BITS_PER_UNIT
;
4575 bitoffset
+= (tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
4576 - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr
), 1));
4578 /* If the adjustment is larger than bitpos, we would have a negative bit
4579 position for the lower bound and this may wreak havoc later. This can
4580 occur only if we have a non-null offset, so adjust offset and bitpos
4581 to make the lower bound non-negative. */
4582 if (bitoffset
> *bitpos
)
4584 HOST_WIDE_INT adjust
= bitoffset
- *bitpos
;
4586 gcc_assert ((adjust
% BITS_PER_UNIT
) == 0);
4587 gcc_assert (*offset
!= NULL_TREE
);
4591 = size_binop (MINUS_EXPR
, *offset
, size_int (adjust
/ BITS_PER_UNIT
));
4595 *bitstart
= *bitpos
- bitoffset
;
4597 *bitend
= *bitstart
+ tree_low_cst (DECL_SIZE (repr
), 1) - 1;
4600 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4601 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4602 DECL_RTL was not set yet, return NORTL. */
4605 addr_expr_of_non_mem_decl_p_1 (tree addr
, bool nortl
)
4607 if (TREE_CODE (addr
) != ADDR_EXPR
)
4610 tree base
= TREE_OPERAND (addr
, 0);
4613 || TREE_ADDRESSABLE (base
)
4614 || DECL_MODE (base
) == BLKmode
)
4617 if (!DECL_RTL_SET_P (base
))
4620 return (!MEM_P (DECL_RTL (base
)));
4623 /* Returns true if the MEM_REF REF refers to an object that does not
4624 reside in memory and has non-BLKmode. */
4627 mem_ref_refers_to_non_mem_p (tree ref
)
4629 tree base
= TREE_OPERAND (ref
, 0);
4630 return addr_expr_of_non_mem_decl_p_1 (base
, false);
4633 /* Return TRUE iff OP is an ADDR_EXPR of a DECL that's not
4634 addressable. This is very much like mem_ref_refers_to_non_mem_p,
4635 but instead of the MEM_REF, it takes its base, and it doesn't
4636 assume a DECL is in memory just because its RTL is not set yet. */
4639 addr_expr_of_non_mem_decl_p (tree op
)
4641 return addr_expr_of_non_mem_decl_p_1 (op
, true);
4644 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4645 is true, try generating a nontemporal store. */
4648 expand_assignment (tree to
, tree from
, bool nontemporal
)
4652 enum machine_mode mode
;
4654 enum insn_code icode
;
4656 /* Don't crash if the lhs of the assignment was erroneous. */
4657 if (TREE_CODE (to
) == ERROR_MARK
)
4659 expand_normal (from
);
4663 /* Optimize away no-op moves without side-effects. */
4664 if (operand_equal_p (to
, from
, 0))
4667 /* Handle misaligned stores. */
4668 mode
= TYPE_MODE (TREE_TYPE (to
));
4669 if ((TREE_CODE (to
) == MEM_REF
4670 || TREE_CODE (to
) == TARGET_MEM_REF
)
4672 && !mem_ref_refers_to_non_mem_p (to
)
4673 && ((align
= get_object_alignment (to
))
4674 < GET_MODE_ALIGNMENT (mode
))
4675 && (((icode
= optab_handler (movmisalign_optab
, mode
))
4676 != CODE_FOR_nothing
)
4677 || SLOW_UNALIGNED_ACCESS (mode
, align
)))
4681 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4682 reg
= force_not_mem (reg
);
4683 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4685 if (icode
!= CODE_FOR_nothing
)
4687 struct expand_operand ops
[2];
4689 create_fixed_operand (&ops
[0], mem
);
4690 create_input_operand (&ops
[1], reg
, mode
);
4691 /* The movmisalign<mode> pattern cannot fail, else the assignment
4692 would silently be omitted. */
4693 expand_insn (icode
, 2, ops
);
4696 store_bit_field (mem
, GET_MODE_BITSIZE (mode
),
4697 0, 0, 0, mode
, reg
);
4701 /* Assignment of a structure component needs special treatment
4702 if the structure component's rtx is not simply a MEM.
4703 Assignment of an array element at a constant index, and assignment of
4704 an array element in an unaligned packed structure field, has the same
4705 problem. Same for (partially) storing into a non-memory object. */
4706 if (handled_component_p (to
)
4707 || (TREE_CODE (to
) == MEM_REF
4708 && mem_ref_refers_to_non_mem_p (to
))
4709 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4711 enum machine_mode mode1
;
4712 HOST_WIDE_INT bitsize
, bitpos
;
4713 unsigned HOST_WIDE_INT bitregion_start
= 0;
4714 unsigned HOST_WIDE_INT bitregion_end
= 0;
4721 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4722 &unsignedp
, &volatilep
, true);
4724 if (TREE_CODE (to
) == COMPONENT_REF
4725 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
4726 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
4728 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4730 /* If the bitfield is volatile, we want to access it in the
4731 field's mode, not the computed mode.
4732 If a MEM has VOIDmode (external with incomplete type),
4733 use BLKmode for it instead. */
4736 if (volatilep
&& flag_strict_volatile_bitfields
> 0)
4737 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
4738 else if (GET_MODE (to_rtx
) == VOIDmode
)
4739 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
4744 enum machine_mode address_mode
;
4747 if (!MEM_P (to_rtx
))
4749 /* We can get constant negative offsets into arrays with broken
4750 user code. Translate this to a trap instead of ICEing. */
4751 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
4752 expand_builtin_trap ();
4753 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
4756 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4757 address_mode
= get_address_mode (to_rtx
);
4758 if (GET_MODE (offset_rtx
) != address_mode
)
4759 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
4761 /* A constant address in TO_RTX can have VOIDmode, we must not try
4762 to call force_reg for that case. Avoid that case. */
4764 && GET_MODE (to_rtx
) == BLKmode
4765 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4767 && (bitpos
% bitsize
) == 0
4768 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4769 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4771 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4775 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4776 highest_pow2_factor_for_target (to
,
4780 /* No action is needed if the target is not a memory and the field
4781 lies completely outside that target. This can occur if the source
4782 code contains an out-of-bounds access to a small array. */
4784 && GET_MODE (to_rtx
) != BLKmode
4785 && (unsigned HOST_WIDE_INT
) bitpos
4786 >= GET_MODE_PRECISION (GET_MODE (to_rtx
)))
4788 expand_normal (from
);
4791 /* Handle expand_expr of a complex value returning a CONCAT. */
4792 else if (GET_CODE (to_rtx
) == CONCAT
)
4794 unsigned short mode_bitsize
= GET_MODE_BITSIZE (GET_MODE (to_rtx
));
4795 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from
)))
4797 && bitsize
== mode_bitsize
)
4798 result
= store_expr (from
, to_rtx
, false, nontemporal
);
4799 else if (bitsize
== mode_bitsize
/ 2
4800 && (bitpos
== 0 || bitpos
== mode_bitsize
/ 2))
4801 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
4803 else if (bitpos
+ bitsize
<= mode_bitsize
/ 2)
4804 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
4805 bitregion_start
, bitregion_end
,
4807 get_alias_set (to
), nontemporal
);
4808 else if (bitpos
>= mode_bitsize
/ 2)
4809 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
4810 bitpos
- mode_bitsize
/ 2,
4811 bitregion_start
, bitregion_end
,
4813 get_alias_set (to
), nontemporal
);
4814 else if (bitpos
== 0 && bitsize
== mode_bitsize
)
4817 result
= expand_normal (from
);
4818 from_rtx
= simplify_gen_subreg (GET_MODE (to_rtx
), result
,
4819 TYPE_MODE (TREE_TYPE (from
)), 0);
4820 emit_move_insn (XEXP (to_rtx
, 0),
4821 read_complex_part (from_rtx
, false));
4822 emit_move_insn (XEXP (to_rtx
, 1),
4823 read_complex_part (from_rtx
, true));
4827 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
4828 GET_MODE_SIZE (GET_MODE (to_rtx
)));
4829 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
4830 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
4831 result
= store_field (temp
, bitsize
, bitpos
,
4832 bitregion_start
, bitregion_end
,
4834 get_alias_set (to
), nontemporal
);
4835 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
4836 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
4843 /* If the field is at offset zero, we could have been given the
4844 DECL_RTX of the parent struct. Don't munge it. */
4845 to_rtx
= shallow_copy_rtx (to_rtx
);
4846 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4848 MEM_VOLATILE_P (to_rtx
) = 1;
4851 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
4852 bitregion_start
, bitregion_end
,
4857 result
= store_field (to_rtx
, bitsize
, bitpos
,
4858 bitregion_start
, bitregion_end
,
4860 get_alias_set (to
), nontemporal
);
4864 preserve_temp_slots (result
);
4869 /* If the rhs is a function call and its value is not an aggregate,
4870 call the function before we start to compute the lhs.
4871 This is needed for correct code for cases such as
4872 val = setjmp (buf) on machines where reference to val
4873 requires loading up part of an address in a separate insn.
4875 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4876 since it might be a promoted variable where the zero- or sign- extension
4877 needs to be done. Handling this in the normal way is safe because no
4878 computation is done before the call. The same is true for SSA names. */
4879 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
4880 && COMPLETE_TYPE_P (TREE_TYPE (from
))
4881 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4882 && ! (((TREE_CODE (to
) == VAR_DECL
4883 || TREE_CODE (to
) == PARM_DECL
4884 || TREE_CODE (to
) == RESULT_DECL
)
4885 && REG_P (DECL_RTL (to
)))
4886 || TREE_CODE (to
) == SSA_NAME
))
4891 value
= expand_normal (from
);
4893 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4895 /* Handle calls that return values in multiple non-contiguous locations.
4896 The Irix 6 ABI has examples of this. */
4897 if (GET_CODE (to_rtx
) == PARALLEL
)
4899 if (GET_CODE (value
) == PARALLEL
)
4900 emit_group_move (to_rtx
, value
);
4902 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
4903 int_size_in_bytes (TREE_TYPE (from
)));
4905 else if (GET_CODE (value
) == PARALLEL
)
4906 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
4907 int_size_in_bytes (TREE_TYPE (from
)));
4908 else if (GET_MODE (to_rtx
) == BLKmode
)
4910 /* Handle calls that return BLKmode values in registers. */
4912 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
4914 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4918 if (POINTER_TYPE_P (TREE_TYPE (to
)))
4919 value
= convert_memory_address_addr_space
4920 (GET_MODE (to_rtx
), value
,
4921 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
4923 emit_move_insn (to_rtx
, value
);
4925 preserve_temp_slots (to_rtx
);
4930 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
4931 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4933 /* Don't move directly into a return register. */
4934 if (TREE_CODE (to
) == RESULT_DECL
4935 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
4941 /* If the source is itself a return value, it still is in a pseudo at
4942 this point so we can move it back to the return register directly. */
4944 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
4945 && TREE_CODE (from
) != CALL_EXPR
)
4946 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
4948 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
4950 /* Handle calls that return values in multiple non-contiguous locations.
4951 The Irix 6 ABI has examples of this. */
4952 if (GET_CODE (to_rtx
) == PARALLEL
)
4954 if (GET_CODE (temp
) == PARALLEL
)
4955 emit_group_move (to_rtx
, temp
);
4957 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
4958 int_size_in_bytes (TREE_TYPE (from
)));
4961 emit_move_insn (to_rtx
, temp
);
4963 preserve_temp_slots (to_rtx
);
4968 /* In case we are returning the contents of an object which overlaps
4969 the place the value is being stored, use a safe function when copying
4970 a value through a pointer into a structure value return block. */
4971 if (TREE_CODE (to
) == RESULT_DECL
4972 && TREE_CODE (from
) == INDIRECT_REF
4973 && ADDR_SPACE_GENERIC_P
4974 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
4975 && refs_may_alias_p (to
, from
)
4976 && cfun
->returns_struct
4977 && !cfun
->returns_pcc_struct
)
4982 size
= expr_size (from
);
4983 from_rtx
= expand_normal (from
);
4985 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4986 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4987 XEXP (from_rtx
, 0), Pmode
,
4988 convert_to_mode (TYPE_MODE (sizetype
),
4989 size
, TYPE_UNSIGNED (sizetype
)),
4990 TYPE_MODE (sizetype
));
4992 preserve_temp_slots (to_rtx
);
4997 /* Compute FROM and store the value in the rtx we got. */
5000 result
= store_expr (from
, to_rtx
, 0, nontemporal
);
5001 preserve_temp_slots (result
);
5006 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5007 succeeded, false otherwise. */
5010 emit_storent_insn (rtx to
, rtx from
)
5012 struct expand_operand ops
[2];
5013 enum machine_mode mode
= GET_MODE (to
);
5014 enum insn_code code
= optab_handler (storent_optab
, mode
);
5016 if (code
== CODE_FOR_nothing
)
5019 create_fixed_operand (&ops
[0], to
);
5020 create_input_operand (&ops
[1], from
, mode
);
5021 return maybe_expand_insn (code
, 2, ops
);
5024 /* Generate code for computing expression EXP,
5025 and storing the value into TARGET.
5027 If the mode is BLKmode then we may return TARGET itself.
5028 It turns out that in BLKmode it doesn't cause a problem.
5029 because C has no operators that could combine two different
5030 assignments into the same BLKmode object with different values
5031 with no sequence point. Will other languages need this to
5034 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5035 stack, and block moves may need to be treated specially.
5037 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5040 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
)
5043 rtx alt_rtl
= NULL_RTX
;
5044 location_t loc
= curr_insn_location ();
5046 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5048 /* C++ can generate ?: expressions with a throw expression in one
5049 branch and an rvalue in the other. Here, we resolve attempts to
5050 store the throw expression's nonexistent result. */
5051 gcc_assert (!call_param_p
);
5052 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5055 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5057 /* Perform first part of compound expression, then assign from second
5059 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5060 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5061 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5064 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5066 /* For conditional expression, get safe form of the target. Then
5067 test the condition, doing the appropriate assignment on either
5068 side. This avoids the creation of unnecessary temporaries.
5069 For non-BLKmode, it is more efficient not to do this. */
5071 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
5073 do_pending_stack_adjust ();
5075 jumpifnot (TREE_OPERAND (exp
, 0), lab1
, -1);
5076 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5078 emit_jump_insn (gen_jump (lab2
));
5081 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5088 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5089 /* If this is a scalar in a register that is stored in a wider mode
5090 than the declared mode, compute the result into its declared mode
5091 and then convert to the wider mode. Our value is the computed
5094 rtx inner_target
= 0;
5096 /* We can do the conversion inside EXP, which will often result
5097 in some optimizations. Do the conversion in two steps: first
5098 change the signedness, if needed, then the extend. But don't
5099 do this if the type of EXP is a subtype of something else
5100 since then the conversion might involve more than just
5101 converting modes. */
5102 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5103 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5104 && GET_MODE_PRECISION (GET_MODE (target
))
5105 == TYPE_PRECISION (TREE_TYPE (exp
)))
5107 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
5108 != SUBREG_PROMOTED_UNSIGNED_P (target
))
5110 /* Some types, e.g. Fortran's logical*4, won't have a signed
5111 version, so use the mode instead. */
5113 = (signed_or_unsigned_type_for
5114 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)));
5116 ntype
= lang_hooks
.types
.type_for_mode
5117 (TYPE_MODE (TREE_TYPE (exp
)),
5118 SUBREG_PROMOTED_UNSIGNED_P (target
));
5120 exp
= fold_convert_loc (loc
, ntype
, exp
);
5123 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5124 (GET_MODE (SUBREG_REG (target
)),
5125 SUBREG_PROMOTED_UNSIGNED_P (target
)),
5128 inner_target
= SUBREG_REG (target
);
5131 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5132 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5134 /* If TEMP is a VOIDmode constant, use convert_modes to make
5135 sure that we properly convert it. */
5136 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5138 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5139 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
5140 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
5141 GET_MODE (target
), temp
,
5142 SUBREG_PROMOTED_UNSIGNED_P (target
));
5145 convert_move (SUBREG_REG (target
), temp
,
5146 SUBREG_PROMOTED_UNSIGNED_P (target
));
5150 else if ((TREE_CODE (exp
) == STRING_CST
5151 || (TREE_CODE (exp
) == MEM_REF
5152 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5153 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5155 && integer_zerop (TREE_OPERAND (exp
, 1))))
5156 && !nontemporal
&& !call_param_p
5159 /* Optimize initialization of an array with a STRING_CST. */
5160 HOST_WIDE_INT exp_len
, str_copy_len
;
5162 tree str
= TREE_CODE (exp
) == STRING_CST
5163 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5165 exp_len
= int_expr_size (exp
);
5169 if (TREE_STRING_LENGTH (str
) <= 0)
5172 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
5173 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
5176 str_copy_len
= TREE_STRING_LENGTH (str
);
5177 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
5178 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
5180 str_copy_len
+= STORE_MAX_PIECES
- 1;
5181 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5183 str_copy_len
= MIN (str_copy_len
, exp_len
);
5184 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
5185 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
5186 MEM_ALIGN (target
), false))
5191 dest_mem
= store_by_pieces (dest_mem
,
5192 str_copy_len
, builtin_strncpy_read_str
,
5194 TREE_STRING_POINTER (str
)),
5195 MEM_ALIGN (target
), false,
5196 exp_len
> str_copy_len
? 1 : 0);
5197 if (exp_len
> str_copy_len
)
5198 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
5199 GEN_INT (exp_len
- str_copy_len
),
5208 /* If we want to use a nontemporal store, force the value to
5210 tmp_target
= nontemporal
? NULL_RTX
: target
;
5211 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5213 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5217 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5218 the same as that of TARGET, adjust the constant. This is needed, for
5219 example, in case it is a CONST_DOUBLE and we want only a word-sized
5221 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5222 && TREE_CODE (exp
) != ERROR_MARK
5223 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5224 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5225 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5227 /* If value was not generated in the target, store it there.
5228 Convert the value to TARGET's type first if necessary and emit the
5229 pending incrementations that have been queued when expanding EXP.
5230 Note that we cannot emit the whole queue blindly because this will
5231 effectively disable the POST_INC optimization later.
5233 If TEMP and TARGET compare equal according to rtx_equal_p, but
5234 one or both of them are volatile memory refs, we have to distinguish
5236 - expand_expr has used TARGET. In this case, we must not generate
5237 another copy. This can be detected by TARGET being equal according
5239 - expand_expr has not used TARGET - that means that the source just
5240 happens to have the same RTX form. Since temp will have been created
5241 by expand_expr, it will compare unequal according to == .
5242 We must generate a copy in this case, to reach the correct number
5243 of volatile memory references. */
5245 if ((! rtx_equal_p (temp
, target
)
5246 || (temp
!= target
&& (side_effects_p (temp
)
5247 || side_effects_p (target
))))
5248 && TREE_CODE (exp
) != ERROR_MARK
5249 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5250 but TARGET is not valid memory reference, TEMP will differ
5251 from TARGET although it is really the same location. */
5253 && rtx_equal_p (alt_rtl
, target
)
5254 && !side_effects_p (alt_rtl
)
5255 && !side_effects_p (target
))
5256 /* If there's nothing to copy, don't bother. Don't call
5257 expr_size unless necessary, because some front-ends (C++)
5258 expr_size-hook must not be given objects that are not
5259 supposed to be bit-copied or bit-initialized. */
5260 && expr_size (exp
) != const0_rtx
)
5262 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5264 if (GET_MODE (target
) == BLKmode
)
5266 /* Handle calls that return BLKmode values in registers. */
5267 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5268 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5270 store_bit_field (target
,
5271 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5272 0, 0, 0, GET_MODE (temp
), temp
);
5275 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5278 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5280 /* Handle copying a string constant into an array. The string
5281 constant may be shorter than the array. So copy just the string's
5282 actual length, and clear the rest. First get the size of the data
5283 type of the string, which is actually the size of the target. */
5284 rtx size
= expr_size (exp
);
5286 if (CONST_INT_P (size
)
5287 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5288 emit_block_move (target
, temp
, size
,
5290 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5293 enum machine_mode pointer_mode
5294 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5295 enum machine_mode address_mode
= get_address_mode (target
);
5297 /* Compute the size of the data to copy from the string. */
5299 = size_binop_loc (loc
, MIN_EXPR
,
5300 make_tree (sizetype
, size
),
5301 size_int (TREE_STRING_LENGTH (exp
)));
5303 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5305 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5308 /* Copy that much. */
5309 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5310 TYPE_UNSIGNED (sizetype
));
5311 emit_block_move (target
, temp
, copy_size_rtx
,
5313 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5315 /* Figure out how much is left in TARGET that we have to clear.
5316 Do all calculations in pointer_mode. */
5317 if (CONST_INT_P (copy_size_rtx
))
5319 size
= plus_constant (address_mode
, size
,
5320 -INTVAL (copy_size_rtx
));
5321 target
= adjust_address (target
, BLKmode
,
5322 INTVAL (copy_size_rtx
));
5326 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5327 copy_size_rtx
, NULL_RTX
, 0,
5330 if (GET_MODE (copy_size_rtx
) != address_mode
)
5331 copy_size_rtx
= convert_to_mode (address_mode
,
5333 TYPE_UNSIGNED (sizetype
));
5335 target
= offset_address (target
, copy_size_rtx
,
5336 highest_pow2_factor (copy_size
));
5337 label
= gen_label_rtx ();
5338 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5339 GET_MODE (size
), 0, label
);
5342 if (size
!= const0_rtx
)
5343 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5349 /* Handle calls that return values in multiple non-contiguous locations.
5350 The Irix 6 ABI has examples of this. */
5351 else if (GET_CODE (target
) == PARALLEL
)
5353 if (GET_CODE (temp
) == PARALLEL
)
5354 emit_group_move (target
, temp
);
5356 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5357 int_size_in_bytes (TREE_TYPE (exp
)));
5359 else if (GET_CODE (temp
) == PARALLEL
)
5360 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5361 int_size_in_bytes (TREE_TYPE (exp
)));
5362 else if (GET_MODE (temp
) == BLKmode
)
5363 emit_block_move (target
, temp
, expr_size (exp
),
5365 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5366 /* If we emit a nontemporal store, there is nothing else to do. */
5367 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5371 temp
= force_operand (temp
, target
);
5373 emit_move_insn (target
, temp
);
5380 /* Return true if field F of structure TYPE is a flexible array. */
5383 flexible_array_member_p (const_tree f
, const_tree type
)
5388 return (DECL_CHAIN (f
) == NULL
5389 && TREE_CODE (tf
) == ARRAY_TYPE
5391 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5392 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5393 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5394 && int_size_in_bytes (type
) >= 0);
5397 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5398 must have in order for it to completely initialize a value of type TYPE.
5399 Return -1 if the number isn't known.
5401 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5403 static HOST_WIDE_INT
5404 count_type_elements (const_tree type
, bool for_ctor_p
)
5406 switch (TREE_CODE (type
))
5412 nelts
= array_type_nelts (type
);
5413 if (nelts
&& host_integerp (nelts
, 1))
5415 unsigned HOST_WIDE_INT n
;
5417 n
= tree_low_cst (nelts
, 1) + 1;
5418 if (n
== 0 || for_ctor_p
)
5421 return n
* count_type_elements (TREE_TYPE (type
), false);
5423 return for_ctor_p
? -1 : 1;
5428 unsigned HOST_WIDE_INT n
;
5432 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5433 if (TREE_CODE (f
) == FIELD_DECL
)
5436 n
+= count_type_elements (TREE_TYPE (f
), false);
5437 else if (!flexible_array_member_p (f
, type
))
5438 /* Don't count flexible arrays, which are not supposed
5439 to be initialized. */
5447 case QUAL_UNION_TYPE
:
5452 gcc_assert (!for_ctor_p
);
5453 /* Estimate the number of scalars in each field and pick the
5454 maximum. Other estimates would do instead; the idea is simply
5455 to make sure that the estimate is not sensitive to the ordering
5458 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5459 if (TREE_CODE (f
) == FIELD_DECL
)
5461 m
= count_type_elements (TREE_TYPE (f
), false);
5462 /* If the field doesn't span the whole union, add an extra
5463 scalar for the rest. */
5464 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5465 TYPE_SIZE (type
)) != 1)
5477 return TYPE_VECTOR_SUBPARTS (type
);
5481 case FIXED_POINT_TYPE
:
5486 case REFERENCE_TYPE
:
5502 /* Helper for categorize_ctor_elements. Identical interface. */
5505 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5506 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5508 unsigned HOST_WIDE_INT idx
;
5509 HOST_WIDE_INT nz_elts
, init_elts
, num_fields
;
5510 tree value
, purpose
, elt_type
;
5512 /* Whether CTOR is a valid constant initializer, in accordance with what
5513 initializer_constant_valid_p does. If inferred from the constructor
5514 elements, true until proven otherwise. */
5515 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
5516 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
5521 elt_type
= NULL_TREE
;
5523 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
5525 HOST_WIDE_INT mult
= 1;
5527 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
5529 tree lo_index
= TREE_OPERAND (purpose
, 0);
5530 tree hi_index
= TREE_OPERAND (purpose
, 1);
5532 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
5533 mult
= (tree_low_cst (hi_index
, 1)
5534 - tree_low_cst (lo_index
, 1) + 1);
5537 elt_type
= TREE_TYPE (value
);
5539 switch (TREE_CODE (value
))
5543 HOST_WIDE_INT nz
= 0, ic
= 0;
5545 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &ic
,
5548 nz_elts
+= mult
* nz
;
5549 init_elts
+= mult
* ic
;
5551 if (const_from_elts_p
&& const_p
)
5552 const_p
= const_elt_p
;
5559 if (!initializer_zerop (value
))
5565 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
5566 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
5570 if (!initializer_zerop (TREE_REALPART (value
)))
5572 if (!initializer_zerop (TREE_IMAGPART (value
)))
5580 for (i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
5582 tree v
= VECTOR_CST_ELT (value
, i
);
5583 if (!initializer_zerop (v
))
5592 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
5593 nz_elts
+= mult
* tc
;
5594 init_elts
+= mult
* tc
;
5596 if (const_from_elts_p
&& const_p
)
5597 const_p
= initializer_constant_valid_p (value
, elt_type
)
5604 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
5605 num_fields
, elt_type
))
5606 *p_complete
= false;
5608 *p_nz_elts
+= nz_elts
;
5609 *p_init_elts
+= init_elts
;
5614 /* Examine CTOR to discover:
5615 * how many scalar fields are set to nonzero values,
5616 and place it in *P_NZ_ELTS;
5617 * how many scalar fields in total are in CTOR,
5618 and place it in *P_ELT_COUNT.
5619 * whether the constructor is complete -- in the sense that every
5620 meaningful byte is explicitly given a value --
5621 and place it in *P_COMPLETE.
5623 Return whether or not CTOR is a valid static constant initializer, the same
5624 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5627 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5628 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5634 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_init_elts
, p_complete
);
5637 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5638 of which had type LAST_TYPE. Each element was itself a complete
5639 initializer, in the sense that every meaningful byte was explicitly
5640 given a value. Return true if the same is true for the constructor
5644 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
5645 const_tree last_type
)
5647 if (TREE_CODE (type
) == UNION_TYPE
5648 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5653 gcc_assert (num_elts
== 1 && last_type
);
5655 /* ??? We could look at each element of the union, and find the
5656 largest element. Which would avoid comparing the size of the
5657 initialized element against any tail padding in the union.
5658 Doesn't seem worth the effort... */
5659 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
5662 return count_type_elements (type
, true) == num_elts
;
5665 /* Return 1 if EXP contains mostly (3/4) zeros. */
5668 mostly_zeros_p (const_tree exp
)
5670 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5672 HOST_WIDE_INT nz_elts
, init_elts
;
5675 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5676 return !complete_p
|| nz_elts
< init_elts
/ 4;
5679 return initializer_zerop (exp
);
5682 /* Return 1 if EXP contains all zeros. */
5685 all_zeros_p (const_tree exp
)
5687 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5689 HOST_WIDE_INT nz_elts
, init_elts
;
5692 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5693 return nz_elts
== 0;
5696 return initializer_zerop (exp
);
5699 /* Helper function for store_constructor.
5700 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5701 CLEARED is as for store_constructor.
5702 ALIAS_SET is the alias set to use for any stores.
5704 This provides a recursive shortcut back to store_constructor when it isn't
5705 necessary to go through store_field. This is so that we can pass through
5706 the cleared field to let store_constructor know that we may not have to
5707 clear a substructure if the outer structure has already been cleared. */
5710 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
5711 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
5712 tree exp
, int cleared
, alias_set_type alias_set
)
5714 if (TREE_CODE (exp
) == CONSTRUCTOR
5715 /* We can only call store_constructor recursively if the size and
5716 bit position are on a byte boundary. */
5717 && bitpos
% BITS_PER_UNIT
== 0
5718 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
5719 /* If we have a nonzero bitpos for a register target, then we just
5720 let store_field do the bitfield handling. This is unlikely to
5721 generate unnecessary clear instructions anyways. */
5722 && (bitpos
== 0 || MEM_P (target
)))
5726 = adjust_address (target
,
5727 GET_MODE (target
) == BLKmode
5729 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
5730 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5733 /* Update the alias set, if required. */
5734 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
5735 && MEM_ALIAS_SET (target
) != 0)
5737 target
= copy_rtx (target
);
5738 set_mem_alias_set (target
, alias_set
);
5741 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
5744 store_field (target
, bitsize
, bitpos
, 0, 0, mode
, exp
, alias_set
, false);
5747 /* Store the value of constructor EXP into the rtx TARGET.
5748 TARGET is either a REG or a MEM; we know it cannot conflict, since
5749 safe_from_p has been called.
5750 CLEARED is true if TARGET is known to have been zero'd.
5751 SIZE is the number of bytes of TARGET we are allowed to modify: this
5752 may not be the same as the size of EXP if we are assigning to a field
5753 which has been packed to exclude padding bits. */
5756 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
5758 tree type
= TREE_TYPE (exp
);
5759 #ifdef WORD_REGISTER_OPERATIONS
5760 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
5763 switch (TREE_CODE (type
))
5767 case QUAL_UNION_TYPE
:
5769 unsigned HOST_WIDE_INT idx
;
5772 /* If size is zero or the target is already cleared, do nothing. */
5773 if (size
== 0 || cleared
)
5775 /* We either clear the aggregate or indicate the value is dead. */
5776 else if ((TREE_CODE (type
) == UNION_TYPE
5777 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5778 && ! CONSTRUCTOR_ELTS (exp
))
5779 /* If the constructor is empty, clear the union. */
5781 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
5785 /* If we are building a static constructor into a register,
5786 set the initial value as zero so we can fold the value into
5787 a constant. But if more than one register is involved,
5788 this probably loses. */
5789 else if (REG_P (target
) && TREE_STATIC (exp
)
5790 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
5792 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5796 /* If the constructor has fewer fields than the structure or
5797 if we are initializing the structure to mostly zeros, clear
5798 the whole structure first. Don't do this if TARGET is a
5799 register whose mode size isn't equal to SIZE since
5800 clear_storage can't handle this case. */
5802 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp
))
5803 != fields_length (type
))
5804 || mostly_zeros_p (exp
))
5806 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
5809 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5813 if (REG_P (target
) && !cleared
)
5814 emit_clobber (target
);
5816 /* Store each element of the constructor into the
5817 corresponding field of TARGET. */
5818 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
5820 enum machine_mode mode
;
5821 HOST_WIDE_INT bitsize
;
5822 HOST_WIDE_INT bitpos
= 0;
5824 rtx to_rtx
= target
;
5826 /* Just ignore missing fields. We cleared the whole
5827 structure, above, if any fields are missing. */
5831 if (cleared
&& initializer_zerop (value
))
5834 if (host_integerp (DECL_SIZE (field
), 1))
5835 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
5839 mode
= DECL_MODE (field
);
5840 if (DECL_BIT_FIELD (field
))
5843 offset
= DECL_FIELD_OFFSET (field
);
5844 if (host_integerp (offset
, 0)
5845 && host_integerp (bit_position (field
), 0))
5847 bitpos
= int_bit_position (field
);
5851 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
5855 enum machine_mode address_mode
;
5859 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
5860 make_tree (TREE_TYPE (exp
),
5863 offset_rtx
= expand_normal (offset
);
5864 gcc_assert (MEM_P (to_rtx
));
5866 address_mode
= get_address_mode (to_rtx
);
5867 if (GET_MODE (offset_rtx
) != address_mode
)
5868 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5870 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5871 highest_pow2_factor (offset
));
5874 #ifdef WORD_REGISTER_OPERATIONS
5875 /* If this initializes a field that is smaller than a
5876 word, at the start of a word, try to widen it to a full
5877 word. This special case allows us to output C++ member
5878 function initializations in a form that the optimizers
5881 && bitsize
< BITS_PER_WORD
5882 && bitpos
% BITS_PER_WORD
== 0
5883 && GET_MODE_CLASS (mode
) == MODE_INT
5884 && TREE_CODE (value
) == INTEGER_CST
5886 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
5888 tree type
= TREE_TYPE (value
);
5890 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
5892 type
= lang_hooks
.types
.type_for_mode
5893 (word_mode
, TYPE_UNSIGNED (type
));
5894 value
= fold_convert (type
, value
);
5897 if (BYTES_BIG_ENDIAN
)
5899 = fold_build2 (LSHIFT_EXPR
, type
, value
,
5900 build_int_cst (type
,
5901 BITS_PER_WORD
- bitsize
));
5902 bitsize
= BITS_PER_WORD
;
5907 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
5908 && DECL_NONADDRESSABLE_P (field
))
5910 to_rtx
= copy_rtx (to_rtx
);
5911 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
5914 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
5916 get_alias_set (TREE_TYPE (field
)));
5923 unsigned HOST_WIDE_INT i
;
5926 tree elttype
= TREE_TYPE (type
);
5928 HOST_WIDE_INT minelt
= 0;
5929 HOST_WIDE_INT maxelt
= 0;
5931 domain
= TYPE_DOMAIN (type
);
5932 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
5933 && TYPE_MAX_VALUE (domain
)
5934 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
5935 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
5937 /* If we have constant bounds for the range of the type, get them. */
5940 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
5941 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
5944 /* If the constructor has fewer elements than the array, clear
5945 the whole array first. Similarly if this is static
5946 constructor of a non-BLKmode object. */
5949 else if (REG_P (target
) && TREE_STATIC (exp
))
5953 unsigned HOST_WIDE_INT idx
;
5955 HOST_WIDE_INT count
= 0, zero_count
= 0;
5956 need_to_clear
= ! const_bounds_p
;
5958 /* This loop is a more accurate version of the loop in
5959 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5960 is also needed to check for missing elements. */
5961 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
5963 HOST_WIDE_INT this_node_count
;
5968 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5970 tree lo_index
= TREE_OPERAND (index
, 0);
5971 tree hi_index
= TREE_OPERAND (index
, 1);
5973 if (! host_integerp (lo_index
, 1)
5974 || ! host_integerp (hi_index
, 1))
5980 this_node_count
= (tree_low_cst (hi_index
, 1)
5981 - tree_low_cst (lo_index
, 1) + 1);
5984 this_node_count
= 1;
5986 count
+= this_node_count
;
5987 if (mostly_zeros_p (value
))
5988 zero_count
+= this_node_count
;
5991 /* Clear the entire array first if there are any missing
5992 elements, or if the incidence of zero elements is >=
5995 && (count
< maxelt
- minelt
+ 1
5996 || 4 * zero_count
>= 3 * count
))
6000 if (need_to_clear
&& size
> 0)
6003 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6005 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6009 if (!cleared
&& REG_P (target
))
6010 /* Inform later passes that the old value is dead. */
6011 emit_clobber (target
);
6013 /* Store each element of the constructor into the
6014 corresponding element of TARGET, determined by counting the
6016 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6018 enum machine_mode mode
;
6019 HOST_WIDE_INT bitsize
;
6020 HOST_WIDE_INT bitpos
;
6021 rtx xtarget
= target
;
6023 if (cleared
&& initializer_zerop (value
))
6026 mode
= TYPE_MODE (elttype
);
6027 if (mode
== BLKmode
)
6028 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
6029 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
6032 bitsize
= GET_MODE_BITSIZE (mode
);
6034 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6036 tree lo_index
= TREE_OPERAND (index
, 0);
6037 tree hi_index
= TREE_OPERAND (index
, 1);
6038 rtx index_r
, pos_rtx
;
6039 HOST_WIDE_INT lo
, hi
, count
;
6042 /* If the range is constant and "small", unroll the loop. */
6044 && host_integerp (lo_index
, 0)
6045 && host_integerp (hi_index
, 0)
6046 && (lo
= tree_low_cst (lo_index
, 0),
6047 hi
= tree_low_cst (hi_index
, 0),
6048 count
= hi
- lo
+ 1,
6051 || (host_integerp (TYPE_SIZE (elttype
), 1)
6052 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
6055 lo
-= minelt
; hi
-= minelt
;
6056 for (; lo
<= hi
; lo
++)
6058 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
6061 && !MEM_KEEP_ALIAS_SET_P (target
)
6062 && TREE_CODE (type
) == ARRAY_TYPE
6063 && TYPE_NONALIASED_COMPONENT (type
))
6065 target
= copy_rtx (target
);
6066 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6069 store_constructor_field
6070 (target
, bitsize
, bitpos
, mode
, value
, cleared
,
6071 get_alias_set (elttype
));
6076 rtx loop_start
= gen_label_rtx ();
6077 rtx loop_end
= gen_label_rtx ();
6080 expand_normal (hi_index
);
6082 index
= build_decl (EXPR_LOCATION (exp
),
6083 VAR_DECL
, NULL_TREE
, domain
);
6084 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6085 SET_DECL_RTL (index
, index_r
);
6086 store_expr (lo_index
, index_r
, 0, false);
6088 /* Build the head of the loop. */
6089 do_pending_stack_adjust ();
6090 emit_label (loop_start
);
6092 /* Assign value to element index. */
6094 fold_convert (ssizetype
,
6095 fold_build2 (MINUS_EXPR
,
6098 TYPE_MIN_VALUE (domain
)));
6101 size_binop (MULT_EXPR
, position
,
6102 fold_convert (ssizetype
,
6103 TYPE_SIZE_UNIT (elttype
)));
6105 pos_rtx
= expand_normal (position
);
6106 xtarget
= offset_address (target
, pos_rtx
,
6107 highest_pow2_factor (position
));
6108 xtarget
= adjust_address (xtarget
, mode
, 0);
6109 if (TREE_CODE (value
) == CONSTRUCTOR
)
6110 store_constructor (value
, xtarget
, cleared
,
6111 bitsize
/ BITS_PER_UNIT
);
6113 store_expr (value
, xtarget
, 0, false);
6115 /* Generate a conditional jump to exit the loop. */
6116 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6118 jumpif (exit_cond
, loop_end
, -1);
6120 /* Update the loop counter, and jump to the head of
6122 expand_assignment (index
,
6123 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6124 index
, integer_one_node
),
6127 emit_jump (loop_start
);
6129 /* Build the end of the loop. */
6130 emit_label (loop_end
);
6133 else if ((index
!= 0 && ! host_integerp (index
, 0))
6134 || ! host_integerp (TYPE_SIZE (elttype
), 1))
6139 index
= ssize_int (1);
6142 index
= fold_convert (ssizetype
,
6143 fold_build2 (MINUS_EXPR
,
6146 TYPE_MIN_VALUE (domain
)));
6149 size_binop (MULT_EXPR
, index
,
6150 fold_convert (ssizetype
,
6151 TYPE_SIZE_UNIT (elttype
)));
6152 xtarget
= offset_address (target
,
6153 expand_normal (position
),
6154 highest_pow2_factor (position
));
6155 xtarget
= adjust_address (xtarget
, mode
, 0);
6156 store_expr (value
, xtarget
, 0, false);
6161 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
6162 * tree_low_cst (TYPE_SIZE (elttype
), 1));
6164 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
6166 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6167 && TREE_CODE (type
) == ARRAY_TYPE
6168 && TYPE_NONALIASED_COMPONENT (type
))
6170 target
= copy_rtx (target
);
6171 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6173 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
6174 cleared
, get_alias_set (elttype
));
6182 unsigned HOST_WIDE_INT idx
;
6183 constructor_elt
*ce
;
6186 int icode
= CODE_FOR_nothing
;
6187 tree elttype
= TREE_TYPE (type
);
6188 int elt_size
= tree_low_cst (TYPE_SIZE (elttype
), 1);
6189 enum machine_mode eltmode
= TYPE_MODE (elttype
);
6190 HOST_WIDE_INT bitsize
;
6191 HOST_WIDE_INT bitpos
;
6192 rtvec vector
= NULL
;
6194 alias_set_type alias
;
6196 gcc_assert (eltmode
!= BLKmode
);
6198 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6199 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
6201 enum machine_mode mode
= GET_MODE (target
);
6203 icode
= (int) optab_handler (vec_init_optab
, mode
);
6204 if (icode
!= CODE_FOR_nothing
)
6208 vector
= rtvec_alloc (n_elts
);
6209 for (i
= 0; i
< n_elts
; i
++)
6210 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
6214 /* If the constructor has fewer elements than the vector,
6215 clear the whole array first. Similarly if this is static
6216 constructor of a non-BLKmode object. */
6219 else if (REG_P (target
) && TREE_STATIC (exp
))
6223 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6226 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6228 int n_elts_here
= tree_low_cst
6229 (int_const_binop (TRUNC_DIV_EXPR
,
6230 TYPE_SIZE (TREE_TYPE (value
)),
6231 TYPE_SIZE (elttype
)), 1);
6233 count
+= n_elts_here
;
6234 if (mostly_zeros_p (value
))
6235 zero_count
+= n_elts_here
;
6238 /* Clear the entire vector first if there are any missing elements,
6239 or if the incidence of zero elements is >= 75%. */
6240 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
6243 if (need_to_clear
&& size
> 0 && !vector
)
6246 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6248 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6252 /* Inform later passes that the old value is dead. */
6253 if (!cleared
&& !vector
&& REG_P (target
))
6254 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6257 alias
= MEM_ALIAS_SET (target
);
6259 alias
= get_alias_set (elttype
);
6261 /* Store each element of the constructor into the corresponding
6262 element of TARGET, determined by counting the elements. */
6263 for (idx
= 0, i
= 0;
6264 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6265 idx
++, i
+= bitsize
/ elt_size
)
6267 HOST_WIDE_INT eltpos
;
6268 tree value
= ce
->value
;
6270 bitsize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (value
)), 1);
6271 if (cleared
&& initializer_zerop (value
))
6275 eltpos
= tree_low_cst (ce
->index
, 1);
6281 /* Vector CONSTRUCTORs should only be built from smaller
6282 vectors in the case of BLKmode vectors. */
6283 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6284 RTVEC_ELT (vector
, eltpos
)
6285 = expand_normal (value
);
6289 enum machine_mode value_mode
=
6290 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6291 ? TYPE_MODE (TREE_TYPE (value
))
6293 bitpos
= eltpos
* elt_size
;
6294 store_constructor_field (target
, bitsize
, bitpos
, value_mode
,
6295 value
, cleared
, alias
);
6300 emit_insn (GEN_FCN (icode
)
6302 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
6311 /* Store the value of EXP (an expression tree)
6312 into a subfield of TARGET which has mode MODE and occupies
6313 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6314 If MODE is VOIDmode, it means that we are storing into a bit-field.
6316 BITREGION_START is bitpos of the first bitfield in this region.
6317 BITREGION_END is the bitpos of the ending bitfield in this region.
6318 These two fields are 0, if the C++ memory model does not apply,
6319 or we are not interested in keeping track of bitfield regions.
6321 Always return const0_rtx unless we have something particular to
6324 ALIAS_SET is the alias set for the destination. This value will
6325 (in general) be different from that for TARGET, since TARGET is a
6326 reference to the containing structure.
6328 If NONTEMPORAL is true, try generating a nontemporal store. */
6331 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
6332 unsigned HOST_WIDE_INT bitregion_start
,
6333 unsigned HOST_WIDE_INT bitregion_end
,
6334 enum machine_mode mode
, tree exp
,
6335 alias_set_type alias_set
, bool nontemporal
)
6337 if (TREE_CODE (exp
) == ERROR_MARK
)
6340 /* If we have nothing to store, do nothing unless the expression has
6343 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6345 if (GET_CODE (target
) == CONCAT
)
6347 /* We're storing into a struct containing a single __complex. */
6349 gcc_assert (!bitpos
);
6350 return store_expr (exp
, target
, 0, nontemporal
);
6353 /* If the structure is in a register or if the component
6354 is a bit field, we cannot use addressing to access it.
6355 Use bit-field techniques or SUBREG to store in it. */
6357 if (mode
== VOIDmode
6358 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6359 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6360 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6362 || GET_CODE (target
) == SUBREG
6363 /* If the field isn't aligned enough to store as an ordinary memref,
6364 store it as a bit field. */
6366 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6367 || bitpos
% GET_MODE_ALIGNMENT (mode
))
6368 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
6369 || (bitpos
% BITS_PER_UNIT
!= 0)))
6370 || (bitsize
>= 0 && mode
!= BLKmode
6371 && GET_MODE_BITSIZE (mode
) > bitsize
)
6372 /* If the RHS and field are a constant size and the size of the
6373 RHS isn't the same size as the bitfield, we must use bitfield
6376 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
6377 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0)
6378 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6379 decl we must use bitfield operations. */
6381 && TREE_CODE (exp
) == MEM_REF
6382 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6383 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6384 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0),0 ))
6385 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
6390 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6391 implies a mask operation. If the precision is the same size as
6392 the field we're storing into, that mask is redundant. This is
6393 particularly common with bit field assignments generated by the
6395 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
6398 tree type
= TREE_TYPE (exp
);
6399 if (INTEGRAL_TYPE_P (type
)
6400 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
6401 && bitsize
== TYPE_PRECISION (type
))
6403 tree op
= gimple_assign_rhs1 (nop_def
);
6404 type
= TREE_TYPE (op
);
6405 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
6410 temp
= expand_normal (exp
);
6412 /* If BITSIZE is narrower than the size of the type of EXP
6413 we will be narrowing TEMP. Normally, what's wanted are the
6414 low-order bits. However, if EXP's type is a record and this is
6415 big-endian machine, we want the upper BITSIZE bits. */
6416 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
6417 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
6418 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
6419 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
6420 GET_MODE_BITSIZE (GET_MODE (temp
)) - bitsize
,
6423 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6424 if (mode
!= VOIDmode
&& mode
!= BLKmode
6425 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
6426 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
6428 /* If the modes of TEMP and TARGET are both BLKmode, both
6429 must be in memory and BITPOS must be aligned on a byte
6430 boundary. If so, we simply do a block copy. Likewise
6431 for a BLKmode-like TARGET. */
6432 if (GET_MODE (temp
) == BLKmode
6433 && (GET_MODE (target
) == BLKmode
6435 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
6436 && (bitpos
% BITS_PER_UNIT
) == 0
6437 && (bitsize
% BITS_PER_UNIT
) == 0)))
6439 gcc_assert (MEM_P (target
) && MEM_P (temp
)
6440 && (bitpos
% BITS_PER_UNIT
) == 0);
6442 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6443 emit_block_move (target
, temp
,
6444 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6451 /* Handle calls that return values in multiple non-contiguous locations.
6452 The Irix 6 ABI has examples of this. */
6453 if (GET_CODE (temp
) == PARALLEL
)
6455 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6457 if (mode
== BLKmode
)
6458 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6459 temp_target
= gen_reg_rtx (mode
);
6460 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
6463 else if (mode
== BLKmode
)
6465 /* Handle calls that return BLKmode values in registers. */
6466 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
6468 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
6469 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
6474 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6476 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6477 temp_target
= gen_reg_rtx (mode
);
6479 = extract_bit_field (temp
, size
* BITS_PER_UNIT
, 0, 1,
6480 temp_target
, mode
, mode
);
6485 /* Store the value in the bitfield. */
6486 store_bit_field (target
, bitsize
, bitpos
,
6487 bitregion_start
, bitregion_end
,
6494 /* Now build a reference to just the desired component. */
6495 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
6497 if (to_rtx
== target
)
6498 to_rtx
= copy_rtx (to_rtx
);
6500 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
6501 set_mem_alias_set (to_rtx
, alias_set
);
6503 return store_expr (exp
, to_rtx
, 0, nontemporal
);
6507 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6508 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6509 codes and find the ultimate containing object, which we return.
6511 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6512 bit position, and *PUNSIGNEDP to the signedness of the field.
6513 If the position of the field is variable, we store a tree
6514 giving the variable offset (in units) in *POFFSET.
6515 This offset is in addition to the bit position.
6516 If the position is not variable, we store 0 in *POFFSET.
6518 If any of the extraction expressions is volatile,
6519 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6521 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6522 Otherwise, it is a mode that can be used to access the field.
6524 If the field describes a variable-sized object, *PMODE is set to
6525 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6526 this case, but the address of the object can be found.
6528 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6529 look through nodes that serve as markers of a greater alignment than
6530 the one that can be deduced from the expression. These nodes make it
6531 possible for front-ends to prevent temporaries from being created by
6532 the middle-end on alignment considerations. For that purpose, the
6533 normal operating mode at high-level is to always pass FALSE so that
6534 the ultimate containing object is really returned; moreover, the
6535 associated predicate handled_component_p will always return TRUE
6536 on these nodes, thus indicating that they are essentially handled
6537 by get_inner_reference. TRUE should only be passed when the caller
6538 is scanning the expression in order to build another representation
6539 and specifically knows how to handle these nodes; as such, this is
6540 the normal operating mode in the RTL expanders. */
6543 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
6544 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
6545 enum machine_mode
*pmode
, int *punsignedp
,
6546 int *pvolatilep
, bool keep_aligning
)
6549 enum machine_mode mode
= VOIDmode
;
6550 bool blkmode_bitfield
= false;
6551 tree offset
= size_zero_node
;
6552 double_int bit_offset
= double_int_zero
;
6554 /* First get the mode, signedness, and size. We do this from just the
6555 outermost expression. */
6557 if (TREE_CODE (exp
) == COMPONENT_REF
)
6559 tree field
= TREE_OPERAND (exp
, 1);
6560 size_tree
= DECL_SIZE (field
);
6561 if (flag_strict_volatile_bitfields
> 0
6562 && TREE_THIS_VOLATILE (exp
)
6563 && DECL_BIT_FIELD_TYPE (field
)
6564 && DECL_MODE (field
) != BLKmode
)
6565 /* Volatile bitfields should be accessed in the mode of the
6566 field's type, not the mode computed based on the bit
6568 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
6569 else if (!DECL_BIT_FIELD (field
))
6570 mode
= DECL_MODE (field
);
6571 else if (DECL_MODE (field
) == BLKmode
)
6572 blkmode_bitfield
= true;
6574 *punsignedp
= DECL_UNSIGNED (field
);
6576 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
6578 size_tree
= TREE_OPERAND (exp
, 1);
6579 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
6580 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
6582 /* For vector types, with the correct size of access, use the mode of
6584 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
6585 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6586 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
6587 mode
= TYPE_MODE (TREE_TYPE (exp
));
6591 mode
= TYPE_MODE (TREE_TYPE (exp
));
6592 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
6594 if (mode
== BLKmode
)
6595 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
6597 *pbitsize
= GET_MODE_BITSIZE (mode
);
6602 if (! host_integerp (size_tree
, 1))
6603 mode
= BLKmode
, *pbitsize
= -1;
6605 *pbitsize
= tree_low_cst (size_tree
, 1);
6608 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6609 and find the ultimate containing object. */
6612 switch (TREE_CODE (exp
))
6615 bit_offset
+= tree_to_double_int (TREE_OPERAND (exp
, 2));
6620 tree field
= TREE_OPERAND (exp
, 1);
6621 tree this_offset
= component_ref_field_offset (exp
);
6623 /* If this field hasn't been filled in yet, don't go past it.
6624 This should only happen when folding expressions made during
6625 type construction. */
6626 if (this_offset
== 0)
6629 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
6630 bit_offset
+= tree_to_double_int (DECL_FIELD_BIT_OFFSET (field
));
6632 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6637 case ARRAY_RANGE_REF
:
6639 tree index
= TREE_OPERAND (exp
, 1);
6640 tree low_bound
= array_ref_low_bound (exp
);
6641 tree unit_size
= array_ref_element_size (exp
);
6643 /* We assume all arrays have sizes that are a multiple of a byte.
6644 First subtract the lower bound, if any, in the type of the
6645 index, then convert to sizetype and multiply by the size of
6646 the array element. */
6647 if (! integer_zerop (low_bound
))
6648 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
6651 offset
= size_binop (PLUS_EXPR
, offset
,
6652 size_binop (MULT_EXPR
,
6653 fold_convert (sizetype
, index
),
6662 bit_offset
+= double_int::from_uhwi (*pbitsize
);
6665 case VIEW_CONVERT_EXPR
:
6666 if (keep_aligning
&& STRICT_ALIGNMENT
6667 && (TYPE_ALIGN (TREE_TYPE (exp
))
6668 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6669 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6670 < BIGGEST_ALIGNMENT
)
6671 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
6672 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6677 /* Hand back the decl for MEM[&decl, off]. */
6678 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
6680 tree off
= TREE_OPERAND (exp
, 1);
6681 if (!integer_zerop (off
))
6683 double_int boff
, coff
= mem_ref_offset (exp
);
6684 boff
= coff
.lshift (BITS_PER_UNIT
== 8
6685 ? 3 : exact_log2 (BITS_PER_UNIT
));
6688 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6696 /* If any reference in the chain is volatile, the effect is volatile. */
6697 if (TREE_THIS_VOLATILE (exp
))
6700 exp
= TREE_OPERAND (exp
, 0);
6704 /* If OFFSET is constant, see if we can return the whole thing as a
6705 constant bit position. Make sure to handle overflow during
6707 if (TREE_CODE (offset
) == INTEGER_CST
)
6709 double_int tem
= tree_to_double_int (offset
);
6710 tem
= tem
.sext (TYPE_PRECISION (sizetype
));
6711 tem
= tem
.lshift (BITS_PER_UNIT
== 8 ? 3 : exact_log2 (BITS_PER_UNIT
));
6713 if (tem
.fits_shwi ())
6715 *pbitpos
= tem
.to_shwi ();
6716 *poffset
= offset
= NULL_TREE
;
6720 /* Otherwise, split it up. */
6723 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6724 if (bit_offset
.is_negative ())
6727 = double_int::mask (BITS_PER_UNIT
== 8
6728 ? 3 : exact_log2 (BITS_PER_UNIT
));
6729 double_int tem
= bit_offset
.and_not (mask
);
6730 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6731 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6733 tem
= tem
.arshift (BITS_PER_UNIT
== 8
6734 ? 3 : exact_log2 (BITS_PER_UNIT
),
6735 HOST_BITS_PER_DOUBLE_INT
);
6736 offset
= size_binop (PLUS_EXPR
, offset
,
6737 double_int_to_tree (sizetype
, tem
));
6740 *pbitpos
= bit_offset
.to_shwi ();
6744 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6745 if (mode
== VOIDmode
6747 && (*pbitpos
% BITS_PER_UNIT
) == 0
6748 && (*pbitsize
% BITS_PER_UNIT
) == 0)
6756 /* Return a tree of sizetype representing the size, in bytes, of the element
6757 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6760 array_ref_element_size (tree exp
)
6762 tree aligned_size
= TREE_OPERAND (exp
, 3);
6763 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6764 location_t loc
= EXPR_LOCATION (exp
);
6766 /* If a size was specified in the ARRAY_REF, it's the size measured
6767 in alignment units of the element type. So multiply by that value. */
6770 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6771 sizetype from another type of the same width and signedness. */
6772 if (TREE_TYPE (aligned_size
) != sizetype
)
6773 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
6774 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
6775 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
6778 /* Otherwise, take the size from that of the element type. Substitute
6779 any PLACEHOLDER_EXPR that we have. */
6781 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
6784 /* Return a tree representing the lower bound of the array mentioned in
6785 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6788 array_ref_low_bound (tree exp
)
6790 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6792 /* If a lower bound is specified in EXP, use it. */
6793 if (TREE_OPERAND (exp
, 2))
6794 return TREE_OPERAND (exp
, 2);
6796 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6797 substituting for a PLACEHOLDER_EXPR as needed. */
6798 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6799 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
6801 /* Otherwise, return a zero of the appropriate type. */
6802 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
6805 /* Returns true if REF is an array reference to an array at the end of
6806 a structure. If this is the case, the array may be allocated larger
6807 than its upper bound implies. */
6810 array_at_struct_end_p (tree ref
)
6812 if (TREE_CODE (ref
) != ARRAY_REF
6813 && TREE_CODE (ref
) != ARRAY_RANGE_REF
)
6816 while (handled_component_p (ref
))
6818 /* If the reference chain contains a component reference to a
6819 non-union type and there follows another field the reference
6820 is not at the end of a structure. */
6821 if (TREE_CODE (ref
) == COMPONENT_REF
6822 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 0))) == RECORD_TYPE
)
6824 tree nextf
= DECL_CHAIN (TREE_OPERAND (ref
, 1));
6825 while (nextf
&& TREE_CODE (nextf
) != FIELD_DECL
)
6826 nextf
= DECL_CHAIN (nextf
);
6831 ref
= TREE_OPERAND (ref
, 0);
6834 /* If the reference is based on a declared entity, the size of the array
6835 is constrained by its given domain. */
6842 /* Return a tree representing the upper bound of the array mentioned in
6843 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6846 array_ref_up_bound (tree exp
)
6848 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6850 /* If there is a domain type and it has an upper bound, use it, substituting
6851 for a PLACEHOLDER_EXPR as needed. */
6852 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
6853 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
6855 /* Otherwise fail. */
6859 /* Return a tree representing the offset, in bytes, of the field referenced
6860 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6863 component_ref_field_offset (tree exp
)
6865 tree aligned_offset
= TREE_OPERAND (exp
, 2);
6866 tree field
= TREE_OPERAND (exp
, 1);
6867 location_t loc
= EXPR_LOCATION (exp
);
6869 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6870 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6874 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6875 sizetype from another type of the same width and signedness. */
6876 if (TREE_TYPE (aligned_offset
) != sizetype
)
6877 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
6878 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
6879 size_int (DECL_OFFSET_ALIGN (field
)
6883 /* Otherwise, take the offset from that of the field. Substitute
6884 any PLACEHOLDER_EXPR that we have. */
6886 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
6889 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6891 static unsigned HOST_WIDE_INT
6892 target_align (const_tree target
)
6894 /* We might have a chain of nested references with intermediate misaligning
6895 bitfields components, so need to recurse to find out. */
6897 unsigned HOST_WIDE_INT this_align
, outer_align
;
6899 switch (TREE_CODE (target
))
6905 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
6906 outer_align
= target_align (TREE_OPERAND (target
, 0));
6907 return MIN (this_align
, outer_align
);
6910 case ARRAY_RANGE_REF
:
6911 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
6912 outer_align
= target_align (TREE_OPERAND (target
, 0));
6913 return MIN (this_align
, outer_align
);
6916 case NON_LVALUE_EXPR
:
6917 case VIEW_CONVERT_EXPR
:
6918 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
6919 outer_align
= target_align (TREE_OPERAND (target
, 0));
6920 return MAX (this_align
, outer_align
);
6923 return TYPE_ALIGN (TREE_TYPE (target
));
6928 /* Given an rtx VALUE that may contain additions and multiplications, return
6929 an equivalent value that just refers to a register, memory, or constant.
6930 This is done by generating instructions to perform the arithmetic and
6931 returning a pseudo-register containing the value.
6933 The returned value may be a REG, SUBREG, MEM or constant. */
6936 force_operand (rtx value
, rtx target
)
6939 /* Use subtarget as the target for operand 0 of a binary operation. */
6940 rtx subtarget
= get_subtarget (target
);
6941 enum rtx_code code
= GET_CODE (value
);
6943 /* Check for subreg applied to an expression produced by loop optimizer. */
6945 && !REG_P (SUBREG_REG (value
))
6946 && !MEM_P (SUBREG_REG (value
)))
6949 = simplify_gen_subreg (GET_MODE (value
),
6950 force_reg (GET_MODE (SUBREG_REG (value
)),
6951 force_operand (SUBREG_REG (value
),
6953 GET_MODE (SUBREG_REG (value
)),
6954 SUBREG_BYTE (value
));
6955 code
= GET_CODE (value
);
6958 /* Check for a PIC address load. */
6959 if ((code
== PLUS
|| code
== MINUS
)
6960 && XEXP (value
, 0) == pic_offset_table_rtx
6961 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
6962 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
6963 || GET_CODE (XEXP (value
, 1)) == CONST
))
6966 subtarget
= gen_reg_rtx (GET_MODE (value
));
6967 emit_move_insn (subtarget
, value
);
6971 if (ARITHMETIC_P (value
))
6973 op2
= XEXP (value
, 1);
6974 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
6976 if (code
== MINUS
&& CONST_INT_P (op2
))
6979 op2
= negate_rtx (GET_MODE (value
), op2
);
6982 /* Check for an addition with OP2 a constant integer and our first
6983 operand a PLUS of a virtual register and something else. In that
6984 case, we want to emit the sum of the virtual register and the
6985 constant first and then add the other value. This allows virtual
6986 register instantiation to simply modify the constant rather than
6987 creating another one around this addition. */
6988 if (code
== PLUS
&& CONST_INT_P (op2
)
6989 && GET_CODE (XEXP (value
, 0)) == PLUS
6990 && REG_P (XEXP (XEXP (value
, 0), 0))
6991 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6992 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
6994 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
6995 XEXP (XEXP (value
, 0), 0), op2
,
6996 subtarget
, 0, OPTAB_LIB_WIDEN
);
6997 return expand_simple_binop (GET_MODE (value
), code
, temp
,
6998 force_operand (XEXP (XEXP (value
,
7000 target
, 0, OPTAB_LIB_WIDEN
);
7003 op1
= force_operand (XEXP (value
, 0), subtarget
);
7004 op2
= force_operand (op2
, NULL_RTX
);
7008 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7010 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7011 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7012 target
, 1, OPTAB_LIB_WIDEN
);
7014 return expand_divmod (0,
7015 FLOAT_MODE_P (GET_MODE (value
))
7016 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7017 GET_MODE (value
), op1
, op2
, target
, 0);
7019 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7022 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7025 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7028 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7029 target
, 0, OPTAB_LIB_WIDEN
);
7031 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7032 target
, 1, OPTAB_LIB_WIDEN
);
7035 if (UNARY_P (value
))
7038 target
= gen_reg_rtx (GET_MODE (value
));
7039 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7046 case FLOAT_TRUNCATE
:
7047 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7052 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7056 case UNSIGNED_FLOAT
:
7057 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7061 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7065 #ifdef INSN_SCHEDULING
7066 /* On machines that have insn scheduling, we want all memory reference to be
7067 explicit, so we need to deal with such paradoxical SUBREGs. */
7068 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7070 = simplify_gen_subreg (GET_MODE (value
),
7071 force_reg (GET_MODE (SUBREG_REG (value
)),
7072 force_operand (SUBREG_REG (value
),
7074 GET_MODE (SUBREG_REG (value
)),
7075 SUBREG_BYTE (value
));
7081 /* Subroutine of expand_expr: return nonzero iff there is no way that
7082 EXP can reference X, which is being modified. TOP_P is nonzero if this
7083 call is going to be used to determine whether we need a temporary
7084 for EXP, as opposed to a recursive call to this function.
7086 It is always safe for this routine to return zero since it merely
7087 searches for optimization opportunities. */
7090 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7096 /* If EXP has varying size, we MUST use a target since we currently
7097 have no way of allocating temporaries of variable size
7098 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7099 So we assume here that something at a higher level has prevented a
7100 clash. This is somewhat bogus, but the best we can do. Only
7101 do this when X is BLKmode and when we are at the top level. */
7102 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7103 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7104 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7105 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7106 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7108 && GET_MODE (x
) == BLKmode
)
7109 /* If X is in the outgoing argument area, it is always safe. */
7111 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7112 || (GET_CODE (XEXP (x
, 0)) == PLUS
7113 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7116 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7117 find the underlying pseudo. */
7118 if (GET_CODE (x
) == SUBREG
)
7121 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7125 /* Now look at our tree code and possibly recurse. */
7126 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7128 case tcc_declaration
:
7129 exp_rtl
= DECL_RTL_IF_SET (exp
);
7135 case tcc_exceptional
:
7136 if (TREE_CODE (exp
) == TREE_LIST
)
7140 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7142 exp
= TREE_CHAIN (exp
);
7145 if (TREE_CODE (exp
) != TREE_LIST
)
7146 return safe_from_p (x
, exp
, 0);
7149 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7151 constructor_elt
*ce
;
7152 unsigned HOST_WIDE_INT idx
;
7154 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7155 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7156 || !safe_from_p (x
, ce
->value
, 0))
7160 else if (TREE_CODE (exp
) == ERROR_MARK
)
7161 return 1; /* An already-visited SAVE_EXPR? */
7166 /* The only case we look at here is the DECL_INITIAL inside a
7168 return (TREE_CODE (exp
) != DECL_EXPR
7169 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7170 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7171 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7174 case tcc_comparison
:
7175 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7180 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7182 case tcc_expression
:
7185 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7186 the expression. If it is set, we conflict iff we are that rtx or
7187 both are in memory. Otherwise, we check all operands of the
7188 expression recursively. */
7190 switch (TREE_CODE (exp
))
7193 /* If the operand is static or we are static, we can't conflict.
7194 Likewise if we don't conflict with the operand at all. */
7195 if (staticp (TREE_OPERAND (exp
, 0))
7196 || TREE_STATIC (exp
)
7197 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7200 /* Otherwise, the only way this can conflict is if we are taking
7201 the address of a DECL a that address if part of X, which is
7203 exp
= TREE_OPERAND (exp
, 0);
7206 if (!DECL_RTL_SET_P (exp
)
7207 || !MEM_P (DECL_RTL (exp
)))
7210 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7216 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7217 get_alias_set (exp
)))
7222 /* Assume that the call will clobber all hard registers and
7224 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7229 case WITH_CLEANUP_EXPR
:
7230 case CLEANUP_POINT_EXPR
:
7231 /* Lowered by gimplify.c. */
7235 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7241 /* If we have an rtx, we do not need to scan our operands. */
7245 nops
= TREE_OPERAND_LENGTH (exp
);
7246 for (i
= 0; i
< nops
; i
++)
7247 if (TREE_OPERAND (exp
, i
) != 0
7248 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7254 /* Should never get a type here. */
7258 /* If we have an rtl, find any enclosed object. Then see if we conflict
7262 if (GET_CODE (exp_rtl
) == SUBREG
)
7264 exp_rtl
= SUBREG_REG (exp_rtl
);
7266 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7270 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7271 are memory and they conflict. */
7272 return ! (rtx_equal_p (x
, exp_rtl
)
7273 || (MEM_P (x
) && MEM_P (exp_rtl
)
7274 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7277 /* If we reach here, it is safe. */
7282 /* Return the highest power of two that EXP is known to be a multiple of.
7283 This is used in updating alignment of MEMs in array references. */
7285 unsigned HOST_WIDE_INT
7286 highest_pow2_factor (const_tree exp
)
7288 unsigned HOST_WIDE_INT ret
;
7289 int trailing_zeros
= tree_ctz (exp
);
7290 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7291 return BIGGEST_ALIGNMENT
;
7292 ret
= (unsigned HOST_WIDE_INT
) 1 << trailing_zeros
;
7293 if (ret
> BIGGEST_ALIGNMENT
)
7294 return BIGGEST_ALIGNMENT
;
7298 /* Similar, except that the alignment requirements of TARGET are
7299 taken into account. Assume it is at least as aligned as its
7300 type, unless it is a COMPONENT_REF in which case the layout of
7301 the structure gives the alignment. */
7303 static unsigned HOST_WIDE_INT
7304 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7306 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7307 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7309 return MAX (factor
, talign
);
7312 #ifdef HAVE_conditional_move
7313 /* Convert the tree comparison code TCODE to the rtl one where the
7314 signedness is UNSIGNEDP. */
7316 static enum rtx_code
7317 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7329 code
= unsignedp
? LTU
: LT
;
7332 code
= unsignedp
? LEU
: LE
;
7335 code
= unsignedp
? GTU
: GT
;
7338 code
= unsignedp
? GEU
: GE
;
7340 case UNORDERED_EXPR
:
7372 /* Subroutine of expand_expr. Expand the two operands of a binary
7373 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7374 The value may be stored in TARGET if TARGET is nonzero. The
7375 MODIFIER argument is as documented by expand_expr. */
7378 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7379 enum expand_modifier modifier
)
7381 if (! safe_from_p (target
, exp1
, 1))
7383 if (operand_equal_p (exp0
, exp1
, 0))
7385 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7386 *op1
= copy_rtx (*op0
);
7390 /* If we need to preserve evaluation order, copy exp0 into its own
7391 temporary variable so that it can't be clobbered by exp1. */
7392 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
7393 exp0
= save_expr (exp0
);
7394 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7395 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7400 /* Return a MEM that contains constant EXP. DEFER is as for
7401 output_constant_def and MODIFIER is as for expand_expr. */
7404 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7408 mem
= output_constant_def (exp
, defer
);
7409 if (modifier
!= EXPAND_INITIALIZER
)
7410 mem
= use_anchored_address (mem
);
7414 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7415 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7418 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
7419 enum expand_modifier modifier
, addr_space_t as
)
7421 rtx result
, subtarget
;
7423 HOST_WIDE_INT bitsize
, bitpos
;
7424 int volatilep
, unsignedp
;
7425 enum machine_mode mode1
;
7427 /* If we are taking the address of a constant and are at the top level,
7428 we have to use output_constant_def since we can't call force_const_mem
7430 /* ??? This should be considered a front-end bug. We should not be
7431 generating ADDR_EXPR of something that isn't an LVALUE. The only
7432 exception here is STRING_CST. */
7433 if (CONSTANT_CLASS_P (exp
))
7435 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7436 if (modifier
< EXPAND_SUM
)
7437 result
= force_operand (result
, target
);
7441 /* Everything must be something allowed by is_gimple_addressable. */
7442 switch (TREE_CODE (exp
))
7445 /* This case will happen via recursion for &a->b. */
7446 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7450 tree tem
= TREE_OPERAND (exp
, 0);
7451 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7452 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7453 return expand_expr (tem
, target
, tmode
, modifier
);
7457 /* Expand the initializer like constants above. */
7458 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7460 if (modifier
< EXPAND_SUM
)
7461 result
= force_operand (result
, target
);
7465 /* The real part of the complex number is always first, therefore
7466 the address is the same as the address of the parent object. */
7469 inner
= TREE_OPERAND (exp
, 0);
7473 /* The imaginary part of the complex number is always second.
7474 The expression is therefore always offset by the size of the
7477 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
7478 inner
= TREE_OPERAND (exp
, 0);
7481 case COMPOUND_LITERAL_EXPR
:
7482 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7483 rtl_for_decl_init is called on DECL_INITIAL with
7484 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7485 if (modifier
== EXPAND_INITIALIZER
7486 && COMPOUND_LITERAL_EXPR_DECL (exp
))
7487 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
7488 target
, tmode
, modifier
, as
);
7491 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7492 expand_expr, as that can have various side effects; LABEL_DECLs for
7493 example, may not have their DECL_RTL set yet. Expand the rtl of
7494 CONSTRUCTORs too, which should yield a memory reference for the
7495 constructor's contents. Assume language specific tree nodes can
7496 be expanded in some interesting way. */
7497 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7499 || TREE_CODE (exp
) == CONSTRUCTOR
7500 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7502 result
= expand_expr (exp
, target
, tmode
,
7503 modifier
== EXPAND_INITIALIZER
7504 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7506 /* If the DECL isn't in memory, then the DECL wasn't properly
7507 marked TREE_ADDRESSABLE, which will be either a front-end
7508 or a tree optimizer bug. */
7510 if (TREE_ADDRESSABLE (exp
)
7512 && ! targetm
.calls
.allocate_stack_slots_for_args ())
7514 error ("local frame unavailable (naked function?)");
7518 gcc_assert (MEM_P (result
));
7519 result
= XEXP (result
, 0);
7521 /* ??? Is this needed anymore? */
7523 TREE_USED (exp
) = 1;
7525 if (modifier
!= EXPAND_INITIALIZER
7526 && modifier
!= EXPAND_CONST_ADDRESS
7527 && modifier
!= EXPAND_SUM
)
7528 result
= force_operand (result
, target
);
7532 /* Pass FALSE as the last argument to get_inner_reference although
7533 we are expanding to RTL. The rationale is that we know how to
7534 handle "aligning nodes" here: we can just bypass them because
7535 they won't change the final object whose address will be returned
7536 (they actually exist only for that purpose). */
7537 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7538 &mode1
, &unsignedp
, &volatilep
, false);
7542 /* We must have made progress. */
7543 gcc_assert (inner
!= exp
);
7545 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
7546 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7547 inner alignment, force the inner to be sufficiently aligned. */
7548 if (CONSTANT_CLASS_P (inner
)
7549 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
7551 inner
= copy_node (inner
);
7552 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
7553 TYPE_ALIGN (TREE_TYPE (inner
)) = TYPE_ALIGN (TREE_TYPE (exp
));
7554 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
7556 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
7562 if (modifier
!= EXPAND_NORMAL
)
7563 result
= force_operand (result
, NULL
);
7564 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
7565 modifier
== EXPAND_INITIALIZER
7566 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
7568 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7569 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
7571 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7572 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
7575 subtarget
= bitpos
? NULL_RTX
: target
;
7576 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
7577 1, OPTAB_LIB_WIDEN
);
7583 /* Someone beforehand should have rejected taking the address
7584 of such an object. */
7585 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
7587 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7588 result
= plus_constant (tmode
, result
, bitpos
/ BITS_PER_UNIT
);
7589 if (modifier
< EXPAND_SUM
)
7590 result
= force_operand (result
, target
);
7596 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7597 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7600 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
7601 enum expand_modifier modifier
)
7603 addr_space_t as
= ADDR_SPACE_GENERIC
;
7604 enum machine_mode address_mode
= Pmode
;
7605 enum machine_mode pointer_mode
= ptr_mode
;
7606 enum machine_mode rmode
;
7609 /* Target mode of VOIDmode says "whatever's natural". */
7610 if (tmode
== VOIDmode
)
7611 tmode
= TYPE_MODE (TREE_TYPE (exp
));
7613 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
7615 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
7616 address_mode
= targetm
.addr_space
.address_mode (as
);
7617 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7620 /* We can get called with some Weird Things if the user does silliness
7621 like "(short) &a". In that case, convert_memory_address won't do
7622 the right thing, so ignore the given target mode. */
7623 if (tmode
!= address_mode
&& tmode
!= pointer_mode
)
7624 tmode
= address_mode
;
7626 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
7627 tmode
, modifier
, as
);
7629 /* Despite expand_expr claims concerning ignoring TMODE when not
7630 strictly convenient, stuff breaks if we don't honor it. Note
7631 that combined with the above, we only do this for pointer modes. */
7632 rmode
= GET_MODE (result
);
7633 if (rmode
== VOIDmode
)
7636 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7641 /* Generate code for computing CONSTRUCTOR EXP.
7642 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7643 is TRUE, instead of creating a temporary variable in memory
7644 NULL is returned and the caller needs to handle it differently. */
7647 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
7648 bool avoid_temp_mem
)
7650 tree type
= TREE_TYPE (exp
);
7651 enum machine_mode mode
= TYPE_MODE (type
);
7653 /* Try to avoid creating a temporary at all. This is possible
7654 if all of the initializer is zero.
7655 FIXME: try to handle all [0..255] initializers we can handle
7657 if (TREE_STATIC (exp
)
7658 && !TREE_ADDRESSABLE (exp
)
7659 && target
!= 0 && mode
== BLKmode
7660 && all_zeros_p (exp
))
7662 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7666 /* All elts simple constants => refer to a constant in memory. But
7667 if this is a non-BLKmode mode, let it store a field at a time
7668 since that should make a CONST_INT or CONST_DOUBLE when we
7669 fold. Likewise, if we have a target we can use, it is best to
7670 store directly into the target unless the type is large enough
7671 that memcpy will be used. If we are making an initializer and
7672 all operands are constant, put it in memory as well.
7674 FIXME: Avoid trying to fill vector constructors piece-meal.
7675 Output them with output_constant_def below unless we're sure
7676 they're zeros. This should go away when vector initializers
7677 are treated like VECTOR_CST instead of arrays. */
7678 if ((TREE_STATIC (exp
)
7679 && ((mode
== BLKmode
7680 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7681 || TREE_ADDRESSABLE (exp
)
7682 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
7683 && (! MOVE_BY_PIECES_P
7684 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
7686 && ! mostly_zeros_p (exp
))))
7687 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
7688 && TREE_CONSTANT (exp
)))
7695 constructor
= expand_expr_constant (exp
, 1, modifier
);
7697 if (modifier
!= EXPAND_CONST_ADDRESS
7698 && modifier
!= EXPAND_INITIALIZER
7699 && modifier
!= EXPAND_SUM
)
7700 constructor
= validize_mem (constructor
);
7705 /* Handle calls that pass values in multiple non-contiguous
7706 locations. The Irix 6 ABI has examples of this. */
7707 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7708 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
7714 = assign_temp (build_qualified_type (type
, (TYPE_QUALS (type
)
7715 | (TREE_READONLY (exp
)
7716 * TYPE_QUAL_CONST
))),
7717 TREE_ADDRESSABLE (exp
), 1);
7720 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7725 /* expand_expr: generate code for computing expression EXP.
7726 An rtx for the computed value is returned. The value is never null.
7727 In the case of a void EXP, const0_rtx is returned.
7729 The value may be stored in TARGET if TARGET is nonzero.
7730 TARGET is just a suggestion; callers must assume that
7731 the rtx returned may not be the same as TARGET.
7733 If TARGET is CONST0_RTX, it means that the value will be ignored.
7735 If TMODE is not VOIDmode, it suggests generating the
7736 result in mode TMODE. But this is done only when convenient.
7737 Otherwise, TMODE is ignored and the value generated in its natural mode.
7738 TMODE is just a suggestion; callers must assume that
7739 the rtx returned may not have mode TMODE.
7741 Note that TARGET may have neither TMODE nor MODE. In that case, it
7742 probably will not be used.
7744 If MODIFIER is EXPAND_SUM then when EXP is an addition
7745 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7746 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7747 products as above, or REG or MEM, or constant.
7748 Ordinarily in such cases we would output mul or add instructions
7749 and then return a pseudo reg containing the sum.
7751 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7752 it also marks a label as absolutely required (it can't be dead).
7753 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7754 This is used for outputting expressions used in initializers.
7756 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7757 with a constant address even if that address is not normally legitimate.
7758 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7760 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7761 a call parameter. Such targets require special care as we haven't yet
7762 marked TARGET so that it's safe from being trashed by libcalls. We
7763 don't want to use TARGET for anything but the final result;
7764 Intermediate values must go elsewhere. Additionally, calls to
7765 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7767 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7768 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7769 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7770 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7774 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
7775 enum expand_modifier modifier
, rtx
*alt_rtl
)
7779 /* Handle ERROR_MARK before anybody tries to access its type. */
7780 if (TREE_CODE (exp
) == ERROR_MARK
7781 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
7783 ret
= CONST0_RTX (tmode
);
7784 return ret
? ret
: const0_rtx
;
7787 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
7791 /* Try to expand the conditional expression which is represented by
7792 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7793 return the rtl reg which repsents the result. Otherwise return
7797 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
7798 tree treeop1 ATTRIBUTE_UNUSED
,
7799 tree treeop2 ATTRIBUTE_UNUSED
)
7801 #ifdef HAVE_conditional_move
7803 rtx op00
, op01
, op1
, op2
;
7804 enum rtx_code comparison_code
;
7805 enum machine_mode comparison_mode
;
7808 tree type
= TREE_TYPE (treeop1
);
7809 int unsignedp
= TYPE_UNSIGNED (type
);
7810 enum machine_mode mode
= TYPE_MODE (type
);
7811 enum machine_mode orig_mode
= mode
;
7813 /* If we cannot do a conditional move on the mode, try doing it
7814 with the promoted mode. */
7815 if (!can_conditionally_move_p (mode
))
7817 mode
= promote_mode (type
, mode
, &unsignedp
);
7818 if (!can_conditionally_move_p (mode
))
7820 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
7823 temp
= assign_temp (type
, 0, 1);
7826 expand_operands (treeop1
, treeop2
,
7827 temp
, &op1
, &op2
, EXPAND_NORMAL
);
7829 if (TREE_CODE (treeop0
) == SSA_NAME
7830 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
7832 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
7833 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
7834 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
7835 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
7836 comparison_mode
= TYPE_MODE (type
);
7837 unsignedp
= TYPE_UNSIGNED (type
);
7838 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
7840 else if (TREE_CODE_CLASS (TREE_CODE (treeop0
)) == tcc_comparison
)
7842 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
7843 enum tree_code cmpcode
= TREE_CODE (treeop0
);
7844 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
7845 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
7846 unsignedp
= TYPE_UNSIGNED (type
);
7847 comparison_mode
= TYPE_MODE (type
);
7848 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
7852 op00
= expand_normal (treeop0
);
7854 comparison_code
= NE
;
7855 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
7858 if (GET_MODE (op1
) != mode
)
7859 op1
= gen_lowpart (mode
, op1
);
7861 if (GET_MODE (op2
) != mode
)
7862 op2
= gen_lowpart (mode
, op2
);
7864 /* Try to emit the conditional move. */
7865 insn
= emit_conditional_move (temp
, comparison_code
,
7866 op00
, op01
, comparison_mode
,
7870 /* If we could do the conditional move, emit the sequence,
7874 rtx seq
= get_insns ();
7877 return convert_modes (orig_mode
, mode
, temp
, 0);
7880 /* Otherwise discard the sequence and fall back to code with
7888 expand_expr_real_2 (sepops ops
, rtx target
, enum machine_mode tmode
,
7889 enum expand_modifier modifier
)
7891 rtx op0
, op1
, op2
, temp
;
7894 enum machine_mode mode
;
7895 enum tree_code code
= ops
->code
;
7897 rtx subtarget
, original_target
;
7899 bool reduce_bit_field
;
7900 location_t loc
= ops
->location
;
7901 tree treeop0
, treeop1
, treeop2
;
7902 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7903 ? reduce_to_bit_field_precision ((expr), \
7909 mode
= TYPE_MODE (type
);
7910 unsignedp
= TYPE_UNSIGNED (type
);
7916 /* We should be called only on simple (binary or unary) expressions,
7917 exactly those that are valid in gimple expressions that aren't
7918 GIMPLE_SINGLE_RHS (or invalid). */
7919 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
7920 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
7921 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
7923 ignore
= (target
== const0_rtx
7924 || ((CONVERT_EXPR_CODE_P (code
)
7925 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
7926 && TREE_CODE (type
) == VOID_TYPE
));
7928 /* We should be called only if we need the result. */
7929 gcc_assert (!ignore
);
7931 /* An operation in what may be a bit-field type needs the
7932 result to be reduced to the precision of the bit-field type,
7933 which is narrower than that of the type's mode. */
7934 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
7935 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
7937 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
7940 /* Use subtarget as the target for operand 0 of a binary operation. */
7941 subtarget
= get_subtarget (target
);
7942 original_target
= target
;
7946 case NON_LVALUE_EXPR
:
7949 if (treeop0
== error_mark_node
)
7952 if (TREE_CODE (type
) == UNION_TYPE
)
7954 tree valtype
= TREE_TYPE (treeop0
);
7956 /* If both input and output are BLKmode, this conversion isn't doing
7957 anything except possibly changing memory attribute. */
7958 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7960 rtx result
= expand_expr (treeop0
, target
, tmode
,
7963 result
= copy_rtx (result
);
7964 set_mem_attributes (result
, type
, 0);
7970 if (TYPE_MODE (type
) != BLKmode
)
7971 target
= gen_reg_rtx (TYPE_MODE (type
));
7973 target
= assign_temp (type
, 1, 1);
7977 /* Store data into beginning of memory target. */
7978 store_expr (treeop0
,
7979 adjust_address (target
, TYPE_MODE (valtype
), 0),
7980 modifier
== EXPAND_STACK_PARM
,
7985 gcc_assert (REG_P (target
));
7987 /* Store this field into a union of the proper type. */
7988 store_field (target
,
7989 MIN ((int_size_in_bytes (TREE_TYPE
7992 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7993 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0, false);
7996 /* Return the entire union. */
8000 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8002 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8005 /* If the signedness of the conversion differs and OP0 is
8006 a promoted SUBREG, clear that indication since we now
8007 have to do the proper extension. */
8008 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8009 && GET_CODE (op0
) == SUBREG
)
8010 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8012 return REDUCE_BIT_FIELD (op0
);
8015 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8016 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8017 if (GET_MODE (op0
) == mode
)
8020 /* If OP0 is a constant, just convert it into the proper mode. */
8021 else if (CONSTANT_P (op0
))
8023 tree inner_type
= TREE_TYPE (treeop0
);
8024 enum machine_mode inner_mode
= GET_MODE (op0
);
8026 if (inner_mode
== VOIDmode
)
8027 inner_mode
= TYPE_MODE (inner_type
);
8029 if (modifier
== EXPAND_INITIALIZER
)
8030 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
8031 subreg_lowpart_offset (mode
,
8034 op0
= convert_modes (mode
, inner_mode
, op0
,
8035 TYPE_UNSIGNED (inner_type
));
8038 else if (modifier
== EXPAND_INITIALIZER
)
8039 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8041 else if (target
== 0)
8042 op0
= convert_to_mode (mode
, op0
,
8043 TYPE_UNSIGNED (TREE_TYPE
8047 convert_move (target
, op0
,
8048 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8052 return REDUCE_BIT_FIELD (op0
);
8054 case ADDR_SPACE_CONVERT_EXPR
:
8056 tree treeop0_type
= TREE_TYPE (treeop0
);
8058 addr_space_t as_from
;
8060 gcc_assert (POINTER_TYPE_P (type
));
8061 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8063 as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8064 as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8066 /* Conversions between pointers to the same address space should
8067 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8068 gcc_assert (as_to
!= as_from
);
8070 /* Ask target code to handle conversion between pointers
8071 to overlapping address spaces. */
8072 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8073 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8075 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8076 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8081 /* For disjoint address spaces, converting anything but
8082 a null pointer invokes undefined behaviour. We simply
8083 always return a null pointer here. */
8084 return CONST0_RTX (mode
);
8087 case POINTER_PLUS_EXPR
:
8088 /* Even though the sizetype mode and the pointer's mode can be different
8089 expand is able to handle this correctly and get the correct result out
8090 of the PLUS_EXPR code. */
8091 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8092 if sizetype precision is smaller than pointer precision. */
8093 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8094 treeop1
= fold_convert_loc (loc
, type
,
8095 fold_convert_loc (loc
, ssizetype
,
8097 /* If sizetype precision is larger than pointer precision, truncate the
8098 offset to have matching modes. */
8099 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8100 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8103 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8104 something else, make sure we add the register to the constant and
8105 then to the other thing. This case can occur during strength
8106 reduction and doing it this way will produce better code if the
8107 frame pointer or argument pointer is eliminated.
8109 fold-const.c will ensure that the constant is always in the inner
8110 PLUS_EXPR, so the only case we need to do anything about is if
8111 sp, ap, or fp is our second argument, in which case we must swap
8112 the innermost first argument and our second argument. */
8114 if (TREE_CODE (treeop0
) == PLUS_EXPR
8115 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8116 && TREE_CODE (treeop1
) == VAR_DECL
8117 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8118 || DECL_RTL (treeop1
) == stack_pointer_rtx
8119 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8124 /* If the result is to be ptr_mode and we are adding an integer to
8125 something, we might be forming a constant. So try to use
8126 plus_constant. If it produces a sum and we can't accept it,
8127 use force_operand. This allows P = &ARR[const] to generate
8128 efficient code on machines where a SYMBOL_REF is not a valid
8131 If this is an EXPAND_SUM call, always return the sum. */
8132 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8133 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8135 if (modifier
== EXPAND_STACK_PARM
)
8137 if (TREE_CODE (treeop0
) == INTEGER_CST
8138 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8139 && TREE_CONSTANT (treeop1
))
8143 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8145 /* Use immed_double_const to ensure that the constant is
8146 truncated according to the mode of OP1, then sign extended
8147 to a HOST_WIDE_INT. Using the constant directly can result
8148 in non-canonical RTL in a 64x32 cross compile. */
8150 = immed_double_const (TREE_INT_CST_LOW (treeop0
),
8152 TYPE_MODE (TREE_TYPE (treeop1
)));
8153 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8154 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8155 op1
= force_operand (op1
, target
);
8156 return REDUCE_BIT_FIELD (op1
);
8159 else if (TREE_CODE (treeop1
) == INTEGER_CST
8160 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8161 && TREE_CONSTANT (treeop0
))
8165 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8166 (modifier
== EXPAND_INITIALIZER
8167 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8168 if (! CONSTANT_P (op0
))
8170 op1
= expand_expr (treeop1
, NULL_RTX
,
8171 VOIDmode
, modifier
);
8172 /* Return a PLUS if modifier says it's OK. */
8173 if (modifier
== EXPAND_SUM
8174 || modifier
== EXPAND_INITIALIZER
)
8175 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8178 /* Use immed_double_const to ensure that the constant is
8179 truncated according to the mode of OP1, then sign extended
8180 to a HOST_WIDE_INT. Using the constant directly can result
8181 in non-canonical RTL in a 64x32 cross compile. */
8183 = immed_double_const (TREE_INT_CST_LOW (treeop1
),
8185 TYPE_MODE (TREE_TYPE (treeop0
)));
8186 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8187 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8188 op0
= force_operand (op0
, target
);
8189 return REDUCE_BIT_FIELD (op0
);
8193 /* Use TER to expand pointer addition of a negated value
8194 as pointer subtraction. */
8195 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8196 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8197 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8198 && TREE_CODE (treeop1
) == SSA_NAME
8199 && TYPE_MODE (TREE_TYPE (treeop0
))
8200 == TYPE_MODE (TREE_TYPE (treeop1
)))
8202 gimple def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8205 treeop1
= gimple_assign_rhs1 (def
);
8211 /* No sense saving up arithmetic to be done
8212 if it's all in the wrong mode to form part of an address.
8213 And force_operand won't know whether to sign-extend or
8215 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8216 || mode
!= ptr_mode
)
8218 expand_operands (treeop0
, treeop1
,
8219 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8220 if (op0
== const0_rtx
)
8222 if (op1
== const0_rtx
)
8227 expand_operands (treeop0
, treeop1
,
8228 subtarget
, &op0
, &op1
, modifier
);
8229 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8233 /* For initializers, we are allowed to return a MINUS of two
8234 symbolic constants. Here we handle all cases when both operands
8236 /* Handle difference of two symbolic constants,
8237 for the sake of an initializer. */
8238 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8239 && really_constant_p (treeop0
)
8240 && really_constant_p (treeop1
))
8242 expand_operands (treeop0
, treeop1
,
8243 NULL_RTX
, &op0
, &op1
, modifier
);
8245 /* If the last operand is a CONST_INT, use plus_constant of
8246 the negated constant. Else make the MINUS. */
8247 if (CONST_INT_P (op1
))
8248 return REDUCE_BIT_FIELD (plus_constant (mode
, op0
,
8251 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
8254 /* No sense saving up arithmetic to be done
8255 if it's all in the wrong mode to form part of an address.
8256 And force_operand won't know whether to sign-extend or
8258 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8259 || mode
!= ptr_mode
)
8262 expand_operands (treeop0
, treeop1
,
8263 subtarget
, &op0
, &op1
, modifier
);
8265 /* Convert A - const to A + (-const). */
8266 if (CONST_INT_P (op1
))
8268 op1
= negate_rtx (mode
, op1
);
8269 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8274 case WIDEN_MULT_PLUS_EXPR
:
8275 case WIDEN_MULT_MINUS_EXPR
:
8276 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8277 op2
= expand_normal (treeop2
);
8278 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8282 case WIDEN_MULT_EXPR
:
8283 /* If first operand is constant, swap them.
8284 Thus the following special case checks need only
8285 check the second operand. */
8286 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8293 /* First, check if we have a multiplication of one signed and one
8294 unsigned operand. */
8295 if (TREE_CODE (treeop1
) != INTEGER_CST
8296 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8297 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8299 enum machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8300 this_optab
= usmul_widen_optab
;
8301 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8302 != CODE_FOR_nothing
)
8304 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8305 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8308 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8310 /* op0 and op1 might still be constant, despite the above
8311 != INTEGER_CST check. Handle it. */
8312 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8314 op0
= convert_modes (innermode
, mode
, op0
, true);
8315 op1
= convert_modes (innermode
, mode
, op1
, false);
8316 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8317 target
, unsignedp
));
8322 /* Check for a multiplication with matching signedness. */
8323 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8324 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8325 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8326 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8328 tree op0type
= TREE_TYPE (treeop0
);
8329 enum machine_mode innermode
= TYPE_MODE (op0type
);
8330 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8331 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8332 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8334 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8336 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8337 != CODE_FOR_nothing
)
8339 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8341 /* op0 and op1 might still be constant, despite the above
8342 != INTEGER_CST check. Handle it. */
8343 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8346 op0
= convert_modes (innermode
, mode
, op0
, zextend_p
);
8348 = convert_modes (innermode
, mode
, op1
,
8349 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8350 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8354 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8355 unsignedp
, this_optab
);
8356 return REDUCE_BIT_FIELD (temp
);
8358 if (find_widening_optab_handler (other_optab
, mode
, innermode
, 0)
8360 && innermode
== word_mode
)
8363 op0
= expand_normal (treeop0
);
8364 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8365 op1
= convert_modes (innermode
, mode
,
8366 expand_normal (treeop1
),
8367 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8369 op1
= expand_normal (treeop1
);
8370 /* op0 and op1 might still be constant, despite the above
8371 != INTEGER_CST check. Handle it. */
8372 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8373 goto widen_mult_const
;
8374 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8375 unsignedp
, OPTAB_LIB_WIDEN
);
8376 hipart
= gen_highpart (innermode
, temp
);
8377 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8381 emit_move_insn (hipart
, htem
);
8382 return REDUCE_BIT_FIELD (temp
);
8386 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8387 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8388 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8389 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8393 optab opt
= fma_optab
;
8396 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8398 if (optab_handler (fma_optab
, mode
) == CODE_FOR_nothing
)
8400 tree fn
= mathfn_built_in (TREE_TYPE (treeop0
), BUILT_IN_FMA
);
8403 gcc_assert (fn
!= NULL_TREE
);
8404 call_expr
= build_call_expr (fn
, 3, treeop0
, treeop1
, treeop2
);
8405 return expand_builtin (call_expr
, target
, subtarget
, mode
, false);
8408 def0
= get_def_for_expr (treeop0
, NEGATE_EXPR
);
8409 def2
= get_def_for_expr (treeop2
, NEGATE_EXPR
);
8414 && optab_handler (fnms_optab
, mode
) != CODE_FOR_nothing
)
8417 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8418 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8421 && optab_handler (fnma_optab
, mode
) != CODE_FOR_nothing
)
8424 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8427 && optab_handler (fms_optab
, mode
) != CODE_FOR_nothing
)
8430 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8434 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
8436 op2
= expand_normal (treeop2
);
8437 op1
= expand_normal (treeop1
);
8439 return expand_ternary_op (TYPE_MODE (type
), opt
,
8440 op0
, op1
, op2
, target
, 0);
8444 /* If this is a fixed-point operation, then we cannot use the code
8445 below because "expand_mult" doesn't support sat/no-sat fixed-point
8447 if (ALL_FIXED_POINT_MODE_P (mode
))
8450 /* If first operand is constant, swap them.
8451 Thus the following special case checks need only
8452 check the second operand. */
8453 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8460 /* Attempt to return something suitable for generating an
8461 indexed address, for machines that support that. */
8463 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8464 && host_integerp (treeop1
, 0))
8466 tree exp1
= treeop1
;
8468 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8472 op0
= force_operand (op0
, NULL_RTX
);
8474 op0
= copy_to_mode_reg (mode
, op0
);
8476 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8477 gen_int_mode (tree_low_cst (exp1
, 0),
8478 TYPE_MODE (TREE_TYPE (exp1
)))));
8481 if (modifier
== EXPAND_STACK_PARM
)
8484 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8485 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8487 case TRUNC_DIV_EXPR
:
8488 case FLOOR_DIV_EXPR
:
8490 case ROUND_DIV_EXPR
:
8491 case EXACT_DIV_EXPR
:
8492 /* If this is a fixed-point operation, then we cannot use the code
8493 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8495 if (ALL_FIXED_POINT_MODE_P (mode
))
8498 if (modifier
== EXPAND_STACK_PARM
)
8500 /* Possible optimization: compute the dividend with EXPAND_SUM
8501 then if the divisor is constant can optimize the case
8502 where some terms of the dividend have coeffs divisible by it. */
8503 expand_operands (treeop0
, treeop1
,
8504 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8505 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8510 case MULT_HIGHPART_EXPR
:
8511 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8512 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
8516 case TRUNC_MOD_EXPR
:
8517 case FLOOR_MOD_EXPR
:
8519 case ROUND_MOD_EXPR
:
8520 if (modifier
== EXPAND_STACK_PARM
)
8522 expand_operands (treeop0
, treeop1
,
8523 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8524 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8526 case FIXED_CONVERT_EXPR
:
8527 op0
= expand_normal (treeop0
);
8528 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8529 target
= gen_reg_rtx (mode
);
8531 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
8532 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8533 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8534 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8536 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8539 case FIX_TRUNC_EXPR
:
8540 op0
= expand_normal (treeop0
);
8541 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8542 target
= gen_reg_rtx (mode
);
8543 expand_fix (target
, op0
, unsignedp
);
8547 op0
= expand_normal (treeop0
);
8548 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8549 target
= gen_reg_rtx (mode
);
8550 /* expand_float can't figure out what to do if FROM has VOIDmode.
8551 So give it the correct mode. With -O, cse will optimize this. */
8552 if (GET_MODE (op0
) == VOIDmode
)
8553 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
8555 expand_float (target
, op0
,
8556 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8560 op0
= expand_expr (treeop0
, subtarget
,
8561 VOIDmode
, EXPAND_NORMAL
);
8562 if (modifier
== EXPAND_STACK_PARM
)
8564 temp
= expand_unop (mode
,
8565 optab_for_tree_code (NEGATE_EXPR
, type
,
8569 return REDUCE_BIT_FIELD (temp
);
8572 op0
= expand_expr (treeop0
, subtarget
,
8573 VOIDmode
, EXPAND_NORMAL
);
8574 if (modifier
== EXPAND_STACK_PARM
)
8577 /* ABS_EXPR is not valid for complex arguments. */
8578 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8579 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8581 /* Unsigned abs is simply the operand. Testing here means we don't
8582 risk generating incorrect code below. */
8583 if (TYPE_UNSIGNED (type
))
8586 return expand_abs (mode
, op0
, target
, unsignedp
,
8587 safe_from_p (target
, treeop0
, 1));
8591 target
= original_target
;
8593 || modifier
== EXPAND_STACK_PARM
8594 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8595 || GET_MODE (target
) != mode
8597 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8598 target
= gen_reg_rtx (mode
);
8599 expand_operands (treeop0
, treeop1
,
8600 target
, &op0
, &op1
, EXPAND_NORMAL
);
8602 /* First try to do it with a special MIN or MAX instruction.
8603 If that does not win, use a conditional jump to select the proper
8605 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8606 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8611 /* At this point, a MEM target is no longer useful; we will get better
8614 if (! REG_P (target
))
8615 target
= gen_reg_rtx (mode
);
8617 /* If op1 was placed in target, swap op0 and op1. */
8618 if (target
!= op0
&& target
== op1
)
8625 /* We generate better code and avoid problems with op1 mentioning
8626 target by forcing op1 into a pseudo if it isn't a constant. */
8627 if (! CONSTANT_P (op1
))
8628 op1
= force_reg (mode
, op1
);
8631 enum rtx_code comparison_code
;
8634 if (code
== MAX_EXPR
)
8635 comparison_code
= unsignedp
? GEU
: GE
;
8637 comparison_code
= unsignedp
? LEU
: LE
;
8639 /* Canonicalize to comparisons against 0. */
8640 if (op1
== const1_rtx
)
8642 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8643 or (a != 0 ? a : 1) for unsigned.
8644 For MIN we are safe converting (a <= 1 ? a : 1)
8645 into (a <= 0 ? a : 1) */
8646 cmpop1
= const0_rtx
;
8647 if (code
== MAX_EXPR
)
8648 comparison_code
= unsignedp
? NE
: GT
;
8650 if (op1
== constm1_rtx
&& !unsignedp
)
8652 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8653 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8654 cmpop1
= const0_rtx
;
8655 if (code
== MIN_EXPR
)
8656 comparison_code
= LT
;
8658 #ifdef HAVE_conditional_move
8659 /* Use a conditional move if possible. */
8660 if (can_conditionally_move_p (mode
))
8664 /* ??? Same problem as in expmed.c: emit_conditional_move
8665 forces a stack adjustment via compare_from_rtx, and we
8666 lose the stack adjustment if the sequence we are about
8667 to create is discarded. */
8668 do_pending_stack_adjust ();
8672 /* Try to emit the conditional move. */
8673 insn
= emit_conditional_move (target
, comparison_code
,
8678 /* If we could do the conditional move, emit the sequence,
8682 rtx seq
= get_insns ();
8688 /* Otherwise discard the sequence and fall back to code with
8694 emit_move_insn (target
, op0
);
8696 temp
= gen_label_rtx ();
8697 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
8698 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
,
8701 emit_move_insn (target
, op1
);
8706 op0
= expand_expr (treeop0
, subtarget
,
8707 VOIDmode
, EXPAND_NORMAL
);
8708 if (modifier
== EXPAND_STACK_PARM
)
8710 /* In case we have to reduce the result to bitfield precision
8711 for unsigned bitfield expand this as XOR with a proper constant
8713 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
8714 temp
= expand_binop (mode
, xor_optab
, op0
,
8715 immed_double_int_const
8716 (double_int::mask (TYPE_PRECISION (type
)), mode
),
8717 target
, 1, OPTAB_LIB_WIDEN
);
8719 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8723 /* ??? Can optimize bitwise operations with one arg constant.
8724 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8725 and (a bitwise1 b) bitwise2 b (etc)
8726 but that is probably not worth while. */
8735 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
8736 || (GET_MODE_PRECISION (TYPE_MODE (type
))
8737 == TYPE_PRECISION (type
)));
8742 /* If this is a fixed-point operation, then we cannot use the code
8743 below because "expand_shift" doesn't support sat/no-sat fixed-point
8745 if (ALL_FIXED_POINT_MODE_P (mode
))
8748 if (! safe_from_p (subtarget
, treeop1
, 1))
8750 if (modifier
== EXPAND_STACK_PARM
)
8752 op0
= expand_expr (treeop0
, subtarget
,
8753 VOIDmode
, EXPAND_NORMAL
);
8754 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
8756 if (code
== LSHIFT_EXPR
)
8757 temp
= REDUCE_BIT_FIELD (temp
);
8760 /* Could determine the answer when only additive constants differ. Also,
8761 the addition of one can be handled by changing the condition. */
8768 case UNORDERED_EXPR
:
8776 temp
= do_store_flag (ops
,
8777 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8778 tmode
!= VOIDmode
? tmode
: mode
);
8782 /* Use a compare and a jump for BLKmode comparisons, or for function
8783 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8786 || modifier
== EXPAND_STACK_PARM
8787 || ! safe_from_p (target
, treeop0
, 1)
8788 || ! safe_from_p (target
, treeop1
, 1)
8789 /* Make sure we don't have a hard reg (such as function's return
8790 value) live across basic blocks, if not optimizing. */
8791 || (!optimize
&& REG_P (target
)
8792 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8793 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8795 emit_move_insn (target
, const0_rtx
);
8797 op1
= gen_label_rtx ();
8798 jumpifnot_1 (code
, treeop0
, treeop1
, op1
, -1);
8800 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
8801 emit_move_insn (target
, constm1_rtx
);
8803 emit_move_insn (target
, const1_rtx
);
8809 /* Get the rtx code of the operands. */
8810 op0
= expand_normal (treeop0
);
8811 op1
= expand_normal (treeop1
);
8814 target
= gen_reg_rtx (TYPE_MODE (type
));
8816 /* If target overlaps with op1, then either we need to force
8817 op1 into a pseudo (if target also overlaps with op0),
8818 or write the complex parts in reverse order. */
8819 switch (GET_CODE (target
))
8822 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
8824 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
8826 complex_expr_force_op1
:
8827 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
8828 emit_move_insn (temp
, op1
);
8832 complex_expr_swap_order
:
8833 /* Move the imaginary (op1) and real (op0) parts to their
8835 write_complex_part (target
, op1
, true);
8836 write_complex_part (target
, op0
, false);
8842 temp
= adjust_address_nv (target
,
8843 GET_MODE_INNER (GET_MODE (target
)), 0);
8844 if (reg_overlap_mentioned_p (temp
, op1
))
8846 enum machine_mode imode
= GET_MODE_INNER (GET_MODE (target
));
8847 temp
= adjust_address_nv (target
, imode
,
8848 GET_MODE_SIZE (imode
));
8849 if (reg_overlap_mentioned_p (temp
, op0
))
8850 goto complex_expr_force_op1
;
8851 goto complex_expr_swap_order
;
8855 if (reg_overlap_mentioned_p (target
, op1
))
8857 if (reg_overlap_mentioned_p (target
, op0
))
8858 goto complex_expr_force_op1
;
8859 goto complex_expr_swap_order
;
8864 /* Move the real (op0) and imaginary (op1) parts to their location. */
8865 write_complex_part (target
, op0
, false);
8866 write_complex_part (target
, op1
, true);
8870 case WIDEN_SUM_EXPR
:
8872 tree oprnd0
= treeop0
;
8873 tree oprnd1
= treeop1
;
8875 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8876 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
8881 case REDUC_MAX_EXPR
:
8882 case REDUC_MIN_EXPR
:
8883 case REDUC_PLUS_EXPR
:
8885 op0
= expand_normal (treeop0
);
8886 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8887 temp
= expand_unop (mode
, this_optab
, op0
, target
, unsignedp
);
8892 case VEC_LSHIFT_EXPR
:
8893 case VEC_RSHIFT_EXPR
:
8895 target
= expand_vec_shift_expr (ops
, target
);
8899 case VEC_UNPACK_HI_EXPR
:
8900 case VEC_UNPACK_LO_EXPR
:
8902 op0
= expand_normal (treeop0
);
8903 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
8909 case VEC_UNPACK_FLOAT_HI_EXPR
:
8910 case VEC_UNPACK_FLOAT_LO_EXPR
:
8912 op0
= expand_normal (treeop0
);
8913 /* The signedness is determined from input operand. */
8914 temp
= expand_widen_pattern_expr
8915 (ops
, op0
, NULL_RTX
, NULL_RTX
,
8916 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8922 case VEC_WIDEN_MULT_HI_EXPR
:
8923 case VEC_WIDEN_MULT_LO_EXPR
:
8924 case VEC_WIDEN_MULT_EVEN_EXPR
:
8925 case VEC_WIDEN_MULT_ODD_EXPR
:
8926 case VEC_WIDEN_LSHIFT_HI_EXPR
:
8927 case VEC_WIDEN_LSHIFT_LO_EXPR
:
8928 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8929 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
8931 gcc_assert (target
);
8934 case VEC_PACK_TRUNC_EXPR
:
8935 case VEC_PACK_SAT_EXPR
:
8936 case VEC_PACK_FIX_TRUNC_EXPR
:
8937 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
8941 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
8942 op2
= expand_normal (treeop2
);
8944 /* Careful here: if the target doesn't support integral vector modes,
8945 a constant selection vector could wind up smooshed into a normal
8946 integral constant. */
8947 if (CONSTANT_P (op2
) && GET_CODE (op2
) != CONST_VECTOR
)
8949 tree sel_type
= TREE_TYPE (treeop2
);
8950 enum machine_mode vmode
8951 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type
)),
8952 TYPE_VECTOR_SUBPARTS (sel_type
));
8953 gcc_assert (GET_MODE_CLASS (vmode
) == MODE_VECTOR_INT
);
8954 op2
= simplify_subreg (vmode
, op2
, TYPE_MODE (sel_type
), 0);
8955 gcc_assert (op2
&& GET_CODE (op2
) == CONST_VECTOR
);
8958 gcc_assert (GET_MODE_CLASS (GET_MODE (op2
)) == MODE_VECTOR_INT
);
8960 temp
= expand_vec_perm (mode
, op0
, op1
, op2
, target
);
8966 tree oprnd0
= treeop0
;
8967 tree oprnd1
= treeop1
;
8968 tree oprnd2
= treeop2
;
8971 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8972 op2
= expand_normal (oprnd2
);
8973 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8978 case REALIGN_LOAD_EXPR
:
8980 tree oprnd0
= treeop0
;
8981 tree oprnd1
= treeop1
;
8982 tree oprnd2
= treeop2
;
8985 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8986 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8987 op2
= expand_normal (oprnd2
);
8988 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
8995 /* A COND_EXPR with its type being VOID_TYPE represents a
8996 conditional jump and is handled in
8997 expand_gimple_cond_expr. */
8998 gcc_assert (!VOID_TYPE_P (type
));
9000 /* Note that COND_EXPRs whose type is a structure or union
9001 are required to be constructed to contain assignments of
9002 a temporary variable, so that we can evaluate them here
9003 for side effect only. If type is void, we must do likewise. */
9005 gcc_assert (!TREE_ADDRESSABLE (type
)
9007 && TREE_TYPE (treeop1
) != void_type_node
9008 && TREE_TYPE (treeop2
) != void_type_node
);
9010 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9014 /* If we are not to produce a result, we have no target. Otherwise,
9015 if a target was specified use it; it will not be used as an
9016 intermediate target unless it is safe. If no target, use a
9019 if (modifier
!= EXPAND_STACK_PARM
9021 && safe_from_p (original_target
, treeop0
, 1)
9022 && GET_MODE (original_target
) == mode
9023 && !MEM_P (original_target
))
9024 temp
= original_target
;
9026 temp
= assign_temp (type
, 0, 1);
9028 do_pending_stack_adjust ();
9030 op0
= gen_label_rtx ();
9031 op1
= gen_label_rtx ();
9032 jumpifnot (treeop0
, op0
, -1);
9033 store_expr (treeop1
, temp
,
9034 modifier
== EXPAND_STACK_PARM
,
9037 emit_jump_insn (gen_jump (op1
));
9040 store_expr (treeop2
, temp
,
9041 modifier
== EXPAND_STACK_PARM
,
9049 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9056 /* Here to do an ordinary binary operator. */
9058 expand_operands (treeop0
, treeop1
,
9059 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9061 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9063 if (modifier
== EXPAND_STACK_PARM
)
9065 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9066 unsignedp
, OPTAB_LIB_WIDEN
);
9068 /* Bitwise operations do not need bitfield reduction as we expect their
9069 operands being properly truncated. */
9070 if (code
== BIT_XOR_EXPR
9071 || code
== BIT_AND_EXPR
9072 || code
== BIT_IOR_EXPR
)
9074 return REDUCE_BIT_FIELD (temp
);
9076 #undef REDUCE_BIT_FIELD
9079 /* Return TRUE if expression STMT is suitable for replacement.
9080 Never consider memory loads as replaceable, because those don't ever lead
9081 into constant expressions. */
9084 stmt_is_replaceable_p (gimple stmt
)
9086 if (ssa_is_replaceable_p (stmt
))
9088 /* Don't move around loads. */
9089 if (!gimple_assign_single_p (stmt
)
9090 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9097 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
9098 enum expand_modifier modifier
, rtx
*alt_rtl
)
9100 rtx op0
, op1
, temp
, decl_rtl
;
9103 enum machine_mode mode
;
9104 enum tree_code code
= TREE_CODE (exp
);
9105 rtx subtarget
, original_target
;
9108 bool reduce_bit_field
;
9109 location_t loc
= EXPR_LOCATION (exp
);
9110 struct separate_ops ops
;
9111 tree treeop0
, treeop1
, treeop2
;
9112 tree ssa_name
= NULL_TREE
;
9115 type
= TREE_TYPE (exp
);
9116 mode
= TYPE_MODE (type
);
9117 unsignedp
= TYPE_UNSIGNED (type
);
9119 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9120 if (!VL_EXP_CLASS_P (exp
))
9121 switch (TREE_CODE_LENGTH (code
))
9124 case 3: treeop2
= TREE_OPERAND (exp
, 2);
9125 case 2: treeop1
= TREE_OPERAND (exp
, 1);
9126 case 1: treeop0
= TREE_OPERAND (exp
, 0);
9136 ignore
= (target
== const0_rtx
9137 || ((CONVERT_EXPR_CODE_P (code
)
9138 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9139 && TREE_CODE (type
) == VOID_TYPE
));
9141 /* An operation in what may be a bit-field type needs the
9142 result to be reduced to the precision of the bit-field type,
9143 which is narrower than that of the type's mode. */
9144 reduce_bit_field
= (!ignore
9145 && INTEGRAL_TYPE_P (type
)
9146 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
9148 /* If we are going to ignore this result, we need only do something
9149 if there is a side-effect somewhere in the expression. If there
9150 is, short-circuit the most common cases here. Note that we must
9151 not call expand_expr with anything but const0_rtx in case this
9152 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9156 if (! TREE_SIDE_EFFECTS (exp
))
9159 /* Ensure we reference a volatile object even if value is ignored, but
9160 don't do this if all we are doing is taking its address. */
9161 if (TREE_THIS_VOLATILE (exp
)
9162 && TREE_CODE (exp
) != FUNCTION_DECL
9163 && mode
!= VOIDmode
&& mode
!= BLKmode
9164 && modifier
!= EXPAND_CONST_ADDRESS
)
9166 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9172 if (TREE_CODE_CLASS (code
) == tcc_unary
9173 || code
== BIT_FIELD_REF
9174 || code
== COMPONENT_REF
9175 || code
== INDIRECT_REF
)
9176 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9179 else if (TREE_CODE_CLASS (code
) == tcc_binary
9180 || TREE_CODE_CLASS (code
) == tcc_comparison
9181 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9183 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9184 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9191 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9194 /* Use subtarget as the target for operand 0 of a binary operation. */
9195 subtarget
= get_subtarget (target
);
9196 original_target
= target
;
9202 tree function
= decl_function_context (exp
);
9204 temp
= label_rtx (exp
);
9205 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9207 if (function
!= current_function_decl
9209 LABEL_REF_NONLOCAL_P (temp
) = 1;
9211 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9216 /* ??? ivopts calls expander, without any preparation from
9217 out-of-ssa. So fake instructions as if this was an access to the
9218 base variable. This unnecessarily allocates a pseudo, see how we can
9219 reuse it, if partition base vars have it set already. */
9220 if (!currently_expanding_to_rtl
)
9222 tree var
= SSA_NAME_VAR (exp
);
9223 if (var
&& DECL_RTL_SET_P (var
))
9224 return DECL_RTL (var
);
9225 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9226 LAST_VIRTUAL_REGISTER
+ 1);
9229 g
= get_gimple_for_ssa_name (exp
);
9230 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9232 && modifier
== EXPAND_INITIALIZER
9233 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9234 && (optimize
|| DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9235 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9236 g
= SSA_NAME_DEF_STMT (exp
);
9240 location_t saved_loc
= curr_insn_location ();
9242 set_curr_insn_location (gimple_location (g
));
9243 r
= expand_expr_real (gimple_assign_rhs_to_tree (g
), target
,
9244 tmode
, modifier
, NULL
);
9245 set_curr_insn_location (saved_loc
);
9246 if (REG_P (r
) && !REG_EXPR (r
))
9247 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9252 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9253 exp
= SSA_NAME_VAR (ssa_name
);
9254 goto expand_decl_rtl
;
9258 /* If a static var's type was incomplete when the decl was written,
9259 but the type is complete now, lay out the decl now. */
9260 if (DECL_SIZE (exp
) == 0
9261 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9262 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9263 layout_decl (exp
, 0);
9265 /* ... fall through ... */
9269 decl_rtl
= DECL_RTL (exp
);
9271 gcc_assert (decl_rtl
);
9272 decl_rtl
= copy_rtx (decl_rtl
);
9273 /* Record writes to register variables. */
9274 if (modifier
== EXPAND_WRITE
9276 && HARD_REGISTER_P (decl_rtl
))
9277 add_to_hard_reg_set (&crtl
->asm_clobbers
,
9278 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
9280 /* Ensure variable marked as used even if it doesn't go through
9281 a parser. If it hasn't be used yet, write out an external
9283 TREE_USED (exp
) = 1;
9285 /* Show we haven't gotten RTL for this yet. */
9288 /* Variables inherited from containing functions should have
9289 been lowered by this point. */
9290 context
= decl_function_context (exp
);
9291 gcc_assert (SCOPE_FILE_SCOPE_P (context
)
9292 || context
== current_function_decl
9293 || TREE_STATIC (exp
)
9294 || DECL_EXTERNAL (exp
)
9295 /* ??? C++ creates functions that are not TREE_STATIC. */
9296 || TREE_CODE (exp
) == FUNCTION_DECL
);
9298 /* This is the case of an array whose size is to be determined
9299 from its initializer, while the initializer is still being parsed.
9300 ??? We aren't parsing while expanding anymore. */
9302 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
9303 temp
= validize_mem (decl_rtl
);
9305 /* If DECL_RTL is memory, we are in the normal case and the
9306 address is not valid, get the address into a register. */
9308 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
9311 *alt_rtl
= decl_rtl
;
9312 decl_rtl
= use_anchored_address (decl_rtl
);
9313 if (modifier
!= EXPAND_CONST_ADDRESS
9314 && modifier
!= EXPAND_SUM
9315 && !memory_address_addr_space_p (DECL_MODE (exp
),
9317 MEM_ADDR_SPACE (decl_rtl
)))
9318 temp
= replace_equiv_address (decl_rtl
,
9319 copy_rtx (XEXP (decl_rtl
, 0)));
9322 /* If we got something, return it. But first, set the alignment
9323 if the address is a register. */
9326 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
9327 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
9332 /* If the mode of DECL_RTL does not match that of the decl,
9333 there are two cases: we are dealing with a BLKmode value
9334 that is returned in a register, or we are dealing with
9335 a promoted value. In the latter case, return a SUBREG
9336 of the wanted mode, but mark it so that we know that it
9337 was already extended. */
9338 if (REG_P (decl_rtl
)
9339 && DECL_MODE (exp
) != BLKmode
9340 && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
9342 enum machine_mode pmode
;
9344 /* Get the signedness to be used for this variable. Ensure we get
9345 the same mode we got when the variable was declared. */
9346 if (code
== SSA_NAME
9347 && (g
= SSA_NAME_DEF_STMT (ssa_name
))
9348 && gimple_code (g
) == GIMPLE_CALL
)
9350 gcc_assert (!gimple_call_internal_p (g
));
9351 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
9352 gimple_call_fntype (g
),
9356 pmode
= promote_decl_mode (exp
, &unsignedp
);
9357 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
9359 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
9360 SUBREG_PROMOTED_VAR_P (temp
) = 1;
9361 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
9368 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
9369 TREE_INT_CST_HIGH (exp
), mode
);
9375 tree tmp
= NULL_TREE
;
9376 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
9377 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
9378 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
9379 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
9380 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
9381 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
9382 return const_vector_from_tree (exp
);
9383 if (GET_MODE_CLASS (mode
) == MODE_INT
)
9385 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
9387 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
, type_for_mode
, exp
);
9391 vec
<constructor_elt
, va_gc
> *v
;
9393 vec_alloc (v
, VECTOR_CST_NELTS (exp
));
9394 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
9395 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
9396 tmp
= build_constructor (type
, v
);
9398 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
9403 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
9406 /* If optimized, generate immediate CONST_DOUBLE
9407 which will be turned into memory by reload if necessary.
9409 We used to force a register so that loop.c could see it. But
9410 this does not allow gen_* patterns to perform optimizations with
9411 the constants. It also produces two insns in cases like "x = 1.0;".
9412 On most machines, floating-point constants are not permitted in
9413 many insns, so we'd end up copying it to a register in any case.
9415 Now, we do the copying in expand_binop, if appropriate. */
9416 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
9417 TYPE_MODE (TREE_TYPE (exp
)));
9420 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
9421 TYPE_MODE (TREE_TYPE (exp
)));
9424 /* Handle evaluating a complex constant in a CONCAT target. */
9425 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
9427 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9430 rtarg
= XEXP (original_target
, 0);
9431 itarg
= XEXP (original_target
, 1);
9433 /* Move the real and imaginary parts separately. */
9434 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
9435 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
9438 emit_move_insn (rtarg
, op0
);
9440 emit_move_insn (itarg
, op1
);
9442 return original_target
;
9445 /* ... fall through ... */
9448 temp
= expand_expr_constant (exp
, 1, modifier
);
9450 /* temp contains a constant address.
9451 On RISC machines where a constant address isn't valid,
9452 make some insns to get that address into a register. */
9453 if (modifier
!= EXPAND_CONST_ADDRESS
9454 && modifier
!= EXPAND_INITIALIZER
9455 && modifier
!= EXPAND_SUM
9456 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
9457 MEM_ADDR_SPACE (temp
)))
9458 return replace_equiv_address (temp
,
9459 copy_rtx (XEXP (temp
, 0)));
9465 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
9467 if (!SAVE_EXPR_RESOLVED_P (exp
))
9469 /* We can indeed still hit this case, typically via builtin
9470 expanders calling save_expr immediately before expanding
9471 something. Assume this means that we only have to deal
9472 with non-BLKmode values. */
9473 gcc_assert (GET_MODE (ret
) != BLKmode
);
9475 val
= build_decl (curr_insn_location (),
9476 VAR_DECL
, NULL
, TREE_TYPE (exp
));
9477 DECL_ARTIFICIAL (val
) = 1;
9478 DECL_IGNORED_P (val
) = 1;
9480 TREE_OPERAND (exp
, 0) = treeop0
;
9481 SAVE_EXPR_RESOLVED_P (exp
) = 1;
9483 if (!CONSTANT_P (ret
))
9484 ret
= copy_to_reg (ret
);
9485 SET_DECL_RTL (val
, ret
);
9493 /* If we don't need the result, just ensure we evaluate any
9497 unsigned HOST_WIDE_INT idx
;
9500 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
9501 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9506 return expand_constructor (exp
, target
, modifier
, false);
9508 case TARGET_MEM_REF
:
9511 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9512 enum insn_code icode
;
9515 op0
= addr_for_mem_ref (exp
, as
, true);
9516 op0
= memory_address_addr_space (mode
, op0
, as
);
9517 temp
= gen_rtx_MEM (mode
, op0
);
9518 set_mem_attributes (temp
, exp
, 0);
9519 set_mem_addr_space (temp
, as
);
9520 align
= get_object_alignment (exp
);
9521 if (modifier
!= EXPAND_WRITE
9522 && modifier
!= EXPAND_MEMORY
9524 && align
< GET_MODE_ALIGNMENT (mode
)
9525 /* If the target does not have special handling for unaligned
9526 loads of mode then it can use regular moves for them. */
9527 && ((icode
= optab_handler (movmisalign_optab
, mode
))
9528 != CODE_FOR_nothing
))
9530 struct expand_operand ops
[2];
9532 /* We've already validated the memory, and we're creating a
9533 new pseudo destination. The predicates really can't fail,
9534 nor can the generator. */
9535 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9536 create_fixed_operand (&ops
[1], temp
);
9537 expand_insn (icode
, 2, ops
);
9538 temp
= ops
[0].value
;
9546 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9547 enum machine_mode address_mode
;
9548 tree base
= TREE_OPERAND (exp
, 0);
9550 enum insn_code icode
;
9552 /* Handle expansion of non-aliased memory with non-BLKmode. That
9553 might end up in a register. */
9554 if (mem_ref_refers_to_non_mem_p (exp
))
9556 HOST_WIDE_INT offset
= mem_ref_offset (exp
).low
;
9557 base
= TREE_OPERAND (base
, 0);
9559 && host_integerp (TYPE_SIZE (type
), 1)
9560 && (GET_MODE_BITSIZE (DECL_MODE (base
))
9561 == TREE_INT_CST_LOW (TYPE_SIZE (type
))))
9562 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
9563 target
, tmode
, modifier
);
9564 if (TYPE_MODE (type
) == BLKmode
)
9566 temp
= assign_stack_temp (DECL_MODE (base
),
9567 GET_MODE_SIZE (DECL_MODE (base
)));
9568 store_expr (base
, temp
, 0, false);
9569 temp
= adjust_address (temp
, BLKmode
, offset
);
9570 set_mem_size (temp
, int_size_in_bytes (type
));
9573 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
9574 bitsize_int (offset
* BITS_PER_UNIT
));
9575 return expand_expr (exp
, target
, tmode
, modifier
);
9577 address_mode
= targetm
.addr_space
.address_mode (as
);
9578 base
= TREE_OPERAND (exp
, 0);
9579 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
9581 tree mask
= gimple_assign_rhs2 (def_stmt
);
9582 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
9583 gimple_assign_rhs1 (def_stmt
), mask
);
9584 TREE_OPERAND (exp
, 0) = base
;
9586 align
= get_object_alignment (exp
);
9587 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
9588 op0
= memory_address_addr_space (mode
, op0
, as
);
9589 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
9592 = immed_double_int_const (mem_ref_offset (exp
), address_mode
);
9593 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
9594 op0
= memory_address_addr_space (mode
, op0
, as
);
9596 temp
= gen_rtx_MEM (mode
, op0
);
9597 set_mem_attributes (temp
, exp
, 0);
9598 set_mem_addr_space (temp
, as
);
9599 if (TREE_THIS_VOLATILE (exp
))
9600 MEM_VOLATILE_P (temp
) = 1;
9601 if (modifier
!= EXPAND_WRITE
9602 && modifier
!= EXPAND_MEMORY
9604 && align
< GET_MODE_ALIGNMENT (mode
))
9606 if ((icode
= optab_handler (movmisalign_optab
, mode
))
9607 != CODE_FOR_nothing
)
9609 struct expand_operand ops
[2];
9611 /* We've already validated the memory, and we're creating a
9612 new pseudo destination. The predicates really can't fail,
9613 nor can the generator. */
9614 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9615 create_fixed_operand (&ops
[1], temp
);
9616 expand_insn (icode
, 2, ops
);
9617 temp
= ops
[0].value
;
9619 else if (SLOW_UNALIGNED_ACCESS (mode
, align
))
9620 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
9621 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
9622 (modifier
== EXPAND_STACK_PARM
9623 ? NULL_RTX
: target
),
9632 tree array
= treeop0
;
9633 tree index
= treeop1
;
9636 /* Fold an expression like: "foo"[2].
9637 This is not done in fold so it won't happen inside &.
9638 Don't fold if this is for wide characters since it's too
9639 difficult to do correctly and this is a very rare case. */
9641 if (modifier
!= EXPAND_CONST_ADDRESS
9642 && modifier
!= EXPAND_INITIALIZER
9643 && modifier
!= EXPAND_MEMORY
)
9645 tree t
= fold_read_from_constant_string (exp
);
9648 return expand_expr (t
, target
, tmode
, modifier
);
9651 /* If this is a constant index into a constant array,
9652 just get the value from the array. Handle both the cases when
9653 we have an explicit constructor and when our operand is a variable
9654 that was declared const. */
9656 if (modifier
!= EXPAND_CONST_ADDRESS
9657 && modifier
!= EXPAND_INITIALIZER
9658 && modifier
!= EXPAND_MEMORY
9659 && TREE_CODE (array
) == CONSTRUCTOR
9660 && ! TREE_SIDE_EFFECTS (array
)
9661 && TREE_CODE (index
) == INTEGER_CST
)
9663 unsigned HOST_WIDE_INT ix
;
9666 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
9668 if (tree_int_cst_equal (field
, index
))
9670 if (!TREE_SIDE_EFFECTS (value
))
9671 return expand_expr (fold (value
), target
, tmode
, modifier
);
9676 else if (optimize
>= 1
9677 && modifier
!= EXPAND_CONST_ADDRESS
9678 && modifier
!= EXPAND_INITIALIZER
9679 && modifier
!= EXPAND_MEMORY
9680 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
9681 && TREE_CODE (index
) == INTEGER_CST
9682 && (TREE_CODE (array
) == VAR_DECL
9683 || TREE_CODE (array
) == CONST_DECL
)
9684 && (init
= ctor_for_folding (array
)) != error_mark_node
)
9686 if (TREE_CODE (init
) == CONSTRUCTOR
)
9688 unsigned HOST_WIDE_INT ix
;
9691 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
9693 if (tree_int_cst_equal (field
, index
))
9695 if (TREE_SIDE_EFFECTS (value
))
9698 if (TREE_CODE (value
) == CONSTRUCTOR
)
9700 /* If VALUE is a CONSTRUCTOR, this
9701 optimization is only useful if
9702 this doesn't store the CONSTRUCTOR
9703 into memory. If it does, it is more
9704 efficient to just load the data from
9705 the array directly. */
9706 rtx ret
= expand_constructor (value
, target
,
9708 if (ret
== NULL_RTX
)
9713 expand_expr (fold (value
), target
, tmode
, modifier
);
9716 else if (TREE_CODE (init
) == STRING_CST
)
9718 tree low_bound
= array_ref_low_bound (exp
);
9719 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
9721 /* Optimize the special case of a zero lower bound.
9723 We convert the lower bound to sizetype to avoid problems
9724 with constant folding. E.g. suppose the lower bound is
9725 1 and its mode is QI. Without the conversion
9726 (ARRAY + (INDEX - (unsigned char)1))
9728 (ARRAY + (-(unsigned char)1) + INDEX)
9730 (ARRAY + 255 + INDEX). Oops! */
9731 if (!integer_zerop (low_bound
))
9732 index1
= size_diffop_loc (loc
, index1
,
9733 fold_convert_loc (loc
, sizetype
,
9736 if (compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
9738 tree type
= TREE_TYPE (TREE_TYPE (init
));
9739 enum machine_mode mode
= TYPE_MODE (type
);
9741 if (GET_MODE_CLASS (mode
) == MODE_INT
9742 && GET_MODE_SIZE (mode
) == 1)
9743 return gen_int_mode (TREE_STRING_POINTER (init
)
9744 [TREE_INT_CST_LOW (index1
)],
9750 goto normal_inner_ref
;
9753 /* If the operand is a CONSTRUCTOR, we can just extract the
9754 appropriate field if it is present. */
9755 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
9757 unsigned HOST_WIDE_INT idx
;
9760 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
9762 if (field
== treeop1
9763 /* We can normally use the value of the field in the
9764 CONSTRUCTOR. However, if this is a bitfield in
9765 an integral mode that we can fit in a HOST_WIDE_INT,
9766 we must mask only the number of bits in the bitfield,
9767 since this is done implicitly by the constructor. If
9768 the bitfield does not meet either of those conditions,
9769 we can't do this optimization. */
9770 && (! DECL_BIT_FIELD (field
)
9771 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
9772 && (GET_MODE_PRECISION (DECL_MODE (field
))
9773 <= HOST_BITS_PER_WIDE_INT
))))
9775 if (DECL_BIT_FIELD (field
)
9776 && modifier
== EXPAND_STACK_PARM
)
9778 op0
= expand_expr (value
, target
, tmode
, modifier
);
9779 if (DECL_BIT_FIELD (field
))
9781 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
9782 enum machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
9784 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
9786 op1
= gen_int_mode (((HOST_WIDE_INT
) 1 << bitsize
) - 1,
9788 op0
= expand_and (imode
, op0
, op1
, target
);
9792 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
9794 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
9796 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
9804 goto normal_inner_ref
;
9807 case ARRAY_RANGE_REF
:
9810 enum machine_mode mode1
, mode2
;
9811 HOST_WIDE_INT bitsize
, bitpos
;
9813 int volatilep
= 0, must_force_mem
;
9814 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
9815 &mode1
, &unsignedp
, &volatilep
, true);
9816 rtx orig_op0
, memloc
;
9817 bool mem_attrs_from_type
= false;
9819 /* If we got back the original object, something is wrong. Perhaps
9820 we are evaluating an expression too early. In any event, don't
9821 infinitely recurse. */
9822 gcc_assert (tem
!= exp
);
9824 /* If TEM's type is a union of variable size, pass TARGET to the inner
9825 computation, since it will need a temporary and TARGET is known
9826 to have to do. This occurs in unchecked conversion in Ada. */
9829 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
9830 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
9831 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
9833 && modifier
!= EXPAND_STACK_PARM
9834 ? target
: NULL_RTX
),
9836 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
9838 /* If the bitfield is volatile, we want to access it in the
9839 field's mode, not the computed mode.
9840 If a MEM has VOIDmode (external with incomplete type),
9841 use BLKmode for it instead. */
9844 if (volatilep
&& flag_strict_volatile_bitfields
> 0)
9845 op0
= adjust_address (op0
, mode1
, 0);
9846 else if (GET_MODE (op0
) == VOIDmode
)
9847 op0
= adjust_address (op0
, BLKmode
, 0);
9851 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
9853 /* If we have either an offset, a BLKmode result, or a reference
9854 outside the underlying object, we must force it to memory.
9855 Such a case can occur in Ada if we have unchecked conversion
9856 of an expression from a scalar type to an aggregate type or
9857 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9858 passed a partially uninitialized object or a view-conversion
9859 to a larger size. */
9860 must_force_mem
= (offset
9862 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
9864 /* Handle CONCAT first. */
9865 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
9868 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)))
9871 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
9874 op0
= XEXP (op0
, 0);
9875 mode2
= GET_MODE (op0
);
9877 else if (bitpos
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
9878 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1)))
9882 op0
= XEXP (op0
, 1);
9884 mode2
= GET_MODE (op0
);
9887 /* Otherwise force into memory. */
9891 /* If this is a constant, put it in a register if it is a legitimate
9892 constant and we don't need a memory reference. */
9893 if (CONSTANT_P (op0
)
9895 && targetm
.legitimate_constant_p (mode2
, op0
)
9897 op0
= force_reg (mode2
, op0
);
9899 /* Otherwise, if this is a constant, try to force it to the constant
9900 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9901 is a legitimate constant. */
9902 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
9903 op0
= validize_mem (memloc
);
9905 /* Otherwise, if this is a constant or the object is not in memory
9906 and need be, put it there. */
9907 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
9909 tree nt
= build_qualified_type (TREE_TYPE (tem
),
9910 (TYPE_QUALS (TREE_TYPE (tem
))
9911 | TYPE_QUAL_CONST
));
9912 memloc
= assign_temp (nt
, 1, 1);
9913 emit_move_insn (memloc
, op0
);
9915 mem_attrs_from_type
= true;
9920 enum machine_mode address_mode
;
9921 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
9924 gcc_assert (MEM_P (op0
));
9926 address_mode
= get_address_mode (op0
);
9927 if (GET_MODE (offset_rtx
) != address_mode
)
9928 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
9930 if (GET_MODE (op0
) == BLKmode
9931 /* A constant address in OP0 can have VOIDmode, we must
9932 not try to call force_reg in that case. */
9933 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
9935 && (bitpos
% bitsize
) == 0
9936 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
9937 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
9939 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
9943 op0
= offset_address (op0
, offset_rtx
,
9944 highest_pow2_factor (offset
));
9947 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9948 record its alignment as BIGGEST_ALIGNMENT. */
9949 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
9950 && is_aligning_offset (offset
, tem
))
9951 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
9953 /* Don't forget about volatility even if this is a bitfield. */
9954 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
9956 if (op0
== orig_op0
)
9957 op0
= copy_rtx (op0
);
9959 MEM_VOLATILE_P (op0
) = 1;
9962 /* In cases where an aligned union has an unaligned object
9963 as a field, we might be extracting a BLKmode value from
9964 an integer-mode (e.g., SImode) object. Handle this case
9965 by doing the extract into an object as wide as the field
9966 (which we know to be the width of a basic mode), then
9967 storing into memory, and changing the mode to BLKmode. */
9968 if (mode1
== VOIDmode
9969 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
9970 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
9971 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
9972 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
9973 && modifier
!= EXPAND_CONST_ADDRESS
9974 && modifier
!= EXPAND_INITIALIZER
9975 && modifier
!= EXPAND_MEMORY
)
9976 /* If the field is volatile, we always want an aligned
9977 access. Do this in following two situations:
9978 1. the access is not already naturally
9979 aligned, otherwise "normal" (non-bitfield) volatile fields
9980 become non-addressable.
9981 2. the bitsize is narrower than the access size. Need
9982 to extract bitfields from the access. */
9983 || (volatilep
&& flag_strict_volatile_bitfields
> 0
9984 && (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0
9985 || (mode1
!= BLKmode
9986 && bitsize
< GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
)))
9987 /* If the field isn't aligned enough to fetch as a memref,
9988 fetch it as a bit field. */
9989 || (mode1
!= BLKmode
9990 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
9991 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
9993 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
9994 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
9995 && modifier
!= EXPAND_MEMORY
9996 && ((modifier
== EXPAND_CONST_ADDRESS
9997 || modifier
== EXPAND_INITIALIZER
)
9999 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
10000 || (bitpos
% BITS_PER_UNIT
!= 0)))
10001 /* If the type and the field are a constant size and the
10002 size of the type isn't the same size as the bitfield,
10003 we must use bitfield operations. */
10005 && TYPE_SIZE (TREE_TYPE (exp
))
10006 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
10007 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
10010 enum machine_mode ext_mode
= mode
;
10012 if (ext_mode
== BLKmode
10013 && ! (target
!= 0 && MEM_P (op0
)
10015 && bitpos
% BITS_PER_UNIT
== 0))
10016 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
10018 if (ext_mode
== BLKmode
)
10021 target
= assign_temp (type
, 1, 1);
10026 /* In this case, BITPOS must start at a byte boundary and
10027 TARGET, if specified, must be a MEM. */
10028 gcc_assert (MEM_P (op0
)
10029 && (!target
|| MEM_P (target
))
10030 && !(bitpos
% BITS_PER_UNIT
));
10032 emit_block_move (target
,
10033 adjust_address (op0
, VOIDmode
,
10034 bitpos
/ BITS_PER_UNIT
),
10035 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
10037 (modifier
== EXPAND_STACK_PARM
10038 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10043 op0
= validize_mem (op0
);
10045 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10046 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10048 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10049 (modifier
== EXPAND_STACK_PARM
10050 ? NULL_RTX
: target
),
10051 ext_mode
, ext_mode
);
10053 /* If the result is a record type and BITSIZE is narrower than
10054 the mode of OP0, an integral mode, and this is a big endian
10055 machine, we must put the field into the high-order bits. */
10056 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
10057 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
10058 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
10059 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
10060 GET_MODE_BITSIZE (GET_MODE (op0
))
10061 - bitsize
, op0
, 1);
10063 /* If the result type is BLKmode, store the data into a temporary
10064 of the appropriate type, but with the mode corresponding to the
10065 mode for the data we have (op0's mode). It's tempting to make
10066 this a constant type, since we know it's only being stored once,
10067 but that can cause problems if we are taking the address of this
10068 COMPONENT_REF because the MEM of any reference via that address
10069 will have flags corresponding to the type, which will not
10070 necessarily be constant. */
10071 if (mode
== BLKmode
)
10075 new_rtx
= assign_stack_temp_for_type (ext_mode
,
10076 GET_MODE_BITSIZE (ext_mode
),
10078 emit_move_insn (new_rtx
, op0
);
10079 op0
= copy_rtx (new_rtx
);
10080 PUT_MODE (op0
, BLKmode
);
10086 /* If the result is BLKmode, use that to access the object
10088 if (mode
== BLKmode
)
10091 /* Get a reference to just this component. */
10092 if (modifier
== EXPAND_CONST_ADDRESS
10093 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10094 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10096 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10098 if (op0
== orig_op0
)
10099 op0
= copy_rtx (op0
);
10101 /* If op0 is a temporary because of forcing to memory, pass only the
10102 type to set_mem_attributes so that the original expression is never
10103 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10104 if (mem_attrs_from_type
)
10105 set_mem_attributes (op0
, type
, 0);
10107 set_mem_attributes (op0
, exp
, 0);
10109 if (REG_P (XEXP (op0
, 0)))
10110 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10112 MEM_VOLATILE_P (op0
) |= volatilep
;
10113 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10114 || modifier
== EXPAND_CONST_ADDRESS
10115 || modifier
== EXPAND_INITIALIZER
)
10119 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10121 convert_move (target
, op0
, unsignedp
);
10126 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10129 /* All valid uses of __builtin_va_arg_pack () are removed during
10131 if (CALL_EXPR_VA_ARG_PACK (exp
))
10132 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
10134 tree fndecl
= get_callee_fndecl (exp
), attr
;
10137 && (attr
= lookup_attribute ("error",
10138 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10139 error ("%Kcall to %qs declared with attribute error: %s",
10140 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10141 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10143 && (attr
= lookup_attribute ("warning",
10144 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10145 warning_at (tree_nonartificial_location (exp
),
10146 0, "%Kcall to %qs declared with attribute warning: %s",
10147 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10148 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10150 /* Check for a built-in function. */
10151 if (fndecl
&& DECL_BUILT_IN (fndecl
))
10153 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
10154 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
10157 return expand_call (exp
, target
, ignore
);
10159 case VIEW_CONVERT_EXPR
:
10162 /* If we are converting to BLKmode, try to avoid an intermediate
10163 temporary by fetching an inner memory reference. */
10164 if (mode
== BLKmode
10165 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
10166 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
10167 && handled_component_p (treeop0
))
10169 enum machine_mode mode1
;
10170 HOST_WIDE_INT bitsize
, bitpos
;
10175 = get_inner_reference (treeop0
, &bitsize
, &bitpos
,
10176 &offset
, &mode1
, &unsignedp
, &volatilep
,
10180 /* ??? We should work harder and deal with non-zero offsets. */
10182 && (bitpos
% BITS_PER_UNIT
) == 0
10184 && compare_tree_int (TYPE_SIZE (type
), bitsize
) == 0)
10186 /* See the normal_inner_ref case for the rationale. */
10188 = expand_expr (tem
,
10189 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10190 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10192 && modifier
!= EXPAND_STACK_PARM
10193 ? target
: NULL_RTX
),
10195 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
10197 if (MEM_P (orig_op0
))
10201 /* Get a reference to just this component. */
10202 if (modifier
== EXPAND_CONST_ADDRESS
10203 || modifier
== EXPAND_SUM
10204 || modifier
== EXPAND_INITIALIZER
)
10205 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10207 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10209 if (op0
== orig_op0
)
10210 op0
= copy_rtx (op0
);
10212 set_mem_attributes (op0
, treeop0
, 0);
10213 if (REG_P (XEXP (op0
, 0)))
10214 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10216 MEM_VOLATILE_P (op0
) |= volatilep
;
10222 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
10224 /* If the input and output modes are both the same, we are done. */
10225 if (mode
== GET_MODE (op0
))
10227 /* If neither mode is BLKmode, and both modes are the same size
10228 then we can use gen_lowpart. */
10229 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
10230 && (GET_MODE_PRECISION (mode
)
10231 == GET_MODE_PRECISION (GET_MODE (op0
)))
10232 && !COMPLEX_MODE_P (GET_MODE (op0
)))
10234 if (GET_CODE (op0
) == SUBREG
)
10235 op0
= force_reg (GET_MODE (op0
), op0
);
10236 temp
= gen_lowpart_common (mode
, op0
);
10241 if (!REG_P (op0
) && !MEM_P (op0
))
10242 op0
= force_reg (GET_MODE (op0
), op0
);
10243 op0
= gen_lowpart (mode
, op0
);
10246 /* If both types are integral, convert from one mode to the other. */
10247 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
10248 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
10249 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
10250 /* As a last resort, spill op0 to memory, and reload it in a
10252 else if (!MEM_P (op0
))
10254 /* If the operand is not a MEM, force it into memory. Since we
10255 are going to be changing the mode of the MEM, don't call
10256 force_const_mem for constants because we don't allow pool
10257 constants to change mode. */
10258 tree inner_type
= TREE_TYPE (treeop0
);
10260 gcc_assert (!TREE_ADDRESSABLE (exp
));
10262 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
10264 = assign_stack_temp_for_type
10265 (TYPE_MODE (inner_type
),
10266 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
10268 emit_move_insn (target
, op0
);
10272 /* At this point, OP0 is in the correct mode. If the output type is
10273 such that the operand is known to be aligned, indicate that it is.
10274 Otherwise, we need only be concerned about alignment for non-BLKmode
10278 enum insn_code icode
;
10280 if (TYPE_ALIGN_OK (type
))
10282 /* ??? Copying the MEM without substantially changing it might
10283 run afoul of the code handling volatile memory references in
10284 store_expr, which assumes that TARGET is returned unmodified
10285 if it has been used. */
10286 op0
= copy_rtx (op0
);
10287 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
10289 else if (mode
!= BLKmode
10290 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
)
10291 /* If the target does have special handling for unaligned
10292 loads of mode then use them. */
10293 && ((icode
= optab_handler (movmisalign_optab
, mode
))
10294 != CODE_FOR_nothing
))
10298 op0
= adjust_address (op0
, mode
, 0);
10299 /* We've already validated the memory, and we're creating a
10300 new pseudo destination. The predicates really can't
10302 reg
= gen_reg_rtx (mode
);
10304 /* Nor can the insn generator. */
10305 insn
= GEN_FCN (icode
) (reg
, op0
);
10309 else if (STRICT_ALIGNMENT
10311 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
10313 tree inner_type
= TREE_TYPE (treeop0
);
10314 HOST_WIDE_INT temp_size
10315 = MAX (int_size_in_bytes (inner_type
),
10316 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
10318 = assign_stack_temp_for_type (mode
, temp_size
, type
);
10319 rtx new_with_op0_mode
10320 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
10322 gcc_assert (!TREE_ADDRESSABLE (exp
));
10324 if (GET_MODE (op0
) == BLKmode
)
10325 emit_block_move (new_with_op0_mode
, op0
,
10326 GEN_INT (GET_MODE_SIZE (mode
)),
10327 (modifier
== EXPAND_STACK_PARM
10328 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10330 emit_move_insn (new_with_op0_mode
, op0
);
10335 op0
= adjust_address (op0
, mode
, 0);
10342 tree lhs
= treeop0
;
10343 tree rhs
= treeop1
;
10344 gcc_assert (ignore
);
10346 /* Check for |= or &= of a bitfield of size one into another bitfield
10347 of size 1. In this case, (unless we need the result of the
10348 assignment) we can do this more efficiently with a
10349 test followed by an assignment, if necessary.
10351 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10352 things change so we do, this code should be enhanced to
10354 if (TREE_CODE (lhs
) == COMPONENT_REF
10355 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
10356 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
10357 && TREE_OPERAND (rhs
, 0) == lhs
10358 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
10359 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
10360 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
10362 rtx label
= gen_label_rtx ();
10363 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
10364 do_jump (TREE_OPERAND (rhs
, 1),
10366 value
? 0 : label
, -1);
10367 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
10369 do_pending_stack_adjust ();
10370 emit_label (label
);
10374 expand_assignment (lhs
, rhs
, false);
10379 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
10381 case REALPART_EXPR
:
10382 op0
= expand_normal (treeop0
);
10383 return read_complex_part (op0
, false);
10385 case IMAGPART_EXPR
:
10386 op0
= expand_normal (treeop0
);
10387 return read_complex_part (op0
, true);
10394 /* Expanded in cfgexpand.c. */
10395 gcc_unreachable ();
10397 case TRY_CATCH_EXPR
:
10399 case EH_FILTER_EXPR
:
10400 case TRY_FINALLY_EXPR
:
10401 /* Lowered by tree-eh.c. */
10402 gcc_unreachable ();
10404 case WITH_CLEANUP_EXPR
:
10405 case CLEANUP_POINT_EXPR
:
10407 case CASE_LABEL_EXPR
:
10412 case COMPOUND_EXPR
:
10413 case PREINCREMENT_EXPR
:
10414 case PREDECREMENT_EXPR
:
10415 case POSTINCREMENT_EXPR
:
10416 case POSTDECREMENT_EXPR
:
10419 case COMPOUND_LITERAL_EXPR
:
10420 /* Lowered by gimplify.c. */
10421 gcc_unreachable ();
10424 /* Function descriptors are not valid except for as
10425 initialization constants, and should not be expanded. */
10426 gcc_unreachable ();
10428 case WITH_SIZE_EXPR
:
10429 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10430 have pulled out the size to use in whatever context it needed. */
10431 return expand_expr_real (treeop0
, original_target
, tmode
,
10432 modifier
, alt_rtl
);
10435 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
10439 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10440 signedness of TYPE), possibly returning the result in TARGET. */
10442 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
10444 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
10445 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
10447 /* For constant values, reduce using build_int_cst_type. */
10448 if (CONST_INT_P (exp
))
10450 HOST_WIDE_INT value
= INTVAL (exp
);
10451 tree t
= build_int_cst_type (type
, value
);
10452 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
10454 else if (TYPE_UNSIGNED (type
))
10456 rtx mask
= immed_double_int_const (double_int::mask (prec
),
10458 return expand_and (GET_MODE (exp
), exp
, mask
, target
);
10462 int count
= GET_MODE_PRECISION (GET_MODE (exp
)) - prec
;
10463 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
),
10464 exp
, count
, target
, 0);
10465 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
),
10466 exp
, count
, target
, 0);
10470 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10471 when applied to the address of EXP produces an address known to be
10472 aligned more than BIGGEST_ALIGNMENT. */
10475 is_aligning_offset (const_tree offset
, const_tree exp
)
10477 /* Strip off any conversions. */
10478 while (CONVERT_EXPR_P (offset
))
10479 offset
= TREE_OPERAND (offset
, 0);
10481 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10482 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10483 if (TREE_CODE (offset
) != BIT_AND_EXPR
10484 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
10485 || compare_tree_int (TREE_OPERAND (offset
, 1),
10486 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
10487 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
10490 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10491 It must be NEGATE_EXPR. Then strip any more conversions. */
10492 offset
= TREE_OPERAND (offset
, 0);
10493 while (CONVERT_EXPR_P (offset
))
10494 offset
= TREE_OPERAND (offset
, 0);
10496 if (TREE_CODE (offset
) != NEGATE_EXPR
)
10499 offset
= TREE_OPERAND (offset
, 0);
10500 while (CONVERT_EXPR_P (offset
))
10501 offset
= TREE_OPERAND (offset
, 0);
10503 /* This must now be the address of EXP. */
10504 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
10507 /* Return the tree node if an ARG corresponds to a string constant or zero
10508 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10509 in bytes within the string that ARG is accessing. The type of the
10510 offset will be `sizetype'. */
10513 string_constant (tree arg
, tree
*ptr_offset
)
10515 tree array
, offset
, lower_bound
;
10518 if (TREE_CODE (arg
) == ADDR_EXPR
)
10520 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
10522 *ptr_offset
= size_zero_node
;
10523 return TREE_OPERAND (arg
, 0);
10525 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
10527 array
= TREE_OPERAND (arg
, 0);
10528 offset
= size_zero_node
;
10530 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
10532 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10533 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10534 if (TREE_CODE (array
) != STRING_CST
10535 && TREE_CODE (array
) != VAR_DECL
)
10538 /* Check if the array has a nonzero lower bound. */
10539 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
10540 if (!integer_zerop (lower_bound
))
10542 /* If the offset and base aren't both constants, return 0. */
10543 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
10545 if (TREE_CODE (offset
) != INTEGER_CST
)
10547 /* Adjust offset by the lower bound. */
10548 offset
= size_diffop (fold_convert (sizetype
, offset
),
10549 fold_convert (sizetype
, lower_bound
));
10552 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == MEM_REF
)
10554 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10555 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10556 if (TREE_CODE (array
) != ADDR_EXPR
)
10558 array
= TREE_OPERAND (array
, 0);
10559 if (TREE_CODE (array
) != STRING_CST
10560 && TREE_CODE (array
) != VAR_DECL
)
10566 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
10568 tree arg0
= TREE_OPERAND (arg
, 0);
10569 tree arg1
= TREE_OPERAND (arg
, 1);
10574 if (TREE_CODE (arg0
) == ADDR_EXPR
10575 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
10576 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
10578 array
= TREE_OPERAND (arg0
, 0);
10581 else if (TREE_CODE (arg1
) == ADDR_EXPR
10582 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
10583 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
10585 array
= TREE_OPERAND (arg1
, 0);
10594 if (TREE_CODE (array
) == STRING_CST
)
10596 *ptr_offset
= fold_convert (sizetype
, offset
);
10599 else if (TREE_CODE (array
) == VAR_DECL
10600 || TREE_CODE (array
) == CONST_DECL
)
10603 tree init
= ctor_for_folding (array
);
10605 /* Variables initialized to string literals can be handled too. */
10606 if (init
== error_mark_node
10608 || TREE_CODE (init
) != STRING_CST
)
10611 /* Avoid const char foo[4] = "abcde"; */
10612 if (DECL_SIZE_UNIT (array
) == NULL_TREE
10613 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
10614 || (length
= TREE_STRING_LENGTH (init
)) <= 0
10615 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
10618 /* If variable is bigger than the string literal, OFFSET must be constant
10619 and inside of the bounds of the string literal. */
10620 offset
= fold_convert (sizetype
, offset
);
10621 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
10622 && (! host_integerp (offset
, 1)
10623 || compare_tree_int (offset
, length
) >= 0))
10626 *ptr_offset
= offset
;
10633 /* Generate code to calculate OPS, and exploded expression
10634 using a store-flag instruction and return an rtx for the result.
10635 OPS reflects a comparison.
10637 If TARGET is nonzero, store the result there if convenient.
10639 Return zero if there is no suitable set-flag instruction
10640 available on this machine.
10642 Once expand_expr has been called on the arguments of the comparison,
10643 we are committed to doing the store flag, since it is not safe to
10644 re-evaluate the expression. We emit the store-flag insn by calling
10645 emit_store_flag, but only expand the arguments if we have a reason
10646 to believe that emit_store_flag will be successful. If we think that
10647 it will, but it isn't, we have to simulate the store-flag with a
10648 set/jump/set sequence. */
10651 do_store_flag (sepops ops
, rtx target
, enum machine_mode mode
)
10653 enum rtx_code code
;
10654 tree arg0
, arg1
, type
;
10656 enum machine_mode operand_mode
;
10659 rtx subtarget
= target
;
10660 location_t loc
= ops
->location
;
10665 /* Don't crash if the comparison was erroneous. */
10666 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10669 type
= TREE_TYPE (arg0
);
10670 operand_mode
= TYPE_MODE (type
);
10671 unsignedp
= TYPE_UNSIGNED (type
);
10673 /* We won't bother with BLKmode store-flag operations because it would mean
10674 passing a lot of information to emit_store_flag. */
10675 if (operand_mode
== BLKmode
)
10678 /* We won't bother with store-flag operations involving function pointers
10679 when function pointers must be canonicalized before comparisons. */
10680 #ifdef HAVE_canonicalize_funcptr_for_compare
10681 if (HAVE_canonicalize_funcptr_for_compare
10682 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
10683 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
10685 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
10686 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
10687 == FUNCTION_TYPE
))))
10694 /* For vector typed comparisons emit code to generate the desired
10695 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10696 expander for this. */
10697 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
10699 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
10700 tree if_true
= constant_boolean_node (true, ops
->type
);
10701 tree if_false
= constant_boolean_node (false, ops
->type
);
10702 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
, if_false
, target
);
10705 /* Get the rtx comparison code to use. We know that EXP is a comparison
10706 operation of some type. Some comparisons against 1 and -1 can be
10707 converted to comparisons with zero. Do so here so that the tests
10708 below will be aware that we have a comparison with zero. These
10709 tests will not catch constants in the first operand, but constants
10710 are rarely passed as the first operand. */
10721 if (integer_onep (arg1
))
10722 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10724 code
= unsignedp
? LTU
: LT
;
10727 if (! unsignedp
&& integer_all_onesp (arg1
))
10728 arg1
= integer_zero_node
, code
= LT
;
10730 code
= unsignedp
? LEU
: LE
;
10733 if (! unsignedp
&& integer_all_onesp (arg1
))
10734 arg1
= integer_zero_node
, code
= GE
;
10736 code
= unsignedp
? GTU
: GT
;
10739 if (integer_onep (arg1
))
10740 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10742 code
= unsignedp
? GEU
: GE
;
10745 case UNORDERED_EXPR
:
10771 gcc_unreachable ();
10774 /* Put a constant second. */
10775 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
10776 || TREE_CODE (arg0
) == FIXED_CST
)
10778 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10779 code
= swap_condition (code
);
10782 /* If this is an equality or inequality test of a single bit, we can
10783 do this by shifting the bit being tested to the low-order bit and
10784 masking the result with the constant 1. If the condition was EQ,
10785 we xor it with 1. This does not require an scc insn and is faster
10786 than an scc insn even if we have it.
10788 The code to make this transformation was moved into fold_single_bit_test,
10789 so we just call into the folder and expand its result. */
10791 if ((code
== NE
|| code
== EQ
)
10792 && integer_zerop (arg1
)
10793 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
10795 gimple srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
10797 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
10799 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
10800 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
10801 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
10802 gimple_assign_rhs1 (srcstmt
),
10803 gimple_assign_rhs2 (srcstmt
));
10804 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
10806 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
10810 if (! get_subtarget (target
)
10811 || GET_MODE (subtarget
) != operand_mode
)
10814 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
10817 target
= gen_reg_rtx (mode
);
10819 /* Try a cstore if possible. */
10820 return emit_store_flag_force (target
, code
, op0
, op1
,
10821 operand_mode
, unsignedp
,
10822 (TYPE_PRECISION (ops
->type
) == 1
10823 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
10827 /* Stubs in case we haven't got a casesi insn. */
10828 #ifndef HAVE_casesi
10829 # define HAVE_casesi 0
10830 # define gen_casesi(a, b, c, d, e) (0)
10831 # define CODE_FOR_casesi CODE_FOR_nothing
10834 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10835 0 otherwise (i.e. if there is no casesi instruction).
10837 DEFAULT_PROBABILITY is the probability of jumping to the default
10840 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
10841 rtx table_label
, rtx default_label
, rtx fallback_label
,
10842 int default_probability
)
10844 struct expand_operand ops
[5];
10845 enum machine_mode index_mode
= SImode
;
10846 rtx op1
, op2
, index
;
10851 /* Convert the index to SImode. */
10852 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10854 enum machine_mode omode
= TYPE_MODE (index_type
);
10855 rtx rangertx
= expand_normal (range
);
10857 /* We must handle the endpoints in the original mode. */
10858 index_expr
= build2 (MINUS_EXPR
, index_type
,
10859 index_expr
, minval
);
10860 minval
= integer_zero_node
;
10861 index
= expand_normal (index_expr
);
10863 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10864 omode
, 1, default_label
,
10865 default_probability
);
10866 /* Now we can safely truncate. */
10867 index
= convert_to_mode (index_mode
, index
, 0);
10871 if (TYPE_MODE (index_type
) != index_mode
)
10873 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
10874 index_expr
= fold_convert (index_type
, index_expr
);
10877 index
= expand_normal (index_expr
);
10880 do_pending_stack_adjust ();
10882 op1
= expand_normal (minval
);
10883 op2
= expand_normal (range
);
10885 create_input_operand (&ops
[0], index
, index_mode
);
10886 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
10887 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
10888 create_fixed_operand (&ops
[3], table_label
);
10889 create_fixed_operand (&ops
[4], (default_label
10891 : fallback_label
));
10892 expand_jump_insn (CODE_FOR_casesi
, 5, ops
);
10896 /* Attempt to generate a tablejump instruction; same concept. */
10897 #ifndef HAVE_tablejump
10898 #define HAVE_tablejump 0
10899 #define gen_tablejump(x, y) (0)
10902 /* Subroutine of the next function.
10904 INDEX is the value being switched on, with the lowest value
10905 in the table already subtracted.
10906 MODE is its expected mode (needed if INDEX is constant).
10907 RANGE is the length of the jump table.
10908 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10910 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10911 index value is out of range.
10912 DEFAULT_PROBABILITY is the probability of jumping to
10913 the default label. */
10916 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
10917 rtx default_label
, int default_probability
)
10921 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
10922 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
10924 /* Do an unsigned comparison (in the proper mode) between the index
10925 expression and the value which represents the length of the range.
10926 Since we just finished subtracting the lower bound of the range
10927 from the index expression, this comparison allows us to simultaneously
10928 check that the original index expression value is both greater than
10929 or equal to the minimum value of the range and less than or equal to
10930 the maximum value of the range. */
10933 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10934 default_label
, default_probability
);
10937 /* If index is in range, it must fit in Pmode.
10938 Convert to Pmode so we can index with it. */
10940 index
= convert_to_mode (Pmode
, index
, 1);
10942 /* Don't let a MEM slip through, because then INDEX that comes
10943 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10944 and break_out_memory_refs will go to work on it and mess it up. */
10945 #ifdef PIC_CASE_VECTOR_ADDRESS
10946 if (flag_pic
&& !REG_P (index
))
10947 index
= copy_to_mode_reg (Pmode
, index
);
10950 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10951 GET_MODE_SIZE, because this indicates how large insns are. The other
10952 uses should all be Pmode, because they are addresses. This code
10953 could fail if addresses and insns are not the same size. */
10954 index
= gen_rtx_PLUS
10956 gen_rtx_MULT (Pmode
, index
,
10957 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
), Pmode
)),
10958 gen_rtx_LABEL_REF (Pmode
, table_label
));
10959 #ifdef PIC_CASE_VECTOR_ADDRESS
10961 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10964 index
= memory_address (CASE_VECTOR_MODE
, index
);
10965 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10966 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
10967 convert_move (temp
, vector
, 0);
10969 emit_jump_insn (gen_tablejump (temp
, table_label
));
10971 /* If we are generating PIC code or if the table is PC-relative, the
10972 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10973 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10978 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
10979 rtx table_label
, rtx default_label
, int default_probability
)
10983 if (! HAVE_tablejump
)
10986 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
10987 fold_convert (index_type
, index_expr
),
10988 fold_convert (index_type
, minval
));
10989 index
= expand_normal (index_expr
);
10990 do_pending_stack_adjust ();
10992 do_tablejump (index
, TYPE_MODE (index_type
),
10993 convert_modes (TYPE_MODE (index_type
),
10994 TYPE_MODE (TREE_TYPE (range
)),
10995 expand_normal (range
),
10996 TYPE_UNSIGNED (TREE_TYPE (range
))),
10997 table_label
, default_label
, default_probability
);
11001 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11003 const_vector_from_tree (tree exp
)
11009 enum machine_mode inner
, mode
;
11011 mode
= TYPE_MODE (TREE_TYPE (exp
));
11013 if (initializer_zerop (exp
))
11014 return CONST0_RTX (mode
);
11016 units
= GET_MODE_NUNITS (mode
);
11017 inner
= GET_MODE_INNER (mode
);
11019 v
= rtvec_alloc (units
);
11021 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11023 elt
= VECTOR_CST_ELT (exp
, i
);
11025 if (TREE_CODE (elt
) == REAL_CST
)
11026 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
11028 else if (TREE_CODE (elt
) == FIXED_CST
)
11029 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
11032 RTVEC_ELT (v
, i
) = immed_double_int_const (tree_to_double_int (elt
),
11036 return gen_rtx_CONST_VECTOR (mode
, v
);
11039 /* Build a decl for a personality function given a language prefix. */
11042 build_personality_function (const char *lang
)
11044 const char *unwind_and_version
;
11048 switch (targetm_common
.except_unwind_info (&global_options
))
11053 unwind_and_version
= "_sj0";
11057 unwind_and_version
= "_v0";
11060 unwind_and_version
= "_seh0";
11063 gcc_unreachable ();
11066 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
11068 type
= build_function_type_list (integer_type_node
, integer_type_node
,
11069 long_long_unsigned_type_node
,
11070 ptr_type_node
, ptr_type_node
, NULL_TREE
);
11071 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
11072 get_identifier (name
), type
);
11073 DECL_ARTIFICIAL (decl
) = 1;
11074 DECL_EXTERNAL (decl
) = 1;
11075 TREE_PUBLIC (decl
) = 1;
11077 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11078 are the flags assigned by targetm.encode_section_info. */
11079 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
11084 /* Extracts the personality function of DECL and returns the corresponding
11088 get_personality_function (tree decl
)
11090 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
11091 enum eh_personality_kind pk
;
11093 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
11094 if (pk
== eh_personality_none
)
11098 && pk
== eh_personality_any
)
11099 personality
= lang_hooks
.eh_personality ();
11101 if (pk
== eh_personality_lang
)
11102 gcc_assert (personality
!= NULL_TREE
);
11104 return XEXP (DECL_RTL (personality
), 0);
11107 #include "gt-expr.h"