1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "typeclass.h"
45 #include "langhooks.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
54 #include "diagnostic.h"
55 #include "ssaexpand.h"
56 #include "target-globals.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
78 #define STACK_PUSH_CODE PRE_INC
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* This structure is used by move_by_pieces to describe the move to
93 struct move_by_pieces_d
102 int explicit_inc_from
;
103 unsigned HOST_WIDE_INT len
;
104 HOST_WIDE_INT offset
;
108 /* This structure is used by store_by_pieces to describe the clear to
111 struct store_by_pieces_d
117 unsigned HOST_WIDE_INT len
;
118 HOST_WIDE_INT offset
;
119 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
124 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
127 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
128 struct move_by_pieces_d
*);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
);
131 static tree
emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
133 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
134 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces_d
*, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
137 struct store_by_pieces_d
*);
138 static tree
clear_storage_libcall_fn (int);
139 static rtx
compress_float_constant (rtx
, rtx
);
140 static rtx
get_subtarget (rtx
);
141 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
142 HOST_WIDE_INT
, enum machine_mode
,
143 tree
, tree
, int, alias_set_type
);
144 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
145 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
146 tree
, tree
, alias_set_type
, bool);
148 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
150 static int is_aligning_offset (const_tree
, const_tree
);
151 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
152 enum expand_modifier
);
153 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
154 static rtx
do_store_flag (sepops
, rtx
, enum machine_mode
);
156 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
158 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
159 static rtx
const_vector_from_tree (tree
);
160 static void write_complex_part (rtx
, rtx
, bool);
162 /* This macro is used to determine whether move_by_pieces should be called
163 to perform a structure copy. */
164 #ifndef MOVE_BY_PIECES_P
165 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
166 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
167 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
170 /* This macro is used to determine whether clear_by_pieces should be
171 called to clear storage. */
172 #ifndef CLEAR_BY_PIECES_P
173 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
174 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
175 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
178 /* This macro is used to determine whether store_by_pieces should be
179 called to "memset" storage with byte values other than zero. */
180 #ifndef SET_BY_PIECES_P
181 #define SET_BY_PIECES_P(SIZE, ALIGN) \
182 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
183 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
186 /* This macro is used to determine whether store_by_pieces should be
187 called to "memcpy" storage when the source is a constant string. */
188 #ifndef STORE_BY_PIECES_P
189 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
190 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
191 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
194 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
196 #ifndef SLOW_UNALIGNED_ACCESS
197 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
200 /* This is run to set up which modes can be used
201 directly in memory and to initialize the block move optab. It is run
202 at the beginning of compilation and when the target is reinitialized. */
205 init_expr_target (void)
208 enum machine_mode mode
;
213 /* Try indexing by frame ptr and try by stack ptr.
214 It is known that on the Convex the stack ptr isn't a valid index.
215 With luck, one or the other is valid on any machine. */
216 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
217 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
219 /* A scratch register we can modify in-place below to avoid
220 useless RTL allocations. */
221 reg
= gen_rtx_REG (VOIDmode
, -1);
223 insn
= rtx_alloc (INSN
);
224 pat
= gen_rtx_SET (VOIDmode
, NULL_RTX
, NULL_RTX
);
225 PATTERN (insn
) = pat
;
227 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
228 mode
= (enum machine_mode
) ((int) mode
+ 1))
232 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
233 PUT_MODE (mem
, mode
);
234 PUT_MODE (mem1
, mode
);
235 PUT_MODE (reg
, mode
);
237 /* See if there is some register that can be used in this mode and
238 directly loaded or stored from memory. */
240 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
241 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
242 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
245 if (! HARD_REGNO_MODE_OK (regno
, mode
))
248 SET_REGNO (reg
, regno
);
251 SET_DEST (pat
) = reg
;
252 if (recog (pat
, insn
, &num_clobbers
) >= 0)
253 direct_load
[(int) mode
] = 1;
255 SET_SRC (pat
) = mem1
;
256 SET_DEST (pat
) = reg
;
257 if (recog (pat
, insn
, &num_clobbers
) >= 0)
258 direct_load
[(int) mode
] = 1;
261 SET_DEST (pat
) = mem
;
262 if (recog (pat
, insn
, &num_clobbers
) >= 0)
263 direct_store
[(int) mode
] = 1;
266 SET_DEST (pat
) = mem1
;
267 if (recog (pat
, insn
, &num_clobbers
) >= 0)
268 direct_store
[(int) mode
] = 1;
272 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
274 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
275 mode
= GET_MODE_WIDER_MODE (mode
))
277 enum machine_mode srcmode
;
278 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
279 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
283 ic
= can_extend_p (mode
, srcmode
, 0);
284 if (ic
== CODE_FOR_nothing
)
287 PUT_MODE (mem
, srcmode
);
289 if (insn_operand_matches (ic
, 1, mem
))
290 float_extend_from_mem
[mode
][srcmode
] = true;
295 /* This is run at the start of compiling a function. */
300 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
303 /* Copy data from FROM to TO, where the machine modes are not the same.
304 Both modes may be integer, or both may be floating, or both may be
306 UNSIGNEDP should be nonzero if FROM is an unsigned type.
307 This causes zero-extension instead of sign-extension. */
310 convert_move (rtx to
, rtx from
, int unsignedp
)
312 enum machine_mode to_mode
= GET_MODE (to
);
313 enum machine_mode from_mode
= GET_MODE (from
);
314 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
315 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
319 /* rtx code for making an equivalent value. */
320 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
321 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
324 gcc_assert (to_real
== from_real
);
325 gcc_assert (to_mode
!= BLKmode
);
326 gcc_assert (from_mode
!= BLKmode
);
328 /* If the source and destination are already the same, then there's
333 /* If FROM is a SUBREG that indicates that we have already done at least
334 the required extension, strip it. We don't handle such SUBREGs as
337 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
338 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
339 >= GET_MODE_SIZE (to_mode
))
340 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
341 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
343 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
345 if (to_mode
== from_mode
346 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
348 emit_move_insn (to
, from
);
352 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
354 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
356 if (VECTOR_MODE_P (to_mode
))
357 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
359 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
361 emit_move_insn (to
, from
);
365 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
367 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
368 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
377 gcc_assert ((GET_MODE_PRECISION (from_mode
)
378 != GET_MODE_PRECISION (to_mode
))
379 || (DECIMAL_FLOAT_MODE_P (from_mode
)
380 != DECIMAL_FLOAT_MODE_P (to_mode
)));
382 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
383 /* Conversion between decimal float and binary float, same size. */
384 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
385 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
390 /* Try converting directly if the insn is supported. */
392 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
393 if (code
!= CODE_FOR_nothing
)
395 emit_unop_insn (code
, to
, from
,
396 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
400 /* Otherwise use a libcall. */
401 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
403 /* Is this conversion implemented yet? */
404 gcc_assert (libcall
);
407 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
409 insns
= get_insns ();
411 emit_libcall_block (insns
, to
, value
,
412 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
414 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
418 /* Handle pointer conversion. */ /* SPEE 900220. */
419 /* Targets are expected to provide conversion insns between PxImode and
420 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
421 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
423 enum machine_mode full_mode
424 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
426 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
427 != CODE_FOR_nothing
);
429 if (full_mode
!= from_mode
)
430 from
= convert_to_mode (full_mode
, from
, unsignedp
);
431 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
435 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
438 enum machine_mode full_mode
439 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
441 gcc_assert (convert_optab_handler (sext_optab
, full_mode
, from_mode
)
442 != CODE_FOR_nothing
);
444 if (to_mode
== full_mode
)
446 emit_unop_insn (convert_optab_handler (sext_optab
, full_mode
,
452 new_from
= gen_reg_rtx (full_mode
);
453 emit_unop_insn (convert_optab_handler (sext_optab
, full_mode
, from_mode
),
454 new_from
, from
, UNKNOWN
);
456 /* else proceed to integer conversions below. */
457 from_mode
= full_mode
;
461 /* Make sure both are fixed-point modes or both are not. */
462 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
463 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
464 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
466 /* If we widen from_mode to to_mode and they are in the same class,
467 we won't saturate the result.
468 Otherwise, always saturate the result to play safe. */
469 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
470 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
471 expand_fixed_convert (to
, from
, 0, 0);
473 expand_fixed_convert (to
, from
, 0, 1);
477 /* Now both modes are integers. */
479 /* Handle expanding beyond a word. */
480 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
481 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
488 enum machine_mode lowpart_mode
;
489 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
491 /* Try converting directly if the insn is supported. */
492 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
495 /* If FROM is a SUBREG, put it into a register. Do this
496 so that we always generate the same set of insns for
497 better cse'ing; if an intermediate assignment occurred,
498 we won't be doing the operation directly on the SUBREG. */
499 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
500 from
= force_reg (from_mode
, from
);
501 emit_unop_insn (code
, to
, from
, equiv_code
);
504 /* Next, try converting via full word. */
505 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
506 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
507 != CODE_FOR_nothing
))
509 rtx word_to
= gen_reg_rtx (word_mode
);
512 if (reg_overlap_mentioned_p (to
, from
))
513 from
= force_reg (from_mode
, from
);
516 convert_move (word_to
, from
, unsignedp
);
517 emit_unop_insn (code
, to
, word_to
, equiv_code
);
521 /* No special multiword conversion insn; do it by hand. */
524 /* Since we will turn this into a no conflict block, we must ensure
525 that the source does not overlap the target. */
527 if (reg_overlap_mentioned_p (to
, from
))
528 from
= force_reg (from_mode
, from
);
530 /* Get a copy of FROM widened to a word, if necessary. */
531 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
532 lowpart_mode
= word_mode
;
534 lowpart_mode
= from_mode
;
536 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
538 lowpart
= gen_lowpart (lowpart_mode
, to
);
539 emit_move_insn (lowpart
, lowfrom
);
541 /* Compute the value to put in each remaining word. */
543 fill_value
= const0_rtx
;
545 fill_value
= emit_store_flag (gen_reg_rtx (word_mode
),
546 LT
, lowfrom
, const0_rtx
,
549 /* Fill the remaining words. */
550 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
552 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
553 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
555 gcc_assert (subword
);
557 if (fill_value
!= subword
)
558 emit_move_insn (subword
, fill_value
);
561 insns
= get_insns ();
568 /* Truncating multi-word to a word or less. */
569 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
570 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
573 && ! MEM_VOLATILE_P (from
)
574 && direct_load
[(int) to_mode
]
575 && ! mode_dependent_address_p (XEXP (from
, 0)))
577 || GET_CODE (from
) == SUBREG
))
578 from
= force_reg (from_mode
, from
);
579 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
583 /* Now follow all the conversions between integers
584 no more than a word long. */
586 /* For truncation, usually we can just refer to FROM in a narrower mode. */
587 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
588 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
589 GET_MODE_BITSIZE (from_mode
)))
592 && ! MEM_VOLATILE_P (from
)
593 && direct_load
[(int) to_mode
]
594 && ! mode_dependent_address_p (XEXP (from
, 0)))
596 || GET_CODE (from
) == SUBREG
))
597 from
= force_reg (from_mode
, from
);
598 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
599 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
600 from
= copy_to_reg (from
);
601 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
605 /* Handle extension. */
606 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
608 /* Convert directly if that works. */
609 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
612 emit_unop_insn (code
, to
, from
, equiv_code
);
617 enum machine_mode intermediate
;
621 /* Search for a mode to convert via. */
622 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
623 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
624 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
626 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
628 GET_MODE_BITSIZE (intermediate
))))
629 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
630 != CODE_FOR_nothing
))
632 convert_move (to
, convert_to_mode (intermediate
, from
,
633 unsignedp
), unsignedp
);
637 /* No suitable intermediate mode.
638 Generate what we need with shifts. */
639 shift_amount
= build_int_cst (NULL_TREE
,
640 GET_MODE_BITSIZE (to_mode
)
641 - GET_MODE_BITSIZE (from_mode
));
642 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
643 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
645 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
648 emit_move_insn (to
, tmp
);
653 /* Support special truncate insns for certain modes. */
654 if (convert_optab_handler (trunc_optab
, to_mode
,
655 from_mode
) != CODE_FOR_nothing
)
657 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
662 /* Handle truncation of volatile memrefs, and so on;
663 the things that couldn't be truncated directly,
664 and for which there was no special instruction.
666 ??? Code above formerly short-circuited this, for most integer
667 mode pairs, with a force_reg in from_mode followed by a recursive
668 call to this routine. Appears always to have been wrong. */
669 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
671 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
672 emit_move_insn (to
, temp
);
676 /* Mode combination is not recognized. */
680 /* Return an rtx for a value that would result
681 from converting X to mode MODE.
682 Both X and MODE may be floating, or both integer.
683 UNSIGNEDP is nonzero if X is an unsigned value.
684 This can be done by referring to a part of X in place
685 or by copying to a new temporary with conversion. */
688 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
690 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
693 /* Return an rtx for a value that would result
694 from converting X from mode OLDMODE to mode MODE.
695 Both modes may be floating, or both integer.
696 UNSIGNEDP is nonzero if X is an unsigned value.
698 This can be done by referring to a part of X in place
699 or by copying to a new temporary with conversion.
701 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
704 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
708 /* If FROM is a SUBREG that indicates that we have already done at least
709 the required extension, strip it. */
711 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
712 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
713 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
714 x
= gen_lowpart (mode
, x
);
716 if (GET_MODE (x
) != VOIDmode
)
717 oldmode
= GET_MODE (x
);
722 /* There is one case that we must handle specially: If we are converting
723 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
724 we are to interpret the constant as unsigned, gen_lowpart will do
725 the wrong if the constant appears negative. What we want to do is
726 make the high-order word of the constant zero, not all ones. */
728 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
729 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
730 && CONST_INT_P (x
) && INTVAL (x
) < 0)
732 double_int val
= uhwi_to_double_int (INTVAL (x
));
734 /* We need to zero extend VAL. */
735 if (oldmode
!= VOIDmode
)
736 val
= double_int_zext (val
, GET_MODE_BITSIZE (oldmode
));
738 return immed_double_int_const (val
, mode
);
741 /* We can do this with a gen_lowpart if both desired and current modes
742 are integer, and this is either a constant integer, a register, or a
743 non-volatile MEM. Except for the constant case where MODE is no
744 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
747 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
748 || (GET_MODE_CLASS (mode
) == MODE_INT
749 && GET_MODE_CLASS (oldmode
) == MODE_INT
750 && (GET_CODE (x
) == CONST_DOUBLE
751 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
752 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
753 && direct_load
[(int) mode
])
755 && (! HARD_REGISTER_P (x
)
756 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
757 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
758 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
760 /* ?? If we don't know OLDMODE, we have to assume here that
761 X does not need sign- or zero-extension. This may not be
762 the case, but it's the best we can do. */
763 if (CONST_INT_P (x
) && oldmode
!= VOIDmode
764 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
766 HOST_WIDE_INT val
= INTVAL (x
);
767 int width
= GET_MODE_BITSIZE (oldmode
);
769 /* We must sign or zero-extend in this case. Start by
770 zero-extending, then sign extend if we need to. */
771 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
773 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
774 val
|= (HOST_WIDE_INT
) (-1) << width
;
776 return gen_int_mode (val
, mode
);
779 return gen_lowpart (mode
, x
);
782 /* Converting from integer constant into mode is always equivalent to an
784 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
786 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
787 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
790 temp
= gen_reg_rtx (mode
);
791 convert_move (temp
, x
, unsignedp
);
795 /* Return the largest alignment we can use for doing a move (or store)
796 of MAX_PIECES. ALIGN is the largest alignment we could use. */
799 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
801 enum machine_mode tmode
;
803 tmode
= mode_for_size (max_pieces
* BITS_PER_UNIT
, MODE_INT
, 1);
804 if (align
>= GET_MODE_ALIGNMENT (tmode
))
805 align
= GET_MODE_ALIGNMENT (tmode
);
808 enum machine_mode tmode
, xmode
;
810 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
812 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
813 if (GET_MODE_SIZE (tmode
) > max_pieces
814 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
817 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
823 /* Return the widest integer mode no wider than SIZE. If no such mode
824 can be found, return VOIDmode. */
826 static enum machine_mode
827 widest_int_mode_for_size (unsigned int size
)
829 enum machine_mode tmode
, mode
= VOIDmode
;
831 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
832 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
833 if (GET_MODE_SIZE (tmode
) < size
)
839 /* STORE_MAX_PIECES is the number of bytes at a time that we can
840 store efficiently. Due to internal GCC limitations, this is
841 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
842 for an immediate constant. */
844 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
846 /* Determine whether the LEN bytes can be moved by using several move
847 instructions. Return nonzero if a call to move_by_pieces should
851 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
852 unsigned int align ATTRIBUTE_UNUSED
)
854 return MOVE_BY_PIECES_P (len
, align
);
857 /* Generate several move instructions to copy LEN bytes from block FROM to
858 block TO. (These are MEM rtx's with BLKmode).
860 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
861 used to push FROM to the stack.
863 ALIGN is maximum stack alignment we can assume.
865 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
866 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
870 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
871 unsigned int align
, int endp
)
873 struct move_by_pieces_d data
;
874 enum machine_mode to_addr_mode
, from_addr_mode
875 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (from
));
876 rtx to_addr
, from_addr
= XEXP (from
, 0);
877 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
878 enum insn_code icode
;
880 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
883 data
.from_addr
= from_addr
;
886 to_addr_mode
= targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (to
));
887 to_addr
= XEXP (to
, 0);
890 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
891 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
893 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
897 to_addr_mode
= VOIDmode
;
901 #ifdef STACK_GROWS_DOWNWARD
907 data
.to_addr
= to_addr
;
910 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
911 || GET_CODE (from_addr
) == POST_INC
912 || GET_CODE (from_addr
) == POST_DEC
);
914 data
.explicit_inc_from
= 0;
915 data
.explicit_inc_to
= 0;
916 if (data
.reverse
) data
.offset
= len
;
919 /* If copying requires more than two move insns,
920 copy addresses to registers (to make displacements shorter)
921 and use post-increment if available. */
922 if (!(data
.autinc_from
&& data
.autinc_to
)
923 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
925 /* Find the mode of the largest move...
926 MODE might not be used depending on the definitions of the
927 USE_* macros below. */
928 enum machine_mode mode ATTRIBUTE_UNUSED
929 = widest_int_mode_for_size (max_size
);
931 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
933 data
.from_addr
= copy_to_mode_reg (from_addr_mode
,
934 plus_constant (from_addr
, len
));
935 data
.autinc_from
= 1;
936 data
.explicit_inc_from
= -1;
938 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
940 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
941 data
.autinc_from
= 1;
942 data
.explicit_inc_from
= 1;
944 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
945 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
946 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
948 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
949 plus_constant (to_addr
, len
));
951 data
.explicit_inc_to
= -1;
953 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
955 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
957 data
.explicit_inc_to
= 1;
959 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
960 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
963 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
965 /* First move what we can in the largest integer mode, then go to
966 successively smaller modes. */
970 enum machine_mode mode
= widest_int_mode_for_size (max_size
);
972 if (mode
== VOIDmode
)
975 icode
= optab_handler (mov_optab
, mode
);
976 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
977 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
979 max_size
= GET_MODE_SIZE (mode
);
982 /* The code above should have handled everything. */
983 gcc_assert (!data
.len
);
989 gcc_assert (!data
.reverse
);
994 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
995 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
997 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
998 plus_constant (data
.to_addr
,
1001 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1008 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1016 /* Return number of insns required to move L bytes by pieces.
1017 ALIGN (in bits) is maximum alignment we can assume. */
1019 static unsigned HOST_WIDE_INT
1020 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
1021 unsigned int max_size
)
1023 unsigned HOST_WIDE_INT n_insns
= 0;
1025 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
1027 while (max_size
> 1)
1029 enum machine_mode mode
;
1030 enum insn_code icode
;
1032 mode
= widest_int_mode_for_size (max_size
);
1034 if (mode
== VOIDmode
)
1037 icode
= optab_handler (mov_optab
, mode
);
1038 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1039 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1041 max_size
= GET_MODE_SIZE (mode
);
1048 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1049 with move instructions for mode MODE. GENFUN is the gen_... function
1050 to make a move insn for that mode. DATA has all the other info. */
1053 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1054 struct move_by_pieces_d
*data
)
1056 unsigned int size
= GET_MODE_SIZE (mode
);
1057 rtx to1
= NULL_RTX
, from1
;
1059 while (data
->len
>= size
)
1062 data
->offset
-= size
;
1066 if (data
->autinc_to
)
1067 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1070 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1073 if (data
->autinc_from
)
1074 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1077 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1079 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1080 emit_insn (gen_add2_insn (data
->to_addr
,
1081 GEN_INT (-(HOST_WIDE_INT
)size
)));
1082 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1083 emit_insn (gen_add2_insn (data
->from_addr
,
1084 GEN_INT (-(HOST_WIDE_INT
)size
)));
1087 emit_insn ((*genfun
) (to1
, from1
));
1090 #ifdef PUSH_ROUNDING
1091 emit_single_push_insn (mode
, from1
, NULL
);
1097 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1098 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1099 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1100 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1102 if (! data
->reverse
)
1103 data
->offset
+= size
;
1109 /* Emit code to move a block Y to a block X. This may be done with
1110 string-move instructions, with multiple scalar move instructions,
1111 or with a library call.
1113 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1114 SIZE is an rtx that says how long they are.
1115 ALIGN is the maximum alignment we can assume they have.
1116 METHOD describes what kind of copy this is, and what mechanisms may be used.
1118 Return the address of the new block, if memcpy is called and returns it,
1122 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1123 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
1130 if (CONST_INT_P (size
)
1131 && INTVAL (size
) == 0)
1136 case BLOCK_OP_NORMAL
:
1137 case BLOCK_OP_TAILCALL
:
1138 may_use_call
= true;
1141 case BLOCK_OP_CALL_PARM
:
1142 may_use_call
= block_move_libcall_safe_for_call_parm ();
1144 /* Make inhibit_defer_pop nonzero around the library call
1145 to force it to pop the arguments right away. */
1149 case BLOCK_OP_NO_LIBCALL
:
1150 may_use_call
= false;
1157 gcc_assert (MEM_P (x
) && MEM_P (y
));
1158 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1159 gcc_assert (align
>= BITS_PER_UNIT
);
1161 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1162 block copy is more efficient for other large modes, e.g. DCmode. */
1163 x
= adjust_address (x
, BLKmode
, 0);
1164 y
= adjust_address (y
, BLKmode
, 0);
1166 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1167 can be incorrect is coming from __builtin_memcpy. */
1168 if (CONST_INT_P (size
))
1170 x
= shallow_copy_rtx (x
);
1171 y
= shallow_copy_rtx (y
);
1172 set_mem_size (x
, size
);
1173 set_mem_size (y
, size
);
1176 if (CONST_INT_P (size
) && MOVE_BY_PIECES_P (INTVAL (size
), align
))
1177 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1178 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1179 expected_align
, expected_size
))
1181 else if (may_use_call
1182 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1183 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1184 retval
= emit_block_move_via_libcall (x
, y
, size
,
1185 method
== BLOCK_OP_TAILCALL
);
1187 emit_block_move_via_loop (x
, y
, size
, align
);
1189 if (method
== BLOCK_OP_CALL_PARM
)
1196 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1198 return emit_block_move_hints (x
, y
, size
, method
, 0, -1);
1201 /* A subroutine of emit_block_move. Returns true if calling the
1202 block move libcall will not clobber any parameters which may have
1203 already been placed on the stack. */
1206 block_move_libcall_safe_for_call_parm (void)
1208 #if defined (REG_PARM_STACK_SPACE)
1212 /* If arguments are pushed on the stack, then they're safe. */
1216 /* If registers go on the stack anyway, any argument is sure to clobber
1217 an outgoing argument. */
1218 #if defined (REG_PARM_STACK_SPACE)
1219 fn
= emit_block_move_libcall_fn (false);
1220 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1221 depend on its argument. */
1223 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1224 && REG_PARM_STACK_SPACE (fn
) != 0)
1228 /* If any argument goes in memory, then it might clobber an outgoing
1231 CUMULATIVE_ARGS args_so_far
;
1234 fn
= emit_block_move_libcall_fn (false);
1235 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1237 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1238 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1240 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1241 rtx tmp
= targetm
.calls
.function_arg (&args_so_far
, mode
,
1243 if (!tmp
|| !REG_P (tmp
))
1245 if (targetm
.calls
.arg_partial_bytes (&args_so_far
, mode
, NULL
, 1))
1247 targetm
.calls
.function_arg_advance (&args_so_far
, mode
,
1254 /* A subroutine of emit_block_move. Expand a movmem pattern;
1255 return true if successful. */
1258 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1259 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
1261 int save_volatile_ok
= volatile_ok
;
1262 enum machine_mode mode
;
1264 if (expected_align
< align
)
1265 expected_align
= align
;
1267 /* Since this is a move insn, we don't care about volatility. */
1270 /* Try the most limited insn first, because there's no point
1271 including more than one in the machine description unless
1272 the more limited one has some advantage. */
1274 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1275 mode
= GET_MODE_WIDER_MODE (mode
))
1277 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1279 if (code
!= CODE_FOR_nothing
1280 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1281 here because if SIZE is less than the mode mask, as it is
1282 returned by the macro, it will definitely be less than the
1283 actual mode mask. */
1284 && ((CONST_INT_P (size
)
1285 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1286 <= (GET_MODE_MASK (mode
) >> 1)))
1287 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
))
1289 struct expand_operand ops
[6];
1292 /* ??? When called via emit_block_move_for_call, it'd be
1293 nice if there were some way to inform the backend, so
1294 that it doesn't fail the expansion because it thinks
1295 emitting the libcall would be more efficient. */
1296 nops
= insn_data
[(int) code
].n_operands
;
1297 create_fixed_operand (&ops
[0], x
);
1298 create_fixed_operand (&ops
[1], y
);
1299 /* The check above guarantees that this size conversion is valid. */
1300 create_convert_operand_to (&ops
[2], size
, mode
, true);
1301 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1304 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1305 create_integer_operand (&ops
[5], expected_size
);
1308 if (maybe_expand_insn (code
, nops
, ops
))
1310 volatile_ok
= save_volatile_ok
;
1316 volatile_ok
= save_volatile_ok
;
1320 /* A subroutine of emit_block_move. Expand a call to memcpy.
1321 Return the return value from memcpy, 0 otherwise. */
1324 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1326 rtx dst_addr
, src_addr
;
1327 tree call_expr
, fn
, src_tree
, dst_tree
, size_tree
;
1328 enum machine_mode size_mode
;
1331 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1332 pseudos. We can then place those new pseudos into a VAR_DECL and
1335 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1336 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1338 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1339 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1341 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1342 src_tree
= make_tree (ptr_type_node
, src_addr
);
1344 size_mode
= TYPE_MODE (sizetype
);
1346 size
= convert_to_mode (size_mode
, size
, 1);
1347 size
= copy_to_mode_reg (size_mode
, size
);
1349 /* It is incorrect to use the libcall calling conventions to call
1350 memcpy in this context. This could be a user call to memcpy and
1351 the user may wish to examine the return value from memcpy. For
1352 targets where libcalls and normal calls have different conventions
1353 for returning pointers, we could end up generating incorrect code. */
1355 size_tree
= make_tree (sizetype
, size
);
1357 fn
= emit_block_move_libcall_fn (true);
1358 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1359 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1361 retval
= expand_normal (call_expr
);
1366 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1367 for the function we use for block copies. The first time FOR_CALL
1368 is true, we call assemble_external. */
1370 static GTY(()) tree block_move_fn
;
1373 init_block_move_fn (const char *asmspec
)
1379 fn
= get_identifier ("memcpy");
1380 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1381 const_ptr_type_node
, sizetype
,
1384 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
1385 DECL_EXTERNAL (fn
) = 1;
1386 TREE_PUBLIC (fn
) = 1;
1387 DECL_ARTIFICIAL (fn
) = 1;
1388 TREE_NOTHROW (fn
) = 1;
1389 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1390 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1396 set_user_assembler_name (block_move_fn
, asmspec
);
1400 emit_block_move_libcall_fn (int for_call
)
1402 static bool emitted_extern
;
1405 init_block_move_fn (NULL
);
1407 if (for_call
&& !emitted_extern
)
1409 emitted_extern
= true;
1410 make_decl_rtl (block_move_fn
);
1411 assemble_external (block_move_fn
);
1414 return block_move_fn
;
1417 /* A subroutine of emit_block_move. Copy the data via an explicit
1418 loop. This is used only when libcalls are forbidden. */
1419 /* ??? It'd be nice to copy in hunks larger than QImode. */
1422 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1423 unsigned int align ATTRIBUTE_UNUSED
)
1425 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1426 enum machine_mode x_addr_mode
1427 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (x
));
1428 enum machine_mode y_addr_mode
1429 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (y
));
1430 enum machine_mode iter_mode
;
1432 iter_mode
= GET_MODE (size
);
1433 if (iter_mode
== VOIDmode
)
1434 iter_mode
= word_mode
;
1436 top_label
= gen_label_rtx ();
1437 cmp_label
= gen_label_rtx ();
1438 iter
= gen_reg_rtx (iter_mode
);
1440 emit_move_insn (iter
, const0_rtx
);
1442 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1443 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1444 do_pending_stack_adjust ();
1446 emit_jump (cmp_label
);
1447 emit_label (top_label
);
1449 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1450 x_addr
= gen_rtx_PLUS (x_addr_mode
, x_addr
, tmp
);
1452 if (x_addr_mode
!= y_addr_mode
)
1453 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1454 y_addr
= gen_rtx_PLUS (y_addr_mode
, y_addr
, tmp
);
1456 x
= change_address (x
, QImode
, x_addr
);
1457 y
= change_address (y
, QImode
, y_addr
);
1459 emit_move_insn (x
, y
);
1461 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1462 true, OPTAB_LIB_WIDEN
);
1464 emit_move_insn (iter
, tmp
);
1466 emit_label (cmp_label
);
1468 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1472 /* Copy all or part of a value X into registers starting at REGNO.
1473 The number of registers to be filled is NREGS. */
1476 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1479 #ifdef HAVE_load_multiple
1487 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1488 x
= validize_mem (force_const_mem (mode
, x
));
1490 /* See if the machine can do this with a load multiple insn. */
1491 #ifdef HAVE_load_multiple
1492 if (HAVE_load_multiple
)
1494 last
= get_last_insn ();
1495 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1503 delete_insns_since (last
);
1507 for (i
= 0; i
< nregs
; i
++)
1508 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1509 operand_subword_force (x
, i
, mode
));
1512 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1513 The number of registers to be filled is NREGS. */
1516 move_block_from_reg (int regno
, rtx x
, int nregs
)
1523 /* See if the machine can do this with a store multiple insn. */
1524 #ifdef HAVE_store_multiple
1525 if (HAVE_store_multiple
)
1527 rtx last
= get_last_insn ();
1528 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1536 delete_insns_since (last
);
1540 for (i
= 0; i
< nregs
; i
++)
1542 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1546 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1550 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1551 ORIG, where ORIG is a non-consecutive group of registers represented by
1552 a PARALLEL. The clone is identical to the original except in that the
1553 original set of registers is replaced by a new set of pseudo registers.
1554 The new set has the same modes as the original set. */
1557 gen_group_rtx (rtx orig
)
1562 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1564 length
= XVECLEN (orig
, 0);
1565 tmps
= XALLOCAVEC (rtx
, length
);
1567 /* Skip a NULL entry in first slot. */
1568 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1573 for (; i
< length
; i
++)
1575 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1576 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1578 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1581 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1584 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1585 except that values are placed in TMPS[i], and must later be moved
1586 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1589 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1593 enum machine_mode m
= GET_MODE (orig_src
);
1595 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1598 && !SCALAR_INT_MODE_P (m
)
1599 && !MEM_P (orig_src
)
1600 && GET_CODE (orig_src
) != CONCAT
)
1602 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1603 if (imode
== BLKmode
)
1604 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
, 0);
1606 src
= gen_reg_rtx (imode
);
1607 if (imode
!= BLKmode
)
1608 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1609 emit_move_insn (src
, orig_src
);
1610 /* ...and back again. */
1611 if (imode
!= BLKmode
)
1612 src
= gen_lowpart (imode
, src
);
1613 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1617 /* Check for a NULL entry, used to indicate that the parameter goes
1618 both on the stack and in registers. */
1619 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1624 /* Process the pieces. */
1625 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1627 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1628 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1629 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1632 /* Handle trailing fragments that run over the size of the struct. */
1633 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1635 /* Arrange to shift the fragment to where it belongs.
1636 extract_bit_field loads to the lsb of the reg. */
1638 #ifdef BLOCK_REG_PADDING
1639 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1640 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1645 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1646 bytelen
= ssize
- bytepos
;
1647 gcc_assert (bytelen
> 0);
1650 /* If we won't be loading directly from memory, protect the real source
1651 from strange tricks we might play; but make sure that the source can
1652 be loaded directly into the destination. */
1654 if (!MEM_P (orig_src
)
1655 && (!CONSTANT_P (orig_src
)
1656 || (GET_MODE (orig_src
) != mode
1657 && GET_MODE (orig_src
) != VOIDmode
)))
1659 if (GET_MODE (orig_src
) == VOIDmode
)
1660 src
= gen_reg_rtx (mode
);
1662 src
= gen_reg_rtx (GET_MODE (orig_src
));
1664 emit_move_insn (src
, orig_src
);
1667 /* Optimize the access just a bit. */
1669 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1670 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1671 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1672 && bytelen
== GET_MODE_SIZE (mode
))
1674 tmps
[i
] = gen_reg_rtx (mode
);
1675 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1677 else if (COMPLEX_MODE_P (mode
)
1678 && GET_MODE (src
) == mode
1679 && bytelen
== GET_MODE_SIZE (mode
))
1680 /* Let emit_move_complex do the bulk of the work. */
1682 else if (GET_CODE (src
) == CONCAT
)
1684 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1685 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1687 if ((bytepos
== 0 && bytelen
== slen0
)
1688 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1690 /* The following assumes that the concatenated objects all
1691 have the same size. In this case, a simple calculation
1692 can be used to determine the object and the bit field
1694 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1695 if (! CONSTANT_P (tmps
[i
])
1696 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1697 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1698 (bytepos
% slen0
) * BITS_PER_UNIT
,
1699 1, false, NULL_RTX
, mode
, mode
);
1705 gcc_assert (!bytepos
);
1706 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1707 emit_move_insn (mem
, src
);
1708 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1709 0, 1, false, NULL_RTX
, mode
, mode
);
1712 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1713 SIMD register, which is currently broken. While we get GCC
1714 to emit proper RTL for these cases, let's dump to memory. */
1715 else if (VECTOR_MODE_P (GET_MODE (dst
))
1718 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1721 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1722 emit_move_insn (mem
, src
);
1723 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1725 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1726 && XVECLEN (dst
, 0) > 1)
1727 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1728 else if (CONSTANT_P (src
))
1730 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
1738 gcc_assert (2 * len
== ssize
);
1739 split_double (src
, &first
, &second
);
1746 else if (REG_P (src
) && GET_MODE (src
) == mode
)
1749 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1750 bytepos
* BITS_PER_UNIT
, 1, false, NULL_RTX
,
1754 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1755 build_int_cst (NULL_TREE
, shift
), tmps
[i
], 0);
1759 /* Emit code to move a block SRC of type TYPE to a block DST,
1760 where DST is non-consecutive registers represented by a PARALLEL.
1761 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1765 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1770 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
1771 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1773 /* Copy the extracted pieces into the proper (probable) hard regs. */
1774 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1776 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1779 emit_move_insn (d
, tmps
[i
]);
1783 /* Similar, but load SRC into new pseudos in a format that looks like
1784 PARALLEL. This can later be fed to emit_group_move to get things
1785 in the right place. */
1788 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1793 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1794 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1796 /* Convert the vector to look just like the original PARALLEL, except
1797 with the computed values. */
1798 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1800 rtx e
= XVECEXP (parallel
, 0, i
);
1801 rtx d
= XEXP (e
, 0);
1805 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1806 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1808 RTVEC_ELT (vec
, i
) = e
;
1811 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1814 /* Emit code to move a block SRC to block DST, where SRC and DST are
1815 non-consecutive groups of registers, each represented by a PARALLEL. */
1818 emit_group_move (rtx dst
, rtx src
)
1822 gcc_assert (GET_CODE (src
) == PARALLEL
1823 && GET_CODE (dst
) == PARALLEL
1824 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1826 /* Skip first entry if NULL. */
1827 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1828 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1829 XEXP (XVECEXP (src
, 0, i
), 0));
1832 /* Move a group of registers represented by a PARALLEL into pseudos. */
1835 emit_group_move_into_temps (rtx src
)
1837 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1840 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1842 rtx e
= XVECEXP (src
, 0, i
);
1843 rtx d
= XEXP (e
, 0);
1846 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1847 RTVEC_ELT (vec
, i
) = e
;
1850 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1853 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1854 where SRC is non-consecutive registers represented by a PARALLEL.
1855 SSIZE represents the total size of block ORIG_DST, or -1 if not
1859 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1862 int start
, finish
, i
;
1863 enum machine_mode m
= GET_MODE (orig_dst
);
1865 gcc_assert (GET_CODE (src
) == PARALLEL
);
1867 if (!SCALAR_INT_MODE_P (m
)
1868 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1870 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1871 if (imode
== BLKmode
)
1872 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
, 0);
1874 dst
= gen_reg_rtx (imode
);
1875 emit_group_store (dst
, src
, type
, ssize
);
1876 if (imode
!= BLKmode
)
1877 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1878 emit_move_insn (orig_dst
, dst
);
1882 /* Check for a NULL entry, used to indicate that the parameter goes
1883 both on the stack and in registers. */
1884 if (XEXP (XVECEXP (src
, 0, 0), 0))
1888 finish
= XVECLEN (src
, 0);
1890 tmps
= XALLOCAVEC (rtx
, finish
);
1892 /* Copy the (probable) hard regs into pseudos. */
1893 for (i
= start
; i
< finish
; i
++)
1895 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1896 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
1898 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1899 emit_move_insn (tmps
[i
], reg
);
1905 /* If we won't be storing directly into memory, protect the real destination
1906 from strange tricks we might play. */
1908 if (GET_CODE (dst
) == PARALLEL
)
1912 /* We can get a PARALLEL dst if there is a conditional expression in
1913 a return statement. In that case, the dst and src are the same,
1914 so no action is necessary. */
1915 if (rtx_equal_p (dst
, src
))
1918 /* It is unclear if we can ever reach here, but we may as well handle
1919 it. Allocate a temporary, and split this into a store/load to/from
1922 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
1923 emit_group_store (temp
, src
, type
, ssize
);
1924 emit_group_load (dst
, temp
, type
, ssize
);
1927 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1929 enum machine_mode outer
= GET_MODE (dst
);
1930 enum machine_mode inner
;
1931 HOST_WIDE_INT bytepos
;
1935 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
1936 dst
= gen_reg_rtx (outer
);
1938 /* Make life a bit easier for combine. */
1939 /* If the first element of the vector is the low part
1940 of the destination mode, use a paradoxical subreg to
1941 initialize the destination. */
1944 inner
= GET_MODE (tmps
[start
]);
1945 bytepos
= subreg_lowpart_offset (inner
, outer
);
1946 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
1948 temp
= simplify_gen_subreg (outer
, tmps
[start
],
1952 emit_move_insn (dst
, temp
);
1959 /* If the first element wasn't the low part, try the last. */
1961 && start
< finish
- 1)
1963 inner
= GET_MODE (tmps
[finish
- 1]);
1964 bytepos
= subreg_lowpart_offset (inner
, outer
);
1965 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
1967 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
1971 emit_move_insn (dst
, temp
);
1978 /* Otherwise, simply initialize the result to zero. */
1980 emit_move_insn (dst
, CONST0_RTX (outer
));
1983 /* Process the pieces. */
1984 for (i
= start
; i
< finish
; i
++)
1986 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
1987 enum machine_mode mode
= GET_MODE (tmps
[i
]);
1988 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1989 unsigned int adj_bytelen
= bytelen
;
1992 /* Handle trailing fragments that run over the size of the struct. */
1993 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1994 adj_bytelen
= ssize
- bytepos
;
1996 if (GET_CODE (dst
) == CONCAT
)
1998 if (bytepos
+ adj_bytelen
1999 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2000 dest
= XEXP (dst
, 0);
2001 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2003 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2004 dest
= XEXP (dst
, 1);
2008 enum machine_mode dest_mode
= GET_MODE (dest
);
2009 enum machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2011 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2013 if (GET_MODE_ALIGNMENT (dest_mode
)
2014 >= GET_MODE_ALIGNMENT (tmp_mode
))
2016 dest
= assign_stack_temp (dest_mode
,
2017 GET_MODE_SIZE (dest_mode
),
2019 emit_move_insn (adjust_address (dest
,
2027 dest
= assign_stack_temp (tmp_mode
,
2028 GET_MODE_SIZE (tmp_mode
),
2030 emit_move_insn (dest
, tmps
[i
]);
2031 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2037 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2039 /* store_bit_field always takes its value from the lsb.
2040 Move the fragment to the lsb if it's not already there. */
2042 #ifdef BLOCK_REG_PADDING
2043 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2044 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2050 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2051 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2052 build_int_cst (NULL_TREE
, shift
),
2055 bytelen
= adj_bytelen
;
2058 /* Optimize the access just a bit. */
2060 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2061 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2062 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2063 && bytelen
== GET_MODE_SIZE (mode
))
2064 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2066 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2070 /* Copy from the pseudo into the (probable) hard reg. */
2071 if (orig_dst
!= dst
)
2072 emit_move_insn (orig_dst
, dst
);
2075 /* Generate code to copy a BLKmode object of TYPE out of a
2076 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2077 is null, a stack temporary is created. TGTBLK is returned.
2079 The purpose of this routine is to handle functions that return
2080 BLKmode structures in registers. Some machines (the PA for example)
2081 want to return all small structures in registers regardless of the
2082 structure's alignment. */
2085 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2087 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2088 rtx src
= NULL
, dst
= NULL
;
2089 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2090 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2091 enum machine_mode copy_mode
;
2095 tgtblk
= assign_temp (build_qualified_type (type
,
2097 | TYPE_QUAL_CONST
)),
2099 preserve_temp_slots (tgtblk
);
2102 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2103 into a new pseudo which is a full word. */
2105 if (GET_MODE (srcreg
) != BLKmode
2106 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2107 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2109 /* If the structure doesn't take up a whole number of words, see whether
2110 SRCREG is padded on the left or on the right. If it's on the left,
2111 set PADDING_CORRECTION to the number of bits to skip.
2113 In most ABIs, the structure will be returned at the least end of
2114 the register, which translates to right padding on little-endian
2115 targets and left padding on big-endian targets. The opposite
2116 holds if the structure is returned at the most significant
2117 end of the register. */
2118 if (bytes
% UNITS_PER_WORD
!= 0
2119 && (targetm
.calls
.return_in_msb (type
)
2121 : BYTES_BIG_ENDIAN
))
2123 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2125 /* Copy the structure BITSIZE bits at a time. If the target lives in
2126 memory, take care of not reading/writing past its end by selecting
2127 a copy mode suited to BITSIZE. This should always be possible given
2130 We could probably emit more efficient code for machines which do not use
2131 strict alignment, but it doesn't seem worth the effort at the current
2134 copy_mode
= word_mode
;
2137 enum machine_mode mem_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
2138 if (mem_mode
!= BLKmode
)
2139 copy_mode
= mem_mode
;
2142 for (bitpos
= 0, xbitpos
= padding_correction
;
2143 bitpos
< bytes
* BITS_PER_UNIT
;
2144 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2146 /* We need a new source operand each time xbitpos is on a
2147 word boundary and when xbitpos == padding_correction
2148 (the first time through). */
2149 if (xbitpos
% BITS_PER_WORD
== 0
2150 || xbitpos
== padding_correction
)
2151 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2154 /* We need a new destination operand each time bitpos is on
2156 if (bitpos
% BITS_PER_WORD
== 0)
2157 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2159 /* Use xbitpos for the source extraction (right justified) and
2160 bitpos for the destination store (left justified). */
2161 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, copy_mode
,
2162 extract_bit_field (src
, bitsize
,
2163 xbitpos
% BITS_PER_WORD
, 1, false,
2164 NULL_RTX
, copy_mode
, copy_mode
));
2170 /* Add a USE expression for REG to the (possibly empty) list pointed
2171 to by CALL_FUSAGE. REG must denote a hard register. */
2174 use_reg (rtx
*call_fusage
, rtx reg
)
2176 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2179 = gen_rtx_EXPR_LIST (VOIDmode
,
2180 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2183 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2184 starting at REGNO. All of these registers must be hard registers. */
2187 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2191 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2193 for (i
= 0; i
< nregs
; i
++)
2194 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2197 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2198 PARALLEL REGS. This is for calls that pass values in multiple
2199 non-contiguous locations. The Irix 6 ABI has examples of this. */
2202 use_group_regs (rtx
*call_fusage
, rtx regs
)
2206 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2208 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2210 /* A NULL entry means the parameter goes both on the stack and in
2211 registers. This can also be a MEM for targets that pass values
2212 partially on the stack and partially in registers. */
2213 if (reg
!= 0 && REG_P (reg
))
2214 use_reg (call_fusage
, reg
);
2218 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2219 assigment and the code of the expresion on the RHS is CODE. Return
2223 get_def_for_expr (tree name
, enum tree_code code
)
2227 if (TREE_CODE (name
) != SSA_NAME
)
2230 def_stmt
= get_gimple_for_ssa_name (name
);
2232 || gimple_assign_rhs_code (def_stmt
) != code
)
2239 /* Determine whether the LEN bytes generated by CONSTFUN can be
2240 stored to memory using several move instructions. CONSTFUNDATA is
2241 a pointer which will be passed as argument in every CONSTFUN call.
2242 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2243 a memset operation and false if it's a copy of a constant string.
2244 Return nonzero if a call to store_by_pieces should succeed. */
2247 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2248 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2249 void *constfundata
, unsigned int align
, bool memsetp
)
2251 unsigned HOST_WIDE_INT l
;
2252 unsigned int max_size
;
2253 HOST_WIDE_INT offset
= 0;
2254 enum machine_mode mode
;
2255 enum insn_code icode
;
2257 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2258 rtx cst ATTRIBUTE_UNUSED
;
2264 ? SET_BY_PIECES_P (len
, align
)
2265 : STORE_BY_PIECES_P (len
, align
)))
2268 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2270 /* We would first store what we can in the largest integer mode, then go to
2271 successively smaller modes. */
2274 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2278 max_size
= STORE_MAX_PIECES
+ 1;
2279 while (max_size
> 1)
2281 mode
= widest_int_mode_for_size (max_size
);
2283 if (mode
== VOIDmode
)
2286 icode
= optab_handler (mov_optab
, mode
);
2287 if (icode
!= CODE_FOR_nothing
2288 && align
>= GET_MODE_ALIGNMENT (mode
))
2290 unsigned int size
= GET_MODE_SIZE (mode
);
2297 cst
= (*constfun
) (constfundata
, offset
, mode
);
2298 if (!LEGITIMATE_CONSTANT_P (cst
))
2308 max_size
= GET_MODE_SIZE (mode
);
2311 /* The code above should have handled everything. */
2318 /* Generate several move instructions to store LEN bytes generated by
2319 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2320 pointer which will be passed as argument in every CONSTFUN call.
2321 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2322 a memset operation and false if it's a copy of a constant string.
2323 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2324 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2328 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2329 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2330 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
2332 enum machine_mode to_addr_mode
2333 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (to
));
2334 struct store_by_pieces_d data
;
2338 gcc_assert (endp
!= 2);
2343 ? SET_BY_PIECES_P (len
, align
)
2344 : STORE_BY_PIECES_P (len
, align
));
2345 data
.constfun
= constfun
;
2346 data
.constfundata
= constfundata
;
2349 store_by_pieces_1 (&data
, align
);
2354 gcc_assert (!data
.reverse
);
2359 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2360 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2362 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
2363 plus_constant (data
.to_addr
,
2366 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2373 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2381 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2382 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2385 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2387 struct store_by_pieces_d data
;
2392 data
.constfun
= clear_by_pieces_1
;
2393 data
.constfundata
= NULL
;
2396 store_by_pieces_1 (&data
, align
);
2399 /* Callback routine for clear_by_pieces.
2400 Return const0_rtx unconditionally. */
2403 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2404 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2405 enum machine_mode mode ATTRIBUTE_UNUSED
)
2410 /* Subroutine of clear_by_pieces and store_by_pieces.
2411 Generate several move instructions to store LEN bytes of block TO. (A MEM
2412 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2415 store_by_pieces_1 (struct store_by_pieces_d
*data ATTRIBUTE_UNUSED
,
2416 unsigned int align ATTRIBUTE_UNUSED
)
2418 enum machine_mode to_addr_mode
2419 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (data
->to
));
2420 rtx to_addr
= XEXP (data
->to
, 0);
2421 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2422 enum insn_code icode
;
2425 data
->to_addr
= to_addr
;
2427 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2428 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2430 data
->explicit_inc_to
= 0;
2432 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2434 data
->offset
= data
->len
;
2436 /* If storing requires more than two move insns,
2437 copy addresses to registers (to make displacements shorter)
2438 and use post-increment if available. */
2439 if (!data
->autinc_to
2440 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2442 /* Determine the main mode we'll be using.
2443 MODE might not be used depending on the definitions of the
2444 USE_* macros below. */
2445 enum machine_mode mode ATTRIBUTE_UNUSED
2446 = widest_int_mode_for_size (max_size
);
2448 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2450 data
->to_addr
= copy_to_mode_reg (to_addr_mode
,
2451 plus_constant (to_addr
, data
->len
));
2452 data
->autinc_to
= 1;
2453 data
->explicit_inc_to
= -1;
2456 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2457 && ! data
->autinc_to
)
2459 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2460 data
->autinc_to
= 1;
2461 data
->explicit_inc_to
= 1;
2464 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2465 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2468 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2470 /* First store what we can in the largest integer mode, then go to
2471 successively smaller modes. */
2473 while (max_size
> 1)
2475 enum machine_mode mode
= widest_int_mode_for_size (max_size
);
2477 if (mode
== VOIDmode
)
2480 icode
= optab_handler (mov_optab
, mode
);
2481 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2482 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2484 max_size
= GET_MODE_SIZE (mode
);
2487 /* The code above should have handled everything. */
2488 gcc_assert (!data
->len
);
2491 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2492 with move instructions for mode MODE. GENFUN is the gen_... function
2493 to make a move insn for that mode. DATA has all the other info. */
2496 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2497 struct store_by_pieces_d
*data
)
2499 unsigned int size
= GET_MODE_SIZE (mode
);
2502 while (data
->len
>= size
)
2505 data
->offset
-= size
;
2507 if (data
->autinc_to
)
2508 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2511 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2513 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2514 emit_insn (gen_add2_insn (data
->to_addr
,
2515 GEN_INT (-(HOST_WIDE_INT
) size
)));
2517 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2518 emit_insn ((*genfun
) (to1
, cst
));
2520 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2521 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2523 if (! data
->reverse
)
2524 data
->offset
+= size
;
2530 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2531 its length in bytes. */
2534 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2535 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
2537 enum machine_mode mode
= GET_MODE (object
);
2540 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2542 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2543 just move a zero. Otherwise, do this a piece at a time. */
2545 && CONST_INT_P (size
)
2546 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2548 rtx zero
= CONST0_RTX (mode
);
2551 emit_move_insn (object
, zero
);
2555 if (COMPLEX_MODE_P (mode
))
2557 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2560 write_complex_part (object
, zero
, 0);
2561 write_complex_part (object
, zero
, 1);
2567 if (size
== const0_rtx
)
2570 align
= MEM_ALIGN (object
);
2572 if (CONST_INT_P (size
)
2573 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2574 clear_by_pieces (object
, INTVAL (size
), align
);
2575 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2576 expected_align
, expected_size
))
2578 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
2579 return set_storage_via_libcall (object
, size
, const0_rtx
,
2580 method
== BLOCK_OP_TAILCALL
);
2588 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2590 return clear_storage_hints (object
, size
, method
, 0, -1);
2594 /* A subroutine of clear_storage. Expand a call to memset.
2595 Return the return value of memset, 0 otherwise. */
2598 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
2600 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
2601 enum machine_mode size_mode
;
2604 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2605 place those into new pseudos into a VAR_DECL and use them later. */
2607 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2609 size_mode
= TYPE_MODE (sizetype
);
2610 size
= convert_to_mode (size_mode
, size
, 1);
2611 size
= copy_to_mode_reg (size_mode
, size
);
2613 /* It is incorrect to use the libcall calling conventions to call
2614 memset in this context. This could be a user call to memset and
2615 the user may wish to examine the return value from memset. For
2616 targets where libcalls and normal calls have different conventions
2617 for returning pointers, we could end up generating incorrect code. */
2619 object_tree
= make_tree (ptr_type_node
, object
);
2620 if (!CONST_INT_P (val
))
2621 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
2622 size_tree
= make_tree (sizetype
, size
);
2623 val_tree
= make_tree (integer_type_node
, val
);
2625 fn
= clear_storage_libcall_fn (true);
2626 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
2627 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2629 retval
= expand_normal (call_expr
);
2634 /* A subroutine of set_storage_via_libcall. Create the tree node
2635 for the function we use for block clears. The first time FOR_CALL
2636 is true, we call assemble_external. */
2638 tree block_clear_fn
;
2641 init_block_clear_fn (const char *asmspec
)
2643 if (!block_clear_fn
)
2647 fn
= get_identifier ("memset");
2648 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2649 integer_type_node
, sizetype
,
2652 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
2653 DECL_EXTERNAL (fn
) = 1;
2654 TREE_PUBLIC (fn
) = 1;
2655 DECL_ARTIFICIAL (fn
) = 1;
2656 TREE_NOTHROW (fn
) = 1;
2657 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2658 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2660 block_clear_fn
= fn
;
2664 set_user_assembler_name (block_clear_fn
, asmspec
);
2668 clear_storage_libcall_fn (int for_call
)
2670 static bool emitted_extern
;
2672 if (!block_clear_fn
)
2673 init_block_clear_fn (NULL
);
2675 if (for_call
&& !emitted_extern
)
2677 emitted_extern
= true;
2678 make_decl_rtl (block_clear_fn
);
2679 assemble_external (block_clear_fn
);
2682 return block_clear_fn
;
2685 /* Expand a setmem pattern; return true if successful. */
2688 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
2689 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
2691 /* Try the most limited insn first, because there's no point
2692 including more than one in the machine description unless
2693 the more limited one has some advantage. */
2695 enum machine_mode mode
;
2697 if (expected_align
< align
)
2698 expected_align
= align
;
2700 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2701 mode
= GET_MODE_WIDER_MODE (mode
))
2703 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
2705 if (code
!= CODE_FOR_nothing
2706 /* We don't need MODE to be narrower than
2707 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2708 the mode mask, as it is returned by the macro, it will
2709 definitely be less than the actual mode mask. */
2710 && ((CONST_INT_P (size
)
2711 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2712 <= (GET_MODE_MASK (mode
) >> 1)))
2713 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
))
2715 struct expand_operand ops
[6];
2718 nops
= insn_data
[(int) code
].n_operands
;
2719 create_fixed_operand (&ops
[0], object
);
2720 /* The check above guarantees that this size conversion is valid. */
2721 create_convert_operand_to (&ops
[1], size
, mode
, true);
2722 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
2723 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
2726 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
2727 create_integer_operand (&ops
[5], expected_size
);
2730 if (maybe_expand_insn (code
, nops
, ops
))
2739 /* Write to one of the components of the complex value CPLX. Write VAL to
2740 the real part if IMAG_P is false, and the imaginary part if its true. */
2743 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2745 enum machine_mode cmode
;
2746 enum machine_mode imode
;
2749 if (GET_CODE (cplx
) == CONCAT
)
2751 emit_move_insn (XEXP (cplx
, imag_p
), val
);
2755 cmode
= GET_MODE (cplx
);
2756 imode
= GET_MODE_INNER (cmode
);
2757 ibitsize
= GET_MODE_BITSIZE (imode
);
2759 /* For MEMs simplify_gen_subreg may generate an invalid new address
2760 because, e.g., the original address is considered mode-dependent
2761 by the target, which restricts simplify_subreg from invoking
2762 adjust_address_nv. Instead of preparing fallback support for an
2763 invalid address, we call adjust_address_nv directly. */
2766 emit_move_insn (adjust_address_nv (cplx
, imode
,
2767 imag_p
? GET_MODE_SIZE (imode
) : 0),
2772 /* If the sub-object is at least word sized, then we know that subregging
2773 will work. This special case is important, since store_bit_field
2774 wants to operate on integer modes, and there's rarely an OImode to
2775 correspond to TCmode. */
2776 if (ibitsize
>= BITS_PER_WORD
2777 /* For hard regs we have exact predicates. Assume we can split
2778 the original object if it spans an even number of hard regs.
2779 This special case is important for SCmode on 64-bit platforms
2780 where the natural size of floating-point regs is 32-bit. */
2782 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2783 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2785 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
2786 imag_p
? GET_MODE_SIZE (imode
) : 0);
2789 emit_move_insn (part
, val
);
2793 /* simplify_gen_subreg may fail for sub-word MEMs. */
2794 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2797 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, imode
, val
);
2800 /* Extract one of the components of the complex value CPLX. Extract the
2801 real part if IMAG_P is false, and the imaginary part if it's true. */
2804 read_complex_part (rtx cplx
, bool imag_p
)
2806 enum machine_mode cmode
, imode
;
2809 if (GET_CODE (cplx
) == CONCAT
)
2810 return XEXP (cplx
, imag_p
);
2812 cmode
= GET_MODE (cplx
);
2813 imode
= GET_MODE_INNER (cmode
);
2814 ibitsize
= GET_MODE_BITSIZE (imode
);
2816 /* Special case reads from complex constants that got spilled to memory. */
2817 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
2819 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
2820 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
2822 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
2823 if (CONSTANT_CLASS_P (part
))
2824 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
2828 /* For MEMs simplify_gen_subreg may generate an invalid new address
2829 because, e.g., the original address is considered mode-dependent
2830 by the target, which restricts simplify_subreg from invoking
2831 adjust_address_nv. Instead of preparing fallback support for an
2832 invalid address, we call adjust_address_nv directly. */
2834 return adjust_address_nv (cplx
, imode
,
2835 imag_p
? GET_MODE_SIZE (imode
) : 0);
2837 /* If the sub-object is at least word sized, then we know that subregging
2838 will work. This special case is important, since extract_bit_field
2839 wants to operate on integer modes, and there's rarely an OImode to
2840 correspond to TCmode. */
2841 if (ibitsize
>= BITS_PER_WORD
2842 /* For hard regs we have exact predicates. Assume we can split
2843 the original object if it spans an even number of hard regs.
2844 This special case is important for SCmode on 64-bit platforms
2845 where the natural size of floating-point regs is 32-bit. */
2847 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2848 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2850 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
2851 imag_p
? GET_MODE_SIZE (imode
) : 0);
2855 /* simplify_gen_subreg may fail for sub-word MEMs. */
2856 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2859 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
2860 true, false, NULL_RTX
, imode
, imode
);
2863 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2864 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2865 represented in NEW_MODE. If FORCE is true, this will never happen, as
2866 we'll force-create a SUBREG if needed. */
2869 emit_move_change_mode (enum machine_mode new_mode
,
2870 enum machine_mode old_mode
, rtx x
, bool force
)
2874 if (push_operand (x
, GET_MODE (x
)))
2876 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
2877 MEM_COPY_ATTRIBUTES (ret
, x
);
2881 /* We don't have to worry about changing the address since the
2882 size in bytes is supposed to be the same. */
2883 if (reload_in_progress
)
2885 /* Copy the MEM to change the mode and move any
2886 substitutions from the old MEM to the new one. */
2887 ret
= adjust_address_nv (x
, new_mode
, 0);
2888 copy_replacements (x
, ret
);
2891 ret
= adjust_address (x
, new_mode
, 0);
2895 /* Note that we do want simplify_subreg's behavior of validating
2896 that the new mode is ok for a hard register. If we were to use
2897 simplify_gen_subreg, we would create the subreg, but would
2898 probably run into the target not being able to implement it. */
2899 /* Except, of course, when FORCE is true, when this is exactly what
2900 we want. Which is needed for CCmodes on some targets. */
2902 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
2904 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
2910 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2911 an integer mode of the same size as MODE. Returns the instruction
2912 emitted, or NULL if such a move could not be generated. */
2915 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
, bool force
)
2917 enum machine_mode imode
;
2918 enum insn_code code
;
2920 /* There must exist a mode of the exact size we require. */
2921 imode
= int_mode_for_mode (mode
);
2922 if (imode
== BLKmode
)
2925 /* The target must support moves in this mode. */
2926 code
= optab_handler (mov_optab
, imode
);
2927 if (code
== CODE_FOR_nothing
)
2930 x
= emit_move_change_mode (imode
, mode
, x
, force
);
2933 y
= emit_move_change_mode (imode
, mode
, y
, force
);
2936 return emit_insn (GEN_FCN (code
) (x
, y
));
2939 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2940 Return an equivalent MEM that does not use an auto-increment. */
2943 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
2945 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
2946 HOST_WIDE_INT adjust
;
2949 adjust
= GET_MODE_SIZE (mode
);
2950 #ifdef PUSH_ROUNDING
2951 adjust
= PUSH_ROUNDING (adjust
);
2953 if (code
== PRE_DEC
|| code
== POST_DEC
)
2955 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
2957 rtx expr
= XEXP (XEXP (x
, 0), 1);
2960 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
2961 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
2962 val
= INTVAL (XEXP (expr
, 1));
2963 if (GET_CODE (expr
) == MINUS
)
2965 gcc_assert (adjust
== val
|| adjust
== -val
);
2969 /* Do not use anti_adjust_stack, since we don't want to update
2970 stack_pointer_delta. */
2971 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
2972 GEN_INT (adjust
), stack_pointer_rtx
,
2973 0, OPTAB_LIB_WIDEN
);
2974 if (temp
!= stack_pointer_rtx
)
2975 emit_move_insn (stack_pointer_rtx
, temp
);
2982 temp
= stack_pointer_rtx
;
2987 temp
= plus_constant (stack_pointer_rtx
, -adjust
);
2993 return replace_equiv_address (x
, temp
);
2996 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2997 X is known to satisfy push_operand, and MODE is known to be complex.
2998 Returns the last instruction emitted. */
3001 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
3003 enum machine_mode submode
= GET_MODE_INNER (mode
);
3006 #ifdef PUSH_ROUNDING
3007 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3009 /* In case we output to the stack, but the size is smaller than the
3010 machine can push exactly, we need to use move instructions. */
3011 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3013 x
= emit_move_resolve_push (mode
, x
);
3014 return emit_move_insn (x
, y
);
3018 /* Note that the real part always precedes the imag part in memory
3019 regardless of machine's endianness. */
3020 switch (GET_CODE (XEXP (x
, 0)))
3034 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3035 read_complex_part (y
, imag_first
));
3036 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3037 read_complex_part (y
, !imag_first
));
3040 /* A subroutine of emit_move_complex. Perform the move from Y to X
3041 via two moves of the parts. Returns the last instruction emitted. */
3044 emit_move_complex_parts (rtx x
, rtx y
)
3046 /* Show the output dies here. This is necessary for SUBREGs
3047 of pseudos since we cannot track their lifetimes correctly;
3048 hard regs shouldn't appear here except as return values. */
3049 if (!reload_completed
&& !reload_in_progress
3050 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3053 write_complex_part (x
, read_complex_part (y
, false), false);
3054 write_complex_part (x
, read_complex_part (y
, true), true);
3056 return get_last_insn ();
3059 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3060 MODE is known to be complex. Returns the last instruction emitted. */
3063 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
3067 /* Need to take special care for pushes, to maintain proper ordering
3068 of the data, and possibly extra padding. */
3069 if (push_operand (x
, mode
))
3070 return emit_move_complex_push (mode
, x
, y
);
3072 /* See if we can coerce the target into moving both values at once. */
3074 /* Move floating point as parts. */
3075 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3076 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
)
3078 /* Not possible if the values are inherently not adjacent. */
3079 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3081 /* Is possible if both are registers (or subregs of registers). */
3082 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3084 /* If one of the operands is a memory, and alignment constraints
3085 are friendly enough, we may be able to do combined memory operations.
3086 We do not attempt this if Y is a constant because that combination is
3087 usually better with the by-parts thing below. */
3088 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3089 && (!STRICT_ALIGNMENT
3090 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3099 /* For memory to memory moves, optimal behavior can be had with the
3100 existing block move logic. */
3101 if (MEM_P (x
) && MEM_P (y
))
3103 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3104 BLOCK_OP_NO_LIBCALL
);
3105 return get_last_insn ();
3108 ret
= emit_move_via_integer (mode
, x
, y
, true);
3113 return emit_move_complex_parts (x
, y
);
3116 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3117 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3120 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
3124 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3127 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3128 if (code
!= CODE_FOR_nothing
)
3130 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3131 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3132 return emit_insn (GEN_FCN (code
) (x
, y
));
3136 /* Otherwise, find the MODE_INT mode of the same width. */
3137 ret
= emit_move_via_integer (mode
, x
, y
, false);
3138 gcc_assert (ret
!= NULL
);
3142 /* Return true if word I of OP lies entirely in the
3143 undefined bits of a paradoxical subreg. */
3146 undefined_operand_subword_p (const_rtx op
, int i
)
3148 enum machine_mode innermode
, innermostmode
;
3150 if (GET_CODE (op
) != SUBREG
)
3152 innermode
= GET_MODE (op
);
3153 innermostmode
= GET_MODE (SUBREG_REG (op
));
3154 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3155 /* The SUBREG_BYTE represents offset, as if the value were stored in
3156 memory, except for a paradoxical subreg where we define
3157 SUBREG_BYTE to be 0; undo this exception as in
3159 if (SUBREG_BYTE (op
) == 0
3160 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3162 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3163 if (WORDS_BIG_ENDIAN
)
3164 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3165 if (BYTES_BIG_ENDIAN
)
3166 offset
+= difference
% UNITS_PER_WORD
;
3168 if (offset
>= GET_MODE_SIZE (innermostmode
)
3169 || offset
<= -GET_MODE_SIZE (word_mode
))
3174 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3175 MODE is any multi-word or full-word mode that lacks a move_insn
3176 pattern. Note that you will get better code if you define such
3177 patterns, even if they must turn into multiple assembler instructions. */
3180 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3187 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3189 /* If X is a push on the stack, do the push now and replace
3190 X with a reference to the stack pointer. */
3191 if (push_operand (x
, mode
))
3192 x
= emit_move_resolve_push (mode
, x
);
3194 /* If we are in reload, see if either operand is a MEM whose address
3195 is scheduled for replacement. */
3196 if (reload_in_progress
&& MEM_P (x
)
3197 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3198 x
= replace_equiv_address_nv (x
, inner
);
3199 if (reload_in_progress
&& MEM_P (y
)
3200 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3201 y
= replace_equiv_address_nv (y
, inner
);
3205 need_clobber
= false;
3207 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3210 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3213 /* Do not generate code for a move if it would come entirely
3214 from the undefined bits of a paradoxical subreg. */
3215 if (undefined_operand_subword_p (y
, i
))
3218 ypart
= operand_subword (y
, i
, 1, mode
);
3220 /* If we can't get a part of Y, put Y into memory if it is a
3221 constant. Otherwise, force it into a register. Then we must
3222 be able to get a part of Y. */
3223 if (ypart
== 0 && CONSTANT_P (y
))
3225 y
= use_anchored_address (force_const_mem (mode
, y
));
3226 ypart
= operand_subword (y
, i
, 1, mode
);
3228 else if (ypart
== 0)
3229 ypart
= operand_subword_force (y
, i
, mode
);
3231 gcc_assert (xpart
&& ypart
);
3233 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3235 last_insn
= emit_move_insn (xpart
, ypart
);
3241 /* Show the output dies here. This is necessary for SUBREGs
3242 of pseudos since we cannot track their lifetimes correctly;
3243 hard regs shouldn't appear here except as return values.
3244 We never want to emit such a clobber after reload. */
3246 && ! (reload_in_progress
|| reload_completed
)
3247 && need_clobber
!= 0)
3255 /* Low level part of emit_move_insn.
3256 Called just like emit_move_insn, but assumes X and Y
3257 are basically valid. */
3260 emit_move_insn_1 (rtx x
, rtx y
)
3262 enum machine_mode mode
= GET_MODE (x
);
3263 enum insn_code code
;
3265 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3267 code
= optab_handler (mov_optab
, mode
);
3268 if (code
!= CODE_FOR_nothing
)
3269 return emit_insn (GEN_FCN (code
) (x
, y
));
3271 /* Expand complex moves by moving real part and imag part. */
3272 if (COMPLEX_MODE_P (mode
))
3273 return emit_move_complex (mode
, x
, y
);
3275 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3276 || ALL_FIXED_POINT_MODE_P (mode
))
3278 rtx result
= emit_move_via_integer (mode
, x
, y
, true);
3280 /* If we can't find an integer mode, use multi words. */
3284 return emit_move_multi_word (mode
, x
, y
);
3287 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3288 return emit_move_ccmode (mode
, x
, y
);
3290 /* Try using a move pattern for the corresponding integer mode. This is
3291 only safe when simplify_subreg can convert MODE constants into integer
3292 constants. At present, it can only do this reliably if the value
3293 fits within a HOST_WIDE_INT. */
3294 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3296 rtx ret
= emit_move_via_integer (mode
, x
, y
, false);
3301 return emit_move_multi_word (mode
, x
, y
);
3304 /* Generate code to copy Y into X.
3305 Both Y and X must have the same mode, except that
3306 Y can be a constant with VOIDmode.
3307 This mode cannot be BLKmode; use emit_block_move for that.
3309 Return the last instruction emitted. */
3312 emit_move_insn (rtx x
, rtx y
)
3314 enum machine_mode mode
= GET_MODE (x
);
3315 rtx y_cst
= NULL_RTX
;
3318 gcc_assert (mode
!= BLKmode
3319 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3324 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3325 && (last_insn
= compress_float_constant (x
, y
)))
3330 if (!LEGITIMATE_CONSTANT_P (y
))
3332 y
= force_const_mem (mode
, y
);
3334 /* If the target's cannot_force_const_mem prevented the spill,
3335 assume that the target's move expanders will also take care
3336 of the non-legitimate constant. */
3340 y
= use_anchored_address (y
);
3344 /* If X or Y are memory references, verify that their addresses are valid
3347 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3349 && ! push_operand (x
, GET_MODE (x
))))
3350 x
= validize_mem (x
);
3353 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3354 MEM_ADDR_SPACE (y
)))
3355 y
= validize_mem (y
);
3357 gcc_assert (mode
!= BLKmode
);
3359 last_insn
= emit_move_insn_1 (x
, y
);
3361 if (y_cst
&& REG_P (x
)
3362 && (set
= single_set (last_insn
)) != NULL_RTX
3363 && SET_DEST (set
) == x
3364 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3365 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3370 /* If Y is representable exactly in a narrower mode, and the target can
3371 perform the extension directly from constant or memory, then emit the
3372 move as an extension. */
3375 compress_float_constant (rtx x
, rtx y
)
3377 enum machine_mode dstmode
= GET_MODE (x
);
3378 enum machine_mode orig_srcmode
= GET_MODE (y
);
3379 enum machine_mode srcmode
;
3381 int oldcost
, newcost
;
3382 bool speed
= optimize_insn_for_speed_p ();
3384 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3386 if (LEGITIMATE_CONSTANT_P (y
))
3387 oldcost
= rtx_cost (y
, SET
, speed
);
3389 oldcost
= rtx_cost (force_const_mem (dstmode
, y
), SET
, speed
);
3391 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3392 srcmode
!= orig_srcmode
;
3393 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3396 rtx trunc_y
, last_insn
;
3398 /* Skip if the target can't extend this way. */
3399 ic
= can_extend_p (dstmode
, srcmode
, 0);
3400 if (ic
== CODE_FOR_nothing
)
3403 /* Skip if the narrowed value isn't exact. */
3404 if (! exact_real_truncate (srcmode
, &r
))
3407 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3409 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3411 /* Skip if the target needs extra instructions to perform
3413 if (!insn_operand_matches (ic
, 1, trunc_y
))
3415 /* This is valid, but may not be cheaper than the original. */
3416 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
, speed
);
3417 if (oldcost
< newcost
)
3420 else if (float_extend_from_mem
[dstmode
][srcmode
])
3422 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3423 /* This is valid, but may not be cheaper than the original. */
3424 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
, speed
);
3425 if (oldcost
< newcost
)
3427 trunc_y
= validize_mem (trunc_y
);
3432 /* For CSE's benefit, force the compressed constant pool entry
3433 into a new pseudo. This constant may be used in different modes,
3434 and if not, combine will put things back together for us. */
3435 trunc_y
= force_reg (srcmode
, trunc_y
);
3436 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3437 last_insn
= get_last_insn ();
3440 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3448 /* Pushing data onto the stack. */
3450 /* Push a block of length SIZE (perhaps variable)
3451 and return an rtx to address the beginning of the block.
3452 The value may be virtual_outgoing_args_rtx.
3454 EXTRA is the number of bytes of padding to push in addition to SIZE.
3455 BELOW nonzero means this padding comes at low addresses;
3456 otherwise, the padding comes at high addresses. */
3459 push_block (rtx size
, int extra
, int below
)
3463 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3464 if (CONSTANT_P (size
))
3465 anti_adjust_stack (plus_constant (size
, extra
));
3466 else if (REG_P (size
) && extra
== 0)
3467 anti_adjust_stack (size
);
3470 temp
= copy_to_mode_reg (Pmode
, size
);
3472 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3473 temp
, 0, OPTAB_LIB_WIDEN
);
3474 anti_adjust_stack (temp
);
3477 #ifndef STACK_GROWS_DOWNWARD
3483 temp
= virtual_outgoing_args_rtx
;
3484 if (extra
!= 0 && below
)
3485 temp
= plus_constant (temp
, extra
);
3489 if (CONST_INT_P (size
))
3490 temp
= plus_constant (virtual_outgoing_args_rtx
,
3491 -INTVAL (size
) - (below
? 0 : extra
));
3492 else if (extra
!= 0 && !below
)
3493 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3494 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3496 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3497 negate_rtx (Pmode
, size
));
3500 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3503 #ifdef PUSH_ROUNDING
3505 /* Emit single push insn. */
3508 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3511 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3513 enum insn_code icode
;
3515 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3516 /* If there is push pattern, use it. Otherwise try old way of throwing
3517 MEM representing push operation to move expander. */
3518 icode
= optab_handler (push_optab
, mode
);
3519 if (icode
!= CODE_FOR_nothing
)
3521 struct expand_operand ops
[1];
3523 create_input_operand (&ops
[0], x
, mode
);
3524 if (maybe_expand_insn (icode
, 1, ops
))
3527 if (GET_MODE_SIZE (mode
) == rounded_size
)
3528 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3529 /* If we are to pad downward, adjust the stack pointer first and
3530 then store X into the stack location using an offset. This is
3531 because emit_move_insn does not know how to pad; it does not have
3533 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3535 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3536 HOST_WIDE_INT offset
;
3538 emit_move_insn (stack_pointer_rtx
,
3539 expand_binop (Pmode
,
3540 #ifdef STACK_GROWS_DOWNWARD
3546 GEN_INT (rounded_size
),
3547 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3549 offset
= (HOST_WIDE_INT
) padding_size
;
3550 #ifdef STACK_GROWS_DOWNWARD
3551 if (STACK_PUSH_CODE
== POST_DEC
)
3552 /* We have already decremented the stack pointer, so get the
3554 offset
+= (HOST_WIDE_INT
) rounded_size
;
3556 if (STACK_PUSH_CODE
== POST_INC
)
3557 /* We have already incremented the stack pointer, so get the
3559 offset
-= (HOST_WIDE_INT
) rounded_size
;
3561 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3565 #ifdef STACK_GROWS_DOWNWARD
3566 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3567 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3568 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3570 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3571 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3572 GEN_INT (rounded_size
));
3574 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3577 dest
= gen_rtx_MEM (mode
, dest_addr
);
3581 set_mem_attributes (dest
, type
, 1);
3583 if (flag_optimize_sibling_calls
)
3584 /* Function incoming arguments may overlap with sibling call
3585 outgoing arguments and we cannot allow reordering of reads
3586 from function arguments with stores to outgoing arguments
3587 of sibling calls. */
3588 set_mem_alias_set (dest
, 0);
3590 emit_move_insn (dest
, x
);
3594 /* Generate code to push X onto the stack, assuming it has mode MODE and
3596 MODE is redundant except when X is a CONST_INT (since they don't
3598 SIZE is an rtx for the size of data to be copied (in bytes),
3599 needed only if X is BLKmode.
3601 ALIGN (in bits) is maximum alignment we can assume.
3603 If PARTIAL and REG are both nonzero, then copy that many of the first
3604 bytes of X into registers starting with REG, and push the rest of X.
3605 The amount of space pushed is decreased by PARTIAL bytes.
3606 REG must be a hard register in this case.
3607 If REG is zero but PARTIAL is not, take any all others actions for an
3608 argument partially in registers, but do not actually load any
3611 EXTRA is the amount in bytes of extra space to leave next to this arg.
3612 This is ignored if an argument block has already been allocated.
3614 On a machine that lacks real push insns, ARGS_ADDR is the address of
3615 the bottom of the argument block for this call. We use indexing off there
3616 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3617 argument block has not been preallocated.
3619 ARGS_SO_FAR is the size of args previously pushed for this call.
3621 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3622 for arguments passed in registers. If nonzero, it will be the number
3623 of bytes required. */
3626 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3627 unsigned int align
, int partial
, rtx reg
, int extra
,
3628 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3632 enum direction stack_direction
3633 #ifdef STACK_GROWS_DOWNWARD
3639 /* Decide where to pad the argument: `downward' for below,
3640 `upward' for above, or `none' for don't pad it.
3641 Default is below for small data on big-endian machines; else above. */
3642 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3644 /* Invert direction if stack is post-decrement.
3646 if (STACK_PUSH_CODE
== POST_DEC
)
3647 if (where_pad
!= none
)
3648 where_pad
= (where_pad
== downward
? upward
: downward
);
3653 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
3655 /* Copy a block into the stack, entirely or partially. */
3662 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3663 used
= partial
- offset
;
3665 if (mode
!= BLKmode
)
3667 /* A value is to be stored in an insufficiently aligned
3668 stack slot; copy via a suitably aligned slot if
3670 size
= GEN_INT (GET_MODE_SIZE (mode
));
3671 if (!MEM_P (xinner
))
3673 temp
= assign_temp (type
, 0, 1, 1);
3674 emit_move_insn (temp
, xinner
);
3681 /* USED is now the # of bytes we need not copy to the stack
3682 because registers will take care of them. */
3685 xinner
= adjust_address (xinner
, BLKmode
, used
);
3687 /* If the partial register-part of the arg counts in its stack size,
3688 skip the part of stack space corresponding to the registers.
3689 Otherwise, start copying to the beginning of the stack space,
3690 by setting SKIP to 0. */
3691 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3693 #ifdef PUSH_ROUNDING
3694 /* Do it with several push insns if that doesn't take lots of insns
3695 and if there is no difficulty with push insns that skip bytes
3696 on the stack for alignment purposes. */
3699 && CONST_INT_P (size
)
3701 && MEM_ALIGN (xinner
) >= align
3702 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3703 /* Here we avoid the case of a structure whose weak alignment
3704 forces many pushes of a small amount of data,
3705 and such small pushes do rounding that causes trouble. */
3706 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3707 || align
>= BIGGEST_ALIGNMENT
3708 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3709 == (align
/ BITS_PER_UNIT
)))
3710 && (HOST_WIDE_INT
) PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3712 /* Push padding now if padding above and stack grows down,
3713 or if padding below and stack grows up.
3714 But if space already allocated, this has already been done. */
3715 if (extra
&& args_addr
== 0
3716 && where_pad
!= none
&& where_pad
!= stack_direction
)
3717 anti_adjust_stack (GEN_INT (extra
));
3719 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3722 #endif /* PUSH_ROUNDING */
3726 /* Otherwise make space on the stack and copy the data
3727 to the address of that space. */
3729 /* Deduct words put into registers from the size we must copy. */
3732 if (CONST_INT_P (size
))
3733 size
= GEN_INT (INTVAL (size
) - used
);
3735 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3736 GEN_INT (used
), NULL_RTX
, 0,
3740 /* Get the address of the stack space.
3741 In this case, we do not deal with EXTRA separately.
3742 A single stack adjust will do. */
3745 temp
= push_block (size
, extra
, where_pad
== downward
);
3748 else if (CONST_INT_P (args_so_far
))
3749 temp
= memory_address (BLKmode
,
3750 plus_constant (args_addr
,
3751 skip
+ INTVAL (args_so_far
)));
3753 temp
= memory_address (BLKmode
,
3754 plus_constant (gen_rtx_PLUS (Pmode
,
3759 if (!ACCUMULATE_OUTGOING_ARGS
)
3761 /* If the source is referenced relative to the stack pointer,
3762 copy it to another register to stabilize it. We do not need
3763 to do this if we know that we won't be changing sp. */
3765 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3766 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3767 temp
= copy_to_reg (temp
);
3770 target
= gen_rtx_MEM (BLKmode
, temp
);
3772 /* We do *not* set_mem_attributes here, because incoming arguments
3773 may overlap with sibling call outgoing arguments and we cannot
3774 allow reordering of reads from function arguments with stores
3775 to outgoing arguments of sibling calls. We do, however, want
3776 to record the alignment of the stack slot. */
3777 /* ALIGN may well be better aligned than TYPE, e.g. due to
3778 PARM_BOUNDARY. Assume the caller isn't lying. */
3779 set_mem_align (target
, align
);
3781 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3784 else if (partial
> 0)
3786 /* Scalar partly in registers. */
3788 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3791 /* # bytes of start of argument
3792 that we must make space for but need not store. */
3793 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3794 int args_offset
= INTVAL (args_so_far
);
3797 /* Push padding now if padding above and stack grows down,
3798 or if padding below and stack grows up.
3799 But if space already allocated, this has already been done. */
3800 if (extra
&& args_addr
== 0
3801 && where_pad
!= none
&& where_pad
!= stack_direction
)
3802 anti_adjust_stack (GEN_INT (extra
));
3804 /* If we make space by pushing it, we might as well push
3805 the real data. Otherwise, we can leave OFFSET nonzero
3806 and leave the space uninitialized. */
3810 /* Now NOT_STACK gets the number of words that we don't need to
3811 allocate on the stack. Convert OFFSET to words too. */
3812 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
3813 offset
/= UNITS_PER_WORD
;
3815 /* If the partial register-part of the arg counts in its stack size,
3816 skip the part of stack space corresponding to the registers.
3817 Otherwise, start copying to the beginning of the stack space,
3818 by setting SKIP to 0. */
3819 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3821 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3822 x
= validize_mem (force_const_mem (mode
, x
));
3824 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3825 SUBREGs of such registers are not allowed. */
3826 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3827 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3828 x
= copy_to_reg (x
);
3830 /* Loop over all the words allocated on the stack for this arg. */
3831 /* We can do it by words, because any scalar bigger than a word
3832 has a size a multiple of a word. */
3833 #ifndef PUSH_ARGS_REVERSED
3834 for (i
= not_stack
; i
< size
; i
++)
3836 for (i
= size
- 1; i
>= not_stack
; i
--)
3838 if (i
>= not_stack
+ offset
)
3839 emit_push_insn (operand_subword_force (x
, i
, mode
),
3840 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3842 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3844 reg_parm_stack_space
, alignment_pad
);
3851 /* Push padding now if padding above and stack grows down,
3852 or if padding below and stack grows up.
3853 But if space already allocated, this has already been done. */
3854 if (extra
&& args_addr
== 0
3855 && where_pad
!= none
&& where_pad
!= stack_direction
)
3856 anti_adjust_stack (GEN_INT (extra
));
3858 #ifdef PUSH_ROUNDING
3859 if (args_addr
== 0 && PUSH_ARGS
)
3860 emit_single_push_insn (mode
, x
, type
);
3864 if (CONST_INT_P (args_so_far
))
3866 = memory_address (mode
,
3867 plus_constant (args_addr
,
3868 INTVAL (args_so_far
)));
3870 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3872 dest
= gen_rtx_MEM (mode
, addr
);
3874 /* We do *not* set_mem_attributes here, because incoming arguments
3875 may overlap with sibling call outgoing arguments and we cannot
3876 allow reordering of reads from function arguments with stores
3877 to outgoing arguments of sibling calls. We do, however, want
3878 to record the alignment of the stack slot. */
3879 /* ALIGN may well be better aligned than TYPE, e.g. due to
3880 PARM_BOUNDARY. Assume the caller isn't lying. */
3881 set_mem_align (dest
, align
);
3883 emit_move_insn (dest
, x
);
3887 /* If part should go in registers, copy that part
3888 into the appropriate registers. Do this now, at the end,
3889 since mem-to-mem copies above may do function calls. */
3890 if (partial
> 0 && reg
!= 0)
3892 /* Handle calls that pass values in multiple non-contiguous locations.
3893 The Irix 6 ABI has examples of this. */
3894 if (GET_CODE (reg
) == PARALLEL
)
3895 emit_group_load (reg
, x
, type
, -1);
3898 gcc_assert (partial
% UNITS_PER_WORD
== 0);
3899 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
3903 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3904 anti_adjust_stack (GEN_INT (extra
));
3906 if (alignment_pad
&& args_addr
== 0)
3907 anti_adjust_stack (alignment_pad
);
3910 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3914 get_subtarget (rtx x
)
3918 /* Only registers can be subtargets. */
3920 /* Don't use hard regs to avoid extending their life. */
3921 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3925 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3926 FIELD is a bitfield. Returns true if the optimization was successful,
3927 and there's nothing else to do. */
3930 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
3931 unsigned HOST_WIDE_INT bitpos
,
3932 enum machine_mode mode1
, rtx str_rtx
,
3935 enum machine_mode str_mode
= GET_MODE (str_rtx
);
3936 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
3941 enum tree_code code
;
3943 if (mode1
!= VOIDmode
3944 || bitsize
>= BITS_PER_WORD
3945 || str_bitsize
> BITS_PER_WORD
3946 || TREE_SIDE_EFFECTS (to
)
3947 || TREE_THIS_VOLATILE (to
))
3951 if (TREE_CODE (src
) != SSA_NAME
)
3953 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
3956 srcstmt
= get_gimple_for_ssa_name (src
);
3958 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
3961 code
= gimple_assign_rhs_code (srcstmt
);
3963 op0
= gimple_assign_rhs1 (srcstmt
);
3965 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
3966 to find its initialization. Hopefully the initialization will
3967 be from a bitfield load. */
3968 if (TREE_CODE (op0
) == SSA_NAME
)
3970 gimple op0stmt
= get_gimple_for_ssa_name (op0
);
3972 /* We want to eventually have OP0 be the same as TO, which
3973 should be a bitfield. */
3975 || !is_gimple_assign (op0stmt
)
3976 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
3978 op0
= gimple_assign_rhs1 (op0stmt
);
3981 op1
= gimple_assign_rhs2 (srcstmt
);
3983 if (!operand_equal_p (to
, op0
, 0))
3986 if (MEM_P (str_rtx
))
3988 unsigned HOST_WIDE_INT offset1
;
3990 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
3991 str_mode
= word_mode
;
3992 str_mode
= get_best_mode (bitsize
, bitpos
,
3993 MEM_ALIGN (str_rtx
), str_mode
, 0);
3994 if (str_mode
== VOIDmode
)
3996 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
3999 bitpos
%= str_bitsize
;
4000 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4001 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4003 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4006 /* If the bit field covers the whole REG/MEM, store_field
4007 will likely generate better code. */
4008 if (bitsize
>= str_bitsize
)
4011 /* We can't handle fields split across multiple entities. */
4012 if (bitpos
+ bitsize
> str_bitsize
)
4015 if (BYTES_BIG_ENDIAN
)
4016 bitpos
= str_bitsize
- bitpos
- bitsize
;
4022 /* For now, just optimize the case of the topmost bitfield
4023 where we don't need to do any masking and also
4024 1 bit bitfields where xor can be used.
4025 We might win by one instruction for the other bitfields
4026 too if insv/extv instructions aren't used, so that
4027 can be added later. */
4028 if (bitpos
+ bitsize
!= str_bitsize
4029 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4032 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4033 value
= convert_modes (str_mode
,
4034 TYPE_MODE (TREE_TYPE (op1
)), value
,
4035 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4037 /* We may be accessing data outside the field, which means
4038 we can alias adjacent data. */
4039 if (MEM_P (str_rtx
))
4041 str_rtx
= shallow_copy_rtx (str_rtx
);
4042 set_mem_alias_set (str_rtx
, 0);
4043 set_mem_expr (str_rtx
, 0);
4046 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4047 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
4049 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4052 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
,
4053 build_int_cst (NULL_TREE
, bitpos
),
4055 result
= expand_binop (str_mode
, binop
, str_rtx
,
4056 value
, str_rtx
, 1, OPTAB_WIDEN
);
4057 if (result
!= str_rtx
)
4058 emit_move_insn (str_rtx
, result
);
4063 if (TREE_CODE (op1
) != INTEGER_CST
)
4065 value
= expand_expr (op1
, NULL_RTX
, GET_MODE (str_rtx
), EXPAND_NORMAL
);
4066 value
= convert_modes (GET_MODE (str_rtx
),
4067 TYPE_MODE (TREE_TYPE (op1
)), value
,
4068 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4070 /* We may be accessing data outside the field, which means
4071 we can alias adjacent data. */
4072 if (MEM_P (str_rtx
))
4074 str_rtx
= shallow_copy_rtx (str_rtx
);
4075 set_mem_alias_set (str_rtx
, 0);
4076 set_mem_expr (str_rtx
, 0);
4079 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4080 if (bitpos
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
4082 rtx mask
= GEN_INT (((unsigned HOST_WIDE_INT
) 1 << bitsize
)
4084 value
= expand_and (GET_MODE (str_rtx
), value
, mask
,
4087 value
= expand_shift (LSHIFT_EXPR
, GET_MODE (str_rtx
), value
,
4088 build_int_cst (NULL_TREE
, bitpos
),
4090 result
= expand_binop (GET_MODE (str_rtx
), binop
, str_rtx
,
4091 value
, str_rtx
, 1, OPTAB_WIDEN
);
4092 if (result
!= str_rtx
)
4093 emit_move_insn (str_rtx
, result
);
4104 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4105 is true, try generating a nontemporal store. */
4108 expand_assignment (tree to
, tree from
, bool nontemporal
)
4112 enum machine_mode mode
;
4114 enum insn_code icode
;
4116 /* Don't crash if the lhs of the assignment was erroneous. */
4117 if (TREE_CODE (to
) == ERROR_MARK
)
4119 result
= expand_normal (from
);
4123 /* Optimize away no-op moves without side-effects. */
4124 if (operand_equal_p (to
, from
, 0))
4127 mode
= TYPE_MODE (TREE_TYPE (to
));
4128 if ((TREE_CODE (to
) == MEM_REF
4129 || TREE_CODE (to
) == TARGET_MEM_REF
)
4131 && ((align
= MAX (TYPE_ALIGN (TREE_TYPE (to
)),
4132 get_object_alignment (to
, BIGGEST_ALIGNMENT
)))
4133 < (signed) GET_MODE_ALIGNMENT (mode
))
4134 && ((icode
= optab_handler (movmisalign_optab
, mode
))
4135 != CODE_FOR_nothing
))
4137 struct expand_operand ops
[2];
4138 enum machine_mode address_mode
;
4141 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4142 reg
= force_not_mem (reg
);
4144 if (TREE_CODE (to
) == MEM_REF
)
4147 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to
, 1))));
4148 tree base
= TREE_OPERAND (to
, 0);
4149 address_mode
= targetm
.addr_space
.address_mode (as
);
4150 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4151 op0
= convert_memory_address_addr_space (address_mode
, op0
, as
);
4152 if (!integer_zerop (TREE_OPERAND (to
, 1)))
4155 = immed_double_int_const (mem_ref_offset (to
), address_mode
);
4156 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
4158 op0
= memory_address_addr_space (mode
, op0
, as
);
4159 mem
= gen_rtx_MEM (mode
, op0
);
4160 set_mem_attributes (mem
, to
, 0);
4161 set_mem_addr_space (mem
, as
);
4163 else if (TREE_CODE (to
) == TARGET_MEM_REF
)
4165 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (to
));
4166 struct mem_address addr
;
4168 get_address_description (to
, &addr
);
4169 op0
= addr_for_mem_ref (&addr
, as
, true);
4170 op0
= memory_address_addr_space (mode
, op0
, as
);
4171 mem
= gen_rtx_MEM (mode
, op0
);
4172 set_mem_attributes (mem
, to
, 0);
4173 set_mem_addr_space (mem
, as
);
4177 if (TREE_THIS_VOLATILE (to
))
4178 MEM_VOLATILE_P (mem
) = 1;
4180 create_fixed_operand (&ops
[0], mem
);
4181 create_input_operand (&ops
[1], reg
, mode
);
4182 /* The movmisalign<mode> pattern cannot fail, else the assignment would
4183 silently be omitted. */
4184 expand_insn (icode
, 2, ops
);
4188 /* Assignment of a structure component needs special treatment
4189 if the structure component's rtx is not simply a MEM.
4190 Assignment of an array element at a constant index, and assignment of
4191 an array element in an unaligned packed structure field, has the same
4193 if (handled_component_p (to
)
4194 /* ??? We only need to handle MEM_REF here if the access is not
4195 a full access of the base object. */
4196 || (TREE_CODE (to
) == MEM_REF
4197 && TREE_CODE (TREE_OPERAND (to
, 0)) == ADDR_EXPR
)
4198 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4200 enum machine_mode mode1
;
4201 HOST_WIDE_INT bitsize
, bitpos
;
4208 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4209 &unsignedp
, &volatilep
, true);
4211 /* If we are going to use store_bit_field and extract_bit_field,
4212 make sure to_rtx will be safe for multiple use. */
4214 to_rtx
= expand_normal (tem
);
4216 /* If the bitfield is volatile, we want to access it in the
4217 field's mode, not the computed mode.
4218 If a MEM has VOIDmode (external with incomplete type),
4219 use BLKmode for it instead. */
4222 if (volatilep
&& flag_strict_volatile_bitfields
> 0)
4223 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
4224 else if (GET_MODE (to_rtx
) == VOIDmode
)
4225 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
4230 enum machine_mode address_mode
;
4233 if (!MEM_P (to_rtx
))
4235 /* We can get constant negative offsets into arrays with broken
4236 user code. Translate this to a trap instead of ICEing. */
4237 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
4238 expand_builtin_trap ();
4239 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
4242 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4244 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (to_rtx
));
4245 if (GET_MODE (offset_rtx
) != address_mode
)
4246 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
4248 /* A constant address in TO_RTX can have VOIDmode, we must not try
4249 to call force_reg for that case. Avoid that case. */
4251 && GET_MODE (to_rtx
) == BLKmode
4252 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4254 && (bitpos
% bitsize
) == 0
4255 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4256 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4258 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4262 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4263 highest_pow2_factor_for_target (to
,
4267 /* No action is needed if the target is not a memory and the field
4268 lies completely outside that target. This can occur if the source
4269 code contains an out-of-bounds access to a small array. */
4271 && GET_MODE (to_rtx
) != BLKmode
4272 && (unsigned HOST_WIDE_INT
) bitpos
4273 >= GET_MODE_BITSIZE (GET_MODE (to_rtx
)))
4275 expand_normal (from
);
4278 /* Handle expand_expr of a complex value returning a CONCAT. */
4279 else if (GET_CODE (to_rtx
) == CONCAT
)
4281 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from
))))
4283 gcc_assert (bitpos
== 0);
4284 result
= store_expr (from
, to_rtx
, false, nontemporal
);
4288 gcc_assert (bitpos
== 0 || bitpos
== GET_MODE_BITSIZE (mode1
));
4289 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
4297 /* If the field is at offset zero, we could have been given the
4298 DECL_RTX of the parent struct. Don't munge it. */
4299 to_rtx
= shallow_copy_rtx (to_rtx
);
4301 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4303 /* Deal with volatile and readonly fields. The former is only
4304 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4306 MEM_VOLATILE_P (to_rtx
) = 1;
4307 if (component_uses_parent_alias_set (to
))
4308 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4311 if (optimize_bitfield_assignment_op (bitsize
, bitpos
, mode1
,
4315 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4316 TREE_TYPE (tem
), get_alias_set (to
),
4321 preserve_temp_slots (result
);
4327 /* If the rhs is a function call and its value is not an aggregate,
4328 call the function before we start to compute the lhs.
4329 This is needed for correct code for cases such as
4330 val = setjmp (buf) on machines where reference to val
4331 requires loading up part of an address in a separate insn.
4333 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4334 since it might be a promoted variable where the zero- or sign- extension
4335 needs to be done. Handling this in the normal way is safe because no
4336 computation is done before the call. The same is true for SSA names. */
4337 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
4338 && COMPLETE_TYPE_P (TREE_TYPE (from
))
4339 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4340 && ! (((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4341 && REG_P (DECL_RTL (to
)))
4342 || TREE_CODE (to
) == SSA_NAME
))
4347 value
= expand_normal (from
);
4349 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4351 /* Handle calls that return values in multiple non-contiguous locations.
4352 The Irix 6 ABI has examples of this. */
4353 if (GET_CODE (to_rtx
) == PARALLEL
)
4354 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
4355 int_size_in_bytes (TREE_TYPE (from
)));
4356 else if (GET_MODE (to_rtx
) == BLKmode
)
4357 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4360 if (POINTER_TYPE_P (TREE_TYPE (to
)))
4361 value
= convert_memory_address_addr_space
4362 (GET_MODE (to_rtx
), value
,
4363 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
4365 emit_move_insn (to_rtx
, value
);
4367 preserve_temp_slots (to_rtx
);
4373 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4374 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4377 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4379 /* Don't move directly into a return register. */
4380 if (TREE_CODE (to
) == RESULT_DECL
4381 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
4386 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
4388 if (GET_CODE (to_rtx
) == PARALLEL
)
4389 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
4390 int_size_in_bytes (TREE_TYPE (from
)));
4392 emit_move_insn (to_rtx
, temp
);
4394 preserve_temp_slots (to_rtx
);
4400 /* In case we are returning the contents of an object which overlaps
4401 the place the value is being stored, use a safe function when copying
4402 a value through a pointer into a structure value return block. */
4403 if (TREE_CODE (to
) == RESULT_DECL
4404 && TREE_CODE (from
) == INDIRECT_REF
4405 && ADDR_SPACE_GENERIC_P
4406 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
4407 && refs_may_alias_p (to
, from
)
4408 && cfun
->returns_struct
4409 && !cfun
->returns_pcc_struct
)
4414 size
= expr_size (from
);
4415 from_rtx
= expand_normal (from
);
4417 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4418 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4419 XEXP (from_rtx
, 0), Pmode
,
4420 convert_to_mode (TYPE_MODE (sizetype
),
4421 size
, TYPE_UNSIGNED (sizetype
)),
4422 TYPE_MODE (sizetype
));
4424 preserve_temp_slots (to_rtx
);
4430 /* Compute FROM and store the value in the rtx we got. */
4433 result
= store_expr (from
, to_rtx
, 0, nontemporal
);
4434 preserve_temp_slots (result
);
4440 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4441 succeeded, false otherwise. */
4444 emit_storent_insn (rtx to
, rtx from
)
4446 struct expand_operand ops
[2];
4447 enum machine_mode mode
= GET_MODE (to
);
4448 enum insn_code code
= optab_handler (storent_optab
, mode
);
4450 if (code
== CODE_FOR_nothing
)
4453 create_fixed_operand (&ops
[0], to
);
4454 create_input_operand (&ops
[1], from
, mode
);
4455 return maybe_expand_insn (code
, 2, ops
);
4458 /* Generate code for computing expression EXP,
4459 and storing the value into TARGET.
4461 If the mode is BLKmode then we may return TARGET itself.
4462 It turns out that in BLKmode it doesn't cause a problem.
4463 because C has no operators that could combine two different
4464 assignments into the same BLKmode object with different values
4465 with no sequence point. Will other languages need this to
4468 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4469 stack, and block moves may need to be treated specially.
4471 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4474 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
)
4477 rtx alt_rtl
= NULL_RTX
;
4478 location_t loc
= EXPR_LOCATION (exp
);
4480 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4482 /* C++ can generate ?: expressions with a throw expression in one
4483 branch and an rvalue in the other. Here, we resolve attempts to
4484 store the throw expression's nonexistent result. */
4485 gcc_assert (!call_param_p
);
4486 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4489 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4491 /* Perform first part of compound expression, then assign from second
4493 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4494 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4495 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
4498 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4500 /* For conditional expression, get safe form of the target. Then
4501 test the condition, doing the appropriate assignment on either
4502 side. This avoids the creation of unnecessary temporaries.
4503 For non-BLKmode, it is more efficient not to do this. */
4505 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4507 do_pending_stack_adjust ();
4509 jumpifnot (TREE_OPERAND (exp
, 0), lab1
, -1);
4510 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
4512 emit_jump_insn (gen_jump (lab2
));
4515 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
,
4522 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4523 /* If this is a scalar in a register that is stored in a wider mode
4524 than the declared mode, compute the result into its declared mode
4525 and then convert to the wider mode. Our value is the computed
4528 rtx inner_target
= 0;
4530 /* We can do the conversion inside EXP, which will often result
4531 in some optimizations. Do the conversion in two steps: first
4532 change the signedness, if needed, then the extend. But don't
4533 do this if the type of EXP is a subtype of something else
4534 since then the conversion might involve more than just
4535 converting modes. */
4536 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4537 && TREE_TYPE (TREE_TYPE (exp
)) == 0
4538 && GET_MODE_PRECISION (GET_MODE (target
))
4539 == TYPE_PRECISION (TREE_TYPE (exp
)))
4541 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
4542 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4544 /* Some types, e.g. Fortran's logical*4, won't have a signed
4545 version, so use the mode instead. */
4547 = (signed_or_unsigned_type_for
4548 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)));
4550 ntype
= lang_hooks
.types
.type_for_mode
4551 (TYPE_MODE (TREE_TYPE (exp
)),
4552 SUBREG_PROMOTED_UNSIGNED_P (target
));
4554 exp
= fold_convert_loc (loc
, ntype
, exp
);
4557 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
4558 (GET_MODE (SUBREG_REG (target
)),
4559 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4562 inner_target
= SUBREG_REG (target
);
4565 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4566 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4568 /* If TEMP is a VOIDmode constant, use convert_modes to make
4569 sure that we properly convert it. */
4570 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4572 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4573 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4574 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4575 GET_MODE (target
), temp
,
4576 SUBREG_PROMOTED_UNSIGNED_P (target
));
4579 convert_move (SUBREG_REG (target
), temp
,
4580 SUBREG_PROMOTED_UNSIGNED_P (target
));
4584 else if ((TREE_CODE (exp
) == STRING_CST
4585 || (TREE_CODE (exp
) == MEM_REF
4586 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
4587 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
4589 && integer_zerop (TREE_OPERAND (exp
, 1))))
4590 && !nontemporal
&& !call_param_p
4593 /* Optimize initialization of an array with a STRING_CST. */
4594 HOST_WIDE_INT exp_len
, str_copy_len
;
4596 tree str
= TREE_CODE (exp
) == STRING_CST
4597 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4599 exp_len
= int_expr_size (exp
);
4603 if (TREE_STRING_LENGTH (str
) <= 0)
4606 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
4607 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
4610 str_copy_len
= TREE_STRING_LENGTH (str
);
4611 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
4612 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
4614 str_copy_len
+= STORE_MAX_PIECES
- 1;
4615 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
4617 str_copy_len
= MIN (str_copy_len
, exp_len
);
4618 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
4619 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
4620 MEM_ALIGN (target
), false))
4625 dest_mem
= store_by_pieces (dest_mem
,
4626 str_copy_len
, builtin_strncpy_read_str
,
4628 TREE_STRING_POINTER (str
)),
4629 MEM_ALIGN (target
), false,
4630 exp_len
> str_copy_len
? 1 : 0);
4631 if (exp_len
> str_copy_len
)
4632 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
4633 GEN_INT (exp_len
- str_copy_len
),
4642 /* If we want to use a nontemporal store, force the value to
4644 tmp_target
= nontemporal
? NULL_RTX
: target
;
4645 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
4647 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4651 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4652 the same as that of TARGET, adjust the constant. This is needed, for
4653 example, in case it is a CONST_DOUBLE and we want only a word-sized
4655 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4656 && TREE_CODE (exp
) != ERROR_MARK
4657 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4658 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4659 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
4661 /* If value was not generated in the target, store it there.
4662 Convert the value to TARGET's type first if necessary and emit the
4663 pending incrementations that have been queued when expanding EXP.
4664 Note that we cannot emit the whole queue blindly because this will
4665 effectively disable the POST_INC optimization later.
4667 If TEMP and TARGET compare equal according to rtx_equal_p, but
4668 one or both of them are volatile memory refs, we have to distinguish
4670 - expand_expr has used TARGET. In this case, we must not generate
4671 another copy. This can be detected by TARGET being equal according
4673 - expand_expr has not used TARGET - that means that the source just
4674 happens to have the same RTX form. Since temp will have been created
4675 by expand_expr, it will compare unequal according to == .
4676 We must generate a copy in this case, to reach the correct number
4677 of volatile memory references. */
4679 if ((! rtx_equal_p (temp
, target
)
4680 || (temp
!= target
&& (side_effects_p (temp
)
4681 || side_effects_p (target
))))
4682 && TREE_CODE (exp
) != ERROR_MARK
4683 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4684 but TARGET is not valid memory reference, TEMP will differ
4685 from TARGET although it is really the same location. */
4687 && rtx_equal_p (alt_rtl
, target
)
4688 && !side_effects_p (alt_rtl
)
4689 && !side_effects_p (target
))
4690 /* If there's nothing to copy, don't bother. Don't call
4691 expr_size unless necessary, because some front-ends (C++)
4692 expr_size-hook must not be given objects that are not
4693 supposed to be bit-copied or bit-initialized. */
4694 && expr_size (exp
) != const0_rtx
)
4696 if (GET_MODE (temp
) != GET_MODE (target
)
4697 && GET_MODE (temp
) != VOIDmode
)
4699 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4700 if (GET_MODE (target
) == BLKmode
4701 && GET_MODE (temp
) == BLKmode
)
4702 emit_block_move (target
, temp
, expr_size (exp
),
4704 ? BLOCK_OP_CALL_PARM
4705 : BLOCK_OP_NORMAL
));
4706 else if (GET_MODE (target
) == BLKmode
)
4707 store_bit_field (target
, INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
4708 0, GET_MODE (temp
), temp
);
4710 convert_move (target
, temp
, unsignedp
);
4713 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4715 /* Handle copying a string constant into an array. The string
4716 constant may be shorter than the array. So copy just the string's
4717 actual length, and clear the rest. First get the size of the data
4718 type of the string, which is actually the size of the target. */
4719 rtx size
= expr_size (exp
);
4721 if (CONST_INT_P (size
)
4722 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4723 emit_block_move (target
, temp
, size
,
4725 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4728 enum machine_mode pointer_mode
4729 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
4730 enum machine_mode address_mode
4731 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (target
));
4733 /* Compute the size of the data to copy from the string. */
4735 = size_binop_loc (loc
, MIN_EXPR
,
4736 make_tree (sizetype
, size
),
4737 size_int (TREE_STRING_LENGTH (exp
)));
4739 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4741 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4744 /* Copy that much. */
4745 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
4746 TYPE_UNSIGNED (sizetype
));
4747 emit_block_move (target
, temp
, copy_size_rtx
,
4749 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4751 /* Figure out how much is left in TARGET that we have to clear.
4752 Do all calculations in pointer_mode. */
4753 if (CONST_INT_P (copy_size_rtx
))
4755 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4756 target
= adjust_address (target
, BLKmode
,
4757 INTVAL (copy_size_rtx
));
4761 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4762 copy_size_rtx
, NULL_RTX
, 0,
4765 if (GET_MODE (copy_size_rtx
) != address_mode
)
4766 copy_size_rtx
= convert_to_mode (address_mode
,
4768 TYPE_UNSIGNED (sizetype
));
4770 target
= offset_address (target
, copy_size_rtx
,
4771 highest_pow2_factor (copy_size
));
4772 label
= gen_label_rtx ();
4773 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4774 GET_MODE (size
), 0, label
);
4777 if (size
!= const0_rtx
)
4778 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
4784 /* Handle calls that return values in multiple non-contiguous locations.
4785 The Irix 6 ABI has examples of this. */
4786 else if (GET_CODE (target
) == PARALLEL
)
4787 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4788 int_size_in_bytes (TREE_TYPE (exp
)));
4789 else if (GET_MODE (temp
) == BLKmode
)
4790 emit_block_move (target
, temp
, expr_size (exp
),
4792 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4793 else if (nontemporal
4794 && emit_storent_insn (target
, temp
))
4795 /* If we managed to emit a nontemporal store, there is nothing else to
4800 temp
= force_operand (temp
, target
);
4802 emit_move_insn (target
, temp
);
4809 /* Helper for categorize_ctor_elements. Identical interface. */
4812 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4813 HOST_WIDE_INT
*p_elt_count
,
4816 unsigned HOST_WIDE_INT idx
;
4817 HOST_WIDE_INT nz_elts
, elt_count
;
4818 tree value
, purpose
;
4820 /* Whether CTOR is a valid constant initializer, in accordance with what
4821 initializer_constant_valid_p does. If inferred from the constructor
4822 elements, true until proven otherwise. */
4823 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
4824 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
4829 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
4831 HOST_WIDE_INT mult
= 1;
4833 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4835 tree lo_index
= TREE_OPERAND (purpose
, 0);
4836 tree hi_index
= TREE_OPERAND (purpose
, 1);
4838 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4839 mult
= (tree_low_cst (hi_index
, 1)
4840 - tree_low_cst (lo_index
, 1) + 1);
4843 switch (TREE_CODE (value
))
4847 HOST_WIDE_INT nz
= 0, ic
= 0;
4850 = categorize_ctor_elements_1 (value
, &nz
, &ic
, p_must_clear
);
4852 nz_elts
+= mult
* nz
;
4853 elt_count
+= mult
* ic
;
4855 if (const_from_elts_p
&& const_p
)
4856 const_p
= const_elt_p
;
4863 if (!initializer_zerop (value
))
4869 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
4870 elt_count
+= mult
* TREE_STRING_LENGTH (value
);
4874 if (!initializer_zerop (TREE_REALPART (value
)))
4876 if (!initializer_zerop (TREE_IMAGPART (value
)))
4884 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4886 if (!initializer_zerop (TREE_VALUE (v
)))
4895 HOST_WIDE_INT tc
= count_type_elements (TREE_TYPE (value
), true);
4898 nz_elts
+= mult
* tc
;
4899 elt_count
+= mult
* tc
;
4901 if (const_from_elts_p
&& const_p
)
4902 const_p
= initializer_constant_valid_p (value
, TREE_TYPE (value
))
4910 && (TREE_CODE (TREE_TYPE (ctor
)) == UNION_TYPE
4911 || TREE_CODE (TREE_TYPE (ctor
)) == QUAL_UNION_TYPE
))
4914 bool clear_this
= true;
4916 if (!VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (ctor
)))
4918 /* We don't expect more than one element of the union to be
4919 initialized. Not sure what we should do otherwise... */
4920 gcc_assert (VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (ctor
))
4923 init_sub_type
= TREE_TYPE (VEC_index (constructor_elt
,
4924 CONSTRUCTOR_ELTS (ctor
),
4927 /* ??? We could look at each element of the union, and find the
4928 largest element. Which would avoid comparing the size of the
4929 initialized element against any tail padding in the union.
4930 Doesn't seem worth the effort... */
4931 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor
)),
4932 TYPE_SIZE (init_sub_type
)) == 1)
4934 /* And now we have to find out if the element itself is fully
4935 constructed. E.g. for union { struct { int a, b; } s; } u
4936 = { .s = { .a = 1 } }. */
4937 if (elt_count
== count_type_elements (init_sub_type
, false))
4942 *p_must_clear
= clear_this
;
4945 *p_nz_elts
+= nz_elts
;
4946 *p_elt_count
+= elt_count
;
4951 /* Examine CTOR to discover:
4952 * how many scalar fields are set to nonzero values,
4953 and place it in *P_NZ_ELTS;
4954 * how many scalar fields in total are in CTOR,
4955 and place it in *P_ELT_COUNT.
4956 * if a type is a union, and the initializer from the constructor
4957 is not the largest element in the union, then set *p_must_clear.
4959 Return whether or not CTOR is a valid static constant initializer, the same
4960 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4963 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4964 HOST_WIDE_INT
*p_elt_count
,
4969 *p_must_clear
= false;
4972 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_elt_count
, p_must_clear
);
4975 /* Count the number of scalars in TYPE. Return -1 on overflow or
4976 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4977 array member at the end of the structure. */
4980 count_type_elements (const_tree type
, bool allow_flexarr
)
4982 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
4983 switch (TREE_CODE (type
))
4987 tree telts
= array_type_nelts (type
);
4988 if (telts
&& host_integerp (telts
, 1))
4990 HOST_WIDE_INT n
= tree_low_cst (telts
, 1) + 1;
4991 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
), false);
4994 else if (max
/ n
> m
)
5002 HOST_WIDE_INT n
= 0, t
;
5005 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5006 if (TREE_CODE (f
) == FIELD_DECL
)
5008 t
= count_type_elements (TREE_TYPE (f
), false);
5011 /* Check for structures with flexible array member. */
5012 tree tf
= TREE_TYPE (f
);
5014 && DECL_CHAIN (f
) == NULL
5015 && TREE_CODE (tf
) == ARRAY_TYPE
5017 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5018 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5019 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5020 && int_size_in_bytes (type
) >= 0)
5032 case QUAL_UNION_TYPE
:
5039 return TYPE_VECTOR_SUBPARTS (type
);
5043 case FIXED_POINT_TYPE
:
5048 case REFERENCE_TYPE
:
5063 /* Return 1 if EXP contains mostly (3/4) zeros. */
5066 mostly_zeros_p (const_tree exp
)
5068 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5071 HOST_WIDE_INT nz_elts
, count
, elts
;
5074 categorize_ctor_elements (exp
, &nz_elts
, &count
, &must_clear
);
5078 elts
= count_type_elements (TREE_TYPE (exp
), false);
5080 return nz_elts
< elts
/ 4;
5083 return initializer_zerop (exp
);
5086 /* Return 1 if EXP contains all zeros. */
5089 all_zeros_p (const_tree exp
)
5091 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5094 HOST_WIDE_INT nz_elts
, count
;
5097 categorize_ctor_elements (exp
, &nz_elts
, &count
, &must_clear
);
5098 return nz_elts
== 0;
5101 return initializer_zerop (exp
);
5104 /* Helper function for store_constructor.
5105 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5106 TYPE is the type of the CONSTRUCTOR, not the element type.
5107 CLEARED is as for store_constructor.
5108 ALIAS_SET is the alias set to use for any stores.
5110 This provides a recursive shortcut back to store_constructor when it isn't
5111 necessary to go through store_field. This is so that we can pass through
5112 the cleared field to let store_constructor know that we may not have to
5113 clear a substructure if the outer structure has already been cleared. */
5116 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
5117 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
5118 tree exp
, tree type
, int cleared
,
5119 alias_set_type alias_set
)
5121 if (TREE_CODE (exp
) == CONSTRUCTOR
5122 /* We can only call store_constructor recursively if the size and
5123 bit position are on a byte boundary. */
5124 && bitpos
% BITS_PER_UNIT
== 0
5125 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
5126 /* If we have a nonzero bitpos for a register target, then we just
5127 let store_field do the bitfield handling. This is unlikely to
5128 generate unnecessary clear instructions anyways. */
5129 && (bitpos
== 0 || MEM_P (target
)))
5133 = adjust_address (target
,
5134 GET_MODE (target
) == BLKmode
5136 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
5137 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5140 /* Update the alias set, if required. */
5141 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
5142 && MEM_ALIAS_SET (target
) != 0)
5144 target
= copy_rtx (target
);
5145 set_mem_alias_set (target
, alias_set
);
5148 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
5151 store_field (target
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
, false);
5154 /* Store the value of constructor EXP into the rtx TARGET.
5155 TARGET is either a REG or a MEM; we know it cannot conflict, since
5156 safe_from_p has been called.
5157 CLEARED is true if TARGET is known to have been zero'd.
5158 SIZE is the number of bytes of TARGET we are allowed to modify: this
5159 may not be the same as the size of EXP if we are assigning to a field
5160 which has been packed to exclude padding bits. */
5163 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
5165 tree type
= TREE_TYPE (exp
);
5166 #ifdef WORD_REGISTER_OPERATIONS
5167 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
5170 switch (TREE_CODE (type
))
5174 case QUAL_UNION_TYPE
:
5176 unsigned HOST_WIDE_INT idx
;
5179 /* If size is zero or the target is already cleared, do nothing. */
5180 if (size
== 0 || cleared
)
5182 /* We either clear the aggregate or indicate the value is dead. */
5183 else if ((TREE_CODE (type
) == UNION_TYPE
5184 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5185 && ! CONSTRUCTOR_ELTS (exp
))
5186 /* If the constructor is empty, clear the union. */
5188 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
5192 /* If we are building a static constructor into a register,
5193 set the initial value as zero so we can fold the value into
5194 a constant. But if more than one register is involved,
5195 this probably loses. */
5196 else if (REG_P (target
) && TREE_STATIC (exp
)
5197 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
5199 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5203 /* If the constructor has fewer fields than the structure or
5204 if we are initializing the structure to mostly zeros, clear
5205 the whole structure first. Don't do this if TARGET is a
5206 register whose mode size isn't equal to SIZE since
5207 clear_storage can't handle this case. */
5209 && (((int)VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (exp
))
5210 != fields_length (type
))
5211 || mostly_zeros_p (exp
))
5213 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
5216 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5220 if (REG_P (target
) && !cleared
)
5221 emit_clobber (target
);
5223 /* Store each element of the constructor into the
5224 corresponding field of TARGET. */
5225 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
5227 enum machine_mode mode
;
5228 HOST_WIDE_INT bitsize
;
5229 HOST_WIDE_INT bitpos
= 0;
5231 rtx to_rtx
= target
;
5233 /* Just ignore missing fields. We cleared the whole
5234 structure, above, if any fields are missing. */
5238 if (cleared
&& initializer_zerop (value
))
5241 if (host_integerp (DECL_SIZE (field
), 1))
5242 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
5246 mode
= DECL_MODE (field
);
5247 if (DECL_BIT_FIELD (field
))
5250 offset
= DECL_FIELD_OFFSET (field
);
5251 if (host_integerp (offset
, 0)
5252 && host_integerp (bit_position (field
), 0))
5254 bitpos
= int_bit_position (field
);
5258 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
5262 enum machine_mode address_mode
;
5266 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
5267 make_tree (TREE_TYPE (exp
),
5270 offset_rtx
= expand_normal (offset
);
5271 gcc_assert (MEM_P (to_rtx
));
5274 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (to_rtx
));
5275 if (GET_MODE (offset_rtx
) != address_mode
)
5276 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5278 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5279 highest_pow2_factor (offset
));
5282 #ifdef WORD_REGISTER_OPERATIONS
5283 /* If this initializes a field that is smaller than a
5284 word, at the start of a word, try to widen it to a full
5285 word. This special case allows us to output C++ member
5286 function initializations in a form that the optimizers
5289 && bitsize
< BITS_PER_WORD
5290 && bitpos
% BITS_PER_WORD
== 0
5291 && GET_MODE_CLASS (mode
) == MODE_INT
5292 && TREE_CODE (value
) == INTEGER_CST
5294 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
5296 tree type
= TREE_TYPE (value
);
5298 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
5300 type
= lang_hooks
.types
.type_for_size
5301 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
5302 value
= fold_convert (type
, value
);
5305 if (BYTES_BIG_ENDIAN
)
5307 = fold_build2 (LSHIFT_EXPR
, type
, value
,
5308 build_int_cst (type
,
5309 BITS_PER_WORD
- bitsize
));
5310 bitsize
= BITS_PER_WORD
;
5315 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
5316 && DECL_NONADDRESSABLE_P (field
))
5318 to_rtx
= copy_rtx (to_rtx
);
5319 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
5322 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
5323 value
, type
, cleared
,
5324 get_alias_set (TREE_TYPE (field
)));
5331 unsigned HOST_WIDE_INT i
;
5334 tree elttype
= TREE_TYPE (type
);
5336 HOST_WIDE_INT minelt
= 0;
5337 HOST_WIDE_INT maxelt
= 0;
5339 domain
= TYPE_DOMAIN (type
);
5340 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
5341 && TYPE_MAX_VALUE (domain
)
5342 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
5343 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
5345 /* If we have constant bounds for the range of the type, get them. */
5348 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
5349 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
5352 /* If the constructor has fewer elements than the array, clear
5353 the whole array first. Similarly if this is static
5354 constructor of a non-BLKmode object. */
5357 else if (REG_P (target
) && TREE_STATIC (exp
))
5361 unsigned HOST_WIDE_INT idx
;
5363 HOST_WIDE_INT count
= 0, zero_count
= 0;
5364 need_to_clear
= ! const_bounds_p
;
5366 /* This loop is a more accurate version of the loop in
5367 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5368 is also needed to check for missing elements. */
5369 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
5371 HOST_WIDE_INT this_node_count
;
5376 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5378 tree lo_index
= TREE_OPERAND (index
, 0);
5379 tree hi_index
= TREE_OPERAND (index
, 1);
5381 if (! host_integerp (lo_index
, 1)
5382 || ! host_integerp (hi_index
, 1))
5388 this_node_count
= (tree_low_cst (hi_index
, 1)
5389 - tree_low_cst (lo_index
, 1) + 1);
5392 this_node_count
= 1;
5394 count
+= this_node_count
;
5395 if (mostly_zeros_p (value
))
5396 zero_count
+= this_node_count
;
5399 /* Clear the entire array first if there are any missing
5400 elements, or if the incidence of zero elements is >=
5403 && (count
< maxelt
- minelt
+ 1
5404 || 4 * zero_count
>= 3 * count
))
5408 if (need_to_clear
&& size
> 0)
5411 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5413 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5417 if (!cleared
&& REG_P (target
))
5418 /* Inform later passes that the old value is dead. */
5419 emit_clobber (target
);
5421 /* Store each element of the constructor into the
5422 corresponding element of TARGET, determined by counting the
5424 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
5426 enum machine_mode mode
;
5427 HOST_WIDE_INT bitsize
;
5428 HOST_WIDE_INT bitpos
;
5429 rtx xtarget
= target
;
5431 if (cleared
&& initializer_zerop (value
))
5434 mode
= TYPE_MODE (elttype
);
5435 if (mode
== BLKmode
)
5436 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5437 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5440 bitsize
= GET_MODE_BITSIZE (mode
);
5442 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5444 tree lo_index
= TREE_OPERAND (index
, 0);
5445 tree hi_index
= TREE_OPERAND (index
, 1);
5446 rtx index_r
, pos_rtx
;
5447 HOST_WIDE_INT lo
, hi
, count
;
5450 /* If the range is constant and "small", unroll the loop. */
5452 && host_integerp (lo_index
, 0)
5453 && host_integerp (hi_index
, 0)
5454 && (lo
= tree_low_cst (lo_index
, 0),
5455 hi
= tree_low_cst (hi_index
, 0),
5456 count
= hi
- lo
+ 1,
5459 || (host_integerp (TYPE_SIZE (elttype
), 1)
5460 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5463 lo
-= minelt
; hi
-= minelt
;
5464 for (; lo
<= hi
; lo
++)
5466 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5469 && !MEM_KEEP_ALIAS_SET_P (target
)
5470 && TREE_CODE (type
) == ARRAY_TYPE
5471 && TYPE_NONALIASED_COMPONENT (type
))
5473 target
= copy_rtx (target
);
5474 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5477 store_constructor_field
5478 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5479 get_alias_set (elttype
));
5484 rtx loop_start
= gen_label_rtx ();
5485 rtx loop_end
= gen_label_rtx ();
5488 expand_normal (hi_index
);
5490 index
= build_decl (EXPR_LOCATION (exp
),
5491 VAR_DECL
, NULL_TREE
, domain
);
5492 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
5493 SET_DECL_RTL (index
, index_r
);
5494 store_expr (lo_index
, index_r
, 0, false);
5496 /* Build the head of the loop. */
5497 do_pending_stack_adjust ();
5498 emit_label (loop_start
);
5500 /* Assign value to element index. */
5502 fold_convert (ssizetype
,
5503 fold_build2 (MINUS_EXPR
,
5506 TYPE_MIN_VALUE (domain
)));
5509 size_binop (MULT_EXPR
, position
,
5510 fold_convert (ssizetype
,
5511 TYPE_SIZE_UNIT (elttype
)));
5513 pos_rtx
= expand_normal (position
);
5514 xtarget
= offset_address (target
, pos_rtx
,
5515 highest_pow2_factor (position
));
5516 xtarget
= adjust_address (xtarget
, mode
, 0);
5517 if (TREE_CODE (value
) == CONSTRUCTOR
)
5518 store_constructor (value
, xtarget
, cleared
,
5519 bitsize
/ BITS_PER_UNIT
);
5521 store_expr (value
, xtarget
, 0, false);
5523 /* Generate a conditional jump to exit the loop. */
5524 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
5526 jumpif (exit_cond
, loop_end
, -1);
5528 /* Update the loop counter, and jump to the head of
5530 expand_assignment (index
,
5531 build2 (PLUS_EXPR
, TREE_TYPE (index
),
5532 index
, integer_one_node
),
5535 emit_jump (loop_start
);
5537 /* Build the end of the loop. */
5538 emit_label (loop_end
);
5541 else if ((index
!= 0 && ! host_integerp (index
, 0))
5542 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5547 index
= ssize_int (1);
5550 index
= fold_convert (ssizetype
,
5551 fold_build2 (MINUS_EXPR
,
5554 TYPE_MIN_VALUE (domain
)));
5557 size_binop (MULT_EXPR
, index
,
5558 fold_convert (ssizetype
,
5559 TYPE_SIZE_UNIT (elttype
)));
5560 xtarget
= offset_address (target
,
5561 expand_normal (position
),
5562 highest_pow2_factor (position
));
5563 xtarget
= adjust_address (xtarget
, mode
, 0);
5564 store_expr (value
, xtarget
, 0, false);
5569 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5570 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5572 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5574 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
5575 && TREE_CODE (type
) == ARRAY_TYPE
5576 && TYPE_NONALIASED_COMPONENT (type
))
5578 target
= copy_rtx (target
);
5579 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5581 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5582 type
, cleared
, get_alias_set (elttype
));
5590 unsigned HOST_WIDE_INT idx
;
5591 constructor_elt
*ce
;
5595 tree elttype
= TREE_TYPE (type
);
5596 int elt_size
= tree_low_cst (TYPE_SIZE (elttype
), 1);
5597 enum machine_mode eltmode
= TYPE_MODE (elttype
);
5598 HOST_WIDE_INT bitsize
;
5599 HOST_WIDE_INT bitpos
;
5600 rtvec vector
= NULL
;
5602 alias_set_type alias
;
5604 gcc_assert (eltmode
!= BLKmode
);
5606 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
5607 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
5609 enum machine_mode mode
= GET_MODE (target
);
5611 icode
= (int) optab_handler (vec_init_optab
, mode
);
5612 if (icode
!= CODE_FOR_nothing
)
5616 vector
= rtvec_alloc (n_elts
);
5617 for (i
= 0; i
< n_elts
; i
++)
5618 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
5622 /* If the constructor has fewer elements than the vector,
5623 clear the whole array first. Similarly if this is static
5624 constructor of a non-BLKmode object. */
5627 else if (REG_P (target
) && TREE_STATIC (exp
))
5631 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
5634 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
5636 int n_elts_here
= tree_low_cst
5637 (int_const_binop (TRUNC_DIV_EXPR
,
5638 TYPE_SIZE (TREE_TYPE (value
)),
5639 TYPE_SIZE (elttype
), 0), 1);
5641 count
+= n_elts_here
;
5642 if (mostly_zeros_p (value
))
5643 zero_count
+= n_elts_here
;
5646 /* Clear the entire vector first if there are any missing elements,
5647 or if the incidence of zero elements is >= 75%. */
5648 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
5651 if (need_to_clear
&& size
> 0 && !vector
)
5654 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5656 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5660 /* Inform later passes that the old value is dead. */
5661 if (!cleared
&& !vector
&& REG_P (target
))
5662 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5665 alias
= MEM_ALIAS_SET (target
);
5667 alias
= get_alias_set (elttype
);
5669 /* Store each element of the constructor into the corresponding
5670 element of TARGET, determined by counting the elements. */
5671 for (idx
= 0, i
= 0;
5672 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
);
5673 idx
++, i
+= bitsize
/ elt_size
)
5675 HOST_WIDE_INT eltpos
;
5676 tree value
= ce
->value
;
5678 bitsize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (value
)), 1);
5679 if (cleared
&& initializer_zerop (value
))
5683 eltpos
= tree_low_cst (ce
->index
, 1);
5689 /* Vector CONSTRUCTORs should only be built from smaller
5690 vectors in the case of BLKmode vectors. */
5691 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
5692 RTVEC_ELT (vector
, eltpos
)
5693 = expand_normal (value
);
5697 enum machine_mode value_mode
=
5698 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
5699 ? TYPE_MODE (TREE_TYPE (value
))
5701 bitpos
= eltpos
* elt_size
;
5702 store_constructor_field (target
, bitsize
, bitpos
,
5703 value_mode
, value
, type
,
5709 emit_insn (GEN_FCN (icode
)
5711 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
5720 /* Store the value of EXP (an expression tree)
5721 into a subfield of TARGET which has mode MODE and occupies
5722 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5723 If MODE is VOIDmode, it means that we are storing into a bit-field.
5725 Always return const0_rtx unless we have something particular to
5728 TYPE is the type of the underlying object,
5730 ALIAS_SET is the alias set for the destination. This value will
5731 (in general) be different from that for TARGET, since TARGET is a
5732 reference to the containing structure.
5734 If NONTEMPORAL is true, try generating a nontemporal store. */
5737 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5738 enum machine_mode mode
, tree exp
, tree type
,
5739 alias_set_type alias_set
, bool nontemporal
)
5741 if (TREE_CODE (exp
) == ERROR_MARK
)
5744 /* If we have nothing to store, do nothing unless the expression has
5747 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5749 /* If we are storing into an unaligned field of an aligned union that is
5750 in a register, we may have the mode of TARGET being an integer mode but
5751 MODE == BLKmode. In that case, get an aligned object whose size and
5752 alignment are the same as TARGET and store TARGET into it (we can avoid
5753 the store if the field being stored is the entire width of TARGET). Then
5754 call ourselves recursively to store the field into a BLKmode version of
5755 that object. Finally, load from the object into TARGET. This is not
5756 very efficient in general, but should only be slightly more expensive
5757 than the otherwise-required unaligned accesses. Perhaps this can be
5758 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5759 twice, once with emit_move_insn and once via store_field. */
5762 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
5764 rtx object
= assign_temp (type
, 0, 1, 1);
5765 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5767 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5768 emit_move_insn (object
, target
);
5770 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
,
5773 emit_move_insn (target
, object
);
5775 /* We want to return the BLKmode version of the data. */
5779 if (GET_CODE (target
) == CONCAT
)
5781 /* We're storing into a struct containing a single __complex. */
5783 gcc_assert (!bitpos
);
5784 return store_expr (exp
, target
, 0, nontemporal
);
5787 /* If the structure is in a register or if the component
5788 is a bit field, we cannot use addressing to access it.
5789 Use bit-field techniques or SUBREG to store in it. */
5791 if (mode
== VOIDmode
5792 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5793 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5794 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5796 || GET_CODE (target
) == SUBREG
5797 /* If the field isn't aligned enough to store as an ordinary memref,
5798 store it as a bit field. */
5800 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5801 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5802 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5803 || (bitpos
% BITS_PER_UNIT
!= 0)))
5804 /* If the RHS and field are a constant size and the size of the
5805 RHS isn't the same size as the bitfield, we must use bitfield
5808 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5809 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0)
5810 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
5811 decl we must use bitfield operations. */
5813 && TREE_CODE (exp
) == MEM_REF
5814 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5815 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5816 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0),0 ))
5817 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
5822 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5823 implies a mask operation. If the precision is the same size as
5824 the field we're storing into, that mask is redundant. This is
5825 particularly common with bit field assignments generated by the
5827 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
5830 tree type
= TREE_TYPE (exp
);
5831 if (INTEGRAL_TYPE_P (type
)
5832 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
5833 && bitsize
== TYPE_PRECISION (type
))
5835 tree op
= gimple_assign_rhs1 (nop_def
);
5836 type
= TREE_TYPE (op
);
5837 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
5842 temp
= expand_normal (exp
);
5844 /* If BITSIZE is narrower than the size of the type of EXP
5845 we will be narrowing TEMP. Normally, what's wanted are the
5846 low-order bits. However, if EXP's type is a record and this is
5847 big-endian machine, we want the upper BITSIZE bits. */
5848 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5849 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5850 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5851 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5852 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5856 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5858 if (mode
!= VOIDmode
&& mode
!= BLKmode
5859 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5860 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5862 /* If the modes of TEMP and TARGET are both BLKmode, both
5863 must be in memory and BITPOS must be aligned on a byte
5864 boundary. If so, we simply do a block copy. Likewise
5865 for a BLKmode-like TARGET. */
5866 if (GET_MODE (temp
) == BLKmode
5867 && (GET_MODE (target
) == BLKmode
5869 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
5870 && (bitpos
% BITS_PER_UNIT
) == 0
5871 && (bitsize
% BITS_PER_UNIT
) == 0)))
5873 gcc_assert (MEM_P (target
) && MEM_P (temp
)
5874 && (bitpos
% BITS_PER_UNIT
) == 0);
5876 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5877 emit_block_move (target
, temp
,
5878 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5885 /* Store the value in the bitfield. */
5886 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
);
5892 /* Now build a reference to just the desired component. */
5893 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5895 if (to_rtx
== target
)
5896 to_rtx
= copy_rtx (to_rtx
);
5898 if (!MEM_SCALAR_P (to_rtx
))
5899 MEM_IN_STRUCT_P (to_rtx
) = 1;
5900 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5901 set_mem_alias_set (to_rtx
, alias_set
);
5903 return store_expr (exp
, to_rtx
, 0, nontemporal
);
5907 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5908 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5909 codes and find the ultimate containing object, which we return.
5911 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5912 bit position, and *PUNSIGNEDP to the signedness of the field.
5913 If the position of the field is variable, we store a tree
5914 giving the variable offset (in units) in *POFFSET.
5915 This offset is in addition to the bit position.
5916 If the position is not variable, we store 0 in *POFFSET.
5918 If any of the extraction expressions is volatile,
5919 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5921 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5922 Otherwise, it is a mode that can be used to access the field.
5924 If the field describes a variable-sized object, *PMODE is set to
5925 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5926 this case, but the address of the object can be found.
5928 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5929 look through nodes that serve as markers of a greater alignment than
5930 the one that can be deduced from the expression. These nodes make it
5931 possible for front-ends to prevent temporaries from being created by
5932 the middle-end on alignment considerations. For that purpose, the
5933 normal operating mode at high-level is to always pass FALSE so that
5934 the ultimate containing object is really returned; moreover, the
5935 associated predicate handled_component_p will always return TRUE
5936 on these nodes, thus indicating that they are essentially handled
5937 by get_inner_reference. TRUE should only be passed when the caller
5938 is scanning the expression in order to build another representation
5939 and specifically knows how to handle these nodes; as such, this is
5940 the normal operating mode in the RTL expanders. */
5943 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5944 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5945 enum machine_mode
*pmode
, int *punsignedp
,
5946 int *pvolatilep
, bool keep_aligning
)
5949 enum machine_mode mode
= VOIDmode
;
5950 bool blkmode_bitfield
= false;
5951 tree offset
= size_zero_node
;
5952 double_int bit_offset
= double_int_zero
;
5954 /* First get the mode, signedness, and size. We do this from just the
5955 outermost expression. */
5957 if (TREE_CODE (exp
) == COMPONENT_REF
)
5959 tree field
= TREE_OPERAND (exp
, 1);
5960 size_tree
= DECL_SIZE (field
);
5961 if (!DECL_BIT_FIELD (field
))
5962 mode
= DECL_MODE (field
);
5963 else if (DECL_MODE (field
) == BLKmode
)
5964 blkmode_bitfield
= true;
5965 else if (TREE_THIS_VOLATILE (exp
)
5966 && flag_strict_volatile_bitfields
> 0)
5967 /* Volatile bitfields should be accessed in the mode of the
5968 field's type, not the mode computed based on the bit
5970 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
5972 *punsignedp
= DECL_UNSIGNED (field
);
5974 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5976 size_tree
= TREE_OPERAND (exp
, 1);
5977 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5978 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
5980 /* For vector types, with the correct size of access, use the mode of
5982 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
5983 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5984 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
5985 mode
= TYPE_MODE (TREE_TYPE (exp
));
5989 mode
= TYPE_MODE (TREE_TYPE (exp
));
5990 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
5992 if (mode
== BLKmode
)
5993 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5995 *pbitsize
= GET_MODE_BITSIZE (mode
);
6000 if (! host_integerp (size_tree
, 1))
6001 mode
= BLKmode
, *pbitsize
= -1;
6003 *pbitsize
= tree_low_cst (size_tree
, 1);
6006 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6007 and find the ultimate containing object. */
6010 switch (TREE_CODE (exp
))
6014 = double_int_add (bit_offset
,
6015 tree_to_double_int (TREE_OPERAND (exp
, 2)));
6020 tree field
= TREE_OPERAND (exp
, 1);
6021 tree this_offset
= component_ref_field_offset (exp
);
6023 /* If this field hasn't been filled in yet, don't go past it.
6024 This should only happen when folding expressions made during
6025 type construction. */
6026 if (this_offset
== 0)
6029 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
6030 bit_offset
= double_int_add (bit_offset
,
6032 (DECL_FIELD_BIT_OFFSET (field
)));
6034 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6039 case ARRAY_RANGE_REF
:
6041 tree index
= TREE_OPERAND (exp
, 1);
6042 tree low_bound
= array_ref_low_bound (exp
);
6043 tree unit_size
= array_ref_element_size (exp
);
6045 /* We assume all arrays have sizes that are a multiple of a byte.
6046 First subtract the lower bound, if any, in the type of the
6047 index, then convert to sizetype and multiply by the size of
6048 the array element. */
6049 if (! integer_zerop (low_bound
))
6050 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
6053 offset
= size_binop (PLUS_EXPR
, offset
,
6054 size_binop (MULT_EXPR
,
6055 fold_convert (sizetype
, index
),
6064 bit_offset
= double_int_add (bit_offset
,
6065 uhwi_to_double_int (*pbitsize
));
6068 case VIEW_CONVERT_EXPR
:
6069 if (keep_aligning
&& STRICT_ALIGNMENT
6070 && (TYPE_ALIGN (TREE_TYPE (exp
))
6071 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6072 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6073 < BIGGEST_ALIGNMENT
)
6074 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
6075 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6080 /* Hand back the decl for MEM[&decl, off]. */
6081 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
6083 tree off
= TREE_OPERAND (exp
, 1);
6084 if (!integer_zerop (off
))
6086 double_int boff
, coff
= mem_ref_offset (exp
);
6087 boff
= double_int_lshift (coff
,
6089 ? 3 : exact_log2 (BITS_PER_UNIT
),
6090 HOST_BITS_PER_DOUBLE_INT
, true);
6091 bit_offset
= double_int_add (bit_offset
, boff
);
6093 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6101 /* If any reference in the chain is volatile, the effect is volatile. */
6102 if (TREE_THIS_VOLATILE (exp
))
6105 exp
= TREE_OPERAND (exp
, 0);
6109 /* If OFFSET is constant, see if we can return the whole thing as a
6110 constant bit position. Make sure to handle overflow during
6112 if (host_integerp (offset
, 0))
6114 double_int tem
= double_int_lshift (tree_to_double_int (offset
),
6116 ? 3 : exact_log2 (BITS_PER_UNIT
),
6117 HOST_BITS_PER_DOUBLE_INT
, true);
6118 tem
= double_int_add (tem
, bit_offset
);
6119 if (double_int_fits_in_shwi_p (tem
))
6121 *pbitpos
= double_int_to_shwi (tem
);
6122 *poffset
= offset
= NULL_TREE
;
6126 /* Otherwise, split it up. */
6129 *pbitpos
= double_int_to_shwi (bit_offset
);
6133 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6134 if (mode
== VOIDmode
6136 && (*pbitpos
% BITS_PER_UNIT
) == 0
6137 && (*pbitsize
% BITS_PER_UNIT
) == 0)
6145 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6146 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6147 EXP is marked as PACKED. */
6150 contains_packed_reference (const_tree exp
)
6152 bool packed_p
= false;
6156 switch (TREE_CODE (exp
))
6160 tree field
= TREE_OPERAND (exp
, 1);
6161 packed_p
= DECL_PACKED (field
)
6162 || TYPE_PACKED (TREE_TYPE (field
))
6163 || TYPE_PACKED (TREE_TYPE (exp
));
6171 case ARRAY_RANGE_REF
:
6174 case VIEW_CONVERT_EXPR
:
6180 exp
= TREE_OPERAND (exp
, 0);
6186 /* Return a tree of sizetype representing the size, in bytes, of the element
6187 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6190 array_ref_element_size (tree exp
)
6192 tree aligned_size
= TREE_OPERAND (exp
, 3);
6193 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6194 location_t loc
= EXPR_LOCATION (exp
);
6196 /* If a size was specified in the ARRAY_REF, it's the size measured
6197 in alignment units of the element type. So multiply by that value. */
6200 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6201 sizetype from another type of the same width and signedness. */
6202 if (TREE_TYPE (aligned_size
) != sizetype
)
6203 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
6204 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
6205 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
6208 /* Otherwise, take the size from that of the element type. Substitute
6209 any PLACEHOLDER_EXPR that we have. */
6211 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
6214 /* Return a tree representing the lower bound of the array mentioned in
6215 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6218 array_ref_low_bound (tree exp
)
6220 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6222 /* If a lower bound is specified in EXP, use it. */
6223 if (TREE_OPERAND (exp
, 2))
6224 return TREE_OPERAND (exp
, 2);
6226 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6227 substituting for a PLACEHOLDER_EXPR as needed. */
6228 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6229 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
6231 /* Otherwise, return a zero of the appropriate type. */
6232 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
6235 /* Return a tree representing the upper bound of the array mentioned in
6236 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6239 array_ref_up_bound (tree exp
)
6241 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6243 /* If there is a domain type and it has an upper bound, use it, substituting
6244 for a PLACEHOLDER_EXPR as needed. */
6245 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
6246 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
6248 /* Otherwise fail. */
6252 /* Return a tree representing the offset, in bytes, of the field referenced
6253 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6256 component_ref_field_offset (tree exp
)
6258 tree aligned_offset
= TREE_OPERAND (exp
, 2);
6259 tree field
= TREE_OPERAND (exp
, 1);
6260 location_t loc
= EXPR_LOCATION (exp
);
6262 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6263 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6267 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6268 sizetype from another type of the same width and signedness. */
6269 if (TREE_TYPE (aligned_offset
) != sizetype
)
6270 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
6271 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
6272 size_int (DECL_OFFSET_ALIGN (field
)
6276 /* Otherwise, take the offset from that of the field. Substitute
6277 any PLACEHOLDER_EXPR that we have. */
6279 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
6282 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6284 static unsigned HOST_WIDE_INT
6285 target_align (const_tree target
)
6287 /* We might have a chain of nested references with intermediate misaligning
6288 bitfields components, so need to recurse to find out. */
6290 unsigned HOST_WIDE_INT this_align
, outer_align
;
6292 switch (TREE_CODE (target
))
6298 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
6299 outer_align
= target_align (TREE_OPERAND (target
, 0));
6300 return MIN (this_align
, outer_align
);
6303 case ARRAY_RANGE_REF
:
6304 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
6305 outer_align
= target_align (TREE_OPERAND (target
, 0));
6306 return MIN (this_align
, outer_align
);
6309 case NON_LVALUE_EXPR
:
6310 case VIEW_CONVERT_EXPR
:
6311 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
6312 outer_align
= target_align (TREE_OPERAND (target
, 0));
6313 return MAX (this_align
, outer_align
);
6316 return TYPE_ALIGN (TREE_TYPE (target
));
6321 /* Given an rtx VALUE that may contain additions and multiplications, return
6322 an equivalent value that just refers to a register, memory, or constant.
6323 This is done by generating instructions to perform the arithmetic and
6324 returning a pseudo-register containing the value.
6326 The returned value may be a REG, SUBREG, MEM or constant. */
6329 force_operand (rtx value
, rtx target
)
6332 /* Use subtarget as the target for operand 0 of a binary operation. */
6333 rtx subtarget
= get_subtarget (target
);
6334 enum rtx_code code
= GET_CODE (value
);
6336 /* Check for subreg applied to an expression produced by loop optimizer. */
6338 && !REG_P (SUBREG_REG (value
))
6339 && !MEM_P (SUBREG_REG (value
)))
6342 = simplify_gen_subreg (GET_MODE (value
),
6343 force_reg (GET_MODE (SUBREG_REG (value
)),
6344 force_operand (SUBREG_REG (value
),
6346 GET_MODE (SUBREG_REG (value
)),
6347 SUBREG_BYTE (value
));
6348 code
= GET_CODE (value
);
6351 /* Check for a PIC address load. */
6352 if ((code
== PLUS
|| code
== MINUS
)
6353 && XEXP (value
, 0) == pic_offset_table_rtx
6354 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
6355 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
6356 || GET_CODE (XEXP (value
, 1)) == CONST
))
6359 subtarget
= gen_reg_rtx (GET_MODE (value
));
6360 emit_move_insn (subtarget
, value
);
6364 if (ARITHMETIC_P (value
))
6366 op2
= XEXP (value
, 1);
6367 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
6369 if (code
== MINUS
&& CONST_INT_P (op2
))
6372 op2
= negate_rtx (GET_MODE (value
), op2
);
6375 /* Check for an addition with OP2 a constant integer and our first
6376 operand a PLUS of a virtual register and something else. In that
6377 case, we want to emit the sum of the virtual register and the
6378 constant first and then add the other value. This allows virtual
6379 register instantiation to simply modify the constant rather than
6380 creating another one around this addition. */
6381 if (code
== PLUS
&& CONST_INT_P (op2
)
6382 && GET_CODE (XEXP (value
, 0)) == PLUS
6383 && REG_P (XEXP (XEXP (value
, 0), 0))
6384 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6385 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
6387 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
6388 XEXP (XEXP (value
, 0), 0), op2
,
6389 subtarget
, 0, OPTAB_LIB_WIDEN
);
6390 return expand_simple_binop (GET_MODE (value
), code
, temp
,
6391 force_operand (XEXP (XEXP (value
,
6393 target
, 0, OPTAB_LIB_WIDEN
);
6396 op1
= force_operand (XEXP (value
, 0), subtarget
);
6397 op2
= force_operand (op2
, NULL_RTX
);
6401 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
6403 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
6404 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6405 target
, 1, OPTAB_LIB_WIDEN
);
6407 return expand_divmod (0,
6408 FLOAT_MODE_P (GET_MODE (value
))
6409 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
6410 GET_MODE (value
), op1
, op2
, target
, 0);
6412 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6415 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
6418 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6421 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6422 target
, 0, OPTAB_LIB_WIDEN
);
6424 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6425 target
, 1, OPTAB_LIB_WIDEN
);
6428 if (UNARY_P (value
))
6431 target
= gen_reg_rtx (GET_MODE (value
));
6432 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
6439 case FLOAT_TRUNCATE
:
6440 convert_move (target
, op1
, code
== ZERO_EXTEND
);
6445 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
6449 case UNSIGNED_FLOAT
:
6450 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
6454 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
6458 #ifdef INSN_SCHEDULING
6459 /* On machines that have insn scheduling, we want all memory reference to be
6460 explicit, so we need to deal with such paradoxical SUBREGs. */
6461 if (GET_CODE (value
) == SUBREG
&& MEM_P (SUBREG_REG (value
))
6462 && (GET_MODE_SIZE (GET_MODE (value
))
6463 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
6465 = simplify_gen_subreg (GET_MODE (value
),
6466 force_reg (GET_MODE (SUBREG_REG (value
)),
6467 force_operand (SUBREG_REG (value
),
6469 GET_MODE (SUBREG_REG (value
)),
6470 SUBREG_BYTE (value
));
6476 /* Subroutine of expand_expr: return nonzero iff there is no way that
6477 EXP can reference X, which is being modified. TOP_P is nonzero if this
6478 call is going to be used to determine whether we need a temporary
6479 for EXP, as opposed to a recursive call to this function.
6481 It is always safe for this routine to return zero since it merely
6482 searches for optimization opportunities. */
6485 safe_from_p (const_rtx x
, tree exp
, int top_p
)
6491 /* If EXP has varying size, we MUST use a target since we currently
6492 have no way of allocating temporaries of variable size
6493 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6494 So we assume here that something at a higher level has prevented a
6495 clash. This is somewhat bogus, but the best we can do. Only
6496 do this when X is BLKmode and when we are at the top level. */
6497 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6498 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
6499 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
6500 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
6501 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
6503 && GET_MODE (x
) == BLKmode
)
6504 /* If X is in the outgoing argument area, it is always safe. */
6506 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
6507 || (GET_CODE (XEXP (x
, 0)) == PLUS
6508 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
6511 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6512 find the underlying pseudo. */
6513 if (GET_CODE (x
) == SUBREG
)
6516 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6520 /* Now look at our tree code and possibly recurse. */
6521 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
6523 case tcc_declaration
:
6524 exp_rtl
= DECL_RTL_IF_SET (exp
);
6530 case tcc_exceptional
:
6531 if (TREE_CODE (exp
) == TREE_LIST
)
6535 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
6537 exp
= TREE_CHAIN (exp
);
6540 if (TREE_CODE (exp
) != TREE_LIST
)
6541 return safe_from_p (x
, exp
, 0);
6544 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
6546 constructor_elt
*ce
;
6547 unsigned HOST_WIDE_INT idx
;
6549 FOR_EACH_VEC_ELT (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
)
6550 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
6551 || !safe_from_p (x
, ce
->value
, 0))
6555 else if (TREE_CODE (exp
) == ERROR_MARK
)
6556 return 1; /* An already-visited SAVE_EXPR? */
6561 /* The only case we look at here is the DECL_INITIAL inside a
6563 return (TREE_CODE (exp
) != DECL_EXPR
6564 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
6565 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
6566 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
6569 case tcc_comparison
:
6570 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
6575 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6577 case tcc_expression
:
6580 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6581 the expression. If it is set, we conflict iff we are that rtx or
6582 both are in memory. Otherwise, we check all operands of the
6583 expression recursively. */
6585 switch (TREE_CODE (exp
))
6588 /* If the operand is static or we are static, we can't conflict.
6589 Likewise if we don't conflict with the operand at all. */
6590 if (staticp (TREE_OPERAND (exp
, 0))
6591 || TREE_STATIC (exp
)
6592 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6595 /* Otherwise, the only way this can conflict is if we are taking
6596 the address of a DECL a that address if part of X, which is
6598 exp
= TREE_OPERAND (exp
, 0);
6601 if (!DECL_RTL_SET_P (exp
)
6602 || !MEM_P (DECL_RTL (exp
)))
6605 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6611 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6612 get_alias_set (exp
)))
6617 /* Assume that the call will clobber all hard registers and
6619 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6624 case WITH_CLEANUP_EXPR
:
6625 case CLEANUP_POINT_EXPR
:
6626 /* Lowered by gimplify.c. */
6630 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6636 /* If we have an rtx, we do not need to scan our operands. */
6640 nops
= TREE_OPERAND_LENGTH (exp
);
6641 for (i
= 0; i
< nops
; i
++)
6642 if (TREE_OPERAND (exp
, i
) != 0
6643 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6649 /* Should never get a type here. */
6653 /* If we have an rtl, find any enclosed object. Then see if we conflict
6657 if (GET_CODE (exp_rtl
) == SUBREG
)
6659 exp_rtl
= SUBREG_REG (exp_rtl
);
6661 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6665 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6666 are memory and they conflict. */
6667 return ! (rtx_equal_p (x
, exp_rtl
)
6668 || (MEM_P (x
) && MEM_P (exp_rtl
)
6669 && true_dependence (exp_rtl
, VOIDmode
, x
,
6670 rtx_addr_varies_p
)));
6673 /* If we reach here, it is safe. */
6678 /* Return the highest power of two that EXP is known to be a multiple of.
6679 This is used in updating alignment of MEMs in array references. */
6681 unsigned HOST_WIDE_INT
6682 highest_pow2_factor (const_tree exp
)
6684 unsigned HOST_WIDE_INT c0
, c1
;
6686 switch (TREE_CODE (exp
))
6689 /* We can find the lowest bit that's a one. If the low
6690 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6691 We need to handle this case since we can find it in a COND_EXPR,
6692 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6693 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6695 if (TREE_OVERFLOW (exp
))
6696 return BIGGEST_ALIGNMENT
;
6699 /* Note: tree_low_cst is intentionally not used here,
6700 we don't care about the upper bits. */
6701 c0
= TREE_INT_CST_LOW (exp
);
6703 return c0
? c0
: BIGGEST_ALIGNMENT
;
6707 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6708 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6709 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6710 return MIN (c0
, c1
);
6713 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6714 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6717 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6719 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6720 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6722 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6723 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6724 return MAX (1, c0
/ c1
);
6729 /* The highest power of two of a bit-and expression is the maximum of
6730 that of its operands. We typically get here for a complex LHS and
6731 a constant negative power of two on the RHS to force an explicit
6732 alignment, so don't bother looking at the LHS. */
6733 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6737 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6740 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6743 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6744 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6745 return MIN (c0
, c1
);
6754 /* Similar, except that the alignment requirements of TARGET are
6755 taken into account. Assume it is at least as aligned as its
6756 type, unless it is a COMPONENT_REF in which case the layout of
6757 the structure gives the alignment. */
6759 static unsigned HOST_WIDE_INT
6760 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
6762 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
6763 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
6765 return MAX (factor
, talign
);
6768 /* Subroutine of expand_expr. Expand the two operands of a binary
6769 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6770 The value may be stored in TARGET if TARGET is nonzero. The
6771 MODIFIER argument is as documented by expand_expr. */
6774 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6775 enum expand_modifier modifier
)
6777 if (! safe_from_p (target
, exp1
, 1))
6779 if (operand_equal_p (exp0
, exp1
, 0))
6781 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6782 *op1
= copy_rtx (*op0
);
6786 /* If we need to preserve evaluation order, copy exp0 into its own
6787 temporary variable so that it can't be clobbered by exp1. */
6788 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6789 exp0
= save_expr (exp0
);
6790 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6791 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6796 /* Return a MEM that contains constant EXP. DEFER is as for
6797 output_constant_def and MODIFIER is as for expand_expr. */
6800 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
6804 mem
= output_constant_def (exp
, defer
);
6805 if (modifier
!= EXPAND_INITIALIZER
)
6806 mem
= use_anchored_address (mem
);
6810 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6811 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6814 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6815 enum expand_modifier modifier
, addr_space_t as
)
6817 rtx result
, subtarget
;
6819 HOST_WIDE_INT bitsize
, bitpos
;
6820 int volatilep
, unsignedp
;
6821 enum machine_mode mode1
;
6823 /* If we are taking the address of a constant and are at the top level,
6824 we have to use output_constant_def since we can't call force_const_mem
6826 /* ??? This should be considered a front-end bug. We should not be
6827 generating ADDR_EXPR of something that isn't an LVALUE. The only
6828 exception here is STRING_CST. */
6829 if (CONSTANT_CLASS_P (exp
))
6830 return XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
6832 /* Everything must be something allowed by is_gimple_addressable. */
6833 switch (TREE_CODE (exp
))
6836 /* This case will happen via recursion for &a->b. */
6837 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6841 tree tem
= TREE_OPERAND (exp
, 0);
6842 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
6843 tem
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
6845 double_int_to_tree (sizetype
, mem_ref_offset (exp
)));
6846 return expand_expr (tem
, target
, tmode
, modifier
);
6850 /* Expand the initializer like constants above. */
6851 return XEXP (expand_expr_constant (DECL_INITIAL (exp
), 0, modifier
), 0);
6854 /* The real part of the complex number is always first, therefore
6855 the address is the same as the address of the parent object. */
6858 inner
= TREE_OPERAND (exp
, 0);
6862 /* The imaginary part of the complex number is always second.
6863 The expression is therefore always offset by the size of the
6866 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
6867 inner
= TREE_OPERAND (exp
, 0);
6871 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6872 expand_expr, as that can have various side effects; LABEL_DECLs for
6873 example, may not have their DECL_RTL set yet. Expand the rtl of
6874 CONSTRUCTORs too, which should yield a memory reference for the
6875 constructor's contents. Assume language specific tree nodes can
6876 be expanded in some interesting way. */
6877 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
6879 || TREE_CODE (exp
) == CONSTRUCTOR
6880 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
6882 result
= expand_expr (exp
, target
, tmode
,
6883 modifier
== EXPAND_INITIALIZER
6884 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
6886 /* If the DECL isn't in memory, then the DECL wasn't properly
6887 marked TREE_ADDRESSABLE, which will be either a front-end
6888 or a tree optimizer bug. */
6889 gcc_assert (MEM_P (result
));
6890 result
= XEXP (result
, 0);
6892 /* ??? Is this needed anymore? */
6893 if (DECL_P (exp
) && !TREE_USED (exp
) == 0)
6895 assemble_external (exp
);
6896 TREE_USED (exp
) = 1;
6899 if (modifier
!= EXPAND_INITIALIZER
6900 && modifier
!= EXPAND_CONST_ADDRESS
)
6901 result
= force_operand (result
, target
);
6905 /* Pass FALSE as the last argument to get_inner_reference although
6906 we are expanding to RTL. The rationale is that we know how to
6907 handle "aligning nodes" here: we can just bypass them because
6908 they won't change the final object whose address will be returned
6909 (they actually exist only for that purpose). */
6910 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6911 &mode1
, &unsignedp
, &volatilep
, false);
6915 /* We must have made progress. */
6916 gcc_assert (inner
!= exp
);
6918 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
6919 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6920 inner alignment, force the inner to be sufficiently aligned. */
6921 if (CONSTANT_CLASS_P (inner
)
6922 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
6924 inner
= copy_node (inner
);
6925 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
6926 TYPE_ALIGN (TREE_TYPE (inner
)) = TYPE_ALIGN (TREE_TYPE (exp
));
6927 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
6929 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
6935 if (modifier
!= EXPAND_NORMAL
)
6936 result
= force_operand (result
, NULL
);
6937 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
6938 modifier
== EXPAND_INITIALIZER
6939 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
6941 result
= convert_memory_address_addr_space (tmode
, result
, as
);
6942 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
6944 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6945 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
6948 subtarget
= bitpos
? NULL_RTX
: target
;
6949 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
6950 1, OPTAB_LIB_WIDEN
);
6956 /* Someone beforehand should have rejected taking the address
6957 of such an object. */
6958 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
6960 result
= plus_constant (result
, bitpos
/ BITS_PER_UNIT
);
6961 if (modifier
< EXPAND_SUM
)
6962 result
= force_operand (result
, target
);
6968 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6969 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6972 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
6973 enum expand_modifier modifier
)
6975 addr_space_t as
= ADDR_SPACE_GENERIC
;
6976 enum machine_mode address_mode
= Pmode
;
6977 enum machine_mode pointer_mode
= ptr_mode
;
6978 enum machine_mode rmode
;
6981 /* Target mode of VOIDmode says "whatever's natural". */
6982 if (tmode
== VOIDmode
)
6983 tmode
= TYPE_MODE (TREE_TYPE (exp
));
6985 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
6987 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
6988 address_mode
= targetm
.addr_space
.address_mode (as
);
6989 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
6992 /* We can get called with some Weird Things if the user does silliness
6993 like "(short) &a". In that case, convert_memory_address won't do
6994 the right thing, so ignore the given target mode. */
6995 if (tmode
!= address_mode
&& tmode
!= pointer_mode
)
6996 tmode
= address_mode
;
6998 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
6999 tmode
, modifier
, as
);
7001 /* Despite expand_expr claims concerning ignoring TMODE when not
7002 strictly convenient, stuff breaks if we don't honor it. Note
7003 that combined with the above, we only do this for pointer modes. */
7004 rmode
= GET_MODE (result
);
7005 if (rmode
== VOIDmode
)
7008 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7013 /* Generate code for computing CONSTRUCTOR EXP.
7014 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7015 is TRUE, instead of creating a temporary variable in memory
7016 NULL is returned and the caller needs to handle it differently. */
7019 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
7020 bool avoid_temp_mem
)
7022 tree type
= TREE_TYPE (exp
);
7023 enum machine_mode mode
= TYPE_MODE (type
);
7025 /* Try to avoid creating a temporary at all. This is possible
7026 if all of the initializer is zero.
7027 FIXME: try to handle all [0..255] initializers we can handle
7029 if (TREE_STATIC (exp
)
7030 && !TREE_ADDRESSABLE (exp
)
7031 && target
!= 0 && mode
== BLKmode
7032 && all_zeros_p (exp
))
7034 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7038 /* All elts simple constants => refer to a constant in memory. But
7039 if this is a non-BLKmode mode, let it store a field at a time
7040 since that should make a CONST_INT or CONST_DOUBLE when we
7041 fold. Likewise, if we have a target we can use, it is best to
7042 store directly into the target unless the type is large enough
7043 that memcpy will be used. If we are making an initializer and
7044 all operands are constant, put it in memory as well.
7046 FIXME: Avoid trying to fill vector constructors piece-meal.
7047 Output them with output_constant_def below unless we're sure
7048 they're zeros. This should go away when vector initializers
7049 are treated like VECTOR_CST instead of arrays. */
7050 if ((TREE_STATIC (exp
)
7051 && ((mode
== BLKmode
7052 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7053 || TREE_ADDRESSABLE (exp
)
7054 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
7055 && (! MOVE_BY_PIECES_P
7056 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
7058 && ! mostly_zeros_p (exp
))))
7059 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
7060 && TREE_CONSTANT (exp
)))
7067 constructor
= expand_expr_constant (exp
, 1, modifier
);
7069 if (modifier
!= EXPAND_CONST_ADDRESS
7070 && modifier
!= EXPAND_INITIALIZER
7071 && modifier
!= EXPAND_SUM
)
7072 constructor
= validize_mem (constructor
);
7077 /* Handle calls that pass values in multiple non-contiguous
7078 locations. The Irix 6 ABI has examples of this. */
7079 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7080 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
7086 = assign_temp (build_qualified_type (type
, (TYPE_QUALS (type
)
7087 | (TREE_READONLY (exp
)
7088 * TYPE_QUAL_CONST
))),
7089 0, TREE_ADDRESSABLE (exp
), 1);
7092 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7097 /* expand_expr: generate code for computing expression EXP.
7098 An rtx for the computed value is returned. The value is never null.
7099 In the case of a void EXP, const0_rtx is returned.
7101 The value may be stored in TARGET if TARGET is nonzero.
7102 TARGET is just a suggestion; callers must assume that
7103 the rtx returned may not be the same as TARGET.
7105 If TARGET is CONST0_RTX, it means that the value will be ignored.
7107 If TMODE is not VOIDmode, it suggests generating the
7108 result in mode TMODE. But this is done only when convenient.
7109 Otherwise, TMODE is ignored and the value generated in its natural mode.
7110 TMODE is just a suggestion; callers must assume that
7111 the rtx returned may not have mode TMODE.
7113 Note that TARGET may have neither TMODE nor MODE. In that case, it
7114 probably will not be used.
7116 If MODIFIER is EXPAND_SUM then when EXP is an addition
7117 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7118 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7119 products as above, or REG or MEM, or constant.
7120 Ordinarily in such cases we would output mul or add instructions
7121 and then return a pseudo reg containing the sum.
7123 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7124 it also marks a label as absolutely required (it can't be dead).
7125 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7126 This is used for outputting expressions used in initializers.
7128 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7129 with a constant address even if that address is not normally legitimate.
7130 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7132 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7133 a call parameter. Such targets require special care as we haven't yet
7134 marked TARGET so that it's safe from being trashed by libcalls. We
7135 don't want to use TARGET for anything but the final result;
7136 Intermediate values must go elsewhere. Additionally, calls to
7137 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7139 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7140 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7141 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7142 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7146 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
7147 enum expand_modifier modifier
, rtx
*alt_rtl
)
7151 /* Handle ERROR_MARK before anybody tries to access its type. */
7152 if (TREE_CODE (exp
) == ERROR_MARK
7153 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
7155 ret
= CONST0_RTX (tmode
);
7156 return ret
? ret
: const0_rtx
;
7159 /* If this is an expression of some kind and it has an associated line
7160 number, then emit the line number before expanding the expression.
7162 We need to save and restore the file and line information so that
7163 errors discovered during expansion are emitted with the right
7164 information. It would be better of the diagnostic routines
7165 used the file/line information embedded in the tree nodes rather
7167 if (cfun
&& EXPR_HAS_LOCATION (exp
))
7169 location_t saved_location
= input_location
;
7170 location_t saved_curr_loc
= get_curr_insn_source_location ();
7171 tree saved_block
= get_curr_insn_block ();
7172 input_location
= EXPR_LOCATION (exp
);
7173 set_curr_insn_source_location (input_location
);
7175 /* Record where the insns produced belong. */
7176 set_curr_insn_block (TREE_BLOCK (exp
));
7178 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
7180 input_location
= saved_location
;
7181 set_curr_insn_block (saved_block
);
7182 set_curr_insn_source_location (saved_curr_loc
);
7186 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
7193 expand_expr_real_2 (sepops ops
, rtx target
, enum machine_mode tmode
,
7194 enum expand_modifier modifier
)
7196 rtx op0
, op1
, op2
, temp
;
7199 enum machine_mode mode
;
7200 enum tree_code code
= ops
->code
;
7202 rtx subtarget
, original_target
;
7204 bool reduce_bit_field
;
7205 location_t loc
= ops
->location
;
7206 tree treeop0
, treeop1
, treeop2
;
7207 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7208 ? reduce_to_bit_field_precision ((expr), \
7214 mode
= TYPE_MODE (type
);
7215 unsignedp
= TYPE_UNSIGNED (type
);
7221 /* We should be called only on simple (binary or unary) expressions,
7222 exactly those that are valid in gimple expressions that aren't
7223 GIMPLE_SINGLE_RHS (or invalid). */
7224 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
7225 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
7226 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
7228 ignore
= (target
== const0_rtx
7229 || ((CONVERT_EXPR_CODE_P (code
)
7230 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
7231 && TREE_CODE (type
) == VOID_TYPE
));
7233 /* We should be called only if we need the result. */
7234 gcc_assert (!ignore
);
7236 /* An operation in what may be a bit-field type needs the
7237 result to be reduced to the precision of the bit-field type,
7238 which is narrower than that of the type's mode. */
7239 reduce_bit_field
= (TREE_CODE (type
) == INTEGER_TYPE
7240 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
7242 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
7245 /* Use subtarget as the target for operand 0 of a binary operation. */
7246 subtarget
= get_subtarget (target
);
7247 original_target
= target
;
7251 case NON_LVALUE_EXPR
:
7254 if (treeop0
== error_mark_node
)
7257 if (TREE_CODE (type
) == UNION_TYPE
)
7259 tree valtype
= TREE_TYPE (treeop0
);
7261 /* If both input and output are BLKmode, this conversion isn't doing
7262 anything except possibly changing memory attribute. */
7263 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
7265 rtx result
= expand_expr (treeop0
, target
, tmode
,
7268 result
= copy_rtx (result
);
7269 set_mem_attributes (result
, type
, 0);
7275 if (TYPE_MODE (type
) != BLKmode
)
7276 target
= gen_reg_rtx (TYPE_MODE (type
));
7278 target
= assign_temp (type
, 0, 1, 1);
7282 /* Store data into beginning of memory target. */
7283 store_expr (treeop0
,
7284 adjust_address (target
, TYPE_MODE (valtype
), 0),
7285 modifier
== EXPAND_STACK_PARM
,
7290 gcc_assert (REG_P (target
));
7292 /* Store this field into a union of the proper type. */
7293 store_field (target
,
7294 MIN ((int_size_in_bytes (TREE_TYPE
7297 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7298 0, TYPE_MODE (valtype
), treeop0
,
7302 /* Return the entire union. */
7306 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
7308 op0
= expand_expr (treeop0
, target
, VOIDmode
,
7311 /* If the signedness of the conversion differs and OP0 is
7312 a promoted SUBREG, clear that indication since we now
7313 have to do the proper extension. */
7314 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
7315 && GET_CODE (op0
) == SUBREG
)
7316 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7318 return REDUCE_BIT_FIELD (op0
);
7321 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
7322 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
7323 if (GET_MODE (op0
) == mode
)
7326 /* If OP0 is a constant, just convert it into the proper mode. */
7327 else if (CONSTANT_P (op0
))
7329 tree inner_type
= TREE_TYPE (treeop0
);
7330 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
7332 if (modifier
== EXPAND_INITIALIZER
)
7333 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
7334 subreg_lowpart_offset (mode
,
7337 op0
= convert_modes (mode
, inner_mode
, op0
,
7338 TYPE_UNSIGNED (inner_type
));
7341 else if (modifier
== EXPAND_INITIALIZER
)
7342 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7344 else if (target
== 0)
7345 op0
= convert_to_mode (mode
, op0
,
7346 TYPE_UNSIGNED (TREE_TYPE
7350 convert_move (target
, op0
,
7351 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
7355 return REDUCE_BIT_FIELD (op0
);
7357 case ADDR_SPACE_CONVERT_EXPR
:
7359 tree treeop0_type
= TREE_TYPE (treeop0
);
7361 addr_space_t as_from
;
7363 gcc_assert (POINTER_TYPE_P (type
));
7364 gcc_assert (POINTER_TYPE_P (treeop0_type
));
7366 as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
7367 as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
7369 /* Conversions between pointers to the same address space should
7370 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7371 gcc_assert (as_to
!= as_from
);
7373 /* Ask target code to handle conversion between pointers
7374 to overlapping address spaces. */
7375 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
7376 || targetm
.addr_space
.subset_p (as_from
, as_to
))
7378 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
7379 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
7384 /* For disjoint address spaces, converting anything but
7385 a null pointer invokes undefined behaviour. We simply
7386 always return a null pointer here. */
7387 return CONST0_RTX (mode
);
7390 case POINTER_PLUS_EXPR
:
7391 /* Even though the sizetype mode and the pointer's mode can be different
7392 expand is able to handle this correctly and get the correct result out
7393 of the PLUS_EXPR code. */
7394 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7395 if sizetype precision is smaller than pointer precision. */
7396 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
7397 treeop1
= fold_convert_loc (loc
, type
,
7398 fold_convert_loc (loc
, ssizetype
,
7401 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7402 something else, make sure we add the register to the constant and
7403 then to the other thing. This case can occur during strength
7404 reduction and doing it this way will produce better code if the
7405 frame pointer or argument pointer is eliminated.
7407 fold-const.c will ensure that the constant is always in the inner
7408 PLUS_EXPR, so the only case we need to do anything about is if
7409 sp, ap, or fp is our second argument, in which case we must swap
7410 the innermost first argument and our second argument. */
7412 if (TREE_CODE (treeop0
) == PLUS_EXPR
7413 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
7414 && TREE_CODE (treeop1
) == VAR_DECL
7415 && (DECL_RTL (treeop1
) == frame_pointer_rtx
7416 || DECL_RTL (treeop1
) == stack_pointer_rtx
7417 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
7421 treeop1
= TREE_OPERAND (treeop0
, 0);
7422 TREE_OPERAND (treeop0
, 0) = t
;
7425 /* If the result is to be ptr_mode and we are adding an integer to
7426 something, we might be forming a constant. So try to use
7427 plus_constant. If it produces a sum and we can't accept it,
7428 use force_operand. This allows P = &ARR[const] to generate
7429 efficient code on machines where a SYMBOL_REF is not a valid
7432 If this is an EXPAND_SUM call, always return the sum. */
7433 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7434 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7436 if (modifier
== EXPAND_STACK_PARM
)
7438 if (TREE_CODE (treeop0
) == INTEGER_CST
7439 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7440 && TREE_CONSTANT (treeop1
))
7444 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
7446 /* Use immed_double_const to ensure that the constant is
7447 truncated according to the mode of OP1, then sign extended
7448 to a HOST_WIDE_INT. Using the constant directly can result
7449 in non-canonical RTL in a 64x32 cross compile. */
7451 = immed_double_const (TREE_INT_CST_LOW (treeop0
),
7453 TYPE_MODE (TREE_TYPE (treeop1
)));
7454 op1
= plus_constant (op1
, INTVAL (constant_part
));
7455 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7456 op1
= force_operand (op1
, target
);
7457 return REDUCE_BIT_FIELD (op1
);
7460 else if (TREE_CODE (treeop1
) == INTEGER_CST
7461 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7462 && TREE_CONSTANT (treeop0
))
7466 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
7467 (modifier
== EXPAND_INITIALIZER
7468 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
7469 if (! CONSTANT_P (op0
))
7471 op1
= expand_expr (treeop1
, NULL_RTX
,
7472 VOIDmode
, modifier
);
7473 /* Return a PLUS if modifier says it's OK. */
7474 if (modifier
== EXPAND_SUM
7475 || modifier
== EXPAND_INITIALIZER
)
7476 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
7479 /* Use immed_double_const to ensure that the constant is
7480 truncated according to the mode of OP1, then sign extended
7481 to a HOST_WIDE_INT. Using the constant directly can result
7482 in non-canonical RTL in a 64x32 cross compile. */
7484 = immed_double_const (TREE_INT_CST_LOW (treeop1
),
7486 TYPE_MODE (TREE_TYPE (treeop0
)));
7487 op0
= plus_constant (op0
, INTVAL (constant_part
));
7488 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7489 op0
= force_operand (op0
, target
);
7490 return REDUCE_BIT_FIELD (op0
);
7494 /* Use TER to expand pointer addition of a negated value
7495 as pointer subtraction. */
7496 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
7497 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
7498 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
7499 && TREE_CODE (treeop1
) == SSA_NAME
7500 && TYPE_MODE (TREE_TYPE (treeop0
))
7501 == TYPE_MODE (TREE_TYPE (treeop1
)))
7503 gimple def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
7506 treeop1
= gimple_assign_rhs1 (def
);
7512 /* No sense saving up arithmetic to be done
7513 if it's all in the wrong mode to form part of an address.
7514 And force_operand won't know whether to sign-extend or
7516 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7517 || mode
!= ptr_mode
)
7519 expand_operands (treeop0
, treeop1
,
7520 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
7521 if (op0
== const0_rtx
)
7523 if (op1
== const0_rtx
)
7528 expand_operands (treeop0
, treeop1
,
7529 subtarget
, &op0
, &op1
, modifier
);
7530 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7534 /* For initializers, we are allowed to return a MINUS of two
7535 symbolic constants. Here we handle all cases when both operands
7537 /* Handle difference of two symbolic constants,
7538 for the sake of an initializer. */
7539 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7540 && really_constant_p (treeop0
)
7541 && really_constant_p (treeop1
))
7543 expand_operands (treeop0
, treeop1
,
7544 NULL_RTX
, &op0
, &op1
, modifier
);
7546 /* If the last operand is a CONST_INT, use plus_constant of
7547 the negated constant. Else make the MINUS. */
7548 if (CONST_INT_P (op1
))
7549 return REDUCE_BIT_FIELD (plus_constant (op0
, - INTVAL (op1
)));
7551 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
7554 /* No sense saving up arithmetic to be done
7555 if it's all in the wrong mode to form part of an address.
7556 And force_operand won't know whether to sign-extend or
7558 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7559 || mode
!= ptr_mode
)
7562 expand_operands (treeop0
, treeop1
,
7563 subtarget
, &op0
, &op1
, modifier
);
7565 /* Convert A - const to A + (-const). */
7566 if (CONST_INT_P (op1
))
7568 op1
= negate_rtx (mode
, op1
);
7569 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
7574 case WIDEN_MULT_PLUS_EXPR
:
7575 case WIDEN_MULT_MINUS_EXPR
:
7576 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
7577 op2
= expand_normal (treeop2
);
7578 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
7582 case WIDEN_MULT_EXPR
:
7583 /* If first operand is constant, swap them.
7584 Thus the following special case checks need only
7585 check the second operand. */
7586 if (TREE_CODE (treeop0
) == INTEGER_CST
)
7593 /* First, check if we have a multiplication of one signed and one
7594 unsigned operand. */
7595 if (TREE_CODE (treeop1
) != INTEGER_CST
7596 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
7597 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
7599 enum machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
7600 this_optab
= usmul_widen_optab
;
7601 if (mode
== GET_MODE_2XWIDER_MODE (innermode
))
7603 if (optab_handler (this_optab
, mode
) != CODE_FOR_nothing
)
7605 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
7606 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
7609 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
7615 /* Check for a multiplication with matching signedness. */
7616 else if ((TREE_CODE (treeop1
) == INTEGER_CST
7617 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
7618 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
7619 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
7621 tree op0type
= TREE_TYPE (treeop0
);
7622 enum machine_mode innermode
= TYPE_MODE (op0type
);
7623 bool zextend_p
= TYPE_UNSIGNED (op0type
);
7624 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
7625 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
7627 if (mode
== GET_MODE_2XWIDER_MODE (innermode
)
7628 && TREE_CODE (treeop0
) != INTEGER_CST
)
7630 if (optab_handler (this_optab
, mode
) != CODE_FOR_nothing
)
7632 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
7634 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
7635 unsignedp
, this_optab
);
7636 return REDUCE_BIT_FIELD (temp
);
7638 if (optab_handler (other_optab
, mode
) != CODE_FOR_nothing
7639 && innermode
== word_mode
)
7642 op0
= expand_normal (treeop0
);
7643 if (TREE_CODE (treeop1
) == INTEGER_CST
)
7644 op1
= convert_modes (innermode
, mode
,
7645 expand_normal (treeop1
), unsignedp
);
7647 op1
= expand_normal (treeop1
);
7648 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7649 unsignedp
, OPTAB_LIB_WIDEN
);
7650 hipart
= gen_highpart (innermode
, temp
);
7651 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
7655 emit_move_insn (hipart
, htem
);
7656 return REDUCE_BIT_FIELD (temp
);
7660 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
7661 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
7662 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
7663 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
7667 optab opt
= fma_optab
;
7670 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
7672 if (optab_handler (fma_optab
, mode
) == CODE_FOR_nothing
)
7674 tree fn
= mathfn_built_in (TREE_TYPE (treeop0
), BUILT_IN_FMA
);
7677 gcc_assert (fn
!= NULL_TREE
);
7678 call_expr
= build_call_expr (fn
, 3, treeop0
, treeop1
, treeop2
);
7679 return expand_builtin (call_expr
, target
, subtarget
, mode
, false);
7682 def0
= get_def_for_expr (treeop0
, NEGATE_EXPR
);
7683 def2
= get_def_for_expr (treeop2
, NEGATE_EXPR
);
7688 && optab_handler (fnms_optab
, mode
) != CODE_FOR_nothing
)
7691 op0
= expand_normal (gimple_assign_rhs1 (def0
));
7692 op2
= expand_normal (gimple_assign_rhs1 (def2
));
7695 && optab_handler (fnma_optab
, mode
) != CODE_FOR_nothing
)
7698 op0
= expand_normal (gimple_assign_rhs1 (def0
));
7701 && optab_handler (fms_optab
, mode
) != CODE_FOR_nothing
)
7704 op2
= expand_normal (gimple_assign_rhs1 (def2
));
7708 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
7710 op2
= expand_normal (treeop2
);
7711 op1
= expand_normal (treeop1
);
7713 return expand_ternary_op (TYPE_MODE (type
), opt
,
7714 op0
, op1
, op2
, target
, 0);
7718 /* If this is a fixed-point operation, then we cannot use the code
7719 below because "expand_mult" doesn't support sat/no-sat fixed-point
7721 if (ALL_FIXED_POINT_MODE_P (mode
))
7724 /* If first operand is constant, swap them.
7725 Thus the following special case checks need only
7726 check the second operand. */
7727 if (TREE_CODE (treeop0
) == INTEGER_CST
)
7734 /* Attempt to return something suitable for generating an
7735 indexed address, for machines that support that. */
7737 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7738 && host_integerp (treeop1
, 0))
7740 tree exp1
= treeop1
;
7742 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
7746 op0
= force_operand (op0
, NULL_RTX
);
7748 op0
= copy_to_mode_reg (mode
, op0
);
7750 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
7751 gen_int_mode (tree_low_cst (exp1
, 0),
7752 TYPE_MODE (TREE_TYPE (exp1
)))));
7755 if (modifier
== EXPAND_STACK_PARM
)
7758 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
7759 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
7761 case TRUNC_DIV_EXPR
:
7762 case FLOOR_DIV_EXPR
:
7764 case ROUND_DIV_EXPR
:
7765 case EXACT_DIV_EXPR
:
7766 /* If this is a fixed-point operation, then we cannot use the code
7767 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7769 if (ALL_FIXED_POINT_MODE_P (mode
))
7772 if (modifier
== EXPAND_STACK_PARM
)
7774 /* Possible optimization: compute the dividend with EXPAND_SUM
7775 then if the divisor is constant can optimize the case
7776 where some terms of the dividend have coeffs divisible by it. */
7777 expand_operands (treeop0
, treeop1
,
7778 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
7779 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7784 case TRUNC_MOD_EXPR
:
7785 case FLOOR_MOD_EXPR
:
7787 case ROUND_MOD_EXPR
:
7788 if (modifier
== EXPAND_STACK_PARM
)
7790 expand_operands (treeop0
, treeop1
,
7791 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
7792 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7794 case FIXED_CONVERT_EXPR
:
7795 op0
= expand_normal (treeop0
);
7796 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7797 target
= gen_reg_rtx (mode
);
7799 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
7800 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
7801 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
7802 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
7804 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
7807 case FIX_TRUNC_EXPR
:
7808 op0
= expand_normal (treeop0
);
7809 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7810 target
= gen_reg_rtx (mode
);
7811 expand_fix (target
, op0
, unsignedp
);
7815 op0
= expand_normal (treeop0
);
7816 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
7817 target
= gen_reg_rtx (mode
);
7818 /* expand_float can't figure out what to do if FROM has VOIDmode.
7819 So give it the correct mode. With -O, cse will optimize this. */
7820 if (GET_MODE (op0
) == VOIDmode
)
7821 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
7823 expand_float (target
, op0
,
7824 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
7828 op0
= expand_expr (treeop0
, subtarget
,
7829 VOIDmode
, EXPAND_NORMAL
);
7830 if (modifier
== EXPAND_STACK_PARM
)
7832 temp
= expand_unop (mode
,
7833 optab_for_tree_code (NEGATE_EXPR
, type
,
7837 return REDUCE_BIT_FIELD (temp
);
7840 op0
= expand_expr (treeop0
, subtarget
,
7841 VOIDmode
, EXPAND_NORMAL
);
7842 if (modifier
== EXPAND_STACK_PARM
)
7845 /* ABS_EXPR is not valid for complex arguments. */
7846 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7847 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
7849 /* Unsigned abs is simply the operand. Testing here means we don't
7850 risk generating incorrect code below. */
7851 if (TYPE_UNSIGNED (type
))
7854 return expand_abs (mode
, op0
, target
, unsignedp
,
7855 safe_from_p (target
, treeop0
, 1));
7859 target
= original_target
;
7861 || modifier
== EXPAND_STACK_PARM
7862 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
7863 || GET_MODE (target
) != mode
7865 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7866 target
= gen_reg_rtx (mode
);
7867 expand_operands (treeop0
, treeop1
,
7868 target
, &op0
, &op1
, EXPAND_NORMAL
);
7870 /* First try to do it with a special MIN or MAX instruction.
7871 If that does not win, use a conditional jump to select the proper
7873 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
7874 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7879 /* At this point, a MEM target is no longer useful; we will get better
7882 if (! REG_P (target
))
7883 target
= gen_reg_rtx (mode
);
7885 /* If op1 was placed in target, swap op0 and op1. */
7886 if (target
!= op0
&& target
== op1
)
7893 /* We generate better code and avoid problems with op1 mentioning
7894 target by forcing op1 into a pseudo if it isn't a constant. */
7895 if (! CONSTANT_P (op1
))
7896 op1
= force_reg (mode
, op1
);
7899 enum rtx_code comparison_code
;
7902 if (code
== MAX_EXPR
)
7903 comparison_code
= unsignedp
? GEU
: GE
;
7905 comparison_code
= unsignedp
? LEU
: LE
;
7907 /* Canonicalize to comparisons against 0. */
7908 if (op1
== const1_rtx
)
7910 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7911 or (a != 0 ? a : 1) for unsigned.
7912 For MIN we are safe converting (a <= 1 ? a : 1)
7913 into (a <= 0 ? a : 1) */
7914 cmpop1
= const0_rtx
;
7915 if (code
== MAX_EXPR
)
7916 comparison_code
= unsignedp
? NE
: GT
;
7918 if (op1
== constm1_rtx
&& !unsignedp
)
7920 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7921 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7922 cmpop1
= const0_rtx
;
7923 if (code
== MIN_EXPR
)
7924 comparison_code
= LT
;
7926 #ifdef HAVE_conditional_move
7927 /* Use a conditional move if possible. */
7928 if (can_conditionally_move_p (mode
))
7932 /* ??? Same problem as in expmed.c: emit_conditional_move
7933 forces a stack adjustment via compare_from_rtx, and we
7934 lose the stack adjustment if the sequence we are about
7935 to create is discarded. */
7936 do_pending_stack_adjust ();
7940 /* Try to emit the conditional move. */
7941 insn
= emit_conditional_move (target
, comparison_code
,
7946 /* If we could do the conditional move, emit the sequence,
7950 rtx seq
= get_insns ();
7956 /* Otherwise discard the sequence and fall back to code with
7962 emit_move_insn (target
, op0
);
7964 temp
= gen_label_rtx ();
7965 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
7966 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
,
7969 emit_move_insn (target
, op1
);
7974 op0
= expand_expr (treeop0
, subtarget
,
7975 VOIDmode
, EXPAND_NORMAL
);
7976 if (modifier
== EXPAND_STACK_PARM
)
7978 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
7982 /* ??? Can optimize bitwise operations with one arg constant.
7983 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7984 and (a bitwise1 b) bitwise2 b (etc)
7985 but that is probably not worth while. */
7987 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7988 boolean values when we want in all cases to compute both of them. In
7989 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7990 as actual zero-or-1 values and then bitwise anding. In cases where
7991 there cannot be any side effects, better code would be made by
7992 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7993 how to recognize those cases. */
7995 case TRUTH_AND_EXPR
:
7996 code
= BIT_AND_EXPR
;
8001 code
= BIT_IOR_EXPR
;
8005 case TRUTH_XOR_EXPR
:
8006 code
= BIT_XOR_EXPR
;
8012 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
8013 || (GET_MODE_PRECISION (TYPE_MODE (type
))
8014 == TYPE_PRECISION (type
)));
8019 /* If this is a fixed-point operation, then we cannot use the code
8020 below because "expand_shift" doesn't support sat/no-sat fixed-point
8022 if (ALL_FIXED_POINT_MODE_P (mode
))
8025 if (! safe_from_p (subtarget
, treeop1
, 1))
8027 if (modifier
== EXPAND_STACK_PARM
)
8029 op0
= expand_expr (treeop0
, subtarget
,
8030 VOIDmode
, EXPAND_NORMAL
);
8031 temp
= expand_shift (code
, mode
, op0
, treeop1
, target
,
8033 if (code
== LSHIFT_EXPR
)
8034 temp
= REDUCE_BIT_FIELD (temp
);
8037 /* Could determine the answer when only additive constants differ. Also,
8038 the addition of one can be handled by changing the condition. */
8045 case UNORDERED_EXPR
:
8053 temp
= do_store_flag (ops
,
8054 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8055 tmode
!= VOIDmode
? tmode
: mode
);
8059 /* Use a compare and a jump for BLKmode comparisons, or for function
8060 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8063 || modifier
== EXPAND_STACK_PARM
8064 || ! safe_from_p (target
, treeop0
, 1)
8065 || ! safe_from_p (target
, treeop1
, 1)
8066 /* Make sure we don't have a hard reg (such as function's return
8067 value) live across basic blocks, if not optimizing. */
8068 || (!optimize
&& REG_P (target
)
8069 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8070 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8072 emit_move_insn (target
, const0_rtx
);
8074 op1
= gen_label_rtx ();
8075 jumpifnot_1 (code
, treeop0
, treeop1
, op1
, -1);
8077 emit_move_insn (target
, const1_rtx
);
8082 case TRUTH_NOT_EXPR
:
8083 if (modifier
== EXPAND_STACK_PARM
)
8085 op0
= expand_expr (treeop0
, target
,
8086 VOIDmode
, EXPAND_NORMAL
);
8087 /* The parser is careful to generate TRUTH_NOT_EXPR
8088 only with operands that are always zero or one. */
8089 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
8090 target
, 1, OPTAB_LIB_WIDEN
);
8095 /* Get the rtx code of the operands. */
8096 op0
= expand_normal (treeop0
);
8097 op1
= expand_normal (treeop1
);
8100 target
= gen_reg_rtx (TYPE_MODE (type
));
8102 /* Move the real (op0) and imaginary (op1) parts to their location. */
8103 write_complex_part (target
, op0
, false);
8104 write_complex_part (target
, op1
, true);
8108 case WIDEN_SUM_EXPR
:
8110 tree oprnd0
= treeop0
;
8111 tree oprnd1
= treeop1
;
8113 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8114 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
8119 case REDUC_MAX_EXPR
:
8120 case REDUC_MIN_EXPR
:
8121 case REDUC_PLUS_EXPR
:
8123 op0
= expand_normal (treeop0
);
8124 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8125 temp
= expand_unop (mode
, this_optab
, op0
, target
, unsignedp
);
8130 case VEC_EXTRACT_EVEN_EXPR
:
8131 case VEC_EXTRACT_ODD_EXPR
:
8133 expand_operands (treeop0
, treeop1
,
8134 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8135 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8136 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8142 case VEC_INTERLEAVE_HIGH_EXPR
:
8143 case VEC_INTERLEAVE_LOW_EXPR
:
8145 expand_operands (treeop0
, treeop1
,
8146 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8147 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8148 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8154 case VEC_LSHIFT_EXPR
:
8155 case VEC_RSHIFT_EXPR
:
8157 target
= expand_vec_shift_expr (ops
, target
);
8161 case VEC_UNPACK_HI_EXPR
:
8162 case VEC_UNPACK_LO_EXPR
:
8164 op0
= expand_normal (treeop0
);
8165 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8166 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
8172 case VEC_UNPACK_FLOAT_HI_EXPR
:
8173 case VEC_UNPACK_FLOAT_LO_EXPR
:
8175 op0
= expand_normal (treeop0
);
8176 /* The signedness is determined from input operand. */
8177 this_optab
= optab_for_tree_code (code
,
8178 TREE_TYPE (treeop0
),
8180 temp
= expand_widen_pattern_expr
8181 (ops
, op0
, NULL_RTX
, NULL_RTX
,
8182 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8188 case VEC_WIDEN_MULT_HI_EXPR
:
8189 case VEC_WIDEN_MULT_LO_EXPR
:
8191 tree oprnd0
= treeop0
;
8192 tree oprnd1
= treeop1
;
8194 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8195 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
8197 gcc_assert (target
);
8201 case VEC_PACK_TRUNC_EXPR
:
8202 case VEC_PACK_SAT_EXPR
:
8203 case VEC_PACK_FIX_TRUNC_EXPR
:
8204 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
8209 tree oprnd0
= treeop0
;
8210 tree oprnd1
= treeop1
;
8211 tree oprnd2
= treeop2
;
8214 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8215 op2
= expand_normal (oprnd2
);
8216 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8221 case REALIGN_LOAD_EXPR
:
8223 tree oprnd0
= treeop0
;
8224 tree oprnd1
= treeop1
;
8225 tree oprnd2
= treeop2
;
8228 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8229 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8230 op2
= expand_normal (oprnd2
);
8231 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
8241 /* Here to do an ordinary binary operator. */
8243 expand_operands (treeop0
, treeop1
,
8244 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8246 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8248 if (modifier
== EXPAND_STACK_PARM
)
8250 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8251 unsignedp
, OPTAB_LIB_WIDEN
);
8253 return REDUCE_BIT_FIELD (temp
);
8255 #undef REDUCE_BIT_FIELD
8258 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
8259 enum expand_modifier modifier
, rtx
*alt_rtl
)
8261 rtx op0
, op1
, temp
, decl_rtl
;
8264 enum machine_mode mode
;
8265 enum tree_code code
= TREE_CODE (exp
);
8266 rtx subtarget
, original_target
;
8269 bool reduce_bit_field
;
8270 location_t loc
= EXPR_LOCATION (exp
);
8271 struct separate_ops ops
;
8272 tree treeop0
, treeop1
, treeop2
;
8273 tree ssa_name
= NULL_TREE
;
8276 type
= TREE_TYPE (exp
);
8277 mode
= TYPE_MODE (type
);
8278 unsignedp
= TYPE_UNSIGNED (type
);
8280 treeop0
= treeop1
= treeop2
= NULL_TREE
;
8281 if (!VL_EXP_CLASS_P (exp
))
8282 switch (TREE_CODE_LENGTH (code
))
8285 case 3: treeop2
= TREE_OPERAND (exp
, 2);
8286 case 2: treeop1
= TREE_OPERAND (exp
, 1);
8287 case 1: treeop0
= TREE_OPERAND (exp
, 0);
8297 ignore
= (target
== const0_rtx
8298 || ((CONVERT_EXPR_CODE_P (code
)
8299 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8300 && TREE_CODE (type
) == VOID_TYPE
));
8302 /* An operation in what may be a bit-field type needs the
8303 result to be reduced to the precision of the bit-field type,
8304 which is narrower than that of the type's mode. */
8305 reduce_bit_field
= (!ignore
8306 && TREE_CODE (type
) == INTEGER_TYPE
8307 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
8309 /* If we are going to ignore this result, we need only do something
8310 if there is a side-effect somewhere in the expression. If there
8311 is, short-circuit the most common cases here. Note that we must
8312 not call expand_expr with anything but const0_rtx in case this
8313 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8317 if (! TREE_SIDE_EFFECTS (exp
))
8320 /* Ensure we reference a volatile object even if value is ignored, but
8321 don't do this if all we are doing is taking its address. */
8322 if (TREE_THIS_VOLATILE (exp
)
8323 && TREE_CODE (exp
) != FUNCTION_DECL
8324 && mode
!= VOIDmode
&& mode
!= BLKmode
8325 && modifier
!= EXPAND_CONST_ADDRESS
)
8327 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
8329 temp
= copy_to_reg (temp
);
8333 if (TREE_CODE_CLASS (code
) == tcc_unary
8334 || code
== COMPONENT_REF
|| code
== INDIRECT_REF
)
8335 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
8338 else if (TREE_CODE_CLASS (code
) == tcc_binary
8339 || TREE_CODE_CLASS (code
) == tcc_comparison
8340 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
8342 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
8343 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
8346 else if (code
== BIT_FIELD_REF
)
8348 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
8349 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
8350 expand_expr (treeop2
, const0_rtx
, VOIDmode
, modifier
);
8357 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8360 /* Use subtarget as the target for operand 0 of a binary operation. */
8361 subtarget
= get_subtarget (target
);
8362 original_target
= target
;
8368 tree function
= decl_function_context (exp
);
8370 temp
= label_rtx (exp
);
8371 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
8373 if (function
!= current_function_decl
8375 LABEL_REF_NONLOCAL_P (temp
) = 1;
8377 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
8382 /* ??? ivopts calls expander, without any preparation from
8383 out-of-ssa. So fake instructions as if this was an access to the
8384 base variable. This unnecessarily allocates a pseudo, see how we can
8385 reuse it, if partition base vars have it set already. */
8386 if (!currently_expanding_to_rtl
)
8387 return expand_expr_real_1 (SSA_NAME_VAR (exp
), target
, tmode
, modifier
,
8390 g
= get_gimple_for_ssa_name (exp
);
8391 /* For EXPAND_INITIALIZER try harder to get something simpler. */
8393 && modifier
== EXPAND_INITIALIZER
8394 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
8395 && (optimize
|| DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
8396 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
8397 g
= SSA_NAME_DEF_STMT (exp
);
8399 return expand_expr_real (gimple_assign_rhs_to_tree (g
), target
, tmode
,
8403 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
8404 exp
= SSA_NAME_VAR (ssa_name
);
8405 goto expand_decl_rtl
;
8409 /* If a static var's type was incomplete when the decl was written,
8410 but the type is complete now, lay out the decl now. */
8411 if (DECL_SIZE (exp
) == 0
8412 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
8413 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
8414 layout_decl (exp
, 0);
8416 /* ... fall through ... */
8420 decl_rtl
= DECL_RTL (exp
);
8422 gcc_assert (decl_rtl
);
8423 decl_rtl
= copy_rtx (decl_rtl
);
8424 /* Record writes to register variables. */
8425 if (modifier
== EXPAND_WRITE
&& REG_P (decl_rtl
)
8426 && REGNO (decl_rtl
) < FIRST_PSEUDO_REGISTER
)
8428 int i
= REGNO (decl_rtl
);
8429 int nregs
= hard_regno_nregs
[i
][GET_MODE (decl_rtl
)];
8432 SET_HARD_REG_BIT (crtl
->asm_clobbers
, i
);
8438 /* Ensure variable marked as used even if it doesn't go through
8439 a parser. If it hasn't be used yet, write out an external
8441 if (! TREE_USED (exp
))
8443 assemble_external (exp
);
8444 TREE_USED (exp
) = 1;
8447 /* Show we haven't gotten RTL for this yet. */
8450 /* Variables inherited from containing functions should have
8451 been lowered by this point. */
8452 context
= decl_function_context (exp
);
8453 gcc_assert (!context
8454 || context
== current_function_decl
8455 || TREE_STATIC (exp
)
8456 || DECL_EXTERNAL (exp
)
8457 /* ??? C++ creates functions that are not TREE_STATIC. */
8458 || TREE_CODE (exp
) == FUNCTION_DECL
);
8460 /* This is the case of an array whose size is to be determined
8461 from its initializer, while the initializer is still being parsed.
8464 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
8465 temp
= validize_mem (decl_rtl
);
8467 /* If DECL_RTL is memory, we are in the normal case and the
8468 address is not valid, get the address into a register. */
8470 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
8473 *alt_rtl
= decl_rtl
;
8474 decl_rtl
= use_anchored_address (decl_rtl
);
8475 if (modifier
!= EXPAND_CONST_ADDRESS
8476 && modifier
!= EXPAND_SUM
8477 && !memory_address_addr_space_p (DECL_MODE (exp
),
8479 MEM_ADDR_SPACE (decl_rtl
)))
8480 temp
= replace_equiv_address (decl_rtl
,
8481 copy_rtx (XEXP (decl_rtl
, 0)));
8484 /* If we got something, return it. But first, set the alignment
8485 if the address is a register. */
8488 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
8489 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
8494 /* If the mode of DECL_RTL does not match that of the decl, it
8495 must be a promoted value. We return a SUBREG of the wanted mode,
8496 but mark it so that we know that it was already extended. */
8497 if (REG_P (decl_rtl
) && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
8499 enum machine_mode pmode
;
8501 /* Get the signedness to be used for this variable. Ensure we get
8502 the same mode we got when the variable was declared. */
8503 if (code
== SSA_NAME
8504 && (g
= SSA_NAME_DEF_STMT (ssa_name
))
8505 && gimple_code (g
) == GIMPLE_CALL
)
8506 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
8508 (TREE_TYPE (gimple_call_fn (g
))),
8511 pmode
= promote_decl_mode (exp
, &unsignedp
);
8512 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
8514 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
8515 SUBREG_PROMOTED_VAR_P (temp
) = 1;
8516 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
8523 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
8524 TREE_INT_CST_HIGH (exp
), mode
);
8530 tree tmp
= NULL_TREE
;
8531 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
8532 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
8533 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
8534 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
8535 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
8536 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
8537 return const_vector_from_tree (exp
);
8538 if (GET_MODE_CLASS (mode
) == MODE_INT
)
8540 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
8542 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
, type_for_mode
, exp
);
8545 tmp
= build_constructor_from_list (type
,
8546 TREE_VECTOR_CST_ELTS (exp
));
8547 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
8552 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
8555 /* If optimized, generate immediate CONST_DOUBLE
8556 which will be turned into memory by reload if necessary.
8558 We used to force a register so that loop.c could see it. But
8559 this does not allow gen_* patterns to perform optimizations with
8560 the constants. It also produces two insns in cases like "x = 1.0;".
8561 On most machines, floating-point constants are not permitted in
8562 many insns, so we'd end up copying it to a register in any case.
8564 Now, we do the copying in expand_binop, if appropriate. */
8565 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
8566 TYPE_MODE (TREE_TYPE (exp
)));
8569 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
8570 TYPE_MODE (TREE_TYPE (exp
)));
8573 /* Handle evaluating a complex constant in a CONCAT target. */
8574 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
8576 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8579 rtarg
= XEXP (original_target
, 0);
8580 itarg
= XEXP (original_target
, 1);
8582 /* Move the real and imaginary parts separately. */
8583 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
8584 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
8587 emit_move_insn (rtarg
, op0
);
8589 emit_move_insn (itarg
, op1
);
8591 return original_target
;
8594 /* ... fall through ... */
8597 temp
= expand_expr_constant (exp
, 1, modifier
);
8599 /* temp contains a constant address.
8600 On RISC machines where a constant address isn't valid,
8601 make some insns to get that address into a register. */
8602 if (modifier
!= EXPAND_CONST_ADDRESS
8603 && modifier
!= EXPAND_INITIALIZER
8604 && modifier
!= EXPAND_SUM
8605 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
8606 MEM_ADDR_SPACE (temp
)))
8607 return replace_equiv_address (temp
,
8608 copy_rtx (XEXP (temp
, 0)));
8614 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
8616 if (!SAVE_EXPR_RESOLVED_P (exp
))
8618 /* We can indeed still hit this case, typically via builtin
8619 expanders calling save_expr immediately before expanding
8620 something. Assume this means that we only have to deal
8621 with non-BLKmode values. */
8622 gcc_assert (GET_MODE (ret
) != BLKmode
);
8624 val
= build_decl (EXPR_LOCATION (exp
),
8625 VAR_DECL
, NULL
, TREE_TYPE (exp
));
8626 DECL_ARTIFICIAL (val
) = 1;
8627 DECL_IGNORED_P (val
) = 1;
8629 TREE_OPERAND (exp
, 0) = treeop0
;
8630 SAVE_EXPR_RESOLVED_P (exp
) = 1;
8632 if (!CONSTANT_P (ret
))
8633 ret
= copy_to_reg (ret
);
8634 SET_DECL_RTL (val
, ret
);
8642 /* If we don't need the result, just ensure we evaluate any
8646 unsigned HOST_WIDE_INT idx
;
8649 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
8650 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
8655 return expand_constructor (exp
, target
, modifier
, false);
8657 case TARGET_MEM_REF
:
8659 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (exp
));
8660 struct mem_address addr
;
8663 get_address_description (exp
, &addr
);
8664 op0
= addr_for_mem_ref (&addr
, as
, true);
8665 op0
= memory_address_addr_space (mode
, op0
, as
);
8666 temp
= gen_rtx_MEM (mode
, op0
);
8667 set_mem_attributes (temp
, exp
, 0);
8668 set_mem_addr_space (temp
, as
);
8669 align
= MAX (TYPE_ALIGN (TREE_TYPE (exp
)),
8670 get_object_alignment (exp
, BIGGEST_ALIGNMENT
));
8672 && (unsigned) align
< GET_MODE_ALIGNMENT (mode
)
8673 /* If the target does not have special handling for unaligned
8674 loads of mode then it can use regular moves for them. */
8675 && ((icode
= optab_handler (movmisalign_optab
, mode
))
8676 != CODE_FOR_nothing
))
8680 /* We've already validated the memory, and we're creating a
8681 new pseudo destination. The predicates really can't fail. */
8682 reg
= gen_reg_rtx (mode
);
8684 /* Nor can the insn generator. */
8685 insn
= GEN_FCN (icode
) (reg
, temp
);
8686 gcc_assert (insn
!= NULL_RTX
);
8697 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
8698 enum machine_mode address_mode
;
8699 tree base
= TREE_OPERAND (exp
, 0);
8702 /* Handle expansion of non-aliased memory with non-BLKmode. That
8703 might end up in a register. */
8704 if (TREE_CODE (base
) == ADDR_EXPR
)
8706 HOST_WIDE_INT offset
= mem_ref_offset (exp
).low
;
8708 base
= TREE_OPERAND (base
, 0);
8712 base
= get_addr_base_and_unit_offset (base
, &off
);
8716 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
8717 decl we must use bitfield operations. */
8719 && !TREE_ADDRESSABLE (base
)
8720 && DECL_MODE (base
) != BLKmode
8721 && DECL_RTL_SET_P (base
)
8722 && !MEM_P (DECL_RTL (base
)))
8726 && host_integerp (TYPE_SIZE (TREE_TYPE (exp
)), 1)
8727 && (GET_MODE_BITSIZE (DECL_MODE (base
))
8728 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp
)))))
8729 return expand_expr (build1 (VIEW_CONVERT_EXPR
,
8730 TREE_TYPE (exp
), base
),
8731 target
, tmode
, modifier
);
8732 bit_offset
= bitsize_int (offset
* BITS_PER_UNIT
);
8733 bftype
= TREE_TYPE (base
);
8734 if (TYPE_MODE (TREE_TYPE (exp
)) != BLKmode
)
8735 bftype
= TREE_TYPE (exp
);
8736 return expand_expr (build3 (BIT_FIELD_REF
, bftype
,
8738 TYPE_SIZE (TREE_TYPE (exp
)),
8740 target
, tmode
, modifier
);
8743 address_mode
= targetm
.addr_space
.address_mode (as
);
8744 base
= TREE_OPERAND (exp
, 0);
8745 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
8747 tree mask
= gimple_assign_rhs2 (def_stmt
);
8748 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
8749 gimple_assign_rhs1 (def_stmt
), mask
);
8750 TREE_OPERAND (exp
, 0) = base
;
8752 align
= MAX (TYPE_ALIGN (TREE_TYPE (exp
)),
8753 get_object_alignment (exp
, BIGGEST_ALIGNMENT
));
8754 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
8755 op0
= memory_address_addr_space (address_mode
, op0
, as
);
8756 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
8759 = immed_double_int_const (mem_ref_offset (exp
), address_mode
);
8760 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
8762 op0
= memory_address_addr_space (mode
, op0
, as
);
8763 temp
= gen_rtx_MEM (mode
, op0
);
8764 set_mem_attributes (temp
, exp
, 0);
8765 set_mem_addr_space (temp
, as
);
8766 if (TREE_THIS_VOLATILE (exp
))
8767 MEM_VOLATILE_P (temp
) = 1;
8769 && (unsigned) align
< GET_MODE_ALIGNMENT (mode
)
8770 /* If the target does not have special handling for unaligned
8771 loads of mode then it can use regular moves for them. */
8772 && ((icode
= optab_handler (movmisalign_optab
, mode
))
8773 != CODE_FOR_nothing
))
8777 /* We've already validated the memory, and we're creating a
8778 new pseudo destination. The predicates really can't fail. */
8779 reg
= gen_reg_rtx (mode
);
8781 /* Nor can the insn generator. */
8782 insn
= GEN_FCN (icode
) (reg
, temp
);
8793 tree array
= treeop0
;
8794 tree index
= treeop1
;
8796 /* Fold an expression like: "foo"[2].
8797 This is not done in fold so it won't happen inside &.
8798 Don't fold if this is for wide characters since it's too
8799 difficult to do correctly and this is a very rare case. */
8801 if (modifier
!= EXPAND_CONST_ADDRESS
8802 && modifier
!= EXPAND_INITIALIZER
8803 && modifier
!= EXPAND_MEMORY
)
8805 tree t
= fold_read_from_constant_string (exp
);
8808 return expand_expr (t
, target
, tmode
, modifier
);
8811 /* If this is a constant index into a constant array,
8812 just get the value from the array. Handle both the cases when
8813 we have an explicit constructor and when our operand is a variable
8814 that was declared const. */
8816 if (modifier
!= EXPAND_CONST_ADDRESS
8817 && modifier
!= EXPAND_INITIALIZER
8818 && modifier
!= EXPAND_MEMORY
8819 && TREE_CODE (array
) == CONSTRUCTOR
8820 && ! TREE_SIDE_EFFECTS (array
)
8821 && TREE_CODE (index
) == INTEGER_CST
)
8823 unsigned HOST_WIDE_INT ix
;
8826 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
8828 if (tree_int_cst_equal (field
, index
))
8830 if (!TREE_SIDE_EFFECTS (value
))
8831 return expand_expr (fold (value
), target
, tmode
, modifier
);
8836 else if (optimize
>= 1
8837 && modifier
!= EXPAND_CONST_ADDRESS
8838 && modifier
!= EXPAND_INITIALIZER
8839 && modifier
!= EXPAND_MEMORY
8840 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
8841 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
8842 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
8843 && const_value_known_p (array
))
8845 if (TREE_CODE (index
) == INTEGER_CST
)
8847 tree init
= DECL_INITIAL (array
);
8849 if (TREE_CODE (init
) == CONSTRUCTOR
)
8851 unsigned HOST_WIDE_INT ix
;
8854 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
8856 if (tree_int_cst_equal (field
, index
))
8858 if (TREE_SIDE_EFFECTS (value
))
8861 if (TREE_CODE (value
) == CONSTRUCTOR
)
8863 /* If VALUE is a CONSTRUCTOR, this
8864 optimization is only useful if
8865 this doesn't store the CONSTRUCTOR
8866 into memory. If it does, it is more
8867 efficient to just load the data from
8868 the array directly. */
8869 rtx ret
= expand_constructor (value
, target
,
8871 if (ret
== NULL_RTX
)
8875 return expand_expr (fold (value
), target
, tmode
,
8879 else if(TREE_CODE (init
) == STRING_CST
)
8881 tree index1
= index
;
8882 tree low_bound
= array_ref_low_bound (exp
);
8883 index1
= fold_convert_loc (loc
, sizetype
,
8886 /* Optimize the special-case of a zero lower bound.
8888 We convert the low_bound to sizetype to avoid some problems
8889 with constant folding. (E.g. suppose the lower bound is 1,
8890 and its mode is QI. Without the conversion,l (ARRAY
8891 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8892 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8894 if (! integer_zerop (low_bound
))
8895 index1
= size_diffop_loc (loc
, index1
,
8896 fold_convert_loc (loc
, sizetype
,
8899 if (0 > compare_tree_int (index1
,
8900 TREE_STRING_LENGTH (init
)))
8902 tree type
= TREE_TYPE (TREE_TYPE (init
));
8903 enum machine_mode mode
= TYPE_MODE (type
);
8905 if (GET_MODE_CLASS (mode
) == MODE_INT
8906 && GET_MODE_SIZE (mode
) == 1)
8907 return gen_int_mode (TREE_STRING_POINTER (init
)
8908 [TREE_INT_CST_LOW (index1
)],
8915 goto normal_inner_ref
;
8918 /* If the operand is a CONSTRUCTOR, we can just extract the
8919 appropriate field if it is present. */
8920 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
8922 unsigned HOST_WIDE_INT idx
;
8925 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
8927 if (field
== treeop1
8928 /* We can normally use the value of the field in the
8929 CONSTRUCTOR. However, if this is a bitfield in
8930 an integral mode that we can fit in a HOST_WIDE_INT,
8931 we must mask only the number of bits in the bitfield,
8932 since this is done implicitly by the constructor. If
8933 the bitfield does not meet either of those conditions,
8934 we can't do this optimization. */
8935 && (! DECL_BIT_FIELD (field
)
8936 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
8937 && (GET_MODE_BITSIZE (DECL_MODE (field
))
8938 <= HOST_BITS_PER_WIDE_INT
))))
8940 if (DECL_BIT_FIELD (field
)
8941 && modifier
== EXPAND_STACK_PARM
)
8943 op0
= expand_expr (value
, target
, tmode
, modifier
);
8944 if (DECL_BIT_FIELD (field
))
8946 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
8947 enum machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
8949 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
8951 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
8952 op0
= expand_and (imode
, op0
, op1
, target
);
8957 = build_int_cst (NULL_TREE
,
8958 GET_MODE_BITSIZE (imode
) - bitsize
);
8960 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
8962 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
8970 goto normal_inner_ref
;
8973 case ARRAY_RANGE_REF
:
8976 enum machine_mode mode1
, mode2
;
8977 HOST_WIDE_INT bitsize
, bitpos
;
8979 int volatilep
= 0, must_force_mem
;
8980 bool packedp
= false;
8981 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
8982 &mode1
, &unsignedp
, &volatilep
, true);
8983 rtx orig_op0
, memloc
;
8985 /* If we got back the original object, something is wrong. Perhaps
8986 we are evaluating an expression too early. In any event, don't
8987 infinitely recurse. */
8988 gcc_assert (tem
!= exp
);
8990 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
8991 || (TREE_CODE (TREE_OPERAND (exp
, 1)) == FIELD_DECL
8992 && DECL_PACKED (TREE_OPERAND (exp
, 1))))
8995 /* If TEM's type is a union of variable size, pass TARGET to the inner
8996 computation, since it will need a temporary and TARGET is known
8997 to have to do. This occurs in unchecked conversion in Ada. */
9000 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
9001 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
9003 && modifier
!= EXPAND_STACK_PARM
9004 ? target
: NULL_RTX
),
9006 (modifier
== EXPAND_INITIALIZER
9007 || modifier
== EXPAND_CONST_ADDRESS
9008 || modifier
== EXPAND_STACK_PARM
)
9009 ? modifier
: EXPAND_NORMAL
);
9012 /* If the bitfield is volatile, we want to access it in the
9013 field's mode, not the computed mode.
9014 If a MEM has VOIDmode (external with incomplete type),
9015 use BLKmode for it instead. */
9018 if (volatilep
&& flag_strict_volatile_bitfields
> 0)
9019 op0
= adjust_address (op0
, mode1
, 0);
9020 else if (GET_MODE (op0
) == VOIDmode
)
9021 op0
= adjust_address (op0
, BLKmode
, 0);
9025 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
9027 /* If we have either an offset, a BLKmode result, or a reference
9028 outside the underlying object, we must force it to memory.
9029 Such a case can occur in Ada if we have unchecked conversion
9030 of an expression from a scalar type to an aggregate type or
9031 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9032 passed a partially uninitialized object or a view-conversion
9033 to a larger size. */
9034 must_force_mem
= (offset
9036 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
9038 /* Handle CONCAT first. */
9039 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
9042 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)))
9045 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
9048 op0
= XEXP (op0
, 0);
9049 mode2
= GET_MODE (op0
);
9051 else if (bitpos
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
9052 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1)))
9056 op0
= XEXP (op0
, 1);
9058 mode2
= GET_MODE (op0
);
9061 /* Otherwise force into memory. */
9065 /* If this is a constant, put it in a register if it is a legitimate
9066 constant and we don't need a memory reference. */
9067 if (CONSTANT_P (op0
)
9069 && LEGITIMATE_CONSTANT_P (op0
)
9071 op0
= force_reg (mode2
, op0
);
9073 /* Otherwise, if this is a constant, try to force it to the constant
9074 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9075 is a legitimate constant. */
9076 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
9077 op0
= validize_mem (memloc
);
9079 /* Otherwise, if this is a constant or the object is not in memory
9080 and need be, put it there. */
9081 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
9083 tree nt
= build_qualified_type (TREE_TYPE (tem
),
9084 (TYPE_QUALS (TREE_TYPE (tem
))
9085 | TYPE_QUAL_CONST
));
9086 memloc
= assign_temp (nt
, 1, 1, 1);
9087 emit_move_insn (memloc
, op0
);
9093 enum machine_mode address_mode
;
9094 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
9097 gcc_assert (MEM_P (op0
));
9100 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (op0
));
9101 if (GET_MODE (offset_rtx
) != address_mode
)
9102 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
9104 if (GET_MODE (op0
) == BLKmode
9105 /* A constant address in OP0 can have VOIDmode, we must
9106 not try to call force_reg in that case. */
9107 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
9109 && (bitpos
% bitsize
) == 0
9110 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
9111 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
9113 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
9117 op0
= offset_address (op0
, offset_rtx
,
9118 highest_pow2_factor (offset
));
9121 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9122 record its alignment as BIGGEST_ALIGNMENT. */
9123 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
9124 && is_aligning_offset (offset
, tem
))
9125 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
9127 /* Don't forget about volatility even if this is a bitfield. */
9128 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
9130 if (op0
== orig_op0
)
9131 op0
= copy_rtx (op0
);
9133 MEM_VOLATILE_P (op0
) = 1;
9136 /* In cases where an aligned union has an unaligned object
9137 as a field, we might be extracting a BLKmode value from
9138 an integer-mode (e.g., SImode) object. Handle this case
9139 by doing the extract into an object as wide as the field
9140 (which we know to be the width of a basic mode), then
9141 storing into memory, and changing the mode to BLKmode. */
9142 if (mode1
== VOIDmode
9143 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
9144 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
9145 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
9146 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
9147 && modifier
!= EXPAND_CONST_ADDRESS
9148 && modifier
!= EXPAND_INITIALIZER
)
9149 /* If the field is volatile, we always want an aligned
9151 || (volatilep
&& flag_strict_volatile_bitfields
> 0)
9152 /* If the field isn't aligned enough to fetch as a memref,
9153 fetch it as a bit field. */
9154 || (mode1
!= BLKmode
9155 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
9156 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
9158 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
9159 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
9160 && ((modifier
== EXPAND_CONST_ADDRESS
9161 || modifier
== EXPAND_INITIALIZER
)
9163 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
9164 || (bitpos
% BITS_PER_UNIT
!= 0)))
9165 /* If the type and the field are a constant size and the
9166 size of the type isn't the same size as the bitfield,
9167 we must use bitfield operations. */
9169 && TYPE_SIZE (TREE_TYPE (exp
))
9170 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
9171 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
9174 enum machine_mode ext_mode
= mode
;
9176 if (ext_mode
== BLKmode
9177 && ! (target
!= 0 && MEM_P (op0
)
9179 && bitpos
% BITS_PER_UNIT
== 0))
9180 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
9182 if (ext_mode
== BLKmode
)
9185 target
= assign_temp (type
, 0, 1, 1);
9190 /* In this case, BITPOS must start at a byte boundary and
9191 TARGET, if specified, must be a MEM. */
9192 gcc_assert (MEM_P (op0
)
9193 && (!target
|| MEM_P (target
))
9194 && !(bitpos
% BITS_PER_UNIT
));
9196 emit_block_move (target
,
9197 adjust_address (op0
, VOIDmode
,
9198 bitpos
/ BITS_PER_UNIT
),
9199 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
9201 (modifier
== EXPAND_STACK_PARM
9202 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
9207 op0
= validize_mem (op0
);
9209 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
9210 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
9212 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
, packedp
,
9213 (modifier
== EXPAND_STACK_PARM
9214 ? NULL_RTX
: target
),
9215 ext_mode
, ext_mode
);
9217 /* If the result is a record type and BITSIZE is narrower than
9218 the mode of OP0, an integral mode, and this is a big endian
9219 machine, we must put the field into the high-order bits. */
9220 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
9221 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
9222 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
9223 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
9224 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
9228 /* If the result type is BLKmode, store the data into a temporary
9229 of the appropriate type, but with the mode corresponding to the
9230 mode for the data we have (op0's mode). It's tempting to make
9231 this a constant type, since we know it's only being stored once,
9232 but that can cause problems if we are taking the address of this
9233 COMPONENT_REF because the MEM of any reference via that address
9234 will have flags corresponding to the type, which will not
9235 necessarily be constant. */
9236 if (mode
== BLKmode
)
9238 HOST_WIDE_INT size
= GET_MODE_BITSIZE (ext_mode
);
9241 /* If the reference doesn't use the alias set of its type,
9242 we cannot create the temporary using that type. */
9243 if (component_uses_parent_alias_set (exp
))
9245 new_rtx
= assign_stack_local (ext_mode
, size
, 0);
9246 set_mem_alias_set (new_rtx
, get_alias_set (exp
));
9249 new_rtx
= assign_stack_temp_for_type (ext_mode
, size
, 0, type
);
9251 emit_move_insn (new_rtx
, op0
);
9252 op0
= copy_rtx (new_rtx
);
9253 PUT_MODE (op0
, BLKmode
);
9254 set_mem_attributes (op0
, exp
, 1);
9260 /* If the result is BLKmode, use that to access the object
9262 if (mode
== BLKmode
)
9265 /* Get a reference to just this component. */
9266 if (modifier
== EXPAND_CONST_ADDRESS
9267 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
9268 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
9270 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
9272 if (op0
== orig_op0
)
9273 op0
= copy_rtx (op0
);
9275 set_mem_attributes (op0
, exp
, 0);
9276 if (REG_P (XEXP (op0
, 0)))
9277 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
9279 MEM_VOLATILE_P (op0
) |= volatilep
;
9280 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
9281 || modifier
== EXPAND_CONST_ADDRESS
9282 || modifier
== EXPAND_INITIALIZER
)
9284 else if (target
== 0)
9285 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
9287 convert_move (target
, op0
, unsignedp
);
9292 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
9295 /* All valid uses of __builtin_va_arg_pack () are removed during
9297 if (CALL_EXPR_VA_ARG_PACK (exp
))
9298 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
9300 tree fndecl
= get_callee_fndecl (exp
), attr
;
9303 && (attr
= lookup_attribute ("error",
9304 DECL_ATTRIBUTES (fndecl
))) != NULL
)
9305 error ("%Kcall to %qs declared with attribute error: %s",
9306 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
9307 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
9309 && (attr
= lookup_attribute ("warning",
9310 DECL_ATTRIBUTES (fndecl
))) != NULL
)
9311 warning_at (tree_nonartificial_location (exp
),
9312 0, "%Kcall to %qs declared with attribute warning: %s",
9313 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
9314 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
9316 /* Check for a built-in function. */
9317 if (fndecl
&& DECL_BUILT_IN (fndecl
))
9319 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
9320 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
9323 return expand_call (exp
, target
, ignore
);
9325 case VIEW_CONVERT_EXPR
:
9328 /* If we are converting to BLKmode, try to avoid an intermediate
9329 temporary by fetching an inner memory reference. */
9331 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
9332 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
9333 && handled_component_p (treeop0
))
9335 enum machine_mode mode1
;
9336 HOST_WIDE_INT bitsize
, bitpos
;
9341 = get_inner_reference (treeop0
, &bitsize
, &bitpos
,
9342 &offset
, &mode1
, &unsignedp
, &volatilep
,
9346 /* ??? We should work harder and deal with non-zero offsets. */
9348 && (bitpos
% BITS_PER_UNIT
) == 0
9350 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) == 0)
9352 /* See the normal_inner_ref case for the rationale. */
9355 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
9356 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
9358 && modifier
!= EXPAND_STACK_PARM
9359 ? target
: NULL_RTX
),
9361 (modifier
== EXPAND_INITIALIZER
9362 || modifier
== EXPAND_CONST_ADDRESS
9363 || modifier
== EXPAND_STACK_PARM
)
9364 ? modifier
: EXPAND_NORMAL
);
9366 if (MEM_P (orig_op0
))
9370 /* Get a reference to just this component. */
9371 if (modifier
== EXPAND_CONST_ADDRESS
9372 || modifier
== EXPAND_SUM
9373 || modifier
== EXPAND_INITIALIZER
)
9374 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
9376 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
9378 if (op0
== orig_op0
)
9379 op0
= copy_rtx (op0
);
9381 set_mem_attributes (op0
, treeop0
, 0);
9382 if (REG_P (XEXP (op0
, 0)))
9383 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
9385 MEM_VOLATILE_P (op0
) |= volatilep
;
9391 op0
= expand_expr (treeop0
,
9392 NULL_RTX
, VOIDmode
, modifier
);
9394 /* If the input and output modes are both the same, we are done. */
9395 if (mode
== GET_MODE (op0
))
9397 /* If neither mode is BLKmode, and both modes are the same size
9398 then we can use gen_lowpart. */
9399 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
9400 && GET_MODE_SIZE (mode
) == GET_MODE_SIZE (GET_MODE (op0
))
9401 && !COMPLEX_MODE_P (GET_MODE (op0
)))
9403 if (GET_CODE (op0
) == SUBREG
)
9404 op0
= force_reg (GET_MODE (op0
), op0
);
9405 temp
= gen_lowpart_common (mode
, op0
);
9410 if (!REG_P (op0
) && !MEM_P (op0
))
9411 op0
= force_reg (GET_MODE (op0
), op0
);
9412 op0
= gen_lowpart (mode
, op0
);
9415 /* If both types are integral, convert from one mode to the other. */
9416 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
9417 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
9418 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9419 /* As a last resort, spill op0 to memory, and reload it in a
9421 else if (!MEM_P (op0
))
9423 /* If the operand is not a MEM, force it into memory. Since we
9424 are going to be changing the mode of the MEM, don't call
9425 force_const_mem for constants because we don't allow pool
9426 constants to change mode. */
9427 tree inner_type
= TREE_TYPE (treeop0
);
9429 gcc_assert (!TREE_ADDRESSABLE (exp
));
9431 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
9433 = assign_stack_temp_for_type
9434 (TYPE_MODE (inner_type
),
9435 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
9437 emit_move_insn (target
, op0
);
9441 /* At this point, OP0 is in the correct mode. If the output type is
9442 such that the operand is known to be aligned, indicate that it is.
9443 Otherwise, we need only be concerned about alignment for non-BLKmode
9447 op0
= copy_rtx (op0
);
9449 if (TYPE_ALIGN_OK (type
))
9450 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
9451 else if (STRICT_ALIGNMENT
9453 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
9455 tree inner_type
= TREE_TYPE (treeop0
);
9456 HOST_WIDE_INT temp_size
9457 = MAX (int_size_in_bytes (inner_type
),
9458 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
9460 = assign_stack_temp_for_type (mode
, temp_size
, 0, type
);
9461 rtx new_with_op0_mode
9462 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
9464 gcc_assert (!TREE_ADDRESSABLE (exp
));
9466 if (GET_MODE (op0
) == BLKmode
)
9467 emit_block_move (new_with_op0_mode
, op0
,
9468 GEN_INT (GET_MODE_SIZE (mode
)),
9469 (modifier
== EXPAND_STACK_PARM
9470 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
9472 emit_move_insn (new_with_op0_mode
, op0
);
9477 op0
= adjust_address (op0
, mode
, 0);
9482 /* Use a compare and a jump for BLKmode comparisons, or for function
9483 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9485 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9486 are occassionally created by folding during expansion. */
9487 case TRUTH_ANDIF_EXPR
:
9488 case TRUTH_ORIF_EXPR
:
9491 || modifier
== EXPAND_STACK_PARM
9492 || ! safe_from_p (target
, treeop0
, 1)
9493 || ! safe_from_p (target
, treeop1
, 1)
9494 /* Make sure we don't have a hard reg (such as function's return
9495 value) live across basic blocks, if not optimizing. */
9496 || (!optimize
&& REG_P (target
)
9497 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
9498 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
9501 emit_move_insn (target
, const0_rtx
);
9503 op1
= gen_label_rtx ();
9504 jumpifnot_1 (code
, treeop0
, treeop1
, op1
, -1);
9507 emit_move_insn (target
, const1_rtx
);
9510 return ignore
? const0_rtx
: target
;
9512 case STATEMENT_LIST
:
9514 tree_stmt_iterator iter
;
9516 gcc_assert (ignore
);
9518 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
9519 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
9524 /* A COND_EXPR with its type being VOID_TYPE represents a
9525 conditional jump and is handled in
9526 expand_gimple_cond_expr. */
9527 gcc_assert (!VOID_TYPE_P (type
));
9529 /* Note that COND_EXPRs whose type is a structure or union
9530 are required to be constructed to contain assignments of
9531 a temporary variable, so that we can evaluate them here
9532 for side effect only. If type is void, we must do likewise. */
9534 gcc_assert (!TREE_ADDRESSABLE (type
)
9536 && TREE_TYPE (treeop1
) != void_type_node
9537 && TREE_TYPE (treeop2
) != void_type_node
);
9539 /* If we are not to produce a result, we have no target. Otherwise,
9540 if a target was specified use it; it will not be used as an
9541 intermediate target unless it is safe. If no target, use a
9544 if (modifier
!= EXPAND_STACK_PARM
9546 && safe_from_p (original_target
, treeop0
, 1)
9547 && GET_MODE (original_target
) == mode
9548 #ifdef HAVE_conditional_move
9549 && (! can_conditionally_move_p (mode
)
9550 || REG_P (original_target
))
9552 && !MEM_P (original_target
))
9553 temp
= original_target
;
9555 temp
= assign_temp (type
, 0, 0, 1);
9557 do_pending_stack_adjust ();
9559 op0
= gen_label_rtx ();
9560 op1
= gen_label_rtx ();
9561 jumpifnot (treeop0
, op0
, -1);
9562 store_expr (treeop1
, temp
,
9563 modifier
== EXPAND_STACK_PARM
,
9566 emit_jump_insn (gen_jump (op1
));
9569 store_expr (treeop2
, temp
,
9570 modifier
== EXPAND_STACK_PARM
,
9578 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9585 gcc_assert (ignore
);
9587 /* Check for |= or &= of a bitfield of size one into another bitfield
9588 of size 1. In this case, (unless we need the result of the
9589 assignment) we can do this more efficiently with a
9590 test followed by an assignment, if necessary.
9592 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9593 things change so we do, this code should be enhanced to
9595 if (TREE_CODE (lhs
) == COMPONENT_REF
9596 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
9597 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
9598 && TREE_OPERAND (rhs
, 0) == lhs
9599 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
9600 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
9601 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
9603 rtx label
= gen_label_rtx ();
9604 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
9605 do_jump (TREE_OPERAND (rhs
, 1),
9607 value
? 0 : label
, -1);
9608 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
9609 MOVE_NONTEMPORAL (exp
));
9610 do_pending_stack_adjust ();
9615 expand_assignment (lhs
, rhs
, MOVE_NONTEMPORAL (exp
));
9620 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
9623 op0
= expand_normal (treeop0
);
9624 return read_complex_part (op0
, false);
9627 op0
= expand_normal (treeop0
);
9628 return read_complex_part (op0
, true);
9635 /* Expanded in cfgexpand.c. */
9638 case TRY_CATCH_EXPR
:
9640 case EH_FILTER_EXPR
:
9641 case TRY_FINALLY_EXPR
:
9642 /* Lowered by tree-eh.c. */
9645 case WITH_CLEANUP_EXPR
:
9646 case CLEANUP_POINT_EXPR
:
9648 case CASE_LABEL_EXPR
:
9654 case PREINCREMENT_EXPR
:
9655 case PREDECREMENT_EXPR
:
9656 case POSTINCREMENT_EXPR
:
9657 case POSTDECREMENT_EXPR
:
9660 /* Lowered by gimplify.c. */
9664 /* Function descriptors are not valid except for as
9665 initialization constants, and should not be expanded. */
9668 case WITH_SIZE_EXPR
:
9669 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9670 have pulled out the size to use in whatever context it needed. */
9671 return expand_expr_real (treeop0
, original_target
, tmode
,
9674 case COMPOUND_LITERAL_EXPR
:
9676 /* Initialize the anonymous variable declared in the compound
9677 literal, then return the variable. */
9678 tree decl
= COMPOUND_LITERAL_EXPR_DECL (exp
);
9680 /* Create RTL for this variable. */
9681 if (!DECL_RTL_SET_P (decl
))
9683 if (DECL_HARD_REGISTER (decl
))
9684 /* The user specified an assembler name for this variable.
9686 rest_of_decl_compilation (decl
, 0, 0);
9691 return expand_expr_real (decl
, original_target
, tmode
,
9696 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9700 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9701 signedness of TYPE), possibly returning the result in TARGET. */
9703 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
9705 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
9706 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
9708 /* For constant values, reduce using build_int_cst_type. */
9709 if (CONST_INT_P (exp
))
9711 HOST_WIDE_INT value
= INTVAL (exp
);
9712 tree t
= build_int_cst_type (type
, value
);
9713 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
9715 else if (TYPE_UNSIGNED (type
))
9717 rtx mask
= immed_double_int_const (double_int_mask (prec
),
9719 return expand_and (GET_MODE (exp
), exp
, mask
, target
);
9723 tree count
= build_int_cst (NULL_TREE
,
9724 GET_MODE_BITSIZE (GET_MODE (exp
)) - prec
);
9725 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
9726 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
9730 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9731 when applied to the address of EXP produces an address known to be
9732 aligned more than BIGGEST_ALIGNMENT. */
9735 is_aligning_offset (const_tree offset
, const_tree exp
)
9737 /* Strip off any conversions. */
9738 while (CONVERT_EXPR_P (offset
))
9739 offset
= TREE_OPERAND (offset
, 0);
9741 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9742 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9743 if (TREE_CODE (offset
) != BIT_AND_EXPR
9744 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9745 || compare_tree_int (TREE_OPERAND (offset
, 1),
9746 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
9747 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9750 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9751 It must be NEGATE_EXPR. Then strip any more conversions. */
9752 offset
= TREE_OPERAND (offset
, 0);
9753 while (CONVERT_EXPR_P (offset
))
9754 offset
= TREE_OPERAND (offset
, 0);
9756 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9759 offset
= TREE_OPERAND (offset
, 0);
9760 while (CONVERT_EXPR_P (offset
))
9761 offset
= TREE_OPERAND (offset
, 0);
9763 /* This must now be the address of EXP. */
9764 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
9767 /* Return the tree node if an ARG corresponds to a string constant or zero
9768 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9769 in bytes within the string that ARG is accessing. The type of the
9770 offset will be `sizetype'. */
9773 string_constant (tree arg
, tree
*ptr_offset
)
9775 tree array
, offset
, lower_bound
;
9778 if (TREE_CODE (arg
) == ADDR_EXPR
)
9780 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9782 *ptr_offset
= size_zero_node
;
9783 return TREE_OPERAND (arg
, 0);
9785 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
9787 array
= TREE_OPERAND (arg
, 0);
9788 offset
= size_zero_node
;
9790 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
9792 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
9793 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
9794 if (TREE_CODE (array
) != STRING_CST
9795 && TREE_CODE (array
) != VAR_DECL
)
9798 /* Check if the array has a nonzero lower bound. */
9799 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
9800 if (!integer_zerop (lower_bound
))
9802 /* If the offset and base aren't both constants, return 0. */
9803 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
9805 if (TREE_CODE (offset
) != INTEGER_CST
)
9807 /* Adjust offset by the lower bound. */
9808 offset
= size_diffop (fold_convert (sizetype
, offset
),
9809 fold_convert (sizetype
, lower_bound
));
9815 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
9817 tree arg0
= TREE_OPERAND (arg
, 0);
9818 tree arg1
= TREE_OPERAND (arg
, 1);
9823 if (TREE_CODE (arg0
) == ADDR_EXPR
9824 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
9825 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
9827 array
= TREE_OPERAND (arg0
, 0);
9830 else if (TREE_CODE (arg1
) == ADDR_EXPR
9831 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
9832 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
9834 array
= TREE_OPERAND (arg1
, 0);
9843 if (TREE_CODE (array
) == STRING_CST
)
9845 *ptr_offset
= fold_convert (sizetype
, offset
);
9848 else if (TREE_CODE (array
) == VAR_DECL
9849 || TREE_CODE (array
) == CONST_DECL
)
9853 /* Variables initialized to string literals can be handled too. */
9854 if (!const_value_known_p (array
)
9855 || !DECL_INITIAL (array
)
9856 || TREE_CODE (DECL_INITIAL (array
)) != STRING_CST
)
9859 /* Avoid const char foo[4] = "abcde"; */
9860 if (DECL_SIZE_UNIT (array
) == NULL_TREE
9861 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
9862 || (length
= TREE_STRING_LENGTH (DECL_INITIAL (array
))) <= 0
9863 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
9866 /* If variable is bigger than the string literal, OFFSET must be constant
9867 and inside of the bounds of the string literal. */
9868 offset
= fold_convert (sizetype
, offset
);
9869 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
9870 && (! host_integerp (offset
, 1)
9871 || compare_tree_int (offset
, length
) >= 0))
9874 *ptr_offset
= offset
;
9875 return DECL_INITIAL (array
);
9881 /* Generate code to calculate OPS, and exploded expression
9882 using a store-flag instruction and return an rtx for the result.
9883 OPS reflects a comparison.
9885 If TARGET is nonzero, store the result there if convenient.
9887 Return zero if there is no suitable set-flag instruction
9888 available on this machine.
9890 Once expand_expr has been called on the arguments of the comparison,
9891 we are committed to doing the store flag, since it is not safe to
9892 re-evaluate the expression. We emit the store-flag insn by calling
9893 emit_store_flag, but only expand the arguments if we have a reason
9894 to believe that emit_store_flag will be successful. If we think that
9895 it will, but it isn't, we have to simulate the store-flag with a
9896 set/jump/set sequence. */
9899 do_store_flag (sepops ops
, rtx target
, enum machine_mode mode
)
9902 tree arg0
, arg1
, type
;
9904 enum machine_mode operand_mode
;
9907 rtx subtarget
= target
;
9908 location_t loc
= ops
->location
;
9913 /* Don't crash if the comparison was erroneous. */
9914 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9917 type
= TREE_TYPE (arg0
);
9918 operand_mode
= TYPE_MODE (type
);
9919 unsignedp
= TYPE_UNSIGNED (type
);
9921 /* We won't bother with BLKmode store-flag operations because it would mean
9922 passing a lot of information to emit_store_flag. */
9923 if (operand_mode
== BLKmode
)
9926 /* We won't bother with store-flag operations involving function pointers
9927 when function pointers must be canonicalized before comparisons. */
9928 #ifdef HAVE_canonicalize_funcptr_for_compare
9929 if (HAVE_canonicalize_funcptr_for_compare
9930 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
9931 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
9933 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
9934 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
9935 == FUNCTION_TYPE
))))
9942 /* Get the rtx comparison code to use. We know that EXP is a comparison
9943 operation of some type. Some comparisons against 1 and -1 can be
9944 converted to comparisons with zero. Do so here so that the tests
9945 below will be aware that we have a comparison with zero. These
9946 tests will not catch constants in the first operand, but constants
9947 are rarely passed as the first operand. */
9958 if (integer_onep (arg1
))
9959 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9961 code
= unsignedp
? LTU
: LT
;
9964 if (! unsignedp
&& integer_all_onesp (arg1
))
9965 arg1
= integer_zero_node
, code
= LT
;
9967 code
= unsignedp
? LEU
: LE
;
9970 if (! unsignedp
&& integer_all_onesp (arg1
))
9971 arg1
= integer_zero_node
, code
= GE
;
9973 code
= unsignedp
? GTU
: GT
;
9976 if (integer_onep (arg1
))
9977 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9979 code
= unsignedp
? GEU
: GE
;
9982 case UNORDERED_EXPR
:
10008 gcc_unreachable ();
10011 /* Put a constant second. */
10012 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
10013 || TREE_CODE (arg0
) == FIXED_CST
)
10015 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10016 code
= swap_condition (code
);
10019 /* If this is an equality or inequality test of a single bit, we can
10020 do this by shifting the bit being tested to the low-order bit and
10021 masking the result with the constant 1. If the condition was EQ,
10022 we xor it with 1. This does not require an scc insn and is faster
10023 than an scc insn even if we have it.
10025 The code to make this transformation was moved into fold_single_bit_test,
10026 so we just call into the folder and expand its result. */
10028 if ((code
== NE
|| code
== EQ
)
10029 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10030 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10032 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
10033 return expand_expr (fold_single_bit_test (loc
,
10034 code
== NE
? NE_EXPR
: EQ_EXPR
,
10036 target
, VOIDmode
, EXPAND_NORMAL
);
10039 if (! get_subtarget (target
)
10040 || GET_MODE (subtarget
) != operand_mode
)
10043 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
10046 target
= gen_reg_rtx (mode
);
10048 /* Try a cstore if possible. */
10049 return emit_store_flag_force (target
, code
, op0
, op1
,
10050 operand_mode
, unsignedp
, 1);
10054 /* Stubs in case we haven't got a casesi insn. */
10055 #ifndef HAVE_casesi
10056 # define HAVE_casesi 0
10057 # define gen_casesi(a, b, c, d, e) (0)
10058 # define CODE_FOR_casesi CODE_FOR_nothing
10061 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10062 0 otherwise (i.e. if there is no casesi instruction). */
10064 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
10065 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
,
10066 rtx fallback_label ATTRIBUTE_UNUSED
)
10068 struct expand_operand ops
[5];
10069 enum machine_mode index_mode
= SImode
;
10070 int index_bits
= GET_MODE_BITSIZE (index_mode
);
10071 rtx op1
, op2
, index
;
10076 /* Convert the index to SImode. */
10077 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
10079 enum machine_mode omode
= TYPE_MODE (index_type
);
10080 rtx rangertx
= expand_normal (range
);
10082 /* We must handle the endpoints in the original mode. */
10083 index_expr
= build2 (MINUS_EXPR
, index_type
,
10084 index_expr
, minval
);
10085 minval
= integer_zero_node
;
10086 index
= expand_normal (index_expr
);
10088 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
10089 omode
, 1, default_label
);
10090 /* Now we can safely truncate. */
10091 index
= convert_to_mode (index_mode
, index
, 0);
10095 if (TYPE_MODE (index_type
) != index_mode
)
10097 index_type
= lang_hooks
.types
.type_for_size (index_bits
, 0);
10098 index_expr
= fold_convert (index_type
, index_expr
);
10101 index
= expand_normal (index_expr
);
10104 do_pending_stack_adjust ();
10106 op1
= expand_normal (minval
);
10107 op2
= expand_normal (range
);
10109 create_input_operand (&ops
[0], index
, index_mode
);
10110 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
10111 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
10112 create_fixed_operand (&ops
[3], table_label
);
10113 create_fixed_operand (&ops
[4], (default_label
10115 : fallback_label
));
10116 expand_jump_insn (CODE_FOR_casesi
, 5, ops
);
10120 /* Attempt to generate a tablejump instruction; same concept. */
10121 #ifndef HAVE_tablejump
10122 #define HAVE_tablejump 0
10123 #define gen_tablejump(x, y) (0)
10126 /* Subroutine of the next function.
10128 INDEX is the value being switched on, with the lowest value
10129 in the table already subtracted.
10130 MODE is its expected mode (needed if INDEX is constant).
10131 RANGE is the length of the jump table.
10132 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10134 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10135 index value is out of range. */
10138 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
10143 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
10144 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
10146 /* Do an unsigned comparison (in the proper mode) between the index
10147 expression and the value which represents the length of the range.
10148 Since we just finished subtracting the lower bound of the range
10149 from the index expression, this comparison allows us to simultaneously
10150 check that the original index expression value is both greater than
10151 or equal to the minimum value of the range and less than or equal to
10152 the maximum value of the range. */
10155 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10158 /* If index is in range, it must fit in Pmode.
10159 Convert to Pmode so we can index with it. */
10161 index
= convert_to_mode (Pmode
, index
, 1);
10163 /* Don't let a MEM slip through, because then INDEX that comes
10164 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10165 and break_out_memory_refs will go to work on it and mess it up. */
10166 #ifdef PIC_CASE_VECTOR_ADDRESS
10167 if (flag_pic
&& !REG_P (index
))
10168 index
= copy_to_mode_reg (Pmode
, index
);
10171 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10172 GET_MODE_SIZE, because this indicates how large insns are. The other
10173 uses should all be Pmode, because they are addresses. This code
10174 could fail if addresses and insns are not the same size. */
10175 index
= gen_rtx_PLUS (Pmode
,
10176 gen_rtx_MULT (Pmode
, index
,
10177 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10178 gen_rtx_LABEL_REF (Pmode
, table_label
));
10179 #ifdef PIC_CASE_VECTOR_ADDRESS
10181 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10184 index
= memory_address (CASE_VECTOR_MODE
, index
);
10185 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10186 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
10187 convert_move (temp
, vector
, 0);
10189 emit_jump_insn (gen_tablejump (temp
, table_label
));
10191 /* If we are generating PIC code or if the table is PC-relative, the
10192 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10193 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10198 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
10199 rtx table_label
, rtx default_label
)
10203 if (! HAVE_tablejump
)
10206 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
10207 fold_convert (index_type
, index_expr
),
10208 fold_convert (index_type
, minval
));
10209 index
= expand_normal (index_expr
);
10210 do_pending_stack_adjust ();
10212 do_tablejump (index
, TYPE_MODE (index_type
),
10213 convert_modes (TYPE_MODE (index_type
),
10214 TYPE_MODE (TREE_TYPE (range
)),
10215 expand_normal (range
),
10216 TYPE_UNSIGNED (TREE_TYPE (range
))),
10217 table_label
, default_label
);
10221 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10223 const_vector_from_tree (tree exp
)
10228 enum machine_mode inner
, mode
;
10230 mode
= TYPE_MODE (TREE_TYPE (exp
));
10232 if (initializer_zerop (exp
))
10233 return CONST0_RTX (mode
);
10235 units
= GET_MODE_NUNITS (mode
);
10236 inner
= GET_MODE_INNER (mode
);
10238 v
= rtvec_alloc (units
);
10240 link
= TREE_VECTOR_CST_ELTS (exp
);
10241 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
10243 elt
= TREE_VALUE (link
);
10245 if (TREE_CODE (elt
) == REAL_CST
)
10246 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
10248 else if (TREE_CODE (elt
) == FIXED_CST
)
10249 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
10252 RTVEC_ELT (v
, i
) = immed_double_int_const (tree_to_double_int (elt
),
10256 /* Initialize remaining elements to 0. */
10257 for (; i
< units
; ++i
)
10258 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
10260 return gen_rtx_CONST_VECTOR (mode
, v
);
10263 /* Build a decl for a personality function given a language prefix. */
10266 build_personality_function (const char *lang
)
10268 const char *unwind_and_version
;
10272 switch (targetm
.except_unwind_info (&global_options
))
10277 unwind_and_version
= "_sj0";
10281 unwind_and_version
= "_v0";
10284 gcc_unreachable ();
10287 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
10289 type
= build_function_type_list (integer_type_node
, integer_type_node
,
10290 long_long_unsigned_type_node
,
10291 ptr_type_node
, ptr_type_node
, NULL_TREE
);
10292 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
10293 get_identifier (name
), type
);
10294 DECL_ARTIFICIAL (decl
) = 1;
10295 DECL_EXTERNAL (decl
) = 1;
10296 TREE_PUBLIC (decl
) = 1;
10298 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10299 are the flags assigned by targetm.encode_section_info. */
10300 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
10305 /* Extracts the personality function of DECL and returns the corresponding
10309 get_personality_function (tree decl
)
10311 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
10312 enum eh_personality_kind pk
;
10314 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
10315 if (pk
== eh_personality_none
)
10319 && pk
== eh_personality_any
)
10320 personality
= lang_hooks
.eh_personality ();
10322 if (pk
== eh_personality_lang
)
10323 gcc_assert (personality
!= NULL_TREE
);
10325 return XEXP (DECL_RTL (personality
), 0);
10328 #include "gt-expr.h"