1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
46 #include "langhooks.h"
49 #include "tree-iterator.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-expr.h"
56 #include "gimple-ssa.h"
58 #include "tree-ssanames.h"
60 #include "common/common-target.h"
63 #include "diagnostic.h"
64 #include "tree-ssa-live.h"
65 #include "tree-outof-ssa.h"
66 #include "target-globals.h"
68 #include "tree-ssa-address.h"
69 #include "cfgexpand.h"
71 #ifndef STACK_PUSH_CODE
72 #ifdef STACK_GROWS_DOWNWARD
73 #define STACK_PUSH_CODE PRE_DEC
75 #define STACK_PUSH_CODE PRE_INC
80 /* If this is nonzero, we do not bother generating VOLATILE
81 around volatile memory references, and we are willing to
82 output indirect addresses. If cse is to follow, we reject
83 indirect addresses so a useful potential cse is generated;
84 if it is used only once, instruction combination will produce
85 the same indirect address eventually. */
88 /* This structure is used by move_by_pieces to describe the move to
90 struct move_by_pieces_d
99 int explicit_inc_from
;
100 unsigned HOST_WIDE_INT len
;
101 HOST_WIDE_INT offset
;
105 /* This structure is used by store_by_pieces to describe the clear to
108 struct store_by_pieces_d
114 unsigned HOST_WIDE_INT len
;
115 HOST_WIDE_INT offset
;
116 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
121 static void move_by_pieces_1 (insn_gen_fn
, machine_mode
,
122 struct move_by_pieces_d
*);
123 static bool block_move_libcall_safe_for_call_parm (void);
124 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
,
125 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
126 unsigned HOST_WIDE_INT
);
127 static tree
emit_block_move_libcall_fn (int);
128 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
129 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
130 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
131 static void store_by_pieces_1 (struct store_by_pieces_d
*, unsigned int);
132 static void store_by_pieces_2 (insn_gen_fn
, machine_mode
,
133 struct store_by_pieces_d
*);
134 static tree
clear_storage_libcall_fn (int);
135 static rtx
compress_float_constant (rtx
, rtx
);
136 static rtx
get_subtarget (rtx
);
137 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
138 HOST_WIDE_INT
, enum machine_mode
,
139 tree
, int, alias_set_type
);
140 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
141 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
,
142 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
143 enum machine_mode
, tree
, alias_set_type
, bool);
145 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
147 static int is_aligning_offset (const_tree
, const_tree
);
148 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
149 enum expand_modifier
);
150 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
151 static rtx
do_store_flag (sepops
, rtx
, enum machine_mode
);
153 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
155 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
, int);
156 static rtx
const_vector_from_tree (tree
);
157 static void write_complex_part (rtx
, rtx
, bool);
159 /* This macro is used to determine whether move_by_pieces should be called
160 to perform a structure copy. */
161 #ifndef MOVE_BY_PIECES_P
162 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
163 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
164 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
167 /* This macro is used to determine whether clear_by_pieces should be
168 called to clear storage. */
169 #ifndef CLEAR_BY_PIECES_P
170 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
171 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
172 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
175 /* This macro is used to determine whether store_by_pieces should be
176 called to "memset" storage with byte values other than zero. */
177 #ifndef SET_BY_PIECES_P
178 #define SET_BY_PIECES_P(SIZE, ALIGN) \
179 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
180 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
183 /* This macro is used to determine whether store_by_pieces should be
184 called to "memcpy" storage when the source is a constant string. */
185 #ifndef STORE_BY_PIECES_P
186 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
188 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
191 /* This is run to set up which modes can be used
192 directly in memory and to initialize the block move optab. It is run
193 at the beginning of compilation and when the target is reinitialized. */
196 init_expr_target (void)
199 enum machine_mode mode
;
204 /* Try indexing by frame ptr and try by stack ptr.
205 It is known that on the Convex the stack ptr isn't a valid index.
206 With luck, one or the other is valid on any machine. */
207 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
208 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
210 /* A scratch register we can modify in-place below to avoid
211 useless RTL allocations. */
212 reg
= gen_rtx_REG (VOIDmode
, -1);
214 insn
= rtx_alloc (INSN
);
215 pat
= gen_rtx_SET (VOIDmode
, NULL_RTX
, NULL_RTX
);
216 PATTERN (insn
) = pat
;
218 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
219 mode
= (enum machine_mode
) ((int) mode
+ 1))
223 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
224 PUT_MODE (mem
, mode
);
225 PUT_MODE (mem1
, mode
);
226 PUT_MODE (reg
, mode
);
228 /* See if there is some register that can be used in this mode and
229 directly loaded or stored from memory. */
231 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
232 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
233 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
236 if (! HARD_REGNO_MODE_OK (regno
, mode
))
239 SET_REGNO (reg
, regno
);
242 SET_DEST (pat
) = reg
;
243 if (recog (pat
, insn
, &num_clobbers
) >= 0)
244 direct_load
[(int) mode
] = 1;
246 SET_SRC (pat
) = mem1
;
247 SET_DEST (pat
) = reg
;
248 if (recog (pat
, insn
, &num_clobbers
) >= 0)
249 direct_load
[(int) mode
] = 1;
252 SET_DEST (pat
) = mem
;
253 if (recog (pat
, insn
, &num_clobbers
) >= 0)
254 direct_store
[(int) mode
] = 1;
257 SET_DEST (pat
) = mem1
;
258 if (recog (pat
, insn
, &num_clobbers
) >= 0)
259 direct_store
[(int) mode
] = 1;
263 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
265 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
266 mode
= GET_MODE_WIDER_MODE (mode
))
268 enum machine_mode srcmode
;
269 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
270 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
274 ic
= can_extend_p (mode
, srcmode
, 0);
275 if (ic
== CODE_FOR_nothing
)
278 PUT_MODE (mem
, srcmode
);
280 if (insn_operand_matches (ic
, 1, mem
))
281 float_extend_from_mem
[mode
][srcmode
] = true;
286 /* This is run at the start of compiling a function. */
291 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
294 /* Copy data from FROM to TO, where the machine modes are not the same.
295 Both modes may be integer, or both may be floating, or both may be
297 UNSIGNEDP should be nonzero if FROM is an unsigned type.
298 This causes zero-extension instead of sign-extension. */
301 convert_move (rtx to
, rtx from
, int unsignedp
)
303 enum machine_mode to_mode
= GET_MODE (to
);
304 enum machine_mode from_mode
= GET_MODE (from
);
305 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
306 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
310 /* rtx code for making an equivalent value. */
311 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
312 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
315 gcc_assert (to_real
== from_real
);
316 gcc_assert (to_mode
!= BLKmode
);
317 gcc_assert (from_mode
!= BLKmode
);
319 /* If the source and destination are already the same, then there's
324 /* If FROM is a SUBREG that indicates that we have already done at least
325 the required extension, strip it. We don't handle such SUBREGs as
328 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
329 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from
)))
330 >= GET_MODE_PRECISION (to_mode
))
331 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
332 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
334 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
336 if (to_mode
== from_mode
337 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
339 emit_move_insn (to
, from
);
343 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
345 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
347 if (VECTOR_MODE_P (to_mode
))
348 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
350 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
352 emit_move_insn (to
, from
);
356 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
358 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
359 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
368 gcc_assert ((GET_MODE_PRECISION (from_mode
)
369 != GET_MODE_PRECISION (to_mode
))
370 || (DECIMAL_FLOAT_MODE_P (from_mode
)
371 != DECIMAL_FLOAT_MODE_P (to_mode
)));
373 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
374 /* Conversion between decimal float and binary float, same size. */
375 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
376 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
381 /* Try converting directly if the insn is supported. */
383 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
384 if (code
!= CODE_FOR_nothing
)
386 emit_unop_insn (code
, to
, from
,
387 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
391 /* Otherwise use a libcall. */
392 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
394 /* Is this conversion implemented yet? */
395 gcc_assert (libcall
);
398 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
400 insns
= get_insns ();
402 emit_libcall_block (insns
, to
, value
,
403 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
405 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
409 /* Handle pointer conversion. */ /* SPEE 900220. */
410 /* Targets are expected to provide conversion insns between PxImode and
411 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
412 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
414 enum machine_mode full_mode
415 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
417 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
418 != CODE_FOR_nothing
);
420 if (full_mode
!= from_mode
)
421 from
= convert_to_mode (full_mode
, from
, unsignedp
);
422 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
426 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
429 enum machine_mode full_mode
430 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
431 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
432 enum insn_code icode
;
434 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
435 gcc_assert (icode
!= CODE_FOR_nothing
);
437 if (to_mode
== full_mode
)
439 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
443 new_from
= gen_reg_rtx (full_mode
);
444 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
446 /* else proceed to integer conversions below. */
447 from_mode
= full_mode
;
451 /* Make sure both are fixed-point modes or both are not. */
452 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
453 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
454 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
456 /* If we widen from_mode to to_mode and they are in the same class,
457 we won't saturate the result.
458 Otherwise, always saturate the result to play safe. */
459 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
460 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
461 expand_fixed_convert (to
, from
, 0, 0);
463 expand_fixed_convert (to
, from
, 0, 1);
467 /* Now both modes are integers. */
469 /* Handle expanding beyond a word. */
470 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
471 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
478 enum machine_mode lowpart_mode
;
479 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
481 /* Try converting directly if the insn is supported. */
482 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
485 /* If FROM is a SUBREG, put it into a register. Do this
486 so that we always generate the same set of insns for
487 better cse'ing; if an intermediate assignment occurred,
488 we won't be doing the operation directly on the SUBREG. */
489 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
490 from
= force_reg (from_mode
, from
);
491 emit_unop_insn (code
, to
, from
, equiv_code
);
494 /* Next, try converting via full word. */
495 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
496 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
497 != CODE_FOR_nothing
))
499 rtx word_to
= gen_reg_rtx (word_mode
);
502 if (reg_overlap_mentioned_p (to
, from
))
503 from
= force_reg (from_mode
, from
);
506 convert_move (word_to
, from
, unsignedp
);
507 emit_unop_insn (code
, to
, word_to
, equiv_code
);
511 /* No special multiword conversion insn; do it by hand. */
514 /* Since we will turn this into a no conflict block, we must ensure the
515 the source does not overlap the target so force it into an isolated
516 register when maybe so. Likewise for any MEM input, since the
517 conversion sequence might require several references to it and we
518 must ensure we're getting the same value every time. */
520 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
521 from
= force_reg (from_mode
, from
);
523 /* Get a copy of FROM widened to a word, if necessary. */
524 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
525 lowpart_mode
= word_mode
;
527 lowpart_mode
= from_mode
;
529 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
531 lowpart
= gen_lowpart (lowpart_mode
, to
);
532 emit_move_insn (lowpart
, lowfrom
);
534 /* Compute the value to put in each remaining word. */
536 fill_value
= const0_rtx
;
538 fill_value
= emit_store_flag_force (gen_reg_rtx (word_mode
),
539 LT
, lowfrom
, const0_rtx
,
540 lowpart_mode
, 0, -1);
542 /* Fill the remaining words. */
543 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
545 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
546 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
548 gcc_assert (subword
);
550 if (fill_value
!= subword
)
551 emit_move_insn (subword
, fill_value
);
554 insns
= get_insns ();
561 /* Truncating multi-word to a word or less. */
562 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
563 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
566 && ! MEM_VOLATILE_P (from
)
567 && direct_load
[(int) to_mode
]
568 && ! mode_dependent_address_p (XEXP (from
, 0),
569 MEM_ADDR_SPACE (from
)))
571 || GET_CODE (from
) == SUBREG
))
572 from
= force_reg (from_mode
, from
);
573 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
577 /* Now follow all the conversions between integers
578 no more than a word long. */
580 /* For truncation, usually we can just refer to FROM in a narrower mode. */
581 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
582 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
585 && ! MEM_VOLATILE_P (from
)
586 && direct_load
[(int) to_mode
]
587 && ! mode_dependent_address_p (XEXP (from
, 0),
588 MEM_ADDR_SPACE (from
)))
590 || GET_CODE (from
) == SUBREG
))
591 from
= force_reg (from_mode
, from
);
592 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
593 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
594 from
= copy_to_reg (from
);
595 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
599 /* Handle extension. */
600 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
602 /* Convert directly if that works. */
603 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
606 emit_unop_insn (code
, to
, from
, equiv_code
);
611 enum machine_mode intermediate
;
615 /* Search for a mode to convert via. */
616 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
617 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
618 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
620 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
621 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, intermediate
)))
622 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
623 != CODE_FOR_nothing
))
625 convert_move (to
, convert_to_mode (intermediate
, from
,
626 unsignedp
), unsignedp
);
630 /* No suitable intermediate mode.
631 Generate what we need with shifts. */
632 shift_amount
= (GET_MODE_PRECISION (to_mode
)
633 - GET_MODE_PRECISION (from_mode
));
634 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
635 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
637 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
640 emit_move_insn (to
, tmp
);
645 /* Support special truncate insns for certain modes. */
646 if (convert_optab_handler (trunc_optab
, to_mode
,
647 from_mode
) != CODE_FOR_nothing
)
649 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
654 /* Handle truncation of volatile memrefs, and so on;
655 the things that couldn't be truncated directly,
656 and for which there was no special instruction.
658 ??? Code above formerly short-circuited this, for most integer
659 mode pairs, with a force_reg in from_mode followed by a recursive
660 call to this routine. Appears always to have been wrong. */
661 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
663 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
664 emit_move_insn (to
, temp
);
668 /* Mode combination is not recognized. */
672 /* Return an rtx for a value that would result
673 from converting X to mode MODE.
674 Both X and MODE may be floating, or both integer.
675 UNSIGNEDP is nonzero if X is an unsigned value.
676 This can be done by referring to a part of X in place
677 or by copying to a new temporary with conversion. */
680 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
682 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
685 /* Return an rtx for a value that would result
686 from converting X from mode OLDMODE to mode MODE.
687 Both modes may be floating, or both integer.
688 UNSIGNEDP is nonzero if X is an unsigned value.
690 This can be done by referring to a part of X in place
691 or by copying to a new temporary with conversion.
693 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
696 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
700 /* If FROM is a SUBREG that indicates that we have already done at least
701 the required extension, strip it. */
703 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
704 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
705 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
706 x
= gen_lowpart (mode
, SUBREG_REG (x
));
708 if (GET_MODE (x
) != VOIDmode
)
709 oldmode
= GET_MODE (x
);
714 if (CONST_SCALAR_INT_P (x
) && GET_MODE_CLASS (mode
) == MODE_INT
)
716 /* If the caller did not tell us the old mode, then there is not
717 much to do with respect to canonicalization. We have to
718 assume that all the bits are significant. */
719 if (GET_MODE_CLASS (oldmode
) != MODE_INT
)
720 oldmode
= MAX_MODE_INT
;
721 wide_int w
= wide_int::from (std::make_pair (x
, oldmode
),
722 GET_MODE_PRECISION (mode
),
723 unsignedp
? UNSIGNED
: SIGNED
);
724 return immed_wide_int_const (w
, mode
);
727 /* We can do this with a gen_lowpart if both desired and current modes
728 are integer, and this is either a constant integer, a register, or a
730 if (GET_MODE_CLASS (mode
) == MODE_INT
731 && GET_MODE_CLASS (oldmode
) == MODE_INT
732 && GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (oldmode
)
733 && ((MEM_P (x
) && !MEM_VOLATILE_P (x
) && direct_load
[(int) mode
])
735 && (!HARD_REGISTER_P (x
)
736 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
737 && TRULY_NOOP_TRUNCATION_MODES_P (mode
, GET_MODE (x
)))))
739 return gen_lowpart (mode
, x
);
741 /* Converting from integer constant into mode is always equivalent to an
743 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
745 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
746 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
749 temp
= gen_reg_rtx (mode
);
750 convert_move (temp
, x
, unsignedp
);
754 /* Return the largest alignment we can use for doing a move (or store)
755 of MAX_PIECES. ALIGN is the largest alignment we could use. */
758 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
760 enum machine_mode tmode
;
762 tmode
= mode_for_size (max_pieces
* BITS_PER_UNIT
, MODE_INT
, 1);
763 if (align
>= GET_MODE_ALIGNMENT (tmode
))
764 align
= GET_MODE_ALIGNMENT (tmode
);
767 enum machine_mode tmode
, xmode
;
769 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
771 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
772 if (GET_MODE_SIZE (tmode
) > max_pieces
773 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
776 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
782 /* Return the widest integer mode no wider than SIZE. If no such mode
783 can be found, return VOIDmode. */
785 static enum machine_mode
786 widest_int_mode_for_size (unsigned int size
)
788 enum machine_mode tmode
, mode
= VOIDmode
;
790 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
791 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
792 if (GET_MODE_SIZE (tmode
) < size
)
798 /* STORE_MAX_PIECES is the number of bytes at a time that we can
799 store efficiently. Due to internal GCC limitations, this is
800 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
801 for an immediate constant. */
803 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
805 /* Determine whether the LEN bytes can be moved by using several move
806 instructions. Return nonzero if a call to move_by_pieces should
810 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED
,
811 unsigned int align ATTRIBUTE_UNUSED
)
813 return MOVE_BY_PIECES_P (len
, align
);
816 /* Generate several move instructions to copy LEN bytes from block FROM to
817 block TO. (These are MEM rtx's with BLKmode).
819 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
820 used to push FROM to the stack.
822 ALIGN is maximum stack alignment we can assume.
824 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
825 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
829 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
830 unsigned int align
, int endp
)
832 struct move_by_pieces_d data
;
833 enum machine_mode to_addr_mode
;
834 enum machine_mode from_addr_mode
= get_address_mode (from
);
835 rtx to_addr
, from_addr
= XEXP (from
, 0);
836 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
837 enum insn_code icode
;
839 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
842 data
.from_addr
= from_addr
;
845 to_addr_mode
= get_address_mode (to
);
846 to_addr
= XEXP (to
, 0);
849 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
850 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
852 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
856 to_addr_mode
= VOIDmode
;
860 #ifdef STACK_GROWS_DOWNWARD
866 data
.to_addr
= to_addr
;
869 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
870 || GET_CODE (from_addr
) == POST_INC
871 || GET_CODE (from_addr
) == POST_DEC
);
873 data
.explicit_inc_from
= 0;
874 data
.explicit_inc_to
= 0;
875 if (data
.reverse
) data
.offset
= len
;
878 /* If copying requires more than two move insns,
879 copy addresses to registers (to make displacements shorter)
880 and use post-increment if available. */
881 if (!(data
.autinc_from
&& data
.autinc_to
)
882 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
884 /* Find the mode of the largest move...
885 MODE might not be used depending on the definitions of the
886 USE_* macros below. */
887 enum machine_mode mode ATTRIBUTE_UNUSED
888 = widest_int_mode_for_size (max_size
);
890 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
892 data
.from_addr
= copy_to_mode_reg (from_addr_mode
,
893 plus_constant (from_addr_mode
,
895 data
.autinc_from
= 1;
896 data
.explicit_inc_from
= -1;
898 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
900 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
901 data
.autinc_from
= 1;
902 data
.explicit_inc_from
= 1;
904 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
905 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
906 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
908 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
909 plus_constant (to_addr_mode
,
912 data
.explicit_inc_to
= -1;
914 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
916 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
918 data
.explicit_inc_to
= 1;
920 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
921 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
924 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
926 /* First move what we can in the largest integer mode, then go to
927 successively smaller modes. */
929 while (max_size
> 1 && data
.len
> 0)
931 enum machine_mode mode
= widest_int_mode_for_size (max_size
);
933 if (mode
== VOIDmode
)
936 icode
= optab_handler (mov_optab
, mode
);
937 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
938 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
940 max_size
= GET_MODE_SIZE (mode
);
943 /* The code above should have handled everything. */
944 gcc_assert (!data
.len
);
950 gcc_assert (!data
.reverse
);
955 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
956 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
958 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
959 plus_constant (to_addr_mode
,
963 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
970 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
978 /* Return number of insns required to move L bytes by pieces.
979 ALIGN (in bits) is maximum alignment we can assume. */
981 unsigned HOST_WIDE_INT
982 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
983 unsigned int max_size
)
985 unsigned HOST_WIDE_INT n_insns
= 0;
987 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
989 while (max_size
> 1 && l
> 0)
991 enum machine_mode mode
;
992 enum insn_code icode
;
994 mode
= widest_int_mode_for_size (max_size
);
996 if (mode
== VOIDmode
)
999 icode
= optab_handler (mov_optab
, mode
);
1000 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1001 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1003 max_size
= GET_MODE_SIZE (mode
);
1010 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1011 with move instructions for mode MODE. GENFUN is the gen_... function
1012 to make a move insn for that mode. DATA has all the other info. */
1015 move_by_pieces_1 (insn_gen_fn genfun
, machine_mode mode
,
1016 struct move_by_pieces_d
*data
)
1018 unsigned int size
= GET_MODE_SIZE (mode
);
1019 rtx to1
= NULL_RTX
, from1
;
1021 while (data
->len
>= size
)
1024 data
->offset
-= size
;
1028 if (data
->autinc_to
)
1029 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1032 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1035 if (data
->autinc_from
)
1036 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1039 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1041 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1042 emit_insn (gen_add2_insn (data
->to_addr
,
1043 gen_int_mode (-(HOST_WIDE_INT
) size
,
1044 GET_MODE (data
->to_addr
))));
1045 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1046 emit_insn (gen_add2_insn (data
->from_addr
,
1047 gen_int_mode (-(HOST_WIDE_INT
) size
,
1048 GET_MODE (data
->from_addr
))));
1051 emit_insn ((*genfun
) (to1
, from1
));
1054 #ifdef PUSH_ROUNDING
1055 emit_single_push_insn (mode
, from1
, NULL
);
1061 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1062 emit_insn (gen_add2_insn (data
->to_addr
,
1064 GET_MODE (data
->to_addr
))));
1065 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1066 emit_insn (gen_add2_insn (data
->from_addr
,
1068 GET_MODE (data
->from_addr
))));
1070 if (! data
->reverse
)
1071 data
->offset
+= size
;
1077 /* Emit code to move a block Y to a block X. This may be done with
1078 string-move instructions, with multiple scalar move instructions,
1079 or with a library call.
1081 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1082 SIZE is an rtx that says how long they are.
1083 ALIGN is the maximum alignment we can assume they have.
1084 METHOD describes what kind of copy this is, and what mechanisms may be used.
1085 MIN_SIZE is the minimal size of block to move
1086 MAX_SIZE is the maximal size of block to move, if it can not be represented
1087 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1089 Return the address of the new block, if memcpy is called and returns it,
1093 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1094 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1095 unsigned HOST_WIDE_INT min_size
,
1096 unsigned HOST_WIDE_INT max_size
,
1097 unsigned HOST_WIDE_INT probable_max_size
)
1104 if (CONST_INT_P (size
)
1105 && INTVAL (size
) == 0)
1110 case BLOCK_OP_NORMAL
:
1111 case BLOCK_OP_TAILCALL
:
1112 may_use_call
= true;
1115 case BLOCK_OP_CALL_PARM
:
1116 may_use_call
= block_move_libcall_safe_for_call_parm ();
1118 /* Make inhibit_defer_pop nonzero around the library call
1119 to force it to pop the arguments right away. */
1123 case BLOCK_OP_NO_LIBCALL
:
1124 may_use_call
= false;
1131 gcc_assert (MEM_P (x
) && MEM_P (y
));
1132 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1133 gcc_assert (align
>= BITS_PER_UNIT
);
1135 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1136 block copy is more efficient for other large modes, e.g. DCmode. */
1137 x
= adjust_address (x
, BLKmode
, 0);
1138 y
= adjust_address (y
, BLKmode
, 0);
1140 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1141 can be incorrect is coming from __builtin_memcpy. */
1142 if (CONST_INT_P (size
))
1144 x
= shallow_copy_rtx (x
);
1145 y
= shallow_copy_rtx (y
);
1146 set_mem_size (x
, INTVAL (size
));
1147 set_mem_size (y
, INTVAL (size
));
1150 if (CONST_INT_P (size
) && MOVE_BY_PIECES_P (INTVAL (size
), align
))
1151 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1152 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1153 expected_align
, expected_size
,
1154 min_size
, max_size
, probable_max_size
))
1156 else if (may_use_call
1157 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1158 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1160 /* Since x and y are passed to a libcall, mark the corresponding
1161 tree EXPR as addressable. */
1162 tree y_expr
= MEM_EXPR (y
);
1163 tree x_expr
= MEM_EXPR (x
);
1165 mark_addressable (y_expr
);
1167 mark_addressable (x_expr
);
1168 retval
= emit_block_move_via_libcall (x
, y
, size
,
1169 method
== BLOCK_OP_TAILCALL
);
1173 emit_block_move_via_loop (x
, y
, size
, align
);
1175 if (method
== BLOCK_OP_CALL_PARM
)
1182 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1184 unsigned HOST_WIDE_INT max
, min
= 0;
1185 if (GET_CODE (size
) == CONST_INT
)
1186 min
= max
= UINTVAL (size
);
1188 max
= GET_MODE_MASK (GET_MODE (size
));
1189 return emit_block_move_hints (x
, y
, size
, method
, 0, -1,
1193 /* A subroutine of emit_block_move. Returns true if calling the
1194 block move libcall will not clobber any parameters which may have
1195 already been placed on the stack. */
1198 block_move_libcall_safe_for_call_parm (void)
1200 #if defined (REG_PARM_STACK_SPACE)
1204 /* If arguments are pushed on the stack, then they're safe. */
1208 /* If registers go on the stack anyway, any argument is sure to clobber
1209 an outgoing argument. */
1210 #if defined (REG_PARM_STACK_SPACE)
1211 fn
= emit_block_move_libcall_fn (false);
1212 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1213 depend on its argument. */
1215 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1216 && REG_PARM_STACK_SPACE (fn
) != 0)
1220 /* If any argument goes in memory, then it might clobber an outgoing
1223 CUMULATIVE_ARGS args_so_far_v
;
1224 cumulative_args_t args_so_far
;
1227 fn
= emit_block_move_libcall_fn (false);
1228 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1229 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1231 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1232 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1234 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1235 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, mode
,
1237 if (!tmp
|| !REG_P (tmp
))
1239 if (targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL
, 1))
1241 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
1248 /* A subroutine of emit_block_move. Expand a movmem pattern;
1249 return true if successful. */
1252 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1253 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1254 unsigned HOST_WIDE_INT min_size
,
1255 unsigned HOST_WIDE_INT max_size
,
1256 unsigned HOST_WIDE_INT probable_max_size
)
1258 int save_volatile_ok
= volatile_ok
;
1259 enum machine_mode mode
;
1261 if (expected_align
< align
)
1262 expected_align
= align
;
1263 if (expected_size
!= -1)
1265 if ((unsigned HOST_WIDE_INT
)expected_size
> probable_max_size
)
1266 expected_size
= probable_max_size
;
1267 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
1268 expected_size
= min_size
;
1271 /* Since this is a move insn, we don't care about volatility. */
1274 /* Try the most limited insn first, because there's no point
1275 including more than one in the machine description unless
1276 the more limited one has some advantage. */
1278 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1279 mode
= GET_MODE_WIDER_MODE (mode
))
1281 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1283 if (code
!= CODE_FOR_nothing
1284 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1285 here because if SIZE is less than the mode mask, as it is
1286 returned by the macro, it will definitely be less than the
1287 actual mode mask. Since SIZE is within the Pmode address
1288 space, we limit MODE to Pmode. */
1289 && ((CONST_INT_P (size
)
1290 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1291 <= (GET_MODE_MASK (mode
) >> 1)))
1292 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
1293 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1295 struct expand_operand ops
[9];
1298 /* ??? When called via emit_block_move_for_call, it'd be
1299 nice if there were some way to inform the backend, so
1300 that it doesn't fail the expansion because it thinks
1301 emitting the libcall would be more efficient. */
1302 nops
= insn_data
[(int) code
].n_generator_args
;
1303 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
1305 create_fixed_operand (&ops
[0], x
);
1306 create_fixed_operand (&ops
[1], y
);
1307 /* The check above guarantees that this size conversion is valid. */
1308 create_convert_operand_to (&ops
[2], size
, mode
, true);
1309 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1312 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1313 create_integer_operand (&ops
[5], expected_size
);
1317 create_integer_operand (&ops
[6], min_size
);
1318 /* If we can not represent the maximal size,
1319 make parameter NULL. */
1320 if ((HOST_WIDE_INT
) max_size
!= -1)
1321 create_integer_operand (&ops
[7], max_size
);
1323 create_fixed_operand (&ops
[7], NULL
);
1327 /* If we can not represent the maximal size,
1328 make parameter NULL. */
1329 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
1330 create_integer_operand (&ops
[8], probable_max_size
);
1332 create_fixed_operand (&ops
[8], NULL
);
1334 if (maybe_expand_insn (code
, nops
, ops
))
1336 volatile_ok
= save_volatile_ok
;
1342 volatile_ok
= save_volatile_ok
;
1346 /* A subroutine of emit_block_move. Expand a call to memcpy.
1347 Return the return value from memcpy, 0 otherwise. */
1350 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1352 rtx dst_addr
, src_addr
;
1353 tree call_expr
, fn
, src_tree
, dst_tree
, size_tree
;
1354 enum machine_mode size_mode
;
1357 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1358 pseudos. We can then place those new pseudos into a VAR_DECL and
1361 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1362 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1364 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1365 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1367 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1368 src_tree
= make_tree (ptr_type_node
, src_addr
);
1370 size_mode
= TYPE_MODE (sizetype
);
1372 size
= convert_to_mode (size_mode
, size
, 1);
1373 size
= copy_to_mode_reg (size_mode
, size
);
1375 /* It is incorrect to use the libcall calling conventions to call
1376 memcpy in this context. This could be a user call to memcpy and
1377 the user may wish to examine the return value from memcpy. For
1378 targets where libcalls and normal calls have different conventions
1379 for returning pointers, we could end up generating incorrect code. */
1381 size_tree
= make_tree (sizetype
, size
);
1383 fn
= emit_block_move_libcall_fn (true);
1384 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1385 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1387 retval
= expand_normal (call_expr
);
1392 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1393 for the function we use for block copies. */
1395 static GTY(()) tree block_move_fn
;
1398 init_block_move_fn (const char *asmspec
)
1402 tree args
, fn
, attrs
, attr_args
;
1404 fn
= get_identifier ("memcpy");
1405 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1406 const_ptr_type_node
, sizetype
,
1409 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
1410 DECL_EXTERNAL (fn
) = 1;
1411 TREE_PUBLIC (fn
) = 1;
1412 DECL_ARTIFICIAL (fn
) = 1;
1413 TREE_NOTHROW (fn
) = 1;
1414 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1415 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1417 attr_args
= build_tree_list (NULL_TREE
, build_string (1, "1"));
1418 attrs
= tree_cons (get_identifier ("fn spec"), attr_args
, NULL
);
1420 decl_attributes (&fn
, attrs
, ATTR_FLAG_BUILT_IN
);
1426 set_user_assembler_name (block_move_fn
, asmspec
);
1430 emit_block_move_libcall_fn (int for_call
)
1432 static bool emitted_extern
;
1435 init_block_move_fn (NULL
);
1437 if (for_call
&& !emitted_extern
)
1439 emitted_extern
= true;
1440 make_decl_rtl (block_move_fn
);
1443 return block_move_fn
;
1446 /* A subroutine of emit_block_move. Copy the data via an explicit
1447 loop. This is used only when libcalls are forbidden. */
1448 /* ??? It'd be nice to copy in hunks larger than QImode. */
1451 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1452 unsigned int align ATTRIBUTE_UNUSED
)
1454 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1455 enum machine_mode x_addr_mode
= get_address_mode (x
);
1456 enum machine_mode y_addr_mode
= get_address_mode (y
);
1457 enum machine_mode iter_mode
;
1459 iter_mode
= GET_MODE (size
);
1460 if (iter_mode
== VOIDmode
)
1461 iter_mode
= word_mode
;
1463 top_label
= gen_label_rtx ();
1464 cmp_label
= gen_label_rtx ();
1465 iter
= gen_reg_rtx (iter_mode
);
1467 emit_move_insn (iter
, const0_rtx
);
1469 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1470 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1471 do_pending_stack_adjust ();
1473 emit_jump (cmp_label
);
1474 emit_label (top_label
);
1476 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1477 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1479 if (x_addr_mode
!= y_addr_mode
)
1480 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1481 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1483 x
= change_address (x
, QImode
, x_addr
);
1484 y
= change_address (y
, QImode
, y_addr
);
1486 emit_move_insn (x
, y
);
1488 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1489 true, OPTAB_LIB_WIDEN
);
1491 emit_move_insn (iter
, tmp
);
1493 emit_label (cmp_label
);
1495 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1496 true, top_label
, REG_BR_PROB_BASE
* 90 / 100);
1499 /* Copy all or part of a value X into registers starting at REGNO.
1500 The number of registers to be filled is NREGS. */
1503 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1506 #ifdef HAVE_load_multiple
1514 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
1515 x
= validize_mem (force_const_mem (mode
, x
));
1517 /* See if the machine can do this with a load multiple insn. */
1518 #ifdef HAVE_load_multiple
1519 if (HAVE_load_multiple
)
1521 last
= get_last_insn ();
1522 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1530 delete_insns_since (last
);
1534 for (i
= 0; i
< nregs
; i
++)
1535 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1536 operand_subword_force (x
, i
, mode
));
1539 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1540 The number of registers to be filled is NREGS. */
1543 move_block_from_reg (int regno
, rtx x
, int nregs
)
1550 /* See if the machine can do this with a store multiple insn. */
1551 #ifdef HAVE_store_multiple
1552 if (HAVE_store_multiple
)
1554 rtx last
= get_last_insn ();
1555 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1563 delete_insns_since (last
);
1567 for (i
= 0; i
< nregs
; i
++)
1569 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1573 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1577 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1578 ORIG, where ORIG is a non-consecutive group of registers represented by
1579 a PARALLEL. The clone is identical to the original except in that the
1580 original set of registers is replaced by a new set of pseudo registers.
1581 The new set has the same modes as the original set. */
1584 gen_group_rtx (rtx orig
)
1589 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1591 length
= XVECLEN (orig
, 0);
1592 tmps
= XALLOCAVEC (rtx
, length
);
1594 /* Skip a NULL entry in first slot. */
1595 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1600 for (; i
< length
; i
++)
1602 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1603 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1605 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1608 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1611 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1612 except that values are placed in TMPS[i], and must later be moved
1613 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1616 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1620 enum machine_mode m
= GET_MODE (orig_src
);
1622 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1625 && !SCALAR_INT_MODE_P (m
)
1626 && !MEM_P (orig_src
)
1627 && GET_CODE (orig_src
) != CONCAT
)
1629 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1630 if (imode
== BLKmode
)
1631 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
1633 src
= gen_reg_rtx (imode
);
1634 if (imode
!= BLKmode
)
1635 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1636 emit_move_insn (src
, orig_src
);
1637 /* ...and back again. */
1638 if (imode
!= BLKmode
)
1639 src
= gen_lowpart (imode
, src
);
1640 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1644 /* Check for a NULL entry, used to indicate that the parameter goes
1645 both on the stack and in registers. */
1646 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1651 /* Process the pieces. */
1652 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1654 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1655 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1656 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1659 /* Handle trailing fragments that run over the size of the struct. */
1660 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1662 /* Arrange to shift the fragment to where it belongs.
1663 extract_bit_field loads to the lsb of the reg. */
1665 #ifdef BLOCK_REG_PADDING
1666 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1667 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1672 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1673 bytelen
= ssize
- bytepos
;
1674 gcc_assert (bytelen
> 0);
1677 /* If we won't be loading directly from memory, protect the real source
1678 from strange tricks we might play; but make sure that the source can
1679 be loaded directly into the destination. */
1681 if (!MEM_P (orig_src
)
1682 && (!CONSTANT_P (orig_src
)
1683 || (GET_MODE (orig_src
) != mode
1684 && GET_MODE (orig_src
) != VOIDmode
)))
1686 if (GET_MODE (orig_src
) == VOIDmode
)
1687 src
= gen_reg_rtx (mode
);
1689 src
= gen_reg_rtx (GET_MODE (orig_src
));
1691 emit_move_insn (src
, orig_src
);
1694 /* Optimize the access just a bit. */
1696 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1697 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1698 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1699 && bytelen
== GET_MODE_SIZE (mode
))
1701 tmps
[i
] = gen_reg_rtx (mode
);
1702 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1704 else if (COMPLEX_MODE_P (mode
)
1705 && GET_MODE (src
) == mode
1706 && bytelen
== GET_MODE_SIZE (mode
))
1707 /* Let emit_move_complex do the bulk of the work. */
1709 else if (GET_CODE (src
) == CONCAT
)
1711 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1712 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1714 if ((bytepos
== 0 && bytelen
== slen0
)
1715 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1717 /* The following assumes that the concatenated objects all
1718 have the same size. In this case, a simple calculation
1719 can be used to determine the object and the bit field
1721 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1722 if (! CONSTANT_P (tmps
[i
])
1723 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1724 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1725 (bytepos
% slen0
) * BITS_PER_UNIT
,
1726 1, NULL_RTX
, mode
, mode
);
1732 gcc_assert (!bytepos
);
1733 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1734 emit_move_insn (mem
, src
);
1735 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1736 0, 1, NULL_RTX
, mode
, mode
);
1739 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1740 SIMD register, which is currently broken. While we get GCC
1741 to emit proper RTL for these cases, let's dump to memory. */
1742 else if (VECTOR_MODE_P (GET_MODE (dst
))
1745 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1748 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1749 emit_move_insn (mem
, src
);
1750 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1752 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1753 && XVECLEN (dst
, 0) > 1)
1754 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
1755 else if (CONSTANT_P (src
))
1757 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
1765 /* TODO: const_wide_int can have sizes other than this... */
1766 gcc_assert (2 * len
== ssize
);
1767 split_double (src
, &first
, &second
);
1774 else if (REG_P (src
) && GET_MODE (src
) == mode
)
1777 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1778 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1782 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1787 /* Emit code to move a block SRC of type TYPE to a block DST,
1788 where DST is non-consecutive registers represented by a PARALLEL.
1789 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1793 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1798 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
1799 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1801 /* Copy the extracted pieces into the proper (probable) hard regs. */
1802 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1804 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1807 emit_move_insn (d
, tmps
[i
]);
1811 /* Similar, but load SRC into new pseudos in a format that looks like
1812 PARALLEL. This can later be fed to emit_group_move to get things
1813 in the right place. */
1816 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1821 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1822 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1824 /* Convert the vector to look just like the original PARALLEL, except
1825 with the computed values. */
1826 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1828 rtx e
= XVECEXP (parallel
, 0, i
);
1829 rtx d
= XEXP (e
, 0);
1833 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1834 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1836 RTVEC_ELT (vec
, i
) = e
;
1839 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1842 /* Emit code to move a block SRC to block DST, where SRC and DST are
1843 non-consecutive groups of registers, each represented by a PARALLEL. */
1846 emit_group_move (rtx dst
, rtx src
)
1850 gcc_assert (GET_CODE (src
) == PARALLEL
1851 && GET_CODE (dst
) == PARALLEL
1852 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1854 /* Skip first entry if NULL. */
1855 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1856 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1857 XEXP (XVECEXP (src
, 0, i
), 0));
1860 /* Move a group of registers represented by a PARALLEL into pseudos. */
1863 emit_group_move_into_temps (rtx src
)
1865 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1868 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1870 rtx e
= XVECEXP (src
, 0, i
);
1871 rtx d
= XEXP (e
, 0);
1874 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1875 RTVEC_ELT (vec
, i
) = e
;
1878 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1881 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1882 where SRC is non-consecutive registers represented by a PARALLEL.
1883 SSIZE represents the total size of block ORIG_DST, or -1 if not
1887 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1890 int start
, finish
, i
;
1891 enum machine_mode m
= GET_MODE (orig_dst
);
1893 gcc_assert (GET_CODE (src
) == PARALLEL
);
1895 if (!SCALAR_INT_MODE_P (m
)
1896 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1898 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1899 if (imode
== BLKmode
)
1900 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
1902 dst
= gen_reg_rtx (imode
);
1903 emit_group_store (dst
, src
, type
, ssize
);
1904 if (imode
!= BLKmode
)
1905 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1906 emit_move_insn (orig_dst
, dst
);
1910 /* Check for a NULL entry, used to indicate that the parameter goes
1911 both on the stack and in registers. */
1912 if (XEXP (XVECEXP (src
, 0, 0), 0))
1916 finish
= XVECLEN (src
, 0);
1918 tmps
= XALLOCAVEC (rtx
, finish
);
1920 /* Copy the (probable) hard regs into pseudos. */
1921 for (i
= start
; i
< finish
; i
++)
1923 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1924 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
1926 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1927 emit_move_insn (tmps
[i
], reg
);
1933 /* If we won't be storing directly into memory, protect the real destination
1934 from strange tricks we might play. */
1936 if (GET_CODE (dst
) == PARALLEL
)
1940 /* We can get a PARALLEL dst if there is a conditional expression in
1941 a return statement. In that case, the dst and src are the same,
1942 so no action is necessary. */
1943 if (rtx_equal_p (dst
, src
))
1946 /* It is unclear if we can ever reach here, but we may as well handle
1947 it. Allocate a temporary, and split this into a store/load to/from
1949 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
1950 emit_group_store (temp
, src
, type
, ssize
);
1951 emit_group_load (dst
, temp
, type
, ssize
);
1954 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1956 enum machine_mode outer
= GET_MODE (dst
);
1957 enum machine_mode inner
;
1958 HOST_WIDE_INT bytepos
;
1962 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
1963 dst
= gen_reg_rtx (outer
);
1965 /* Make life a bit easier for combine. */
1966 /* If the first element of the vector is the low part
1967 of the destination mode, use a paradoxical subreg to
1968 initialize the destination. */
1971 inner
= GET_MODE (tmps
[start
]);
1972 bytepos
= subreg_lowpart_offset (inner
, outer
);
1973 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
1975 temp
= simplify_gen_subreg (outer
, tmps
[start
],
1979 emit_move_insn (dst
, temp
);
1986 /* If the first element wasn't the low part, try the last. */
1988 && start
< finish
- 1)
1990 inner
= GET_MODE (tmps
[finish
- 1]);
1991 bytepos
= subreg_lowpart_offset (inner
, outer
);
1992 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
1994 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
1998 emit_move_insn (dst
, temp
);
2005 /* Otherwise, simply initialize the result to zero. */
2007 emit_move_insn (dst
, CONST0_RTX (outer
));
2010 /* Process the pieces. */
2011 for (i
= start
; i
< finish
; i
++)
2013 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2014 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2015 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2016 unsigned int adj_bytelen
;
2019 /* Handle trailing fragments that run over the size of the struct. */
2020 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2021 adj_bytelen
= ssize
- bytepos
;
2023 adj_bytelen
= bytelen
;
2025 if (GET_CODE (dst
) == CONCAT
)
2027 if (bytepos
+ adj_bytelen
2028 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2029 dest
= XEXP (dst
, 0);
2030 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2032 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2033 dest
= XEXP (dst
, 1);
2037 enum machine_mode dest_mode
= GET_MODE (dest
);
2038 enum machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2040 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2042 if (GET_MODE_ALIGNMENT (dest_mode
)
2043 >= GET_MODE_ALIGNMENT (tmp_mode
))
2045 dest
= assign_stack_temp (dest_mode
,
2046 GET_MODE_SIZE (dest_mode
));
2047 emit_move_insn (adjust_address (dest
,
2055 dest
= assign_stack_temp (tmp_mode
,
2056 GET_MODE_SIZE (tmp_mode
));
2057 emit_move_insn (dest
, tmps
[i
]);
2058 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2064 /* Handle trailing fragments that run over the size of the struct. */
2065 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2067 /* store_bit_field always takes its value from the lsb.
2068 Move the fragment to the lsb if it's not already there. */
2070 #ifdef BLOCK_REG_PADDING
2071 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2072 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2078 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2079 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2083 /* Make sure not to write past the end of the struct. */
2084 store_bit_field (dest
,
2085 adj_bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2086 bytepos
* BITS_PER_UNIT
, ssize
* BITS_PER_UNIT
- 1,
2090 /* Optimize the access just a bit. */
2091 else if (MEM_P (dest
)
2092 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2093 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2094 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2095 && bytelen
== GET_MODE_SIZE (mode
))
2096 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2099 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2100 0, 0, mode
, tmps
[i
]);
2103 /* Copy from the pseudo into the (probable) hard reg. */
2104 if (orig_dst
!= dst
)
2105 emit_move_insn (orig_dst
, dst
);
2108 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2109 of the value stored in X. */
2112 maybe_emit_group_store (rtx x
, tree type
)
2114 enum machine_mode mode
= TYPE_MODE (type
);
2115 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2116 if (GET_CODE (x
) == PARALLEL
)
2118 rtx result
= gen_reg_rtx (mode
);
2119 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2125 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2127 This is used on targets that return BLKmode values in registers. */
2130 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2132 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2133 rtx src
= NULL
, dst
= NULL
;
2134 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2135 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2136 enum machine_mode mode
= GET_MODE (srcreg
);
2137 enum machine_mode tmode
= GET_MODE (target
);
2138 enum machine_mode copy_mode
;
2140 /* BLKmode registers created in the back-end shouldn't have survived. */
2141 gcc_assert (mode
!= BLKmode
);
2143 /* If the structure doesn't take up a whole number of words, see whether
2144 SRCREG is padded on the left or on the right. If it's on the left,
2145 set PADDING_CORRECTION to the number of bits to skip.
2147 In most ABIs, the structure will be returned at the least end of
2148 the register, which translates to right padding on little-endian
2149 targets and left padding on big-endian targets. The opposite
2150 holds if the structure is returned at the most significant
2151 end of the register. */
2152 if (bytes
% UNITS_PER_WORD
!= 0
2153 && (targetm
.calls
.return_in_msb (type
)
2155 : BYTES_BIG_ENDIAN
))
2157 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2159 /* We can use a single move if we have an exact mode for the size. */
2160 else if (MEM_P (target
)
2161 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
2162 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2163 && bytes
== GET_MODE_SIZE (mode
))
2165 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2169 /* And if we additionally have the same mode for a register. */
2170 else if (REG_P (target
)
2171 && GET_MODE (target
) == mode
2172 && bytes
== GET_MODE_SIZE (mode
))
2174 emit_move_insn (target
, srcreg
);
2178 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2179 into a new pseudo which is a full word. */
2180 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2182 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2186 /* Copy the structure BITSIZE bits at a time. If the target lives in
2187 memory, take care of not reading/writing past its end by selecting
2188 a copy mode suited to BITSIZE. This should always be possible given
2191 If the target lives in register, make sure not to select a copy mode
2192 larger than the mode of the register.
2194 We could probably emit more efficient code for machines which do not use
2195 strict alignment, but it doesn't seem worth the effort at the current
2198 copy_mode
= word_mode
;
2201 enum machine_mode mem_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
2202 if (mem_mode
!= BLKmode
)
2203 copy_mode
= mem_mode
;
2205 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2208 for (bitpos
= 0, xbitpos
= padding_correction
;
2209 bitpos
< bytes
* BITS_PER_UNIT
;
2210 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2212 /* We need a new source operand each time xbitpos is on a
2213 word boundary and when xbitpos == padding_correction
2214 (the first time through). */
2215 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2216 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2218 /* We need a new destination operand each time bitpos is on
2220 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2222 else if (bitpos
% BITS_PER_WORD
== 0)
2223 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2225 /* Use xbitpos for the source extraction (right justified) and
2226 bitpos for the destination store (left justified). */
2227 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2228 extract_bit_field (src
, bitsize
,
2229 xbitpos
% BITS_PER_WORD
, 1,
2230 NULL_RTX
, copy_mode
, copy_mode
));
2234 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2235 register if it contains any data, otherwise return null.
2237 This is used on targets that return BLKmode values in registers. */
2240 copy_blkmode_to_reg (enum machine_mode mode
, tree src
)
2243 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2244 unsigned int bitsize
;
2245 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2246 enum machine_mode dst_mode
;
2248 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2250 x
= expand_normal (src
);
2252 bytes
= int_size_in_bytes (TREE_TYPE (src
));
2256 /* If the structure doesn't take up a whole number of words, see
2257 whether the register value should be padded on the left or on
2258 the right. Set PADDING_CORRECTION to the number of padding
2259 bits needed on the left side.
2261 In most ABIs, the structure will be returned at the least end of
2262 the register, which translates to right padding on little-endian
2263 targets and left padding on big-endian targets. The opposite
2264 holds if the structure is returned at the most significant
2265 end of the register. */
2266 if (bytes
% UNITS_PER_WORD
!= 0
2267 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2269 : BYTES_BIG_ENDIAN
))
2270 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2273 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2274 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2275 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2277 /* Copy the structure BITSIZE bits at a time. */
2278 for (bitpos
= 0, xbitpos
= padding_correction
;
2279 bitpos
< bytes
* BITS_PER_UNIT
;
2280 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2282 /* We need a new destination pseudo each time xbitpos is
2283 on a word boundary and when xbitpos == padding_correction
2284 (the first time through). */
2285 if (xbitpos
% BITS_PER_WORD
== 0
2286 || xbitpos
== padding_correction
)
2288 /* Generate an appropriate register. */
2289 dst_word
= gen_reg_rtx (word_mode
);
2290 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2292 /* Clear the destination before we move anything into it. */
2293 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2296 /* We need a new source operand each time bitpos is on a word
2298 if (bitpos
% BITS_PER_WORD
== 0)
2299 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2301 /* Use bitpos for the source extraction (left justified) and
2302 xbitpos for the destination store (right justified). */
2303 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2305 extract_bit_field (src_word
, bitsize
,
2306 bitpos
% BITS_PER_WORD
, 1,
2307 NULL_RTX
, word_mode
, word_mode
));
2310 if (mode
== BLKmode
)
2312 /* Find the smallest integer mode large enough to hold the
2313 entire structure. */
2314 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2316 mode
= GET_MODE_WIDER_MODE (mode
))
2317 /* Have we found a large enough mode? */
2318 if (GET_MODE_SIZE (mode
) >= bytes
)
2321 /* A suitable mode should have been found. */
2322 gcc_assert (mode
!= VOIDmode
);
2325 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2326 dst_mode
= word_mode
;
2329 dst
= gen_reg_rtx (dst_mode
);
2331 for (i
= 0; i
< n_regs
; i
++)
2332 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2334 if (mode
!= dst_mode
)
2335 dst
= gen_lowpart (mode
, dst
);
2340 /* Add a USE expression for REG to the (possibly empty) list pointed
2341 to by CALL_FUSAGE. REG must denote a hard register. */
2344 use_reg_mode (rtx
*call_fusage
, rtx reg
, enum machine_mode mode
)
2346 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2349 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2352 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2353 to by CALL_FUSAGE. REG must denote a hard register. */
2356 clobber_reg_mode (rtx
*call_fusage
, rtx reg
, enum machine_mode mode
)
2358 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2361 = gen_rtx_EXPR_LIST (mode
, gen_rtx_CLOBBER (VOIDmode
, reg
), *call_fusage
);
2364 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2365 starting at REGNO. All of these registers must be hard registers. */
2368 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2372 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2374 for (i
= 0; i
< nregs
; i
++)
2375 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2378 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2379 PARALLEL REGS. This is for calls that pass values in multiple
2380 non-contiguous locations. The Irix 6 ABI has examples of this. */
2383 use_group_regs (rtx
*call_fusage
, rtx regs
)
2387 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2389 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2391 /* A NULL entry means the parameter goes both on the stack and in
2392 registers. This can also be a MEM for targets that pass values
2393 partially on the stack and partially in registers. */
2394 if (reg
!= 0 && REG_P (reg
))
2395 use_reg (call_fusage
, reg
);
2399 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2400 assigment and the code of the expresion on the RHS is CODE. Return
2404 get_def_for_expr (tree name
, enum tree_code code
)
2408 if (TREE_CODE (name
) != SSA_NAME
)
2411 def_stmt
= get_gimple_for_ssa_name (name
);
2413 || gimple_assign_rhs_code (def_stmt
) != code
)
2419 #ifdef HAVE_conditional_move
2420 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2421 assigment and the class of the expresion on the RHS is CLASS. Return
2425 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2429 if (TREE_CODE (name
) != SSA_NAME
)
2432 def_stmt
= get_gimple_for_ssa_name (name
);
2434 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2442 /* Determine whether the LEN bytes generated by CONSTFUN can be
2443 stored to memory using several move instructions. CONSTFUNDATA is
2444 a pointer which will be passed as argument in every CONSTFUN call.
2445 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2446 a memset operation and false if it's a copy of a constant string.
2447 Return nonzero if a call to store_by_pieces should succeed. */
2450 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2451 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2452 void *constfundata
, unsigned int align
, bool memsetp
)
2454 unsigned HOST_WIDE_INT l
;
2455 unsigned int max_size
;
2456 HOST_WIDE_INT offset
= 0;
2457 enum machine_mode mode
;
2458 enum insn_code icode
;
2460 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2461 rtx cst ATTRIBUTE_UNUSED
;
2467 ? SET_BY_PIECES_P (len
, align
)
2468 : STORE_BY_PIECES_P (len
, align
)))
2471 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2473 /* We would first store what we can in the largest integer mode, then go to
2474 successively smaller modes. */
2477 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2481 max_size
= STORE_MAX_PIECES
+ 1;
2482 while (max_size
> 1 && l
> 0)
2484 mode
= widest_int_mode_for_size (max_size
);
2486 if (mode
== VOIDmode
)
2489 icode
= optab_handler (mov_optab
, mode
);
2490 if (icode
!= CODE_FOR_nothing
2491 && align
>= GET_MODE_ALIGNMENT (mode
))
2493 unsigned int size
= GET_MODE_SIZE (mode
);
2500 cst
= (*constfun
) (constfundata
, offset
, mode
);
2501 if (!targetm
.legitimate_constant_p (mode
, cst
))
2511 max_size
= GET_MODE_SIZE (mode
);
2514 /* The code above should have handled everything. */
2521 /* Generate several move instructions to store LEN bytes generated by
2522 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2523 pointer which will be passed as argument in every CONSTFUN call.
2524 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2525 a memset operation and false if it's a copy of a constant string.
2526 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2527 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2531 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2532 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2533 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
2535 enum machine_mode to_addr_mode
= get_address_mode (to
);
2536 struct store_by_pieces_d data
;
2540 gcc_assert (endp
!= 2);
2545 ? SET_BY_PIECES_P (len
, align
)
2546 : STORE_BY_PIECES_P (len
, align
));
2547 data
.constfun
= constfun
;
2548 data
.constfundata
= constfundata
;
2551 store_by_pieces_1 (&data
, align
);
2556 gcc_assert (!data
.reverse
);
2561 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2562 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2564 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
2565 plus_constant (to_addr_mode
,
2569 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2576 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2584 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2585 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2588 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2590 struct store_by_pieces_d data
;
2595 data
.constfun
= clear_by_pieces_1
;
2596 data
.constfundata
= NULL
;
2599 store_by_pieces_1 (&data
, align
);
2602 /* Callback routine for clear_by_pieces.
2603 Return const0_rtx unconditionally. */
2606 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2607 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2608 enum machine_mode mode ATTRIBUTE_UNUSED
)
2613 /* Subroutine of clear_by_pieces and store_by_pieces.
2614 Generate several move instructions to store LEN bytes of block TO. (A MEM
2615 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2618 store_by_pieces_1 (struct store_by_pieces_d
*data ATTRIBUTE_UNUSED
,
2619 unsigned int align ATTRIBUTE_UNUSED
)
2621 enum machine_mode to_addr_mode
= get_address_mode (data
->to
);
2622 rtx to_addr
= XEXP (data
->to
, 0);
2623 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2624 enum insn_code icode
;
2627 data
->to_addr
= to_addr
;
2629 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2630 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2632 data
->explicit_inc_to
= 0;
2634 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2636 data
->offset
= data
->len
;
2638 /* If storing requires more than two move insns,
2639 copy addresses to registers (to make displacements shorter)
2640 and use post-increment if available. */
2641 if (!data
->autinc_to
2642 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2644 /* Determine the main mode we'll be using.
2645 MODE might not be used depending on the definitions of the
2646 USE_* macros below. */
2647 enum machine_mode mode ATTRIBUTE_UNUSED
2648 = widest_int_mode_for_size (max_size
);
2650 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2652 data
->to_addr
= copy_to_mode_reg (to_addr_mode
,
2653 plus_constant (to_addr_mode
,
2656 data
->autinc_to
= 1;
2657 data
->explicit_inc_to
= -1;
2660 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2661 && ! data
->autinc_to
)
2663 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2664 data
->autinc_to
= 1;
2665 data
->explicit_inc_to
= 1;
2668 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2669 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2672 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2674 /* First store what we can in the largest integer mode, then go to
2675 successively smaller modes. */
2677 while (max_size
> 1 && data
->len
> 0)
2679 enum machine_mode mode
= widest_int_mode_for_size (max_size
);
2681 if (mode
== VOIDmode
)
2684 icode
= optab_handler (mov_optab
, mode
);
2685 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2686 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2688 max_size
= GET_MODE_SIZE (mode
);
2691 /* The code above should have handled everything. */
2692 gcc_assert (!data
->len
);
2695 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2696 with move instructions for mode MODE. GENFUN is the gen_... function
2697 to make a move insn for that mode. DATA has all the other info. */
2700 store_by_pieces_2 (insn_gen_fn genfun
, machine_mode mode
,
2701 struct store_by_pieces_d
*data
)
2703 unsigned int size
= GET_MODE_SIZE (mode
);
2706 while (data
->len
>= size
)
2709 data
->offset
-= size
;
2711 if (data
->autinc_to
)
2712 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2715 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2717 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2718 emit_insn (gen_add2_insn (data
->to_addr
,
2719 gen_int_mode (-(HOST_WIDE_INT
) size
,
2720 GET_MODE (data
->to_addr
))));
2722 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2723 emit_insn ((*genfun
) (to1
, cst
));
2725 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2726 emit_insn (gen_add2_insn (data
->to_addr
,
2728 GET_MODE (data
->to_addr
))));
2730 if (! data
->reverse
)
2731 data
->offset
+= size
;
2737 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2738 its length in bytes. */
2741 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2742 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2743 unsigned HOST_WIDE_INT min_size
,
2744 unsigned HOST_WIDE_INT max_size
,
2745 unsigned HOST_WIDE_INT probable_max_size
)
2747 enum machine_mode mode
= GET_MODE (object
);
2750 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2752 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2753 just move a zero. Otherwise, do this a piece at a time. */
2755 && CONST_INT_P (size
)
2756 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2758 rtx zero
= CONST0_RTX (mode
);
2761 emit_move_insn (object
, zero
);
2765 if (COMPLEX_MODE_P (mode
))
2767 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2770 write_complex_part (object
, zero
, 0);
2771 write_complex_part (object
, zero
, 1);
2777 if (size
== const0_rtx
)
2780 align
= MEM_ALIGN (object
);
2782 if (CONST_INT_P (size
)
2783 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2784 clear_by_pieces (object
, INTVAL (size
), align
);
2785 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2786 expected_align
, expected_size
,
2787 min_size
, max_size
, probable_max_size
))
2789 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
2790 return set_storage_via_libcall (object
, size
, const0_rtx
,
2791 method
== BLOCK_OP_TAILCALL
);
2799 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2801 unsigned HOST_WIDE_INT max
, min
= 0;
2802 if (GET_CODE (size
) == CONST_INT
)
2803 min
= max
= UINTVAL (size
);
2805 max
= GET_MODE_MASK (GET_MODE (size
));
2806 return clear_storage_hints (object
, size
, method
, 0, -1, min
, max
, max
);
2810 /* A subroutine of clear_storage. Expand a call to memset.
2811 Return the return value of memset, 0 otherwise. */
2814 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
2816 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
2817 enum machine_mode size_mode
;
2820 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2821 place those into new pseudos into a VAR_DECL and use them later. */
2823 object
= copy_addr_to_reg (XEXP (object
, 0));
2825 size_mode
= TYPE_MODE (sizetype
);
2826 size
= convert_to_mode (size_mode
, size
, 1);
2827 size
= copy_to_mode_reg (size_mode
, size
);
2829 /* It is incorrect to use the libcall calling conventions to call
2830 memset in this context. This could be a user call to memset and
2831 the user may wish to examine the return value from memset. For
2832 targets where libcalls and normal calls have different conventions
2833 for returning pointers, we could end up generating incorrect code. */
2835 object_tree
= make_tree (ptr_type_node
, object
);
2836 if (!CONST_INT_P (val
))
2837 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
2838 size_tree
= make_tree (sizetype
, size
);
2839 val_tree
= make_tree (integer_type_node
, val
);
2841 fn
= clear_storage_libcall_fn (true);
2842 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
2843 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2845 retval
= expand_normal (call_expr
);
2850 /* A subroutine of set_storage_via_libcall. Create the tree node
2851 for the function we use for block clears. */
2853 tree block_clear_fn
;
2856 init_block_clear_fn (const char *asmspec
)
2858 if (!block_clear_fn
)
2862 fn
= get_identifier ("memset");
2863 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2864 integer_type_node
, sizetype
,
2867 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
2868 DECL_EXTERNAL (fn
) = 1;
2869 TREE_PUBLIC (fn
) = 1;
2870 DECL_ARTIFICIAL (fn
) = 1;
2871 TREE_NOTHROW (fn
) = 1;
2872 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2873 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2875 block_clear_fn
= fn
;
2879 set_user_assembler_name (block_clear_fn
, asmspec
);
2883 clear_storage_libcall_fn (int for_call
)
2885 static bool emitted_extern
;
2887 if (!block_clear_fn
)
2888 init_block_clear_fn (NULL
);
2890 if (for_call
&& !emitted_extern
)
2892 emitted_extern
= true;
2893 make_decl_rtl (block_clear_fn
);
2896 return block_clear_fn
;
2899 /* Expand a setmem pattern; return true if successful. */
2902 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
2903 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2904 unsigned HOST_WIDE_INT min_size
,
2905 unsigned HOST_WIDE_INT max_size
,
2906 unsigned HOST_WIDE_INT probable_max_size
)
2908 /* Try the most limited insn first, because there's no point
2909 including more than one in the machine description unless
2910 the more limited one has some advantage. */
2912 enum machine_mode mode
;
2914 if (expected_align
< align
)
2915 expected_align
= align
;
2916 if (expected_size
!= -1)
2918 if ((unsigned HOST_WIDE_INT
)expected_size
> max_size
)
2919 expected_size
= max_size
;
2920 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
2921 expected_size
= min_size
;
2924 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2925 mode
= GET_MODE_WIDER_MODE (mode
))
2927 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
2929 if (code
!= CODE_FOR_nothing
2930 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2931 here because if SIZE is less than the mode mask, as it is
2932 returned by the macro, it will definitely be less than the
2933 actual mode mask. Since SIZE is within the Pmode address
2934 space, we limit MODE to Pmode. */
2935 && ((CONST_INT_P (size
)
2936 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2937 <= (GET_MODE_MASK (mode
) >> 1)))
2938 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
2939 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
2941 struct expand_operand ops
[9];
2944 nops
= insn_data
[(int) code
].n_generator_args
;
2945 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
2947 create_fixed_operand (&ops
[0], object
);
2948 /* The check above guarantees that this size conversion is valid. */
2949 create_convert_operand_to (&ops
[1], size
, mode
, true);
2950 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
2951 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
2954 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
2955 create_integer_operand (&ops
[5], expected_size
);
2959 create_integer_operand (&ops
[6], min_size
);
2960 /* If we can not represent the maximal size,
2961 make parameter NULL. */
2962 if ((HOST_WIDE_INT
) max_size
!= -1)
2963 create_integer_operand (&ops
[7], max_size
);
2965 create_fixed_operand (&ops
[7], NULL
);
2969 /* If we can not represent the maximal size,
2970 make parameter NULL. */
2971 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
2972 create_integer_operand (&ops
[8], probable_max_size
);
2974 create_fixed_operand (&ops
[8], NULL
);
2976 if (maybe_expand_insn (code
, nops
, ops
))
2985 /* Write to one of the components of the complex value CPLX. Write VAL to
2986 the real part if IMAG_P is false, and the imaginary part if its true. */
2989 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2991 enum machine_mode cmode
;
2992 enum machine_mode imode
;
2995 if (GET_CODE (cplx
) == CONCAT
)
2997 emit_move_insn (XEXP (cplx
, imag_p
), val
);
3001 cmode
= GET_MODE (cplx
);
3002 imode
= GET_MODE_INNER (cmode
);
3003 ibitsize
= GET_MODE_BITSIZE (imode
);
3005 /* For MEMs simplify_gen_subreg may generate an invalid new address
3006 because, e.g., the original address is considered mode-dependent
3007 by the target, which restricts simplify_subreg from invoking
3008 adjust_address_nv. Instead of preparing fallback support for an
3009 invalid address, we call adjust_address_nv directly. */
3012 emit_move_insn (adjust_address_nv (cplx
, imode
,
3013 imag_p
? GET_MODE_SIZE (imode
) : 0),
3018 /* If the sub-object is at least word sized, then we know that subregging
3019 will work. This special case is important, since store_bit_field
3020 wants to operate on integer modes, and there's rarely an OImode to
3021 correspond to TCmode. */
3022 if (ibitsize
>= BITS_PER_WORD
3023 /* For hard regs we have exact predicates. Assume we can split
3024 the original object if it spans an even number of hard regs.
3025 This special case is important for SCmode on 64-bit platforms
3026 where the natural size of floating-point regs is 32-bit. */
3028 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3029 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
3031 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
3032 imag_p
? GET_MODE_SIZE (imode
) : 0);
3035 emit_move_insn (part
, val
);
3039 /* simplify_gen_subreg may fail for sub-word MEMs. */
3040 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3043 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
);
3046 /* Extract one of the components of the complex value CPLX. Extract the
3047 real part if IMAG_P is false, and the imaginary part if it's true. */
3050 read_complex_part (rtx cplx
, bool imag_p
)
3052 enum machine_mode cmode
, imode
;
3055 if (GET_CODE (cplx
) == CONCAT
)
3056 return XEXP (cplx
, imag_p
);
3058 cmode
= GET_MODE (cplx
);
3059 imode
= GET_MODE_INNER (cmode
);
3060 ibitsize
= GET_MODE_BITSIZE (imode
);
3062 /* Special case reads from complex constants that got spilled to memory. */
3063 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
3065 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
3066 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3068 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3069 if (CONSTANT_CLASS_P (part
))
3070 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3074 /* For MEMs simplify_gen_subreg may generate an invalid new address
3075 because, e.g., the original address is considered mode-dependent
3076 by the target, which restricts simplify_subreg from invoking
3077 adjust_address_nv. Instead of preparing fallback support for an
3078 invalid address, we call adjust_address_nv directly. */
3080 return adjust_address_nv (cplx
, imode
,
3081 imag_p
? GET_MODE_SIZE (imode
) : 0);
3083 /* If the sub-object is at least word sized, then we know that subregging
3084 will work. This special case is important, since extract_bit_field
3085 wants to operate on integer modes, and there's rarely an OImode to
3086 correspond to TCmode. */
3087 if (ibitsize
>= BITS_PER_WORD
3088 /* For hard regs we have exact predicates. Assume we can split
3089 the original object if it spans an even number of hard regs.
3090 This special case is important for SCmode on 64-bit platforms
3091 where the natural size of floating-point regs is 32-bit. */
3093 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3094 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
3096 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3097 imag_p
? GET_MODE_SIZE (imode
) : 0);
3101 /* simplify_gen_subreg may fail for sub-word MEMs. */
3102 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3105 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3106 true, NULL_RTX
, imode
, imode
);
3109 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3110 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3111 represented in NEW_MODE. If FORCE is true, this will never happen, as
3112 we'll force-create a SUBREG if needed. */
3115 emit_move_change_mode (enum machine_mode new_mode
,
3116 enum machine_mode old_mode
, rtx x
, bool force
)
3120 if (push_operand (x
, GET_MODE (x
)))
3122 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3123 MEM_COPY_ATTRIBUTES (ret
, x
);
3127 /* We don't have to worry about changing the address since the
3128 size in bytes is supposed to be the same. */
3129 if (reload_in_progress
)
3131 /* Copy the MEM to change the mode and move any
3132 substitutions from the old MEM to the new one. */
3133 ret
= adjust_address_nv (x
, new_mode
, 0);
3134 copy_replacements (x
, ret
);
3137 ret
= adjust_address (x
, new_mode
, 0);
3141 /* Note that we do want simplify_subreg's behavior of validating
3142 that the new mode is ok for a hard register. If we were to use
3143 simplify_gen_subreg, we would create the subreg, but would
3144 probably run into the target not being able to implement it. */
3145 /* Except, of course, when FORCE is true, when this is exactly what
3146 we want. Which is needed for CCmodes on some targets. */
3148 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3150 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3156 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3157 an integer mode of the same size as MODE. Returns the instruction
3158 emitted, or NULL if such a move could not be generated. */
3161 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
, bool force
)
3163 enum machine_mode imode
;
3164 enum insn_code code
;
3166 /* There must exist a mode of the exact size we require. */
3167 imode
= int_mode_for_mode (mode
);
3168 if (imode
== BLKmode
)
3171 /* The target must support moves in this mode. */
3172 code
= optab_handler (mov_optab
, imode
);
3173 if (code
== CODE_FOR_nothing
)
3176 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3179 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3182 return emit_insn (GEN_FCN (code
) (x
, y
));
3185 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3186 Return an equivalent MEM that does not use an auto-increment. */
3189 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
3191 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3192 HOST_WIDE_INT adjust
;
3195 adjust
= GET_MODE_SIZE (mode
);
3196 #ifdef PUSH_ROUNDING
3197 adjust
= PUSH_ROUNDING (adjust
);
3199 if (code
== PRE_DEC
|| code
== POST_DEC
)
3201 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3203 rtx expr
= XEXP (XEXP (x
, 0), 1);
3206 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3207 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
3208 val
= INTVAL (XEXP (expr
, 1));
3209 if (GET_CODE (expr
) == MINUS
)
3211 gcc_assert (adjust
== val
|| adjust
== -val
);
3215 /* Do not use anti_adjust_stack, since we don't want to update
3216 stack_pointer_delta. */
3217 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3218 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3219 0, OPTAB_LIB_WIDEN
);
3220 if (temp
!= stack_pointer_rtx
)
3221 emit_move_insn (stack_pointer_rtx
, temp
);
3228 temp
= stack_pointer_rtx
;
3233 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3239 return replace_equiv_address (x
, temp
);
3242 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3243 X is known to satisfy push_operand, and MODE is known to be complex.
3244 Returns the last instruction emitted. */
3247 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
3249 enum machine_mode submode
= GET_MODE_INNER (mode
);
3252 #ifdef PUSH_ROUNDING
3253 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3255 /* In case we output to the stack, but the size is smaller than the
3256 machine can push exactly, we need to use move instructions. */
3257 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3259 x
= emit_move_resolve_push (mode
, x
);
3260 return emit_move_insn (x
, y
);
3264 /* Note that the real part always precedes the imag part in memory
3265 regardless of machine's endianness. */
3266 switch (GET_CODE (XEXP (x
, 0)))
3280 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3281 read_complex_part (y
, imag_first
));
3282 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3283 read_complex_part (y
, !imag_first
));
3286 /* A subroutine of emit_move_complex. Perform the move from Y to X
3287 via two moves of the parts. Returns the last instruction emitted. */
3290 emit_move_complex_parts (rtx x
, rtx y
)
3292 /* Show the output dies here. This is necessary for SUBREGs
3293 of pseudos since we cannot track their lifetimes correctly;
3294 hard regs shouldn't appear here except as return values. */
3295 if (!reload_completed
&& !reload_in_progress
3296 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3299 write_complex_part (x
, read_complex_part (y
, false), false);
3300 write_complex_part (x
, read_complex_part (y
, true), true);
3302 return get_last_insn ();
3305 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3306 MODE is known to be complex. Returns the last instruction emitted. */
3309 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
3313 /* Need to take special care for pushes, to maintain proper ordering
3314 of the data, and possibly extra padding. */
3315 if (push_operand (x
, mode
))
3316 return emit_move_complex_push (mode
, x
, y
);
3318 /* See if we can coerce the target into moving both values at once, except
3319 for floating point where we favor moving as parts if this is easy. */
3320 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3321 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3323 && HARD_REGISTER_P (x
)
3324 && hard_regno_nregs
[REGNO (x
)][mode
] == 1)
3326 && HARD_REGISTER_P (y
)
3327 && hard_regno_nregs
[REGNO (y
)][mode
] == 1))
3329 /* Not possible if the values are inherently not adjacent. */
3330 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3332 /* Is possible if both are registers (or subregs of registers). */
3333 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3335 /* If one of the operands is a memory, and alignment constraints
3336 are friendly enough, we may be able to do combined memory operations.
3337 We do not attempt this if Y is a constant because that combination is
3338 usually better with the by-parts thing below. */
3339 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3340 && (!STRICT_ALIGNMENT
3341 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3350 /* For memory to memory moves, optimal behavior can be had with the
3351 existing block move logic. */
3352 if (MEM_P (x
) && MEM_P (y
))
3354 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3355 BLOCK_OP_NO_LIBCALL
);
3356 return get_last_insn ();
3359 ret
= emit_move_via_integer (mode
, x
, y
, true);
3364 return emit_move_complex_parts (x
, y
);
3367 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3368 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3371 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
3375 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3378 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3379 if (code
!= CODE_FOR_nothing
)
3381 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3382 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3383 return emit_insn (GEN_FCN (code
) (x
, y
));
3387 /* Otherwise, find the MODE_INT mode of the same width. */
3388 ret
= emit_move_via_integer (mode
, x
, y
, false);
3389 gcc_assert (ret
!= NULL
);
3393 /* Return true if word I of OP lies entirely in the
3394 undefined bits of a paradoxical subreg. */
3397 undefined_operand_subword_p (const_rtx op
, int i
)
3399 enum machine_mode innermode
, innermostmode
;
3401 if (GET_CODE (op
) != SUBREG
)
3403 innermode
= GET_MODE (op
);
3404 innermostmode
= GET_MODE (SUBREG_REG (op
));
3405 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3406 /* The SUBREG_BYTE represents offset, as if the value were stored in
3407 memory, except for a paradoxical subreg where we define
3408 SUBREG_BYTE to be 0; undo this exception as in
3410 if (SUBREG_BYTE (op
) == 0
3411 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3413 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3414 if (WORDS_BIG_ENDIAN
)
3415 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3416 if (BYTES_BIG_ENDIAN
)
3417 offset
+= difference
% UNITS_PER_WORD
;
3419 if (offset
>= GET_MODE_SIZE (innermostmode
)
3420 || offset
<= -GET_MODE_SIZE (word_mode
))
3425 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3426 MODE is any multi-word or full-word mode that lacks a move_insn
3427 pattern. Note that you will get better code if you define such
3428 patterns, even if they must turn into multiple assembler instructions. */
3431 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3438 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3440 /* If X is a push on the stack, do the push now and replace
3441 X with a reference to the stack pointer. */
3442 if (push_operand (x
, mode
))
3443 x
= emit_move_resolve_push (mode
, x
);
3445 /* If we are in reload, see if either operand is a MEM whose address
3446 is scheduled for replacement. */
3447 if (reload_in_progress
&& MEM_P (x
)
3448 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3449 x
= replace_equiv_address_nv (x
, inner
);
3450 if (reload_in_progress
&& MEM_P (y
)
3451 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3452 y
= replace_equiv_address_nv (y
, inner
);
3456 need_clobber
= false;
3458 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3461 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3464 /* Do not generate code for a move if it would come entirely
3465 from the undefined bits of a paradoxical subreg. */
3466 if (undefined_operand_subword_p (y
, i
))
3469 ypart
= operand_subword (y
, i
, 1, mode
);
3471 /* If we can't get a part of Y, put Y into memory if it is a
3472 constant. Otherwise, force it into a register. Then we must
3473 be able to get a part of Y. */
3474 if (ypart
== 0 && CONSTANT_P (y
))
3476 y
= use_anchored_address (force_const_mem (mode
, y
));
3477 ypart
= operand_subword (y
, i
, 1, mode
);
3479 else if (ypart
== 0)
3480 ypart
= operand_subword_force (y
, i
, mode
);
3482 gcc_assert (xpart
&& ypart
);
3484 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3486 last_insn
= emit_move_insn (xpart
, ypart
);
3492 /* Show the output dies here. This is necessary for SUBREGs
3493 of pseudos since we cannot track their lifetimes correctly;
3494 hard regs shouldn't appear here except as return values.
3495 We never want to emit such a clobber after reload. */
3497 && ! (reload_in_progress
|| reload_completed
)
3498 && need_clobber
!= 0)
3506 /* Low level part of emit_move_insn.
3507 Called just like emit_move_insn, but assumes X and Y
3508 are basically valid. */
3511 emit_move_insn_1 (rtx x
, rtx y
)
3513 enum machine_mode mode
= GET_MODE (x
);
3514 enum insn_code code
;
3516 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3518 code
= optab_handler (mov_optab
, mode
);
3519 if (code
!= CODE_FOR_nothing
)
3520 return emit_insn (GEN_FCN (code
) (x
, y
));
3522 /* Expand complex moves by moving real part and imag part. */
3523 if (COMPLEX_MODE_P (mode
))
3524 return emit_move_complex (mode
, x
, y
);
3526 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3527 || ALL_FIXED_POINT_MODE_P (mode
))
3529 rtx result
= emit_move_via_integer (mode
, x
, y
, true);
3531 /* If we can't find an integer mode, use multi words. */
3535 return emit_move_multi_word (mode
, x
, y
);
3538 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3539 return emit_move_ccmode (mode
, x
, y
);
3541 /* Try using a move pattern for the corresponding integer mode. This is
3542 only safe when simplify_subreg can convert MODE constants into integer
3543 constants. At present, it can only do this reliably if the value
3544 fits within a HOST_WIDE_INT. */
3545 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3547 rtx ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3551 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3556 return emit_move_multi_word (mode
, x
, y
);
3559 /* Generate code to copy Y into X.
3560 Both Y and X must have the same mode, except that
3561 Y can be a constant with VOIDmode.
3562 This mode cannot be BLKmode; use emit_block_move for that.
3564 Return the last instruction emitted. */
3567 emit_move_insn (rtx x
, rtx y
)
3569 enum machine_mode mode
= GET_MODE (x
);
3570 rtx y_cst
= NULL_RTX
;
3573 gcc_assert (mode
!= BLKmode
3574 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3579 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3580 && (last_insn
= compress_float_constant (x
, y
)))
3585 if (!targetm
.legitimate_constant_p (mode
, y
))
3587 y
= force_const_mem (mode
, y
);
3589 /* If the target's cannot_force_const_mem prevented the spill,
3590 assume that the target's move expanders will also take care
3591 of the non-legitimate constant. */
3595 y
= use_anchored_address (y
);
3599 /* If X or Y are memory references, verify that their addresses are valid
3602 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3604 && ! push_operand (x
, GET_MODE (x
))))
3605 x
= validize_mem (x
);
3608 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3609 MEM_ADDR_SPACE (y
)))
3610 y
= validize_mem (y
);
3612 gcc_assert (mode
!= BLKmode
);
3614 last_insn
= emit_move_insn_1 (x
, y
);
3616 if (y_cst
&& REG_P (x
)
3617 && (set
= single_set (last_insn
)) != NULL_RTX
3618 && SET_DEST (set
) == x
3619 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3620 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3625 /* If Y is representable exactly in a narrower mode, and the target can
3626 perform the extension directly from constant or memory, then emit the
3627 move as an extension. */
3630 compress_float_constant (rtx x
, rtx y
)
3632 enum machine_mode dstmode
= GET_MODE (x
);
3633 enum machine_mode orig_srcmode
= GET_MODE (y
);
3634 enum machine_mode srcmode
;
3636 int oldcost
, newcost
;
3637 bool speed
= optimize_insn_for_speed_p ();
3639 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3641 if (targetm
.legitimate_constant_p (dstmode
, y
))
3642 oldcost
= set_src_cost (y
, speed
);
3644 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), speed
);
3646 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3647 srcmode
!= orig_srcmode
;
3648 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3651 rtx trunc_y
, last_insn
;
3653 /* Skip if the target can't extend this way. */
3654 ic
= can_extend_p (dstmode
, srcmode
, 0);
3655 if (ic
== CODE_FOR_nothing
)
3658 /* Skip if the narrowed value isn't exact. */
3659 if (! exact_real_truncate (srcmode
, &r
))
3662 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3664 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3666 /* Skip if the target needs extra instructions to perform
3668 if (!insn_operand_matches (ic
, 1, trunc_y
))
3670 /* This is valid, but may not be cheaper than the original. */
3671 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3673 if (oldcost
< newcost
)
3676 else if (float_extend_from_mem
[dstmode
][srcmode
])
3678 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3679 /* This is valid, but may not be cheaper than the original. */
3680 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3682 if (oldcost
< newcost
)
3684 trunc_y
= validize_mem (trunc_y
);
3689 /* For CSE's benefit, force the compressed constant pool entry
3690 into a new pseudo. This constant may be used in different modes,
3691 and if not, combine will put things back together for us. */
3692 trunc_y
= force_reg (srcmode
, trunc_y
);
3694 /* If x is a hard register, perform the extension into a pseudo,
3695 so that e.g. stack realignment code is aware of it. */
3697 if (REG_P (x
) && HARD_REGISTER_P (x
))
3698 target
= gen_reg_rtx (dstmode
);
3700 emit_unop_insn (ic
, target
, trunc_y
, UNKNOWN
);
3701 last_insn
= get_last_insn ();
3704 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3707 return emit_move_insn (x
, target
);
3714 /* Pushing data onto the stack. */
3716 /* Push a block of length SIZE (perhaps variable)
3717 and return an rtx to address the beginning of the block.
3718 The value may be virtual_outgoing_args_rtx.
3720 EXTRA is the number of bytes of padding to push in addition to SIZE.
3721 BELOW nonzero means this padding comes at low addresses;
3722 otherwise, the padding comes at high addresses. */
3725 push_block (rtx size
, int extra
, int below
)
3729 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3730 if (CONSTANT_P (size
))
3731 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3732 else if (REG_P (size
) && extra
== 0)
3733 anti_adjust_stack (size
);
3736 temp
= copy_to_mode_reg (Pmode
, size
);
3738 temp
= expand_binop (Pmode
, add_optab
, temp
,
3739 gen_int_mode (extra
, Pmode
),
3740 temp
, 0, OPTAB_LIB_WIDEN
);
3741 anti_adjust_stack (temp
);
3744 #ifndef STACK_GROWS_DOWNWARD
3750 temp
= virtual_outgoing_args_rtx
;
3751 if (extra
!= 0 && below
)
3752 temp
= plus_constant (Pmode
, temp
, extra
);
3756 if (CONST_INT_P (size
))
3757 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3758 -INTVAL (size
) - (below
? 0 : extra
));
3759 else if (extra
!= 0 && !below
)
3760 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3761 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3764 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3765 negate_rtx (Pmode
, size
));
3768 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3771 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3774 mem_autoinc_base (rtx mem
)
3778 rtx addr
= XEXP (mem
, 0);
3779 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3780 return XEXP (addr
, 0);
3785 /* A utility routine used here, in reload, and in try_split. The insns
3786 after PREV up to and including LAST are known to adjust the stack,
3787 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3788 placing notes as appropriate. PREV may be NULL, indicating the
3789 entire insn sequence prior to LAST should be scanned.
3791 The set of allowed stack pointer modifications is small:
3792 (1) One or more auto-inc style memory references (aka pushes),
3793 (2) One or more addition/subtraction with the SP as destination,
3794 (3) A single move insn with the SP as destination,
3795 (4) A call_pop insn,
3796 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3798 Insns in the sequence that do not modify the SP are ignored,
3799 except for noreturn calls.
3801 The return value is the amount of adjustment that can be trivially
3802 verified, via immediate operand or auto-inc. If the adjustment
3803 cannot be trivially extracted, the return value is INT_MIN. */
3806 find_args_size_adjust (rtx insn
)
3811 pat
= PATTERN (insn
);
3814 /* Look for a call_pop pattern. */
3817 /* We have to allow non-call_pop patterns for the case
3818 of emit_single_push_insn of a TLS address. */
3819 if (GET_CODE (pat
) != PARALLEL
)
3822 /* All call_pop have a stack pointer adjust in the parallel.
3823 The call itself is always first, and the stack adjust is
3824 usually last, so search from the end. */
3825 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
3827 set
= XVECEXP (pat
, 0, i
);
3828 if (GET_CODE (set
) != SET
)
3830 dest
= SET_DEST (set
);
3831 if (dest
== stack_pointer_rtx
)
3834 /* We'd better have found the stack pointer adjust. */
3837 /* Fall through to process the extracted SET and DEST
3838 as if it was a standalone insn. */
3840 else if (GET_CODE (pat
) == SET
)
3842 else if ((set
= single_set (insn
)) != NULL
)
3844 else if (GET_CODE (pat
) == PARALLEL
)
3846 /* ??? Some older ports use a parallel with a stack adjust
3847 and a store for a PUSH_ROUNDING pattern, rather than a
3848 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3849 /* ??? See h8300 and m68k, pushqi1. */
3850 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
3852 set
= XVECEXP (pat
, 0, i
);
3853 if (GET_CODE (set
) != SET
)
3855 dest
= SET_DEST (set
);
3856 if (dest
== stack_pointer_rtx
)
3859 /* We do not expect an auto-inc of the sp in the parallel. */
3860 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
3861 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3862 != stack_pointer_rtx
);
3870 dest
= SET_DEST (set
);
3872 /* Look for direct modifications of the stack pointer. */
3873 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
3875 /* Look for a trivial adjustment, otherwise assume nothing. */
3876 /* Note that the SPU restore_stack_block pattern refers to
3877 the stack pointer in V4SImode. Consider that non-trivial. */
3878 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
3879 && GET_CODE (SET_SRC (set
)) == PLUS
3880 && XEXP (SET_SRC (set
), 0) == stack_pointer_rtx
3881 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
3882 return INTVAL (XEXP (SET_SRC (set
), 1));
3883 /* ??? Reload can generate no-op moves, which will be cleaned
3884 up later. Recognize it and continue searching. */
3885 else if (rtx_equal_p (dest
, SET_SRC (set
)))
3888 return HOST_WIDE_INT_MIN
;
3894 /* Otherwise only think about autoinc patterns. */
3895 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
3898 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3899 != stack_pointer_rtx
);
3901 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
3902 mem
= SET_SRC (set
);
3906 addr
= XEXP (mem
, 0);
3907 switch (GET_CODE (addr
))
3911 return GET_MODE_SIZE (GET_MODE (mem
));
3914 return -GET_MODE_SIZE (GET_MODE (mem
));
3917 addr
= XEXP (addr
, 1);
3918 gcc_assert (GET_CODE (addr
) == PLUS
);
3919 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
3920 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
3921 return INTVAL (XEXP (addr
, 1));
3929 fixup_args_size_notes (rtx prev
, rtx last
, int end_args_size
)
3931 int args_size
= end_args_size
;
3932 bool saw_unknown
= false;
3935 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
3937 HOST_WIDE_INT this_delta
;
3939 if (!NONDEBUG_INSN_P (insn
))
3942 this_delta
= find_args_size_adjust (insn
);
3943 if (this_delta
== 0)
3946 || ACCUMULATE_OUTGOING_ARGS
3947 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
3951 gcc_assert (!saw_unknown
);
3952 if (this_delta
== HOST_WIDE_INT_MIN
)
3955 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (args_size
));
3956 #ifdef STACK_GROWS_DOWNWARD
3957 this_delta
= -(unsigned HOST_WIDE_INT
) this_delta
;
3959 args_size
-= this_delta
;
3962 return saw_unknown
? INT_MIN
: args_size
;
3965 #ifdef PUSH_ROUNDING
3966 /* Emit single push insn. */
3969 emit_single_push_insn_1 (enum machine_mode mode
, rtx x
, tree type
)
3972 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3974 enum insn_code icode
;
3976 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3977 /* If there is push pattern, use it. Otherwise try old way of throwing
3978 MEM representing push operation to move expander. */
3979 icode
= optab_handler (push_optab
, mode
);
3980 if (icode
!= CODE_FOR_nothing
)
3982 struct expand_operand ops
[1];
3984 create_input_operand (&ops
[0], x
, mode
);
3985 if (maybe_expand_insn (icode
, 1, ops
))
3988 if (GET_MODE_SIZE (mode
) == rounded_size
)
3989 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3990 /* If we are to pad downward, adjust the stack pointer first and
3991 then store X into the stack location using an offset. This is
3992 because emit_move_insn does not know how to pad; it does not have
3994 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3996 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3997 HOST_WIDE_INT offset
;
3999 emit_move_insn (stack_pointer_rtx
,
4000 expand_binop (Pmode
,
4001 #ifdef STACK_GROWS_DOWNWARD
4007 gen_int_mode (rounded_size
, Pmode
),
4008 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
4010 offset
= (HOST_WIDE_INT
) padding_size
;
4011 #ifdef STACK_GROWS_DOWNWARD
4012 if (STACK_PUSH_CODE
== POST_DEC
)
4013 /* We have already decremented the stack pointer, so get the
4015 offset
+= (HOST_WIDE_INT
) rounded_size
;
4017 if (STACK_PUSH_CODE
== POST_INC
)
4018 /* We have already incremented the stack pointer, so get the
4020 offset
-= (HOST_WIDE_INT
) rounded_size
;
4022 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4023 gen_int_mode (offset
, Pmode
));
4027 #ifdef STACK_GROWS_DOWNWARD
4028 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4029 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4030 gen_int_mode (-(HOST_WIDE_INT
) rounded_size
,
4033 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4034 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4035 gen_int_mode (rounded_size
, Pmode
));
4037 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
4040 dest
= gen_rtx_MEM (mode
, dest_addr
);
4044 set_mem_attributes (dest
, type
, 1);
4046 if (cfun
->tail_call_marked
)
4047 /* Function incoming arguments may overlap with sibling call
4048 outgoing arguments and we cannot allow reordering of reads
4049 from function arguments with stores to outgoing arguments
4050 of sibling calls. */
4051 set_mem_alias_set (dest
, 0);
4053 emit_move_insn (dest
, x
);
4056 /* Emit and annotate a single push insn. */
4059 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
4061 int delta
, old_delta
= stack_pointer_delta
;
4062 rtx prev
= get_last_insn ();
4065 emit_single_push_insn_1 (mode
, x
, type
);
4067 last
= get_last_insn ();
4069 /* Notice the common case where we emitted exactly one insn. */
4070 if (PREV_INSN (last
) == prev
)
4072 add_reg_note (last
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4076 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4077 gcc_assert (delta
== INT_MIN
|| delta
== old_delta
);
4081 /* Generate code to push X onto the stack, assuming it has mode MODE and
4083 MODE is redundant except when X is a CONST_INT (since they don't
4085 SIZE is an rtx for the size of data to be copied (in bytes),
4086 needed only if X is BLKmode.
4088 ALIGN (in bits) is maximum alignment we can assume.
4090 If PARTIAL and REG are both nonzero, then copy that many of the first
4091 bytes of X into registers starting with REG, and push the rest of X.
4092 The amount of space pushed is decreased by PARTIAL bytes.
4093 REG must be a hard register in this case.
4094 If REG is zero but PARTIAL is not, take any all others actions for an
4095 argument partially in registers, but do not actually load any
4098 EXTRA is the amount in bytes of extra space to leave next to this arg.
4099 This is ignored if an argument block has already been allocated.
4101 On a machine that lacks real push insns, ARGS_ADDR is the address of
4102 the bottom of the argument block for this call. We use indexing off there
4103 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4104 argument block has not been preallocated.
4106 ARGS_SO_FAR is the size of args previously pushed for this call.
4108 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4109 for arguments passed in registers. If nonzero, it will be the number
4110 of bytes required. */
4113 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
4114 unsigned int align
, int partial
, rtx reg
, int extra
,
4115 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4119 enum direction stack_direction
4120 #ifdef STACK_GROWS_DOWNWARD
4126 /* Decide where to pad the argument: `downward' for below,
4127 `upward' for above, or `none' for don't pad it.
4128 Default is below for small data on big-endian machines; else above. */
4129 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
4131 /* Invert direction if stack is post-decrement.
4133 if (STACK_PUSH_CODE
== POST_DEC
)
4134 if (where_pad
!= none
)
4135 where_pad
= (where_pad
== downward
? upward
: downward
);
4140 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4142 /* Copy a block into the stack, entirely or partially. */
4149 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4150 used
= partial
- offset
;
4152 if (mode
!= BLKmode
)
4154 /* A value is to be stored in an insufficiently aligned
4155 stack slot; copy via a suitably aligned slot if
4157 size
= GEN_INT (GET_MODE_SIZE (mode
));
4158 if (!MEM_P (xinner
))
4160 temp
= assign_temp (type
, 1, 1);
4161 emit_move_insn (temp
, xinner
);
4168 /* USED is now the # of bytes we need not copy to the stack
4169 because registers will take care of them. */
4172 xinner
= adjust_address (xinner
, BLKmode
, used
);
4174 /* If the partial register-part of the arg counts in its stack size,
4175 skip the part of stack space corresponding to the registers.
4176 Otherwise, start copying to the beginning of the stack space,
4177 by setting SKIP to 0. */
4178 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4180 #ifdef PUSH_ROUNDING
4181 /* Do it with several push insns if that doesn't take lots of insns
4182 and if there is no difficulty with push insns that skip bytes
4183 on the stack for alignment purposes. */
4186 && CONST_INT_P (size
)
4188 && MEM_ALIGN (xinner
) >= align
4189 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
4190 /* Here we avoid the case of a structure whose weak alignment
4191 forces many pushes of a small amount of data,
4192 and such small pushes do rounding that causes trouble. */
4193 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
4194 || align
>= BIGGEST_ALIGNMENT
4195 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
4196 == (align
/ BITS_PER_UNIT
)))
4197 && (HOST_WIDE_INT
) PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
4199 /* Push padding now if padding above and stack grows down,
4200 or if padding below and stack grows up.
4201 But if space already allocated, this has already been done. */
4202 if (extra
&& args_addr
== 0
4203 && where_pad
!= none
&& where_pad
!= stack_direction
)
4204 anti_adjust_stack (GEN_INT (extra
));
4206 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
4209 #endif /* PUSH_ROUNDING */
4213 /* Otherwise make space on the stack and copy the data
4214 to the address of that space. */
4216 /* Deduct words put into registers from the size we must copy. */
4219 if (CONST_INT_P (size
))
4220 size
= GEN_INT (INTVAL (size
) - used
);
4222 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4223 gen_int_mode (used
, GET_MODE (size
)),
4224 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4227 /* Get the address of the stack space.
4228 In this case, we do not deal with EXTRA separately.
4229 A single stack adjust will do. */
4232 temp
= push_block (size
, extra
, where_pad
== downward
);
4235 else if (CONST_INT_P (args_so_far
))
4236 temp
= memory_address (BLKmode
,
4237 plus_constant (Pmode
, args_addr
,
4238 skip
+ INTVAL (args_so_far
)));
4240 temp
= memory_address (BLKmode
,
4241 plus_constant (Pmode
,
4242 gen_rtx_PLUS (Pmode
,
4247 if (!ACCUMULATE_OUTGOING_ARGS
)
4249 /* If the source is referenced relative to the stack pointer,
4250 copy it to another register to stabilize it. We do not need
4251 to do this if we know that we won't be changing sp. */
4253 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4254 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4255 temp
= copy_to_reg (temp
);
4258 target
= gen_rtx_MEM (BLKmode
, temp
);
4260 /* We do *not* set_mem_attributes here, because incoming arguments
4261 may overlap with sibling call outgoing arguments and we cannot
4262 allow reordering of reads from function arguments with stores
4263 to outgoing arguments of sibling calls. We do, however, want
4264 to record the alignment of the stack slot. */
4265 /* ALIGN may well be better aligned than TYPE, e.g. due to
4266 PARM_BOUNDARY. Assume the caller isn't lying. */
4267 set_mem_align (target
, align
);
4269 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4272 else if (partial
> 0)
4274 /* Scalar partly in registers. */
4276 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
4279 /* # bytes of start of argument
4280 that we must make space for but need not store. */
4281 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4282 int args_offset
= INTVAL (args_so_far
);
4285 /* Push padding now if padding above and stack grows down,
4286 or if padding below and stack grows up.
4287 But if space already allocated, this has already been done. */
4288 if (extra
&& args_addr
== 0
4289 && where_pad
!= none
&& where_pad
!= stack_direction
)
4290 anti_adjust_stack (GEN_INT (extra
));
4292 /* If we make space by pushing it, we might as well push
4293 the real data. Otherwise, we can leave OFFSET nonzero
4294 and leave the space uninitialized. */
4298 /* Now NOT_STACK gets the number of words that we don't need to
4299 allocate on the stack. Convert OFFSET to words too. */
4300 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4301 offset
/= UNITS_PER_WORD
;
4303 /* If the partial register-part of the arg counts in its stack size,
4304 skip the part of stack space corresponding to the registers.
4305 Otherwise, start copying to the beginning of the stack space,
4306 by setting SKIP to 0. */
4307 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4309 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4310 x
= validize_mem (force_const_mem (mode
, x
));
4312 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4313 SUBREGs of such registers are not allowed. */
4314 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4315 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4316 x
= copy_to_reg (x
);
4318 /* Loop over all the words allocated on the stack for this arg. */
4319 /* We can do it by words, because any scalar bigger than a word
4320 has a size a multiple of a word. */
4321 for (i
= size
- 1; i
>= not_stack
; i
--)
4322 if (i
>= not_stack
+ offset
)
4323 emit_push_insn (operand_subword_force (x
, i
, mode
),
4324 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4326 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4328 reg_parm_stack_space
, alignment_pad
);
4335 /* Push padding now if padding above and stack grows down,
4336 or if padding below and stack grows up.
4337 But if space already allocated, this has already been done. */
4338 if (extra
&& args_addr
== 0
4339 && where_pad
!= none
&& where_pad
!= stack_direction
)
4340 anti_adjust_stack (GEN_INT (extra
));
4342 #ifdef PUSH_ROUNDING
4343 if (args_addr
== 0 && PUSH_ARGS
)
4344 emit_single_push_insn (mode
, x
, type
);
4348 if (CONST_INT_P (args_so_far
))
4350 = memory_address (mode
,
4351 plus_constant (Pmode
, args_addr
,
4352 INTVAL (args_so_far
)));
4354 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4356 dest
= gen_rtx_MEM (mode
, addr
);
4358 /* We do *not* set_mem_attributes here, because incoming arguments
4359 may overlap with sibling call outgoing arguments and we cannot
4360 allow reordering of reads from function arguments with stores
4361 to outgoing arguments of sibling calls. We do, however, want
4362 to record the alignment of the stack slot. */
4363 /* ALIGN may well be better aligned than TYPE, e.g. due to
4364 PARM_BOUNDARY. Assume the caller isn't lying. */
4365 set_mem_align (dest
, align
);
4367 emit_move_insn (dest
, x
);
4371 /* If part should go in registers, copy that part
4372 into the appropriate registers. Do this now, at the end,
4373 since mem-to-mem copies above may do function calls. */
4374 if (partial
> 0 && reg
!= 0)
4376 /* Handle calls that pass values in multiple non-contiguous locations.
4377 The Irix 6 ABI has examples of this. */
4378 if (GET_CODE (reg
) == PARALLEL
)
4379 emit_group_load (reg
, x
, type
, -1);
4382 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4383 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
4387 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4388 anti_adjust_stack (GEN_INT (extra
));
4390 if (alignment_pad
&& args_addr
== 0)
4391 anti_adjust_stack (alignment_pad
);
4394 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4398 get_subtarget (rtx x
)
4402 /* Only registers can be subtargets. */
4404 /* Don't use hard regs to avoid extending their life. */
4405 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4409 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4410 FIELD is a bitfield. Returns true if the optimization was successful,
4411 and there's nothing else to do. */
4414 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
4415 unsigned HOST_WIDE_INT bitpos
,
4416 unsigned HOST_WIDE_INT bitregion_start
,
4417 unsigned HOST_WIDE_INT bitregion_end
,
4418 enum machine_mode mode1
, rtx str_rtx
,
4421 enum machine_mode str_mode
= GET_MODE (str_rtx
);
4422 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4427 enum tree_code code
;
4429 if (mode1
!= VOIDmode
4430 || bitsize
>= BITS_PER_WORD
4431 || str_bitsize
> BITS_PER_WORD
4432 || TREE_SIDE_EFFECTS (to
)
4433 || TREE_THIS_VOLATILE (to
))
4437 if (TREE_CODE (src
) != SSA_NAME
)
4439 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4442 srcstmt
= get_gimple_for_ssa_name (src
);
4444 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4447 code
= gimple_assign_rhs_code (srcstmt
);
4449 op0
= gimple_assign_rhs1 (srcstmt
);
4451 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4452 to find its initialization. Hopefully the initialization will
4453 be from a bitfield load. */
4454 if (TREE_CODE (op0
) == SSA_NAME
)
4456 gimple op0stmt
= get_gimple_for_ssa_name (op0
);
4458 /* We want to eventually have OP0 be the same as TO, which
4459 should be a bitfield. */
4461 || !is_gimple_assign (op0stmt
)
4462 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4464 op0
= gimple_assign_rhs1 (op0stmt
);
4467 op1
= gimple_assign_rhs2 (srcstmt
);
4469 if (!operand_equal_p (to
, op0
, 0))
4472 if (MEM_P (str_rtx
))
4474 unsigned HOST_WIDE_INT offset1
;
4476 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4477 str_mode
= word_mode
;
4478 str_mode
= get_best_mode (bitsize
, bitpos
,
4479 bitregion_start
, bitregion_end
,
4480 MEM_ALIGN (str_rtx
), str_mode
, 0);
4481 if (str_mode
== VOIDmode
)
4483 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4486 bitpos
%= str_bitsize
;
4487 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4488 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4490 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4493 /* If the bit field covers the whole REG/MEM, store_field
4494 will likely generate better code. */
4495 if (bitsize
>= str_bitsize
)
4498 /* We can't handle fields split across multiple entities. */
4499 if (bitpos
+ bitsize
> str_bitsize
)
4502 if (BYTES_BIG_ENDIAN
)
4503 bitpos
= str_bitsize
- bitpos
- bitsize
;
4509 /* For now, just optimize the case of the topmost bitfield
4510 where we don't need to do any masking and also
4511 1 bit bitfields where xor can be used.
4512 We might win by one instruction for the other bitfields
4513 too if insv/extv instructions aren't used, so that
4514 can be added later. */
4515 if (bitpos
+ bitsize
!= str_bitsize
4516 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4519 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4520 value
= convert_modes (str_mode
,
4521 TYPE_MODE (TREE_TYPE (op1
)), value
,
4522 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4524 /* We may be accessing data outside the field, which means
4525 we can alias adjacent data. */
4526 if (MEM_P (str_rtx
))
4528 str_rtx
= shallow_copy_rtx (str_rtx
);
4529 set_mem_alias_set (str_rtx
, 0);
4530 set_mem_expr (str_rtx
, 0);
4533 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4534 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
4536 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4539 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4540 result
= expand_binop (str_mode
, binop
, str_rtx
,
4541 value
, str_rtx
, 1, OPTAB_WIDEN
);
4542 if (result
!= str_rtx
)
4543 emit_move_insn (str_rtx
, result
);
4548 if (TREE_CODE (op1
) != INTEGER_CST
)
4550 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4551 value
= convert_modes (str_mode
,
4552 TYPE_MODE (TREE_TYPE (op1
)), value
,
4553 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4555 /* We may be accessing data outside the field, which means
4556 we can alias adjacent data. */
4557 if (MEM_P (str_rtx
))
4559 str_rtx
= shallow_copy_rtx (str_rtx
);
4560 set_mem_alias_set (str_rtx
, 0);
4561 set_mem_expr (str_rtx
, 0);
4564 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4565 if (bitpos
+ bitsize
!= str_bitsize
)
4567 rtx mask
= gen_int_mode (((unsigned HOST_WIDE_INT
) 1 << bitsize
) - 1,
4569 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4571 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4572 result
= expand_binop (str_mode
, binop
, str_rtx
,
4573 value
, str_rtx
, 1, OPTAB_WIDEN
);
4574 if (result
!= str_rtx
)
4575 emit_move_insn (str_rtx
, result
);
4585 /* In the C++ memory model, consecutive bit fields in a structure are
4586 considered one memory location.
4588 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4589 returns the bit range of consecutive bits in which this COMPONENT_REF
4590 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4591 and *OFFSET may be adjusted in the process.
4593 If the access does not need to be restricted, 0 is returned in both
4594 *BITSTART and *BITEND. */
4597 get_bit_range (unsigned HOST_WIDE_INT
*bitstart
,
4598 unsigned HOST_WIDE_INT
*bitend
,
4600 HOST_WIDE_INT
*bitpos
,
4603 HOST_WIDE_INT bitoffset
;
4606 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4608 field
= TREE_OPERAND (exp
, 1);
4609 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4610 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4611 need to limit the range we can access. */
4614 *bitstart
= *bitend
= 0;
4618 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4619 part of a larger bit field, then the representative does not serve any
4620 useful purpose. This can occur in Ada. */
4621 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4623 enum machine_mode rmode
;
4624 HOST_WIDE_INT rbitsize
, rbitpos
;
4628 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4629 &roffset
, &rmode
, &unsignedp
, &volatilep
, false);
4630 if ((rbitpos
% BITS_PER_UNIT
) != 0)
4632 *bitstart
= *bitend
= 0;
4637 /* Compute the adjustment to bitpos from the offset of the field
4638 relative to the representative. DECL_FIELD_OFFSET of field and
4639 repr are the same by construction if they are not constants,
4640 see finish_bitfield_layout. */
4641 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field
))
4642 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr
)))
4643 bitoffset
= (tree_to_uhwi (DECL_FIELD_OFFSET (field
))
4644 - tree_to_uhwi (DECL_FIELD_OFFSET (repr
))) * BITS_PER_UNIT
;
4647 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
4648 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
4650 /* If the adjustment is larger than bitpos, we would have a negative bit
4651 position for the lower bound and this may wreak havoc later. Adjust
4652 offset and bitpos to make the lower bound non-negative in that case. */
4653 if (bitoffset
> *bitpos
)
4655 HOST_WIDE_INT adjust
= bitoffset
- *bitpos
;
4656 gcc_assert ((adjust
% BITS_PER_UNIT
) == 0);
4659 if (*offset
== NULL_TREE
)
4660 *offset
= size_int (-adjust
/ BITS_PER_UNIT
);
4663 = size_binop (MINUS_EXPR
, *offset
, size_int (adjust
/ BITS_PER_UNIT
));
4667 *bitstart
= *bitpos
- bitoffset
;
4669 *bitend
= *bitstart
+ tree_to_uhwi (DECL_SIZE (repr
)) - 1;
4672 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4673 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4674 DECL_RTL was not set yet, return NORTL. */
4677 addr_expr_of_non_mem_decl_p_1 (tree addr
, bool nortl
)
4679 if (TREE_CODE (addr
) != ADDR_EXPR
)
4682 tree base
= TREE_OPERAND (addr
, 0);
4685 || TREE_ADDRESSABLE (base
)
4686 || DECL_MODE (base
) == BLKmode
)
4689 if (!DECL_RTL_SET_P (base
))
4692 return (!MEM_P (DECL_RTL (base
)));
4695 /* Returns true if the MEM_REF REF refers to an object that does not
4696 reside in memory and has non-BLKmode. */
4699 mem_ref_refers_to_non_mem_p (tree ref
)
4701 tree base
= TREE_OPERAND (ref
, 0);
4702 return addr_expr_of_non_mem_decl_p_1 (base
, false);
4705 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4706 is true, try generating a nontemporal store. */
4709 expand_assignment (tree to
, tree from
, bool nontemporal
)
4713 enum machine_mode mode
;
4715 enum insn_code icode
;
4717 /* Don't crash if the lhs of the assignment was erroneous. */
4718 if (TREE_CODE (to
) == ERROR_MARK
)
4720 expand_normal (from
);
4724 /* Optimize away no-op moves without side-effects. */
4725 if (operand_equal_p (to
, from
, 0))
4728 /* Handle misaligned stores. */
4729 mode
= TYPE_MODE (TREE_TYPE (to
));
4730 if ((TREE_CODE (to
) == MEM_REF
4731 || TREE_CODE (to
) == TARGET_MEM_REF
)
4733 && !mem_ref_refers_to_non_mem_p (to
)
4734 && ((align
= get_object_alignment (to
))
4735 < GET_MODE_ALIGNMENT (mode
))
4736 && (((icode
= optab_handler (movmisalign_optab
, mode
))
4737 != CODE_FOR_nothing
)
4738 || SLOW_UNALIGNED_ACCESS (mode
, align
)))
4742 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4743 reg
= force_not_mem (reg
);
4744 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4746 if (icode
!= CODE_FOR_nothing
)
4748 struct expand_operand ops
[2];
4750 create_fixed_operand (&ops
[0], mem
);
4751 create_input_operand (&ops
[1], reg
, mode
);
4752 /* The movmisalign<mode> pattern cannot fail, else the assignment
4753 would silently be omitted. */
4754 expand_insn (icode
, 2, ops
);
4757 store_bit_field (mem
, GET_MODE_BITSIZE (mode
), 0, 0, 0, mode
, reg
);
4761 /* Assignment of a structure component needs special treatment
4762 if the structure component's rtx is not simply a MEM.
4763 Assignment of an array element at a constant index, and assignment of
4764 an array element in an unaligned packed structure field, has the same
4765 problem. Same for (partially) storing into a non-memory object. */
4766 if (handled_component_p (to
)
4767 || (TREE_CODE (to
) == MEM_REF
4768 && mem_ref_refers_to_non_mem_p (to
))
4769 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4771 enum machine_mode mode1
;
4772 HOST_WIDE_INT bitsize
, bitpos
;
4773 unsigned HOST_WIDE_INT bitregion_start
= 0;
4774 unsigned HOST_WIDE_INT bitregion_end
= 0;
4781 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4782 &unsignedp
, &volatilep
, true);
4784 /* Make sure bitpos is not negative, it can wreak havoc later. */
4787 gcc_assert (offset
== NULL_TREE
);
4788 offset
= size_int (bitpos
>> (BITS_PER_UNIT
== 8
4789 ? 3 : exact_log2 (BITS_PER_UNIT
)));
4790 bitpos
&= BITS_PER_UNIT
- 1;
4793 if (TREE_CODE (to
) == COMPONENT_REF
4794 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
4795 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
4796 /* The C++ memory model naturally applies to byte-aligned fields.
4797 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4798 BITSIZE are not byte-aligned, there is no need to limit the range
4799 we can access. This can occur with packed structures in Ada. */
4800 else if (bitsize
> 0
4801 && bitsize
% BITS_PER_UNIT
== 0
4802 && bitpos
% BITS_PER_UNIT
== 0)
4804 bitregion_start
= bitpos
;
4805 bitregion_end
= bitpos
+ bitsize
- 1;
4808 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4810 /* If the field has a mode, we want to access it in the
4811 field's mode, not the computed mode.
4812 If a MEM has VOIDmode (external with incomplete type),
4813 use BLKmode for it instead. */
4816 if (mode1
!= VOIDmode
)
4817 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
4818 else if (GET_MODE (to_rtx
) == VOIDmode
)
4819 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
4824 enum machine_mode address_mode
;
4827 if (!MEM_P (to_rtx
))
4829 /* We can get constant negative offsets into arrays with broken
4830 user code. Translate this to a trap instead of ICEing. */
4831 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
4832 expand_builtin_trap ();
4833 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
4836 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4837 address_mode
= get_address_mode (to_rtx
);
4838 if (GET_MODE (offset_rtx
) != address_mode
)
4839 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
4841 /* If we have an expression in OFFSET_RTX and a non-zero
4842 byte offset in BITPOS, adding the byte offset before the
4843 OFFSET_RTX results in better intermediate code, which makes
4844 later rtl optimization passes perform better.
4846 We prefer intermediate code like this:
4848 r124:DI=r123:DI+0x18
4853 r124:DI=r123:DI+0x10
4854 [r124:DI+0x8]=r121:DI
4856 This is only done for aligned data values, as these can
4857 be expected to result in single move instructions. */
4858 if (mode1
!= VOIDmode
4861 && (bitpos
% bitsize
) == 0
4862 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4863 && MEM_ALIGN (to_rtx
) >= GET_MODE_ALIGNMENT (mode1
))
4865 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4866 bitregion_start
= 0;
4867 if (bitregion_end
>= (unsigned HOST_WIDE_INT
) bitpos
)
4868 bitregion_end
-= bitpos
;
4872 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4873 highest_pow2_factor_for_target (to
,
4877 /* No action is needed if the target is not a memory and the field
4878 lies completely outside that target. This can occur if the source
4879 code contains an out-of-bounds access to a small array. */
4881 && GET_MODE (to_rtx
) != BLKmode
4882 && (unsigned HOST_WIDE_INT
) bitpos
4883 >= GET_MODE_PRECISION (GET_MODE (to_rtx
)))
4885 expand_normal (from
);
4888 /* Handle expand_expr of a complex value returning a CONCAT. */
4889 else if (GET_CODE (to_rtx
) == CONCAT
)
4891 unsigned short mode_bitsize
= GET_MODE_BITSIZE (GET_MODE (to_rtx
));
4892 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from
)))
4894 && bitsize
== mode_bitsize
)
4895 result
= store_expr (from
, to_rtx
, false, nontemporal
);
4896 else if (bitsize
== mode_bitsize
/ 2
4897 && (bitpos
== 0 || bitpos
== mode_bitsize
/ 2))
4898 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
4900 else if (bitpos
+ bitsize
<= mode_bitsize
/ 2)
4901 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
4902 bitregion_start
, bitregion_end
,
4904 get_alias_set (to
), nontemporal
);
4905 else if (bitpos
>= mode_bitsize
/ 2)
4906 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
4907 bitpos
- mode_bitsize
/ 2,
4908 bitregion_start
, bitregion_end
,
4910 get_alias_set (to
), nontemporal
);
4911 else if (bitpos
== 0 && bitsize
== mode_bitsize
)
4914 result
= expand_normal (from
);
4915 from_rtx
= simplify_gen_subreg (GET_MODE (to_rtx
), result
,
4916 TYPE_MODE (TREE_TYPE (from
)), 0);
4917 emit_move_insn (XEXP (to_rtx
, 0),
4918 read_complex_part (from_rtx
, false));
4919 emit_move_insn (XEXP (to_rtx
, 1),
4920 read_complex_part (from_rtx
, true));
4924 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
4925 GET_MODE_SIZE (GET_MODE (to_rtx
)));
4926 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
4927 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
4928 result
= store_field (temp
, bitsize
, bitpos
,
4929 bitregion_start
, bitregion_end
,
4931 get_alias_set (to
), nontemporal
);
4932 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
4933 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
4940 /* If the field is at offset zero, we could have been given the
4941 DECL_RTX of the parent struct. Don't munge it. */
4942 to_rtx
= shallow_copy_rtx (to_rtx
);
4943 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4945 MEM_VOLATILE_P (to_rtx
) = 1;
4948 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
4949 bitregion_start
, bitregion_end
,
4954 result
= store_field (to_rtx
, bitsize
, bitpos
,
4955 bitregion_start
, bitregion_end
,
4957 get_alias_set (to
), nontemporal
);
4961 preserve_temp_slots (result
);
4966 /* If the rhs is a function call and its value is not an aggregate,
4967 call the function before we start to compute the lhs.
4968 This is needed for correct code for cases such as
4969 val = setjmp (buf) on machines where reference to val
4970 requires loading up part of an address in a separate insn.
4972 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4973 since it might be a promoted variable where the zero- or sign- extension
4974 needs to be done. Handling this in the normal way is safe because no
4975 computation is done before the call. The same is true for SSA names. */
4976 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
4977 && COMPLETE_TYPE_P (TREE_TYPE (from
))
4978 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4979 && ! (((TREE_CODE (to
) == VAR_DECL
4980 || TREE_CODE (to
) == PARM_DECL
4981 || TREE_CODE (to
) == RESULT_DECL
)
4982 && REG_P (DECL_RTL (to
)))
4983 || TREE_CODE (to
) == SSA_NAME
))
4988 value
= expand_normal (from
);
4990 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4992 /* Handle calls that return values in multiple non-contiguous locations.
4993 The Irix 6 ABI has examples of this. */
4994 if (GET_CODE (to_rtx
) == PARALLEL
)
4996 if (GET_CODE (value
) == PARALLEL
)
4997 emit_group_move (to_rtx
, value
);
4999 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
5000 int_size_in_bytes (TREE_TYPE (from
)));
5002 else if (GET_CODE (value
) == PARALLEL
)
5003 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
5004 int_size_in_bytes (TREE_TYPE (from
)));
5005 else if (GET_MODE (to_rtx
) == BLKmode
)
5007 /* Handle calls that return BLKmode values in registers. */
5009 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
5011 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
5015 if (POINTER_TYPE_P (TREE_TYPE (to
)))
5016 value
= convert_memory_address_addr_space
5017 (GET_MODE (to_rtx
), value
,
5018 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
5020 emit_move_insn (to_rtx
, value
);
5022 preserve_temp_slots (to_rtx
);
5027 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5028 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5030 /* Don't move directly into a return register. */
5031 if (TREE_CODE (to
) == RESULT_DECL
5032 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
5038 /* If the source is itself a return value, it still is in a pseudo at
5039 this point so we can move it back to the return register directly. */
5041 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
5042 && TREE_CODE (from
) != CALL_EXPR
)
5043 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
5045 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
5047 /* Handle calls that return values in multiple non-contiguous locations.
5048 The Irix 6 ABI has examples of this. */
5049 if (GET_CODE (to_rtx
) == PARALLEL
)
5051 if (GET_CODE (temp
) == PARALLEL
)
5052 emit_group_move (to_rtx
, temp
);
5054 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
5055 int_size_in_bytes (TREE_TYPE (from
)));
5058 emit_move_insn (to_rtx
, temp
);
5060 preserve_temp_slots (to_rtx
);
5065 /* In case we are returning the contents of an object which overlaps
5066 the place the value is being stored, use a safe function when copying
5067 a value through a pointer into a structure value return block. */
5068 if (TREE_CODE (to
) == RESULT_DECL
5069 && TREE_CODE (from
) == INDIRECT_REF
5070 && ADDR_SPACE_GENERIC_P
5071 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
5072 && refs_may_alias_p (to
, from
)
5073 && cfun
->returns_struct
5074 && !cfun
->returns_pcc_struct
)
5079 size
= expr_size (from
);
5080 from_rtx
= expand_normal (from
);
5082 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
5083 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
5084 XEXP (from_rtx
, 0), Pmode
,
5085 convert_to_mode (TYPE_MODE (sizetype
),
5086 size
, TYPE_UNSIGNED (sizetype
)),
5087 TYPE_MODE (sizetype
));
5089 preserve_temp_slots (to_rtx
);
5094 /* Compute FROM and store the value in the rtx we got. */
5097 result
= store_expr (from
, to_rtx
, 0, nontemporal
);
5098 preserve_temp_slots (result
);
5103 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5104 succeeded, false otherwise. */
5107 emit_storent_insn (rtx to
, rtx from
)
5109 struct expand_operand ops
[2];
5110 enum machine_mode mode
= GET_MODE (to
);
5111 enum insn_code code
= optab_handler (storent_optab
, mode
);
5113 if (code
== CODE_FOR_nothing
)
5116 create_fixed_operand (&ops
[0], to
);
5117 create_input_operand (&ops
[1], from
, mode
);
5118 return maybe_expand_insn (code
, 2, ops
);
5121 /* Generate code for computing expression EXP,
5122 and storing the value into TARGET.
5124 If the mode is BLKmode then we may return TARGET itself.
5125 It turns out that in BLKmode it doesn't cause a problem.
5126 because C has no operators that could combine two different
5127 assignments into the same BLKmode object with different values
5128 with no sequence point. Will other languages need this to
5131 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5132 stack, and block moves may need to be treated specially.
5134 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5137 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
)
5140 rtx alt_rtl
= NULL_RTX
;
5141 location_t loc
= curr_insn_location ();
5143 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5145 /* C++ can generate ?: expressions with a throw expression in one
5146 branch and an rvalue in the other. Here, we resolve attempts to
5147 store the throw expression's nonexistent result. */
5148 gcc_assert (!call_param_p
);
5149 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5152 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5154 /* Perform first part of compound expression, then assign from second
5156 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5157 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5158 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5161 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5163 /* For conditional expression, get safe form of the target. Then
5164 test the condition, doing the appropriate assignment on either
5165 side. This avoids the creation of unnecessary temporaries.
5166 For non-BLKmode, it is more efficient not to do this. */
5168 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
5170 do_pending_stack_adjust ();
5172 jumpifnot (TREE_OPERAND (exp
, 0), lab1
, -1);
5173 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5175 emit_jump_insn (gen_jump (lab2
));
5178 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5185 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5186 /* If this is a scalar in a register that is stored in a wider mode
5187 than the declared mode, compute the result into its declared mode
5188 and then convert to the wider mode. Our value is the computed
5191 rtx inner_target
= 0;
5193 /* We can do the conversion inside EXP, which will often result
5194 in some optimizations. Do the conversion in two steps: first
5195 change the signedness, if needed, then the extend. But don't
5196 do this if the type of EXP is a subtype of something else
5197 since then the conversion might involve more than just
5198 converting modes. */
5199 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5200 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5201 && GET_MODE_PRECISION (GET_MODE (target
))
5202 == TYPE_PRECISION (TREE_TYPE (exp
)))
5204 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
5205 != SUBREG_PROMOTED_UNSIGNED_P (target
))
5207 /* Some types, e.g. Fortran's logical*4, won't have a signed
5208 version, so use the mode instead. */
5210 = (signed_or_unsigned_type_for
5211 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)));
5213 ntype
= lang_hooks
.types
.type_for_mode
5214 (TYPE_MODE (TREE_TYPE (exp
)),
5215 SUBREG_PROMOTED_UNSIGNED_P (target
));
5217 exp
= fold_convert_loc (loc
, ntype
, exp
);
5220 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5221 (GET_MODE (SUBREG_REG (target
)),
5222 SUBREG_PROMOTED_UNSIGNED_P (target
)),
5225 inner_target
= SUBREG_REG (target
);
5228 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5229 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5231 /* If TEMP is a VOIDmode constant, use convert_modes to make
5232 sure that we properly convert it. */
5233 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5235 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5236 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
5237 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
5238 GET_MODE (target
), temp
,
5239 SUBREG_PROMOTED_UNSIGNED_P (target
));
5242 convert_move (SUBREG_REG (target
), temp
,
5243 SUBREG_PROMOTED_UNSIGNED_P (target
));
5247 else if ((TREE_CODE (exp
) == STRING_CST
5248 || (TREE_CODE (exp
) == MEM_REF
5249 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5250 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5252 && integer_zerop (TREE_OPERAND (exp
, 1))))
5253 && !nontemporal
&& !call_param_p
5256 /* Optimize initialization of an array with a STRING_CST. */
5257 HOST_WIDE_INT exp_len
, str_copy_len
;
5259 tree str
= TREE_CODE (exp
) == STRING_CST
5260 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5262 exp_len
= int_expr_size (exp
);
5266 if (TREE_STRING_LENGTH (str
) <= 0)
5269 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
5270 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
5273 str_copy_len
= TREE_STRING_LENGTH (str
);
5274 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
5275 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
5277 str_copy_len
+= STORE_MAX_PIECES
- 1;
5278 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5280 str_copy_len
= MIN (str_copy_len
, exp_len
);
5281 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
5282 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
5283 MEM_ALIGN (target
), false))
5288 dest_mem
= store_by_pieces (dest_mem
,
5289 str_copy_len
, builtin_strncpy_read_str
,
5291 TREE_STRING_POINTER (str
)),
5292 MEM_ALIGN (target
), false,
5293 exp_len
> str_copy_len
? 1 : 0);
5294 if (exp_len
> str_copy_len
)
5295 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
5296 GEN_INT (exp_len
- str_copy_len
),
5305 /* If we want to use a nontemporal store, force the value to
5307 tmp_target
= nontemporal
? NULL_RTX
: target
;
5308 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5310 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5314 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5315 the same as that of TARGET, adjust the constant. This is needed, for
5316 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5317 only a word-sized value. */
5318 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5319 && TREE_CODE (exp
) != ERROR_MARK
5320 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5321 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5322 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5324 /* If value was not generated in the target, store it there.
5325 Convert the value to TARGET's type first if necessary and emit the
5326 pending incrementations that have been queued when expanding EXP.
5327 Note that we cannot emit the whole queue blindly because this will
5328 effectively disable the POST_INC optimization later.
5330 If TEMP and TARGET compare equal according to rtx_equal_p, but
5331 one or both of them are volatile memory refs, we have to distinguish
5333 - expand_expr has used TARGET. In this case, we must not generate
5334 another copy. This can be detected by TARGET being equal according
5336 - expand_expr has not used TARGET - that means that the source just
5337 happens to have the same RTX form. Since temp will have been created
5338 by expand_expr, it will compare unequal according to == .
5339 We must generate a copy in this case, to reach the correct number
5340 of volatile memory references. */
5342 if ((! rtx_equal_p (temp
, target
)
5343 || (temp
!= target
&& (side_effects_p (temp
)
5344 || side_effects_p (target
))))
5345 && TREE_CODE (exp
) != ERROR_MARK
5346 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5347 but TARGET is not valid memory reference, TEMP will differ
5348 from TARGET although it is really the same location. */
5350 && rtx_equal_p (alt_rtl
, target
)
5351 && !side_effects_p (alt_rtl
)
5352 && !side_effects_p (target
))
5353 /* If there's nothing to copy, don't bother. Don't call
5354 expr_size unless necessary, because some front-ends (C++)
5355 expr_size-hook must not be given objects that are not
5356 supposed to be bit-copied or bit-initialized. */
5357 && expr_size (exp
) != const0_rtx
)
5359 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5361 if (GET_MODE (target
) == BLKmode
)
5363 /* Handle calls that return BLKmode values in registers. */
5364 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5365 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5367 store_bit_field (target
,
5368 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5369 0, 0, 0, GET_MODE (temp
), temp
);
5372 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5375 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5377 /* Handle copying a string constant into an array. The string
5378 constant may be shorter than the array. So copy just the string's
5379 actual length, and clear the rest. First get the size of the data
5380 type of the string, which is actually the size of the target. */
5381 rtx size
= expr_size (exp
);
5383 if (CONST_INT_P (size
)
5384 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5385 emit_block_move (target
, temp
, size
,
5387 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5390 enum machine_mode pointer_mode
5391 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5392 enum machine_mode address_mode
= get_address_mode (target
);
5394 /* Compute the size of the data to copy from the string. */
5396 = size_binop_loc (loc
, MIN_EXPR
,
5397 make_tree (sizetype
, size
),
5398 size_int (TREE_STRING_LENGTH (exp
)));
5400 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5402 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5405 /* Copy that much. */
5406 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5407 TYPE_UNSIGNED (sizetype
));
5408 emit_block_move (target
, temp
, copy_size_rtx
,
5410 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5412 /* Figure out how much is left in TARGET that we have to clear.
5413 Do all calculations in pointer_mode. */
5414 if (CONST_INT_P (copy_size_rtx
))
5416 size
= plus_constant (address_mode
, size
,
5417 -INTVAL (copy_size_rtx
));
5418 target
= adjust_address (target
, BLKmode
,
5419 INTVAL (copy_size_rtx
));
5423 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5424 copy_size_rtx
, NULL_RTX
, 0,
5427 if (GET_MODE (copy_size_rtx
) != address_mode
)
5428 copy_size_rtx
= convert_to_mode (address_mode
,
5430 TYPE_UNSIGNED (sizetype
));
5432 target
= offset_address (target
, copy_size_rtx
,
5433 highest_pow2_factor (copy_size
));
5434 label
= gen_label_rtx ();
5435 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5436 GET_MODE (size
), 0, label
);
5439 if (size
!= const0_rtx
)
5440 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5446 /* Handle calls that return values in multiple non-contiguous locations.
5447 The Irix 6 ABI has examples of this. */
5448 else if (GET_CODE (target
) == PARALLEL
)
5450 if (GET_CODE (temp
) == PARALLEL
)
5451 emit_group_move (target
, temp
);
5453 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5454 int_size_in_bytes (TREE_TYPE (exp
)));
5456 else if (GET_CODE (temp
) == PARALLEL
)
5457 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5458 int_size_in_bytes (TREE_TYPE (exp
)));
5459 else if (GET_MODE (temp
) == BLKmode
)
5460 emit_block_move (target
, temp
, expr_size (exp
),
5462 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5463 /* If we emit a nontemporal store, there is nothing else to do. */
5464 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5468 temp
= force_operand (temp
, target
);
5470 emit_move_insn (target
, temp
);
5477 /* Return true if field F of structure TYPE is a flexible array. */
5480 flexible_array_member_p (const_tree f
, const_tree type
)
5485 return (DECL_CHAIN (f
) == NULL
5486 && TREE_CODE (tf
) == ARRAY_TYPE
5488 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5489 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5490 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5491 && int_size_in_bytes (type
) >= 0);
5494 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5495 must have in order for it to completely initialize a value of type TYPE.
5496 Return -1 if the number isn't known.
5498 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5500 static HOST_WIDE_INT
5501 count_type_elements (const_tree type
, bool for_ctor_p
)
5503 switch (TREE_CODE (type
))
5509 nelts
= array_type_nelts (type
);
5510 if (nelts
&& tree_fits_uhwi_p (nelts
))
5512 unsigned HOST_WIDE_INT n
;
5514 n
= tree_to_uhwi (nelts
) + 1;
5515 if (n
== 0 || for_ctor_p
)
5518 return n
* count_type_elements (TREE_TYPE (type
), false);
5520 return for_ctor_p
? -1 : 1;
5525 unsigned HOST_WIDE_INT n
;
5529 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5530 if (TREE_CODE (f
) == FIELD_DECL
)
5533 n
+= count_type_elements (TREE_TYPE (f
), false);
5534 else if (!flexible_array_member_p (f
, type
))
5535 /* Don't count flexible arrays, which are not supposed
5536 to be initialized. */
5544 case QUAL_UNION_TYPE
:
5549 gcc_assert (!for_ctor_p
);
5550 /* Estimate the number of scalars in each field and pick the
5551 maximum. Other estimates would do instead; the idea is simply
5552 to make sure that the estimate is not sensitive to the ordering
5555 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5556 if (TREE_CODE (f
) == FIELD_DECL
)
5558 m
= count_type_elements (TREE_TYPE (f
), false);
5559 /* If the field doesn't span the whole union, add an extra
5560 scalar for the rest. */
5561 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5562 TYPE_SIZE (type
)) != 1)
5574 return TYPE_VECTOR_SUBPARTS (type
);
5578 case FIXED_POINT_TYPE
:
5583 case REFERENCE_TYPE
:
5599 /* Helper for categorize_ctor_elements. Identical interface. */
5602 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5603 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5605 unsigned HOST_WIDE_INT idx
;
5606 HOST_WIDE_INT nz_elts
, init_elts
, num_fields
;
5607 tree value
, purpose
, elt_type
;
5609 /* Whether CTOR is a valid constant initializer, in accordance with what
5610 initializer_constant_valid_p does. If inferred from the constructor
5611 elements, true until proven otherwise. */
5612 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
5613 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
5618 elt_type
= NULL_TREE
;
5620 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
5622 HOST_WIDE_INT mult
= 1;
5624 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
5626 tree lo_index
= TREE_OPERAND (purpose
, 0);
5627 tree hi_index
= TREE_OPERAND (purpose
, 1);
5629 if (tree_fits_uhwi_p (lo_index
) && tree_fits_uhwi_p (hi_index
))
5630 mult
= (tree_to_uhwi (hi_index
)
5631 - tree_to_uhwi (lo_index
) + 1);
5634 elt_type
= TREE_TYPE (value
);
5636 switch (TREE_CODE (value
))
5640 HOST_WIDE_INT nz
= 0, ic
= 0;
5642 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &ic
,
5645 nz_elts
+= mult
* nz
;
5646 init_elts
+= mult
* ic
;
5648 if (const_from_elts_p
&& const_p
)
5649 const_p
= const_elt_p
;
5656 if (!initializer_zerop (value
))
5662 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
5663 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
5667 if (!initializer_zerop (TREE_REALPART (value
)))
5669 if (!initializer_zerop (TREE_IMAGPART (value
)))
5677 for (i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
5679 tree v
= VECTOR_CST_ELT (value
, i
);
5680 if (!initializer_zerop (v
))
5689 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
5690 nz_elts
+= mult
* tc
;
5691 init_elts
+= mult
* tc
;
5693 if (const_from_elts_p
&& const_p
)
5694 const_p
= initializer_constant_valid_p (value
, elt_type
)
5701 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
5702 num_fields
, elt_type
))
5703 *p_complete
= false;
5705 *p_nz_elts
+= nz_elts
;
5706 *p_init_elts
+= init_elts
;
5711 /* Examine CTOR to discover:
5712 * how many scalar fields are set to nonzero values,
5713 and place it in *P_NZ_ELTS;
5714 * how many scalar fields in total are in CTOR,
5715 and place it in *P_ELT_COUNT.
5716 * whether the constructor is complete -- in the sense that every
5717 meaningful byte is explicitly given a value --
5718 and place it in *P_COMPLETE.
5720 Return whether or not CTOR is a valid static constant initializer, the same
5721 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5724 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5725 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5731 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_init_elts
, p_complete
);
5734 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5735 of which had type LAST_TYPE. Each element was itself a complete
5736 initializer, in the sense that every meaningful byte was explicitly
5737 given a value. Return true if the same is true for the constructor
5741 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
5742 const_tree last_type
)
5744 if (TREE_CODE (type
) == UNION_TYPE
5745 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5750 gcc_assert (num_elts
== 1 && last_type
);
5752 /* ??? We could look at each element of the union, and find the
5753 largest element. Which would avoid comparing the size of the
5754 initialized element against any tail padding in the union.
5755 Doesn't seem worth the effort... */
5756 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
5759 return count_type_elements (type
, true) == num_elts
;
5762 /* Return 1 if EXP contains mostly (3/4) zeros. */
5765 mostly_zeros_p (const_tree exp
)
5767 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5769 HOST_WIDE_INT nz_elts
, init_elts
;
5772 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5773 return !complete_p
|| nz_elts
< init_elts
/ 4;
5776 return initializer_zerop (exp
);
5779 /* Return 1 if EXP contains all zeros. */
5782 all_zeros_p (const_tree exp
)
5784 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5786 HOST_WIDE_INT nz_elts
, init_elts
;
5789 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5790 return nz_elts
== 0;
5793 return initializer_zerop (exp
);
5796 /* Helper function for store_constructor.
5797 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5798 CLEARED is as for store_constructor.
5799 ALIAS_SET is the alias set to use for any stores.
5801 This provides a recursive shortcut back to store_constructor when it isn't
5802 necessary to go through store_field. This is so that we can pass through
5803 the cleared field to let store_constructor know that we may not have to
5804 clear a substructure if the outer structure has already been cleared. */
5807 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
5808 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
5809 tree exp
, int cleared
, alias_set_type alias_set
)
5811 if (TREE_CODE (exp
) == CONSTRUCTOR
5812 /* We can only call store_constructor recursively if the size and
5813 bit position are on a byte boundary. */
5814 && bitpos
% BITS_PER_UNIT
== 0
5815 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
5816 /* If we have a nonzero bitpos for a register target, then we just
5817 let store_field do the bitfield handling. This is unlikely to
5818 generate unnecessary clear instructions anyways. */
5819 && (bitpos
== 0 || MEM_P (target
)))
5823 = adjust_address (target
,
5824 GET_MODE (target
) == BLKmode
5826 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
5827 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5830 /* Update the alias set, if required. */
5831 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
5832 && MEM_ALIAS_SET (target
) != 0)
5834 target
= copy_rtx (target
);
5835 set_mem_alias_set (target
, alias_set
);
5838 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
5841 store_field (target
, bitsize
, bitpos
, 0, 0, mode
, exp
, alias_set
, false);
5845 /* Returns the number of FIELD_DECLs in TYPE. */
5848 fields_length (const_tree type
)
5850 tree t
= TYPE_FIELDS (type
);
5853 for (; t
; t
= DECL_CHAIN (t
))
5854 if (TREE_CODE (t
) == FIELD_DECL
)
5861 /* Store the value of constructor EXP into the rtx TARGET.
5862 TARGET is either a REG or a MEM; we know it cannot conflict, since
5863 safe_from_p has been called.
5864 CLEARED is true if TARGET is known to have been zero'd.
5865 SIZE is the number of bytes of TARGET we are allowed to modify: this
5866 may not be the same as the size of EXP if we are assigning to a field
5867 which has been packed to exclude padding bits. */
5870 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
5872 tree type
= TREE_TYPE (exp
);
5873 #ifdef WORD_REGISTER_OPERATIONS
5874 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
5877 switch (TREE_CODE (type
))
5881 case QUAL_UNION_TYPE
:
5883 unsigned HOST_WIDE_INT idx
;
5886 /* If size is zero or the target is already cleared, do nothing. */
5887 if (size
== 0 || cleared
)
5889 /* We either clear the aggregate or indicate the value is dead. */
5890 else if ((TREE_CODE (type
) == UNION_TYPE
5891 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5892 && ! CONSTRUCTOR_ELTS (exp
))
5893 /* If the constructor is empty, clear the union. */
5895 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
5899 /* If we are building a static constructor into a register,
5900 set the initial value as zero so we can fold the value into
5901 a constant. But if more than one register is involved,
5902 this probably loses. */
5903 else if (REG_P (target
) && TREE_STATIC (exp
)
5904 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
5906 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5910 /* If the constructor has fewer fields than the structure or
5911 if we are initializing the structure to mostly zeros, clear
5912 the whole structure first. Don't do this if TARGET is a
5913 register whose mode size isn't equal to SIZE since
5914 clear_storage can't handle this case. */
5916 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp
))
5917 != fields_length (type
))
5918 || mostly_zeros_p (exp
))
5920 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
5923 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5927 if (REG_P (target
) && !cleared
)
5928 emit_clobber (target
);
5930 /* Store each element of the constructor into the
5931 corresponding field of TARGET. */
5932 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
5934 enum machine_mode mode
;
5935 HOST_WIDE_INT bitsize
;
5936 HOST_WIDE_INT bitpos
= 0;
5938 rtx to_rtx
= target
;
5940 /* Just ignore missing fields. We cleared the whole
5941 structure, above, if any fields are missing. */
5945 if (cleared
&& initializer_zerop (value
))
5948 if (tree_fits_uhwi_p (DECL_SIZE (field
)))
5949 bitsize
= tree_to_uhwi (DECL_SIZE (field
));
5953 mode
= DECL_MODE (field
);
5954 if (DECL_BIT_FIELD (field
))
5957 offset
= DECL_FIELD_OFFSET (field
);
5958 if (tree_fits_shwi_p (offset
)
5959 && tree_fits_shwi_p (bit_position (field
)))
5961 bitpos
= int_bit_position (field
);
5965 bitpos
= tree_to_shwi (DECL_FIELD_BIT_OFFSET (field
));
5969 enum machine_mode address_mode
;
5973 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
5974 make_tree (TREE_TYPE (exp
),
5977 offset_rtx
= expand_normal (offset
);
5978 gcc_assert (MEM_P (to_rtx
));
5980 address_mode
= get_address_mode (to_rtx
);
5981 if (GET_MODE (offset_rtx
) != address_mode
)
5982 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5984 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5985 highest_pow2_factor (offset
));
5988 #ifdef WORD_REGISTER_OPERATIONS
5989 /* If this initializes a field that is smaller than a
5990 word, at the start of a word, try to widen it to a full
5991 word. This special case allows us to output C++ member
5992 function initializations in a form that the optimizers
5995 && bitsize
< BITS_PER_WORD
5996 && bitpos
% BITS_PER_WORD
== 0
5997 && GET_MODE_CLASS (mode
) == MODE_INT
5998 && TREE_CODE (value
) == INTEGER_CST
6000 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
6002 tree type
= TREE_TYPE (value
);
6004 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
6006 type
= lang_hooks
.types
.type_for_mode
6007 (word_mode
, TYPE_UNSIGNED (type
));
6008 value
= fold_convert (type
, value
);
6011 if (BYTES_BIG_ENDIAN
)
6013 = fold_build2 (LSHIFT_EXPR
, type
, value
,
6014 build_int_cst (type
,
6015 BITS_PER_WORD
- bitsize
));
6016 bitsize
= BITS_PER_WORD
;
6021 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
6022 && DECL_NONADDRESSABLE_P (field
))
6024 to_rtx
= copy_rtx (to_rtx
);
6025 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
6028 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
6030 get_alias_set (TREE_TYPE (field
)));
6037 unsigned HOST_WIDE_INT i
;
6040 tree elttype
= TREE_TYPE (type
);
6042 HOST_WIDE_INT minelt
= 0;
6043 HOST_WIDE_INT maxelt
= 0;
6045 domain
= TYPE_DOMAIN (type
);
6046 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
6047 && TYPE_MAX_VALUE (domain
)
6048 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain
))
6049 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain
)));
6051 /* If we have constant bounds for the range of the type, get them. */
6054 minelt
= tree_to_shwi (TYPE_MIN_VALUE (domain
));
6055 maxelt
= tree_to_shwi (TYPE_MAX_VALUE (domain
));
6058 /* If the constructor has fewer elements than the array, clear
6059 the whole array first. Similarly if this is static
6060 constructor of a non-BLKmode object. */
6063 else if (REG_P (target
) && TREE_STATIC (exp
))
6067 unsigned HOST_WIDE_INT idx
;
6069 HOST_WIDE_INT count
= 0, zero_count
= 0;
6070 need_to_clear
= ! const_bounds_p
;
6072 /* This loop is a more accurate version of the loop in
6073 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6074 is also needed to check for missing elements. */
6075 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
6077 HOST_WIDE_INT this_node_count
;
6082 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6084 tree lo_index
= TREE_OPERAND (index
, 0);
6085 tree hi_index
= TREE_OPERAND (index
, 1);
6087 if (! tree_fits_uhwi_p (lo_index
)
6088 || ! tree_fits_uhwi_p (hi_index
))
6094 this_node_count
= (tree_to_uhwi (hi_index
)
6095 - tree_to_uhwi (lo_index
) + 1);
6098 this_node_count
= 1;
6100 count
+= this_node_count
;
6101 if (mostly_zeros_p (value
))
6102 zero_count
+= this_node_count
;
6105 /* Clear the entire array first if there are any missing
6106 elements, or if the incidence of zero elements is >=
6109 && (count
< maxelt
- minelt
+ 1
6110 || 4 * zero_count
>= 3 * count
))
6114 if (need_to_clear
&& size
> 0)
6117 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6119 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6123 if (!cleared
&& REG_P (target
))
6124 /* Inform later passes that the old value is dead. */
6125 emit_clobber (target
);
6127 /* Store each element of the constructor into the
6128 corresponding element of TARGET, determined by counting the
6130 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6132 enum machine_mode mode
;
6133 HOST_WIDE_INT bitsize
;
6134 HOST_WIDE_INT bitpos
;
6135 rtx xtarget
= target
;
6137 if (cleared
&& initializer_zerop (value
))
6140 mode
= TYPE_MODE (elttype
);
6141 if (mode
== BLKmode
)
6142 bitsize
= (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6143 ? tree_to_uhwi (TYPE_SIZE (elttype
))
6146 bitsize
= GET_MODE_BITSIZE (mode
);
6148 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6150 tree lo_index
= TREE_OPERAND (index
, 0);
6151 tree hi_index
= TREE_OPERAND (index
, 1);
6152 rtx index_r
, pos_rtx
;
6153 HOST_WIDE_INT lo
, hi
, count
;
6156 /* If the range is constant and "small", unroll the loop. */
6158 && tree_fits_shwi_p (lo_index
)
6159 && tree_fits_shwi_p (hi_index
)
6160 && (lo
= tree_to_shwi (lo_index
),
6161 hi
= tree_to_shwi (hi_index
),
6162 count
= hi
- lo
+ 1,
6165 || (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6166 && (tree_to_uhwi (TYPE_SIZE (elttype
)) * count
6169 lo
-= minelt
; hi
-= minelt
;
6170 for (; lo
<= hi
; lo
++)
6172 bitpos
= lo
* tree_to_shwi (TYPE_SIZE (elttype
));
6175 && !MEM_KEEP_ALIAS_SET_P (target
)
6176 && TREE_CODE (type
) == ARRAY_TYPE
6177 && TYPE_NONALIASED_COMPONENT (type
))
6179 target
= copy_rtx (target
);
6180 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6183 store_constructor_field
6184 (target
, bitsize
, bitpos
, mode
, value
, cleared
,
6185 get_alias_set (elttype
));
6190 rtx loop_start
= gen_label_rtx ();
6191 rtx loop_end
= gen_label_rtx ();
6194 expand_normal (hi_index
);
6196 index
= build_decl (EXPR_LOCATION (exp
),
6197 VAR_DECL
, NULL_TREE
, domain
);
6198 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6199 SET_DECL_RTL (index
, index_r
);
6200 store_expr (lo_index
, index_r
, 0, false);
6202 /* Build the head of the loop. */
6203 do_pending_stack_adjust ();
6204 emit_label (loop_start
);
6206 /* Assign value to element index. */
6208 fold_convert (ssizetype
,
6209 fold_build2 (MINUS_EXPR
,
6212 TYPE_MIN_VALUE (domain
)));
6215 size_binop (MULT_EXPR
, position
,
6216 fold_convert (ssizetype
,
6217 TYPE_SIZE_UNIT (elttype
)));
6219 pos_rtx
= expand_normal (position
);
6220 xtarget
= offset_address (target
, pos_rtx
,
6221 highest_pow2_factor (position
));
6222 xtarget
= adjust_address (xtarget
, mode
, 0);
6223 if (TREE_CODE (value
) == CONSTRUCTOR
)
6224 store_constructor (value
, xtarget
, cleared
,
6225 bitsize
/ BITS_PER_UNIT
);
6227 store_expr (value
, xtarget
, 0, false);
6229 /* Generate a conditional jump to exit the loop. */
6230 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6232 jumpif (exit_cond
, loop_end
, -1);
6234 /* Update the loop counter, and jump to the head of
6236 expand_assignment (index
,
6237 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6238 index
, integer_one_node
),
6241 emit_jump (loop_start
);
6243 /* Build the end of the loop. */
6244 emit_label (loop_end
);
6247 else if ((index
!= 0 && ! tree_fits_shwi_p (index
))
6248 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype
)))
6253 index
= ssize_int (1);
6256 index
= fold_convert (ssizetype
,
6257 fold_build2 (MINUS_EXPR
,
6260 TYPE_MIN_VALUE (domain
)));
6263 size_binop (MULT_EXPR
, index
,
6264 fold_convert (ssizetype
,
6265 TYPE_SIZE_UNIT (elttype
)));
6266 xtarget
= offset_address (target
,
6267 expand_normal (position
),
6268 highest_pow2_factor (position
));
6269 xtarget
= adjust_address (xtarget
, mode
, 0);
6270 store_expr (value
, xtarget
, 0, false);
6275 bitpos
= ((tree_to_shwi (index
) - minelt
)
6276 * tree_to_uhwi (TYPE_SIZE (elttype
)));
6278 bitpos
= (i
* tree_to_uhwi (TYPE_SIZE (elttype
)));
6280 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6281 && TREE_CODE (type
) == ARRAY_TYPE
6282 && TYPE_NONALIASED_COMPONENT (type
))
6284 target
= copy_rtx (target
);
6285 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6287 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
6288 cleared
, get_alias_set (elttype
));
6296 unsigned HOST_WIDE_INT idx
;
6297 constructor_elt
*ce
;
6300 int icode
= CODE_FOR_nothing
;
6301 tree elttype
= TREE_TYPE (type
);
6302 int elt_size
= tree_to_uhwi (TYPE_SIZE (elttype
));
6303 enum machine_mode eltmode
= TYPE_MODE (elttype
);
6304 HOST_WIDE_INT bitsize
;
6305 HOST_WIDE_INT bitpos
;
6306 rtvec vector
= NULL
;
6308 alias_set_type alias
;
6310 gcc_assert (eltmode
!= BLKmode
);
6312 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6313 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
6315 enum machine_mode mode
= GET_MODE (target
);
6317 icode
= (int) optab_handler (vec_init_optab
, mode
);
6318 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6319 if (icode
!= CODE_FOR_nothing
)
6323 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6324 if (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
)
6326 icode
= CODE_FOR_nothing
;
6330 if (icode
!= CODE_FOR_nothing
)
6334 vector
= rtvec_alloc (n_elts
);
6335 for (i
= 0; i
< n_elts
; i
++)
6336 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
6340 /* If the constructor has fewer elements than the vector,
6341 clear the whole array first. Similarly if this is static
6342 constructor of a non-BLKmode object. */
6345 else if (REG_P (target
) && TREE_STATIC (exp
))
6349 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6352 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6354 int n_elts_here
= tree_to_uhwi
6355 (int_const_binop (TRUNC_DIV_EXPR
,
6356 TYPE_SIZE (TREE_TYPE (value
)),
6357 TYPE_SIZE (elttype
)));
6359 count
+= n_elts_here
;
6360 if (mostly_zeros_p (value
))
6361 zero_count
+= n_elts_here
;
6364 /* Clear the entire vector first if there are any missing elements,
6365 or if the incidence of zero elements is >= 75%. */
6366 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
6369 if (need_to_clear
&& size
> 0 && !vector
)
6372 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6374 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6378 /* Inform later passes that the old value is dead. */
6379 if (!cleared
&& !vector
&& REG_P (target
))
6380 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6383 alias
= MEM_ALIAS_SET (target
);
6385 alias
= get_alias_set (elttype
);
6387 /* Store each element of the constructor into the corresponding
6388 element of TARGET, determined by counting the elements. */
6389 for (idx
= 0, i
= 0;
6390 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6391 idx
++, i
+= bitsize
/ elt_size
)
6393 HOST_WIDE_INT eltpos
;
6394 tree value
= ce
->value
;
6396 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value
)));
6397 if (cleared
&& initializer_zerop (value
))
6401 eltpos
= tree_to_uhwi (ce
->index
);
6407 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6409 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6410 RTVEC_ELT (vector
, eltpos
)
6411 = expand_normal (value
);
6415 enum machine_mode value_mode
=
6416 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6417 ? TYPE_MODE (TREE_TYPE (value
))
6419 bitpos
= eltpos
* elt_size
;
6420 store_constructor_field (target
, bitsize
, bitpos
, value_mode
,
6421 value
, cleared
, alias
);
6426 emit_insn (GEN_FCN (icode
)
6428 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
6437 /* Store the value of EXP (an expression tree)
6438 into a subfield of TARGET which has mode MODE and occupies
6439 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6440 If MODE is VOIDmode, it means that we are storing into a bit-field.
6442 BITREGION_START is bitpos of the first bitfield in this region.
6443 BITREGION_END is the bitpos of the ending bitfield in this region.
6444 These two fields are 0, if the C++ memory model does not apply,
6445 or we are not interested in keeping track of bitfield regions.
6447 Always return const0_rtx unless we have something particular to
6450 ALIAS_SET is the alias set for the destination. This value will
6451 (in general) be different from that for TARGET, since TARGET is a
6452 reference to the containing structure.
6454 If NONTEMPORAL is true, try generating a nontemporal store. */
6457 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
6458 unsigned HOST_WIDE_INT bitregion_start
,
6459 unsigned HOST_WIDE_INT bitregion_end
,
6460 enum machine_mode mode
, tree exp
,
6461 alias_set_type alias_set
, bool nontemporal
)
6463 if (TREE_CODE (exp
) == ERROR_MARK
)
6466 /* If we have nothing to store, do nothing unless the expression has
6469 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6471 if (GET_CODE (target
) == CONCAT
)
6473 /* We're storing into a struct containing a single __complex. */
6475 gcc_assert (!bitpos
);
6476 return store_expr (exp
, target
, 0, nontemporal
);
6479 /* If the structure is in a register or if the component
6480 is a bit field, we cannot use addressing to access it.
6481 Use bit-field techniques or SUBREG to store in it. */
6483 if (mode
== VOIDmode
6484 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6485 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6486 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6488 || GET_CODE (target
) == SUBREG
6489 /* If the field isn't aligned enough to store as an ordinary memref,
6490 store it as a bit field. */
6492 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6493 || bitpos
% GET_MODE_ALIGNMENT (mode
))
6494 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
6495 || (bitpos
% BITS_PER_UNIT
!= 0)))
6496 || (bitsize
>= 0 && mode
!= BLKmode
6497 && GET_MODE_BITSIZE (mode
) > bitsize
)
6498 /* If the RHS and field are a constant size and the size of the
6499 RHS isn't the same size as the bitfield, we must use bitfield
6502 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
6503 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0)
6504 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6505 decl we must use bitfield operations. */
6507 && TREE_CODE (exp
) == MEM_REF
6508 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6509 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6510 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0),0 ))
6511 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
6516 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6517 implies a mask operation. If the precision is the same size as
6518 the field we're storing into, that mask is redundant. This is
6519 particularly common with bit field assignments generated by the
6521 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
6524 tree type
= TREE_TYPE (exp
);
6525 if (INTEGRAL_TYPE_P (type
)
6526 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
6527 && bitsize
== TYPE_PRECISION (type
))
6529 tree op
= gimple_assign_rhs1 (nop_def
);
6530 type
= TREE_TYPE (op
);
6531 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
6536 temp
= expand_normal (exp
);
6538 /* If BITSIZE is narrower than the size of the type of EXP
6539 we will be narrowing TEMP. Normally, what's wanted are the
6540 low-order bits. However, if EXP's type is a record and this is
6541 big-endian machine, we want the upper BITSIZE bits. */
6542 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
6543 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
6544 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
6545 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
6546 GET_MODE_BITSIZE (GET_MODE (temp
)) - bitsize
,
6549 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6550 if (mode
!= VOIDmode
&& mode
!= BLKmode
6551 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
6552 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
6554 /* If the modes of TEMP and TARGET are both BLKmode, both
6555 must be in memory and BITPOS must be aligned on a byte
6556 boundary. If so, we simply do a block copy. Likewise
6557 for a BLKmode-like TARGET. */
6558 if (GET_MODE (temp
) == BLKmode
6559 && (GET_MODE (target
) == BLKmode
6561 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
6562 && (bitpos
% BITS_PER_UNIT
) == 0
6563 && (bitsize
% BITS_PER_UNIT
) == 0)))
6565 gcc_assert (MEM_P (target
) && MEM_P (temp
)
6566 && (bitpos
% BITS_PER_UNIT
) == 0);
6568 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6569 emit_block_move (target
, temp
,
6570 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6577 /* Handle calls that return values in multiple non-contiguous locations.
6578 The Irix 6 ABI has examples of this. */
6579 if (GET_CODE (temp
) == PARALLEL
)
6581 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6583 if (mode
== BLKmode
)
6584 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6585 temp_target
= gen_reg_rtx (mode
);
6586 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
6589 else if (mode
== BLKmode
)
6591 /* Handle calls that return BLKmode values in registers. */
6592 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
6594 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
6595 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
6600 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6602 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6603 temp_target
= gen_reg_rtx (mode
);
6605 = extract_bit_field (temp
, size
* BITS_PER_UNIT
, 0, 1,
6606 temp_target
, mode
, mode
);
6611 /* Store the value in the bitfield. */
6612 store_bit_field (target
, bitsize
, bitpos
,
6613 bitregion_start
, bitregion_end
,
6620 /* Now build a reference to just the desired component. */
6621 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
6623 if (to_rtx
== target
)
6624 to_rtx
= copy_rtx (to_rtx
);
6626 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
6627 set_mem_alias_set (to_rtx
, alias_set
);
6629 return store_expr (exp
, to_rtx
, 0, nontemporal
);
6633 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6634 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6635 codes and find the ultimate containing object, which we return.
6637 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6638 bit position, and *PUNSIGNEDP to the signedness of the field.
6639 If the position of the field is variable, we store a tree
6640 giving the variable offset (in units) in *POFFSET.
6641 This offset is in addition to the bit position.
6642 If the position is not variable, we store 0 in *POFFSET.
6644 If any of the extraction expressions is volatile,
6645 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6647 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6648 Otherwise, it is a mode that can be used to access the field.
6650 If the field describes a variable-sized object, *PMODE is set to
6651 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6652 this case, but the address of the object can be found.
6654 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6655 look through nodes that serve as markers of a greater alignment than
6656 the one that can be deduced from the expression. These nodes make it
6657 possible for front-ends to prevent temporaries from being created by
6658 the middle-end on alignment considerations. For that purpose, the
6659 normal operating mode at high-level is to always pass FALSE so that
6660 the ultimate containing object is really returned; moreover, the
6661 associated predicate handled_component_p will always return TRUE
6662 on these nodes, thus indicating that they are essentially handled
6663 by get_inner_reference. TRUE should only be passed when the caller
6664 is scanning the expression in order to build another representation
6665 and specifically knows how to handle these nodes; as such, this is
6666 the normal operating mode in the RTL expanders. */
6669 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
6670 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
6671 enum machine_mode
*pmode
, int *punsignedp
,
6672 int *pvolatilep
, bool keep_aligning
)
6675 enum machine_mode mode
= VOIDmode
;
6676 bool blkmode_bitfield
= false;
6677 tree offset
= size_zero_node
;
6678 offset_int bit_offset
= 0;
6680 /* First get the mode, signedness, and size. We do this from just the
6681 outermost expression. */
6683 if (TREE_CODE (exp
) == COMPONENT_REF
)
6685 tree field
= TREE_OPERAND (exp
, 1);
6686 size_tree
= DECL_SIZE (field
);
6687 if (flag_strict_volatile_bitfields
> 0
6688 && TREE_THIS_VOLATILE (exp
)
6689 && DECL_BIT_FIELD_TYPE (field
)
6690 && DECL_MODE (field
) != BLKmode
)
6691 /* Volatile bitfields should be accessed in the mode of the
6692 field's type, not the mode computed based on the bit
6694 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
6695 else if (!DECL_BIT_FIELD (field
))
6696 mode
= DECL_MODE (field
);
6697 else if (DECL_MODE (field
) == BLKmode
)
6698 blkmode_bitfield
= true;
6700 *punsignedp
= DECL_UNSIGNED (field
);
6702 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
6704 size_tree
= TREE_OPERAND (exp
, 1);
6705 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
6706 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
6708 /* For vector types, with the correct size of access, use the mode of
6710 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
6711 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6712 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
6713 mode
= TYPE_MODE (TREE_TYPE (exp
));
6717 mode
= TYPE_MODE (TREE_TYPE (exp
));
6718 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
6720 if (mode
== BLKmode
)
6721 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
6723 *pbitsize
= GET_MODE_BITSIZE (mode
);
6728 if (! tree_fits_uhwi_p (size_tree
))
6729 mode
= BLKmode
, *pbitsize
= -1;
6731 *pbitsize
= tree_to_uhwi (size_tree
);
6734 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6735 and find the ultimate containing object. */
6738 switch (TREE_CODE (exp
))
6741 bit_offset
+= wi::to_offset (TREE_OPERAND (exp
, 2));
6746 tree field
= TREE_OPERAND (exp
, 1);
6747 tree this_offset
= component_ref_field_offset (exp
);
6749 /* If this field hasn't been filled in yet, don't go past it.
6750 This should only happen when folding expressions made during
6751 type construction. */
6752 if (this_offset
== 0)
6755 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
6756 bit_offset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
6758 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6763 case ARRAY_RANGE_REF
:
6765 tree index
= TREE_OPERAND (exp
, 1);
6766 tree low_bound
= array_ref_low_bound (exp
);
6767 tree unit_size
= array_ref_element_size (exp
);
6769 /* We assume all arrays have sizes that are a multiple of a byte.
6770 First subtract the lower bound, if any, in the type of the
6771 index, then convert to sizetype and multiply by the size of
6772 the array element. */
6773 if (! integer_zerop (low_bound
))
6774 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
6777 offset
= size_binop (PLUS_EXPR
, offset
,
6778 size_binop (MULT_EXPR
,
6779 fold_convert (sizetype
, index
),
6788 bit_offset
+= *pbitsize
;
6791 case VIEW_CONVERT_EXPR
:
6792 if (keep_aligning
&& STRICT_ALIGNMENT
6793 && (TYPE_ALIGN (TREE_TYPE (exp
))
6794 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6795 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6796 < BIGGEST_ALIGNMENT
)
6797 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
6798 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6803 /* Hand back the decl for MEM[&decl, off]. */
6804 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
6806 tree off
= TREE_OPERAND (exp
, 1);
6807 if (!integer_zerop (off
))
6809 offset_int boff
, coff
= mem_ref_offset (exp
);
6810 boff
= wi::lshift (coff
, LOG2_BITS_PER_UNIT
);
6813 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6821 /* If any reference in the chain is volatile, the effect is volatile. */
6822 if (TREE_THIS_VOLATILE (exp
))
6825 exp
= TREE_OPERAND (exp
, 0);
6829 /* If OFFSET is constant, see if we can return the whole thing as a
6830 constant bit position. Make sure to handle overflow during
6832 if (TREE_CODE (offset
) == INTEGER_CST
)
6834 offset_int tem
= wi::sext (wi::to_offset (offset
),
6835 TYPE_PRECISION (sizetype
));
6836 tem
= wi::lshift (tem
, LOG2_BITS_PER_UNIT
);
6838 if (wi::fits_shwi_p (tem
))
6840 *pbitpos
= tem
.to_shwi ();
6841 *poffset
= offset
= NULL_TREE
;
6845 /* Otherwise, split it up. */
6848 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6849 if (wi::neg_p (bit_offset
))
6851 offset_int mask
= wi::mask
<offset_int
> (LOG2_BITS_PER_UNIT
, false);
6852 offset_int tem
= bit_offset
.and_not (mask
);
6853 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6854 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6856 tem
= wi::arshift (tem
, LOG2_BITS_PER_UNIT
);
6857 offset
= size_binop (PLUS_EXPR
, offset
,
6858 wide_int_to_tree (sizetype
, tem
));
6861 *pbitpos
= bit_offset
.to_shwi ();
6865 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6866 if (mode
== VOIDmode
6868 && (*pbitpos
% BITS_PER_UNIT
) == 0
6869 && (*pbitsize
% BITS_PER_UNIT
) == 0)
6877 /* Return a tree of sizetype representing the size, in bytes, of the element
6878 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6881 array_ref_element_size (tree exp
)
6883 tree aligned_size
= TREE_OPERAND (exp
, 3);
6884 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6885 location_t loc
= EXPR_LOCATION (exp
);
6887 /* If a size was specified in the ARRAY_REF, it's the size measured
6888 in alignment units of the element type. So multiply by that value. */
6891 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6892 sizetype from another type of the same width and signedness. */
6893 if (TREE_TYPE (aligned_size
) != sizetype
)
6894 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
6895 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
6896 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
6899 /* Otherwise, take the size from that of the element type. Substitute
6900 any PLACEHOLDER_EXPR that we have. */
6902 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
6905 /* Return a tree representing the lower bound of the array mentioned in
6906 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6909 array_ref_low_bound (tree exp
)
6911 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6913 /* If a lower bound is specified in EXP, use it. */
6914 if (TREE_OPERAND (exp
, 2))
6915 return TREE_OPERAND (exp
, 2);
6917 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6918 substituting for a PLACEHOLDER_EXPR as needed. */
6919 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6920 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
6922 /* Otherwise, return a zero of the appropriate type. */
6923 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
6926 /* Returns true if REF is an array reference to an array at the end of
6927 a structure. If this is the case, the array may be allocated larger
6928 than its upper bound implies. */
6931 array_at_struct_end_p (tree ref
)
6933 if (TREE_CODE (ref
) != ARRAY_REF
6934 && TREE_CODE (ref
) != ARRAY_RANGE_REF
)
6937 while (handled_component_p (ref
))
6939 /* If the reference chain contains a component reference to a
6940 non-union type and there follows another field the reference
6941 is not at the end of a structure. */
6942 if (TREE_CODE (ref
) == COMPONENT_REF
6943 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 0))) == RECORD_TYPE
)
6945 tree nextf
= DECL_CHAIN (TREE_OPERAND (ref
, 1));
6946 while (nextf
&& TREE_CODE (nextf
) != FIELD_DECL
)
6947 nextf
= DECL_CHAIN (nextf
);
6952 ref
= TREE_OPERAND (ref
, 0);
6955 /* If the reference is based on a declared entity, the size of the array
6956 is constrained by its given domain. */
6963 /* Return a tree representing the upper bound of the array mentioned in
6964 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6967 array_ref_up_bound (tree exp
)
6969 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6971 /* If there is a domain type and it has an upper bound, use it, substituting
6972 for a PLACEHOLDER_EXPR as needed. */
6973 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
6974 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
6976 /* Otherwise fail. */
6980 /* Return a tree representing the offset, in bytes, of the field referenced
6981 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6984 component_ref_field_offset (tree exp
)
6986 tree aligned_offset
= TREE_OPERAND (exp
, 2);
6987 tree field
= TREE_OPERAND (exp
, 1);
6988 location_t loc
= EXPR_LOCATION (exp
);
6990 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6991 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6995 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6996 sizetype from another type of the same width and signedness. */
6997 if (TREE_TYPE (aligned_offset
) != sizetype
)
6998 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
6999 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
7000 size_int (DECL_OFFSET_ALIGN (field
)
7004 /* Otherwise, take the offset from that of the field. Substitute
7005 any PLACEHOLDER_EXPR that we have. */
7007 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
7010 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7012 static unsigned HOST_WIDE_INT
7013 target_align (const_tree target
)
7015 /* We might have a chain of nested references with intermediate misaligning
7016 bitfields components, so need to recurse to find out. */
7018 unsigned HOST_WIDE_INT this_align
, outer_align
;
7020 switch (TREE_CODE (target
))
7026 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
7027 outer_align
= target_align (TREE_OPERAND (target
, 0));
7028 return MIN (this_align
, outer_align
);
7031 case ARRAY_RANGE_REF
:
7032 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7033 outer_align
= target_align (TREE_OPERAND (target
, 0));
7034 return MIN (this_align
, outer_align
);
7037 case NON_LVALUE_EXPR
:
7038 case VIEW_CONVERT_EXPR
:
7039 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7040 outer_align
= target_align (TREE_OPERAND (target
, 0));
7041 return MAX (this_align
, outer_align
);
7044 return TYPE_ALIGN (TREE_TYPE (target
));
7049 /* Given an rtx VALUE that may contain additions and multiplications, return
7050 an equivalent value that just refers to a register, memory, or constant.
7051 This is done by generating instructions to perform the arithmetic and
7052 returning a pseudo-register containing the value.
7054 The returned value may be a REG, SUBREG, MEM or constant. */
7057 force_operand (rtx value
, rtx target
)
7060 /* Use subtarget as the target for operand 0 of a binary operation. */
7061 rtx subtarget
= get_subtarget (target
);
7062 enum rtx_code code
= GET_CODE (value
);
7064 /* Check for subreg applied to an expression produced by loop optimizer. */
7066 && !REG_P (SUBREG_REG (value
))
7067 && !MEM_P (SUBREG_REG (value
)))
7070 = simplify_gen_subreg (GET_MODE (value
),
7071 force_reg (GET_MODE (SUBREG_REG (value
)),
7072 force_operand (SUBREG_REG (value
),
7074 GET_MODE (SUBREG_REG (value
)),
7075 SUBREG_BYTE (value
));
7076 code
= GET_CODE (value
);
7079 /* Check for a PIC address load. */
7080 if ((code
== PLUS
|| code
== MINUS
)
7081 && XEXP (value
, 0) == pic_offset_table_rtx
7082 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
7083 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
7084 || GET_CODE (XEXP (value
, 1)) == CONST
))
7087 subtarget
= gen_reg_rtx (GET_MODE (value
));
7088 emit_move_insn (subtarget
, value
);
7092 if (ARITHMETIC_P (value
))
7094 op2
= XEXP (value
, 1);
7095 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
7097 if (code
== MINUS
&& CONST_INT_P (op2
))
7100 op2
= negate_rtx (GET_MODE (value
), op2
);
7103 /* Check for an addition with OP2 a constant integer and our first
7104 operand a PLUS of a virtual register and something else. In that
7105 case, we want to emit the sum of the virtual register and the
7106 constant first and then add the other value. This allows virtual
7107 register instantiation to simply modify the constant rather than
7108 creating another one around this addition. */
7109 if (code
== PLUS
&& CONST_INT_P (op2
)
7110 && GET_CODE (XEXP (value
, 0)) == PLUS
7111 && REG_P (XEXP (XEXP (value
, 0), 0))
7112 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7113 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
7115 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
7116 XEXP (XEXP (value
, 0), 0), op2
,
7117 subtarget
, 0, OPTAB_LIB_WIDEN
);
7118 return expand_simple_binop (GET_MODE (value
), code
, temp
,
7119 force_operand (XEXP (XEXP (value
,
7121 target
, 0, OPTAB_LIB_WIDEN
);
7124 op1
= force_operand (XEXP (value
, 0), subtarget
);
7125 op2
= force_operand (op2
, NULL_RTX
);
7129 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7131 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7132 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7133 target
, 1, OPTAB_LIB_WIDEN
);
7135 return expand_divmod (0,
7136 FLOAT_MODE_P (GET_MODE (value
))
7137 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7138 GET_MODE (value
), op1
, op2
, target
, 0);
7140 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7143 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7146 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7149 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7150 target
, 0, OPTAB_LIB_WIDEN
);
7152 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7153 target
, 1, OPTAB_LIB_WIDEN
);
7156 if (UNARY_P (value
))
7159 target
= gen_reg_rtx (GET_MODE (value
));
7160 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7167 case FLOAT_TRUNCATE
:
7168 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7173 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7177 case UNSIGNED_FLOAT
:
7178 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7182 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7186 #ifdef INSN_SCHEDULING
7187 /* On machines that have insn scheduling, we want all memory reference to be
7188 explicit, so we need to deal with such paradoxical SUBREGs. */
7189 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7191 = simplify_gen_subreg (GET_MODE (value
),
7192 force_reg (GET_MODE (SUBREG_REG (value
)),
7193 force_operand (SUBREG_REG (value
),
7195 GET_MODE (SUBREG_REG (value
)),
7196 SUBREG_BYTE (value
));
7202 /* Subroutine of expand_expr: return nonzero iff there is no way that
7203 EXP can reference X, which is being modified. TOP_P is nonzero if this
7204 call is going to be used to determine whether we need a temporary
7205 for EXP, as opposed to a recursive call to this function.
7207 It is always safe for this routine to return zero since it merely
7208 searches for optimization opportunities. */
7211 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7217 /* If EXP has varying size, we MUST use a target since we currently
7218 have no way of allocating temporaries of variable size
7219 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7220 So we assume here that something at a higher level has prevented a
7221 clash. This is somewhat bogus, but the best we can do. Only
7222 do this when X is BLKmode and when we are at the top level. */
7223 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7224 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7225 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7226 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7227 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7229 && GET_MODE (x
) == BLKmode
)
7230 /* If X is in the outgoing argument area, it is always safe. */
7232 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7233 || (GET_CODE (XEXP (x
, 0)) == PLUS
7234 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7237 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7238 find the underlying pseudo. */
7239 if (GET_CODE (x
) == SUBREG
)
7242 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7246 /* Now look at our tree code and possibly recurse. */
7247 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7249 case tcc_declaration
:
7250 exp_rtl
= DECL_RTL_IF_SET (exp
);
7256 case tcc_exceptional
:
7257 if (TREE_CODE (exp
) == TREE_LIST
)
7261 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7263 exp
= TREE_CHAIN (exp
);
7266 if (TREE_CODE (exp
) != TREE_LIST
)
7267 return safe_from_p (x
, exp
, 0);
7270 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7272 constructor_elt
*ce
;
7273 unsigned HOST_WIDE_INT idx
;
7275 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7276 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7277 || !safe_from_p (x
, ce
->value
, 0))
7281 else if (TREE_CODE (exp
) == ERROR_MARK
)
7282 return 1; /* An already-visited SAVE_EXPR? */
7287 /* The only case we look at here is the DECL_INITIAL inside a
7289 return (TREE_CODE (exp
) != DECL_EXPR
7290 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7291 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7292 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7295 case tcc_comparison
:
7296 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7301 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7303 case tcc_expression
:
7306 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7307 the expression. If it is set, we conflict iff we are that rtx or
7308 both are in memory. Otherwise, we check all operands of the
7309 expression recursively. */
7311 switch (TREE_CODE (exp
))
7314 /* If the operand is static or we are static, we can't conflict.
7315 Likewise if we don't conflict with the operand at all. */
7316 if (staticp (TREE_OPERAND (exp
, 0))
7317 || TREE_STATIC (exp
)
7318 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7321 /* Otherwise, the only way this can conflict is if we are taking
7322 the address of a DECL a that address if part of X, which is
7324 exp
= TREE_OPERAND (exp
, 0);
7327 if (!DECL_RTL_SET_P (exp
)
7328 || !MEM_P (DECL_RTL (exp
)))
7331 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7337 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7338 get_alias_set (exp
)))
7343 /* Assume that the call will clobber all hard registers and
7345 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7350 case WITH_CLEANUP_EXPR
:
7351 case CLEANUP_POINT_EXPR
:
7352 /* Lowered by gimplify.c. */
7356 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7362 /* If we have an rtx, we do not need to scan our operands. */
7366 nops
= TREE_OPERAND_LENGTH (exp
);
7367 for (i
= 0; i
< nops
; i
++)
7368 if (TREE_OPERAND (exp
, i
) != 0
7369 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7375 /* Should never get a type here. */
7379 /* If we have an rtl, find any enclosed object. Then see if we conflict
7383 if (GET_CODE (exp_rtl
) == SUBREG
)
7385 exp_rtl
= SUBREG_REG (exp_rtl
);
7387 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7391 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7392 are memory and they conflict. */
7393 return ! (rtx_equal_p (x
, exp_rtl
)
7394 || (MEM_P (x
) && MEM_P (exp_rtl
)
7395 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7398 /* If we reach here, it is safe. */
7403 /* Return the highest power of two that EXP is known to be a multiple of.
7404 This is used in updating alignment of MEMs in array references. */
7406 unsigned HOST_WIDE_INT
7407 highest_pow2_factor (const_tree exp
)
7409 unsigned HOST_WIDE_INT ret
;
7410 int trailing_zeros
= tree_ctz (exp
);
7411 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7412 return BIGGEST_ALIGNMENT
;
7413 ret
= (unsigned HOST_WIDE_INT
) 1 << trailing_zeros
;
7414 if (ret
> BIGGEST_ALIGNMENT
)
7415 return BIGGEST_ALIGNMENT
;
7419 /* Similar, except that the alignment requirements of TARGET are
7420 taken into account. Assume it is at least as aligned as its
7421 type, unless it is a COMPONENT_REF in which case the layout of
7422 the structure gives the alignment. */
7424 static unsigned HOST_WIDE_INT
7425 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7427 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7428 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7430 return MAX (factor
, talign
);
7433 #ifdef HAVE_conditional_move
7434 /* Convert the tree comparison code TCODE to the rtl one where the
7435 signedness is UNSIGNEDP. */
7437 static enum rtx_code
7438 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7450 code
= unsignedp
? LTU
: LT
;
7453 code
= unsignedp
? LEU
: LE
;
7456 code
= unsignedp
? GTU
: GT
;
7459 code
= unsignedp
? GEU
: GE
;
7461 case UNORDERED_EXPR
:
7493 /* Subroutine of expand_expr. Expand the two operands of a binary
7494 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7495 The value may be stored in TARGET if TARGET is nonzero. The
7496 MODIFIER argument is as documented by expand_expr. */
7499 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7500 enum expand_modifier modifier
)
7502 if (! safe_from_p (target
, exp1
, 1))
7504 if (operand_equal_p (exp0
, exp1
, 0))
7506 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7507 *op1
= copy_rtx (*op0
);
7511 /* If we need to preserve evaluation order, copy exp0 into its own
7512 temporary variable so that it can't be clobbered by exp1. */
7513 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
7514 exp0
= save_expr (exp0
);
7515 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7516 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7521 /* Return a MEM that contains constant EXP. DEFER is as for
7522 output_constant_def and MODIFIER is as for expand_expr. */
7525 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7529 mem
= output_constant_def (exp
, defer
);
7530 if (modifier
!= EXPAND_INITIALIZER
)
7531 mem
= use_anchored_address (mem
);
7535 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7536 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7539 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
7540 enum expand_modifier modifier
, addr_space_t as
)
7542 rtx result
, subtarget
;
7544 HOST_WIDE_INT bitsize
, bitpos
;
7545 int volatilep
, unsignedp
;
7546 enum machine_mode mode1
;
7548 /* If we are taking the address of a constant and are at the top level,
7549 we have to use output_constant_def since we can't call force_const_mem
7551 /* ??? This should be considered a front-end bug. We should not be
7552 generating ADDR_EXPR of something that isn't an LVALUE. The only
7553 exception here is STRING_CST. */
7554 if (CONSTANT_CLASS_P (exp
))
7556 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7557 if (modifier
< EXPAND_SUM
)
7558 result
= force_operand (result
, target
);
7562 /* Everything must be something allowed by is_gimple_addressable. */
7563 switch (TREE_CODE (exp
))
7566 /* This case will happen via recursion for &a->b. */
7567 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7571 tree tem
= TREE_OPERAND (exp
, 0);
7572 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7573 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7574 return expand_expr (tem
, target
, tmode
, modifier
);
7578 /* Expand the initializer like constants above. */
7579 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7581 if (modifier
< EXPAND_SUM
)
7582 result
= force_operand (result
, target
);
7586 /* The real part of the complex number is always first, therefore
7587 the address is the same as the address of the parent object. */
7590 inner
= TREE_OPERAND (exp
, 0);
7594 /* The imaginary part of the complex number is always second.
7595 The expression is therefore always offset by the size of the
7598 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
7599 inner
= TREE_OPERAND (exp
, 0);
7602 case COMPOUND_LITERAL_EXPR
:
7603 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7604 rtl_for_decl_init is called on DECL_INITIAL with
7605 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7606 if (modifier
== EXPAND_INITIALIZER
7607 && COMPOUND_LITERAL_EXPR_DECL (exp
))
7608 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
7609 target
, tmode
, modifier
, as
);
7612 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7613 expand_expr, as that can have various side effects; LABEL_DECLs for
7614 example, may not have their DECL_RTL set yet. Expand the rtl of
7615 CONSTRUCTORs too, which should yield a memory reference for the
7616 constructor's contents. Assume language specific tree nodes can
7617 be expanded in some interesting way. */
7618 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7620 || TREE_CODE (exp
) == CONSTRUCTOR
7621 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7623 result
= expand_expr (exp
, target
, tmode
,
7624 modifier
== EXPAND_INITIALIZER
7625 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7627 /* If the DECL isn't in memory, then the DECL wasn't properly
7628 marked TREE_ADDRESSABLE, which will be either a front-end
7629 or a tree optimizer bug. */
7631 if (TREE_ADDRESSABLE (exp
)
7633 && ! targetm
.calls
.allocate_stack_slots_for_args ())
7635 error ("local frame unavailable (naked function?)");
7639 gcc_assert (MEM_P (result
));
7640 result
= XEXP (result
, 0);
7642 /* ??? Is this needed anymore? */
7644 TREE_USED (exp
) = 1;
7646 if (modifier
!= EXPAND_INITIALIZER
7647 && modifier
!= EXPAND_CONST_ADDRESS
7648 && modifier
!= EXPAND_SUM
)
7649 result
= force_operand (result
, target
);
7653 /* Pass FALSE as the last argument to get_inner_reference although
7654 we are expanding to RTL. The rationale is that we know how to
7655 handle "aligning nodes" here: we can just bypass them because
7656 they won't change the final object whose address will be returned
7657 (they actually exist only for that purpose). */
7658 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7659 &mode1
, &unsignedp
, &volatilep
, false);
7663 /* We must have made progress. */
7664 gcc_assert (inner
!= exp
);
7666 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
7667 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7668 inner alignment, force the inner to be sufficiently aligned. */
7669 if (CONSTANT_CLASS_P (inner
)
7670 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
7672 inner
= copy_node (inner
);
7673 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
7674 TYPE_ALIGN (TREE_TYPE (inner
)) = TYPE_ALIGN (TREE_TYPE (exp
));
7675 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
7677 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
7683 if (modifier
!= EXPAND_NORMAL
)
7684 result
= force_operand (result
, NULL
);
7685 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
7686 modifier
== EXPAND_INITIALIZER
7687 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
7689 /* expand_expr is allowed to return an object in a mode other
7690 than TMODE. If it did, we need to convert. */
7691 if (GET_MODE (tmp
) != VOIDmode
&& tmode
!= GET_MODE (tmp
))
7692 tmp
= convert_modes (tmode
, GET_MODE (tmp
),
7693 tmp
, TYPE_UNSIGNED (TREE_TYPE (offset
)));
7694 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7695 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
7697 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7698 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
7701 subtarget
= bitpos
? NULL_RTX
: target
;
7702 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
7703 1, OPTAB_LIB_WIDEN
);
7709 /* Someone beforehand should have rejected taking the address
7710 of such an object. */
7711 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
7713 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7714 result
= plus_constant (tmode
, result
, bitpos
/ BITS_PER_UNIT
);
7715 if (modifier
< EXPAND_SUM
)
7716 result
= force_operand (result
, target
);
7722 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7723 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7726 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
7727 enum expand_modifier modifier
)
7729 addr_space_t as
= ADDR_SPACE_GENERIC
;
7730 enum machine_mode address_mode
= Pmode
;
7731 enum machine_mode pointer_mode
= ptr_mode
;
7732 enum machine_mode rmode
;
7735 /* Target mode of VOIDmode says "whatever's natural". */
7736 if (tmode
== VOIDmode
)
7737 tmode
= TYPE_MODE (TREE_TYPE (exp
));
7739 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
7741 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
7742 address_mode
= targetm
.addr_space
.address_mode (as
);
7743 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7746 /* We can get called with some Weird Things if the user does silliness
7747 like "(short) &a". In that case, convert_memory_address won't do
7748 the right thing, so ignore the given target mode. */
7749 if (tmode
!= address_mode
&& tmode
!= pointer_mode
)
7750 tmode
= address_mode
;
7752 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
7753 tmode
, modifier
, as
);
7755 /* Despite expand_expr claims concerning ignoring TMODE when not
7756 strictly convenient, stuff breaks if we don't honor it. Note
7757 that combined with the above, we only do this for pointer modes. */
7758 rmode
= GET_MODE (result
);
7759 if (rmode
== VOIDmode
)
7762 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7767 /* Generate code for computing CONSTRUCTOR EXP.
7768 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7769 is TRUE, instead of creating a temporary variable in memory
7770 NULL is returned and the caller needs to handle it differently. */
7773 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
7774 bool avoid_temp_mem
)
7776 tree type
= TREE_TYPE (exp
);
7777 enum machine_mode mode
= TYPE_MODE (type
);
7779 /* Try to avoid creating a temporary at all. This is possible
7780 if all of the initializer is zero.
7781 FIXME: try to handle all [0..255] initializers we can handle
7783 if (TREE_STATIC (exp
)
7784 && !TREE_ADDRESSABLE (exp
)
7785 && target
!= 0 && mode
== BLKmode
7786 && all_zeros_p (exp
))
7788 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7792 /* All elts simple constants => refer to a constant in memory. But
7793 if this is a non-BLKmode mode, let it store a field at a time
7794 since that should make a CONST_INT, CONST_WIDE_INT or
7795 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7796 use, it is best to store directly into the target unless the type
7797 is large enough that memcpy will be used. If we are making an
7798 initializer and all operands are constant, put it in memory as
7801 FIXME: Avoid trying to fill vector constructors piece-meal.
7802 Output them with output_constant_def below unless we're sure
7803 they're zeros. This should go away when vector initializers
7804 are treated like VECTOR_CST instead of arrays. */
7805 if ((TREE_STATIC (exp
)
7806 && ((mode
== BLKmode
7807 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7808 || TREE_ADDRESSABLE (exp
)
7809 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
7810 && (! MOVE_BY_PIECES_P
7811 (tree_to_uhwi (TYPE_SIZE_UNIT (type
)),
7813 && ! mostly_zeros_p (exp
))))
7814 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
7815 && TREE_CONSTANT (exp
)))
7822 constructor
= expand_expr_constant (exp
, 1, modifier
);
7824 if (modifier
!= EXPAND_CONST_ADDRESS
7825 && modifier
!= EXPAND_INITIALIZER
7826 && modifier
!= EXPAND_SUM
)
7827 constructor
= validize_mem (constructor
);
7832 /* Handle calls that pass values in multiple non-contiguous
7833 locations. The Irix 6 ABI has examples of this. */
7834 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7835 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
7840 target
= assign_temp (type
, TREE_ADDRESSABLE (exp
), 1);
7843 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7848 /* expand_expr: generate code for computing expression EXP.
7849 An rtx for the computed value is returned. The value is never null.
7850 In the case of a void EXP, const0_rtx is returned.
7852 The value may be stored in TARGET if TARGET is nonzero.
7853 TARGET is just a suggestion; callers must assume that
7854 the rtx returned may not be the same as TARGET.
7856 If TARGET is CONST0_RTX, it means that the value will be ignored.
7858 If TMODE is not VOIDmode, it suggests generating the
7859 result in mode TMODE. But this is done only when convenient.
7860 Otherwise, TMODE is ignored and the value generated in its natural mode.
7861 TMODE is just a suggestion; callers must assume that
7862 the rtx returned may not have mode TMODE.
7864 Note that TARGET may have neither TMODE nor MODE. In that case, it
7865 probably will not be used.
7867 If MODIFIER is EXPAND_SUM then when EXP is an addition
7868 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7869 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7870 products as above, or REG or MEM, or constant.
7871 Ordinarily in such cases we would output mul or add instructions
7872 and then return a pseudo reg containing the sum.
7874 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7875 it also marks a label as absolutely required (it can't be dead).
7876 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7877 This is used for outputting expressions used in initializers.
7879 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7880 with a constant address even if that address is not normally legitimate.
7881 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7883 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7884 a call parameter. Such targets require special care as we haven't yet
7885 marked TARGET so that it's safe from being trashed by libcalls. We
7886 don't want to use TARGET for anything but the final result;
7887 Intermediate values must go elsewhere. Additionally, calls to
7888 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7890 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7891 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7892 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7893 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7896 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7897 In this case, we don't adjust a returned MEM rtx that wouldn't be
7898 sufficiently aligned for its mode; instead, it's up to the caller
7899 to deal with it afterwards. This is used to make sure that unaligned
7900 base objects for which out-of-bounds accesses are supported, for
7901 example record types with trailing arrays, aren't realigned behind
7902 the back of the caller.
7903 The normal operating mode is to pass FALSE for this parameter. */
7906 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
7907 enum expand_modifier modifier
, rtx
*alt_rtl
,
7908 bool inner_reference_p
)
7912 /* Handle ERROR_MARK before anybody tries to access its type. */
7913 if (TREE_CODE (exp
) == ERROR_MARK
7914 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
7916 ret
= CONST0_RTX (tmode
);
7917 return ret
? ret
: const0_rtx
;
7920 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
,
7925 /* Try to expand the conditional expression which is represented by
7926 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7927 return the rtl reg which repsents the result. Otherwise return
7931 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
7932 tree treeop1 ATTRIBUTE_UNUSED
,
7933 tree treeop2 ATTRIBUTE_UNUSED
)
7935 #ifdef HAVE_conditional_move
7937 rtx op00
, op01
, op1
, op2
;
7938 enum rtx_code comparison_code
;
7939 enum machine_mode comparison_mode
;
7942 tree type
= TREE_TYPE (treeop1
);
7943 int unsignedp
= TYPE_UNSIGNED (type
);
7944 enum machine_mode mode
= TYPE_MODE (type
);
7945 enum machine_mode orig_mode
= mode
;
7947 /* If we cannot do a conditional move on the mode, try doing it
7948 with the promoted mode. */
7949 if (!can_conditionally_move_p (mode
))
7951 mode
= promote_mode (type
, mode
, &unsignedp
);
7952 if (!can_conditionally_move_p (mode
))
7954 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
7957 temp
= assign_temp (type
, 0, 1);
7960 expand_operands (treeop1
, treeop2
,
7961 temp
, &op1
, &op2
, EXPAND_NORMAL
);
7963 if (TREE_CODE (treeop0
) == SSA_NAME
7964 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
7966 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
7967 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
7968 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
7969 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
7970 comparison_mode
= TYPE_MODE (type
);
7971 unsignedp
= TYPE_UNSIGNED (type
);
7972 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
7974 else if (TREE_CODE_CLASS (TREE_CODE (treeop0
)) == tcc_comparison
)
7976 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
7977 enum tree_code cmpcode
= TREE_CODE (treeop0
);
7978 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
7979 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
7980 unsignedp
= TYPE_UNSIGNED (type
);
7981 comparison_mode
= TYPE_MODE (type
);
7982 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
7986 op00
= expand_normal (treeop0
);
7988 comparison_code
= NE
;
7989 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
7992 if (GET_MODE (op1
) != mode
)
7993 op1
= gen_lowpart (mode
, op1
);
7995 if (GET_MODE (op2
) != mode
)
7996 op2
= gen_lowpart (mode
, op2
);
7998 /* Try to emit the conditional move. */
7999 insn
= emit_conditional_move (temp
, comparison_code
,
8000 op00
, op01
, comparison_mode
,
8004 /* If we could do the conditional move, emit the sequence,
8008 rtx seq
= get_insns ();
8011 return convert_modes (orig_mode
, mode
, temp
, 0);
8014 /* Otherwise discard the sequence and fall back to code with
8022 expand_expr_real_2 (sepops ops
, rtx target
, enum machine_mode tmode
,
8023 enum expand_modifier modifier
)
8025 rtx op0
, op1
, op2
, temp
;
8028 enum machine_mode mode
;
8029 enum tree_code code
= ops
->code
;
8031 rtx subtarget
, original_target
;
8033 bool reduce_bit_field
;
8034 location_t loc
= ops
->location
;
8035 tree treeop0
, treeop1
, treeop2
;
8036 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8037 ? reduce_to_bit_field_precision ((expr), \
8043 mode
= TYPE_MODE (type
);
8044 unsignedp
= TYPE_UNSIGNED (type
);
8050 /* We should be called only on simple (binary or unary) expressions,
8051 exactly those that are valid in gimple expressions that aren't
8052 GIMPLE_SINGLE_RHS (or invalid). */
8053 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
8054 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
8055 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
8057 ignore
= (target
== const0_rtx
8058 || ((CONVERT_EXPR_CODE_P (code
)
8059 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8060 && TREE_CODE (type
) == VOID_TYPE
));
8062 /* We should be called only if we need the result. */
8063 gcc_assert (!ignore
);
8065 /* An operation in what may be a bit-field type needs the
8066 result to be reduced to the precision of the bit-field type,
8067 which is narrower than that of the type's mode. */
8068 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8069 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
8071 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8074 /* Use subtarget as the target for operand 0 of a binary operation. */
8075 subtarget
= get_subtarget (target
);
8076 original_target
= target
;
8080 case NON_LVALUE_EXPR
:
8083 if (treeop0
== error_mark_node
)
8086 if (TREE_CODE (type
) == UNION_TYPE
)
8088 tree valtype
= TREE_TYPE (treeop0
);
8090 /* If both input and output are BLKmode, this conversion isn't doing
8091 anything except possibly changing memory attribute. */
8092 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8094 rtx result
= expand_expr (treeop0
, target
, tmode
,
8097 result
= copy_rtx (result
);
8098 set_mem_attributes (result
, type
, 0);
8104 if (TYPE_MODE (type
) != BLKmode
)
8105 target
= gen_reg_rtx (TYPE_MODE (type
));
8107 target
= assign_temp (type
, 1, 1);
8111 /* Store data into beginning of memory target. */
8112 store_expr (treeop0
,
8113 adjust_address (target
, TYPE_MODE (valtype
), 0),
8114 modifier
== EXPAND_STACK_PARM
,
8119 gcc_assert (REG_P (target
));
8121 /* Store this field into a union of the proper type. */
8122 store_field (target
,
8123 MIN ((int_size_in_bytes (TREE_TYPE
8126 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
8127 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0, false);
8130 /* Return the entire union. */
8134 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8136 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8139 /* If the signedness of the conversion differs and OP0 is
8140 a promoted SUBREG, clear that indication since we now
8141 have to do the proper extension. */
8142 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8143 && GET_CODE (op0
) == SUBREG
)
8144 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8146 return REDUCE_BIT_FIELD (op0
);
8149 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8150 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8151 if (GET_MODE (op0
) == mode
)
8154 /* If OP0 is a constant, just convert it into the proper mode. */
8155 else if (CONSTANT_P (op0
))
8157 tree inner_type
= TREE_TYPE (treeop0
);
8158 enum machine_mode inner_mode
= GET_MODE (op0
);
8160 if (inner_mode
== VOIDmode
)
8161 inner_mode
= TYPE_MODE (inner_type
);
8163 if (modifier
== EXPAND_INITIALIZER
)
8164 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
8165 subreg_lowpart_offset (mode
,
8168 op0
= convert_modes (mode
, inner_mode
, op0
,
8169 TYPE_UNSIGNED (inner_type
));
8172 else if (modifier
== EXPAND_INITIALIZER
)
8173 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8175 else if (target
== 0)
8176 op0
= convert_to_mode (mode
, op0
,
8177 TYPE_UNSIGNED (TREE_TYPE
8181 convert_move (target
, op0
,
8182 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8186 return REDUCE_BIT_FIELD (op0
);
8188 case ADDR_SPACE_CONVERT_EXPR
:
8190 tree treeop0_type
= TREE_TYPE (treeop0
);
8192 addr_space_t as_from
;
8194 gcc_assert (POINTER_TYPE_P (type
));
8195 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8197 as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8198 as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8200 /* Conversions between pointers to the same address space should
8201 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8202 gcc_assert (as_to
!= as_from
);
8204 /* Ask target code to handle conversion between pointers
8205 to overlapping address spaces. */
8206 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8207 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8209 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8210 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8215 /* For disjoint address spaces, converting anything but
8216 a null pointer invokes undefined behaviour. We simply
8217 always return a null pointer here. */
8218 return CONST0_RTX (mode
);
8221 case POINTER_PLUS_EXPR
:
8222 /* Even though the sizetype mode and the pointer's mode can be different
8223 expand is able to handle this correctly and get the correct result out
8224 of the PLUS_EXPR code. */
8225 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8226 if sizetype precision is smaller than pointer precision. */
8227 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8228 treeop1
= fold_convert_loc (loc
, type
,
8229 fold_convert_loc (loc
, ssizetype
,
8231 /* If sizetype precision is larger than pointer precision, truncate the
8232 offset to have matching modes. */
8233 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8234 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8237 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8238 something else, make sure we add the register to the constant and
8239 then to the other thing. This case can occur during strength
8240 reduction and doing it this way will produce better code if the
8241 frame pointer or argument pointer is eliminated.
8243 fold-const.c will ensure that the constant is always in the inner
8244 PLUS_EXPR, so the only case we need to do anything about is if
8245 sp, ap, or fp is our second argument, in which case we must swap
8246 the innermost first argument and our second argument. */
8248 if (TREE_CODE (treeop0
) == PLUS_EXPR
8249 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8250 && TREE_CODE (treeop1
) == VAR_DECL
8251 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8252 || DECL_RTL (treeop1
) == stack_pointer_rtx
8253 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8258 /* If the result is to be ptr_mode and we are adding an integer to
8259 something, we might be forming a constant. So try to use
8260 plus_constant. If it produces a sum and we can't accept it,
8261 use force_operand. This allows P = &ARR[const] to generate
8262 efficient code on machines where a SYMBOL_REF is not a valid
8265 If this is an EXPAND_SUM call, always return the sum. */
8266 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8267 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8269 if (modifier
== EXPAND_STACK_PARM
)
8271 if (TREE_CODE (treeop0
) == INTEGER_CST
8272 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8273 && TREE_CONSTANT (treeop1
))
8277 enum machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop1
));
8279 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8281 /* Use wi::shwi to ensure that the constant is
8282 truncated according to the mode of OP1, then sign extended
8283 to a HOST_WIDE_INT. Using the constant directly can result
8284 in non-canonical RTL in a 64x32 cross compile. */
8285 wc
= TREE_INT_CST_LOW (treeop0
);
8287 immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8288 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8289 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8290 op1
= force_operand (op1
, target
);
8291 return REDUCE_BIT_FIELD (op1
);
8294 else if (TREE_CODE (treeop1
) == INTEGER_CST
8295 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8296 && TREE_CONSTANT (treeop0
))
8300 enum machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop0
));
8302 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8303 (modifier
== EXPAND_INITIALIZER
8304 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8305 if (! CONSTANT_P (op0
))
8307 op1
= expand_expr (treeop1
, NULL_RTX
,
8308 VOIDmode
, modifier
);
8309 /* Return a PLUS if modifier says it's OK. */
8310 if (modifier
== EXPAND_SUM
8311 || modifier
== EXPAND_INITIALIZER
)
8312 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8315 /* Use wi::shwi to ensure that the constant is
8316 truncated according to the mode of OP1, then sign extended
8317 to a HOST_WIDE_INT. Using the constant directly can result
8318 in non-canonical RTL in a 64x32 cross compile. */
8319 wc
= TREE_INT_CST_LOW (treeop1
);
8321 = immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8322 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8323 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8324 op0
= force_operand (op0
, target
);
8325 return REDUCE_BIT_FIELD (op0
);
8329 /* Use TER to expand pointer addition of a negated value
8330 as pointer subtraction. */
8331 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8332 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8333 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8334 && TREE_CODE (treeop1
) == SSA_NAME
8335 && TYPE_MODE (TREE_TYPE (treeop0
))
8336 == TYPE_MODE (TREE_TYPE (treeop1
)))
8338 gimple def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8341 treeop1
= gimple_assign_rhs1 (def
);
8347 /* No sense saving up arithmetic to be done
8348 if it's all in the wrong mode to form part of an address.
8349 And force_operand won't know whether to sign-extend or
8351 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8352 || mode
!= ptr_mode
)
8354 expand_operands (treeop0
, treeop1
,
8355 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8356 if (op0
== const0_rtx
)
8358 if (op1
== const0_rtx
)
8363 expand_operands (treeop0
, treeop1
,
8364 subtarget
, &op0
, &op1
, modifier
);
8365 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8369 /* For initializers, we are allowed to return a MINUS of two
8370 symbolic constants. Here we handle all cases when both operands
8372 /* Handle difference of two symbolic constants,
8373 for the sake of an initializer. */
8374 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8375 && really_constant_p (treeop0
)
8376 && really_constant_p (treeop1
))
8378 expand_operands (treeop0
, treeop1
,
8379 NULL_RTX
, &op0
, &op1
, modifier
);
8381 /* If the last operand is a CONST_INT, use plus_constant of
8382 the negated constant. Else make the MINUS. */
8383 if (CONST_INT_P (op1
))
8384 return REDUCE_BIT_FIELD (plus_constant (mode
, op0
,
8387 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
8390 /* No sense saving up arithmetic to be done
8391 if it's all in the wrong mode to form part of an address.
8392 And force_operand won't know whether to sign-extend or
8394 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8395 || mode
!= ptr_mode
)
8398 expand_operands (treeop0
, treeop1
,
8399 subtarget
, &op0
, &op1
, modifier
);
8401 /* Convert A - const to A + (-const). */
8402 if (CONST_INT_P (op1
))
8404 op1
= negate_rtx (mode
, op1
);
8405 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8410 case WIDEN_MULT_PLUS_EXPR
:
8411 case WIDEN_MULT_MINUS_EXPR
:
8412 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8413 op2
= expand_normal (treeop2
);
8414 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8418 case WIDEN_MULT_EXPR
:
8419 /* If first operand is constant, swap them.
8420 Thus the following special case checks need only
8421 check the second operand. */
8422 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8429 /* First, check if we have a multiplication of one signed and one
8430 unsigned operand. */
8431 if (TREE_CODE (treeop1
) != INTEGER_CST
8432 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8433 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8435 enum machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8436 this_optab
= usmul_widen_optab
;
8437 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8438 != CODE_FOR_nothing
)
8440 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8441 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8444 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8446 /* op0 and op1 might still be constant, despite the above
8447 != INTEGER_CST check. Handle it. */
8448 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8450 op0
= convert_modes (innermode
, mode
, op0
, true);
8451 op1
= convert_modes (innermode
, mode
, op1
, false);
8452 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8453 target
, unsignedp
));
8458 /* Check for a multiplication with matching signedness. */
8459 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8460 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8461 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8462 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8464 tree op0type
= TREE_TYPE (treeop0
);
8465 enum machine_mode innermode
= TYPE_MODE (op0type
);
8466 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8467 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8468 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8470 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8472 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8473 != CODE_FOR_nothing
)
8475 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8477 /* op0 and op1 might still be constant, despite the above
8478 != INTEGER_CST check. Handle it. */
8479 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8482 op0
= convert_modes (innermode
, mode
, op0
, zextend_p
);
8484 = convert_modes (innermode
, mode
, op1
,
8485 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8486 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8490 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8491 unsignedp
, this_optab
);
8492 return REDUCE_BIT_FIELD (temp
);
8494 if (find_widening_optab_handler (other_optab
, mode
, innermode
, 0)
8496 && innermode
== word_mode
)
8499 op0
= expand_normal (treeop0
);
8500 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8501 op1
= convert_modes (innermode
, mode
,
8502 expand_normal (treeop1
),
8503 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8505 op1
= expand_normal (treeop1
);
8506 /* op0 and op1 might still be constant, despite the above
8507 != INTEGER_CST check. Handle it. */
8508 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8509 goto widen_mult_const
;
8510 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8511 unsignedp
, OPTAB_LIB_WIDEN
);
8512 hipart
= gen_highpart (innermode
, temp
);
8513 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8517 emit_move_insn (hipart
, htem
);
8518 return REDUCE_BIT_FIELD (temp
);
8522 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8523 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8524 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8525 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8529 optab opt
= fma_optab
;
8532 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8534 if (optab_handler (fma_optab
, mode
) == CODE_FOR_nothing
)
8536 tree fn
= mathfn_built_in (TREE_TYPE (treeop0
), BUILT_IN_FMA
);
8539 gcc_assert (fn
!= NULL_TREE
);
8540 call_expr
= build_call_expr (fn
, 3, treeop0
, treeop1
, treeop2
);
8541 return expand_builtin (call_expr
, target
, subtarget
, mode
, false);
8544 def0
= get_def_for_expr (treeop0
, NEGATE_EXPR
);
8545 def2
= get_def_for_expr (treeop2
, NEGATE_EXPR
);
8550 && optab_handler (fnms_optab
, mode
) != CODE_FOR_nothing
)
8553 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8554 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8557 && optab_handler (fnma_optab
, mode
) != CODE_FOR_nothing
)
8560 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8563 && optab_handler (fms_optab
, mode
) != CODE_FOR_nothing
)
8566 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8570 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
8572 op2
= expand_normal (treeop2
);
8573 op1
= expand_normal (treeop1
);
8575 return expand_ternary_op (TYPE_MODE (type
), opt
,
8576 op0
, op1
, op2
, target
, 0);
8580 /* If this is a fixed-point operation, then we cannot use the code
8581 below because "expand_mult" doesn't support sat/no-sat fixed-point
8583 if (ALL_FIXED_POINT_MODE_P (mode
))
8586 /* If first operand is constant, swap them.
8587 Thus the following special case checks need only
8588 check the second operand. */
8589 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8596 /* Attempt to return something suitable for generating an
8597 indexed address, for machines that support that. */
8599 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8600 && tree_fits_shwi_p (treeop1
))
8602 tree exp1
= treeop1
;
8604 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8608 op0
= force_operand (op0
, NULL_RTX
);
8610 op0
= copy_to_mode_reg (mode
, op0
);
8612 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8613 gen_int_mode (tree_to_shwi (exp1
),
8614 TYPE_MODE (TREE_TYPE (exp1
)))));
8617 if (modifier
== EXPAND_STACK_PARM
)
8620 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8621 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8623 case TRUNC_DIV_EXPR
:
8624 case FLOOR_DIV_EXPR
:
8626 case ROUND_DIV_EXPR
:
8627 case EXACT_DIV_EXPR
:
8628 /* If this is a fixed-point operation, then we cannot use the code
8629 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8631 if (ALL_FIXED_POINT_MODE_P (mode
))
8634 if (modifier
== EXPAND_STACK_PARM
)
8636 /* Possible optimization: compute the dividend with EXPAND_SUM
8637 then if the divisor is constant can optimize the case
8638 where some terms of the dividend have coeffs divisible by it. */
8639 expand_operands (treeop0
, treeop1
,
8640 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8641 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8646 case MULT_HIGHPART_EXPR
:
8647 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8648 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
8652 case TRUNC_MOD_EXPR
:
8653 case FLOOR_MOD_EXPR
:
8655 case ROUND_MOD_EXPR
:
8656 if (modifier
== EXPAND_STACK_PARM
)
8658 expand_operands (treeop0
, treeop1
,
8659 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8660 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8662 case FIXED_CONVERT_EXPR
:
8663 op0
= expand_normal (treeop0
);
8664 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8665 target
= gen_reg_rtx (mode
);
8667 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
8668 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8669 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8670 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8672 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8675 case FIX_TRUNC_EXPR
:
8676 op0
= expand_normal (treeop0
);
8677 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8678 target
= gen_reg_rtx (mode
);
8679 expand_fix (target
, op0
, unsignedp
);
8683 op0
= expand_normal (treeop0
);
8684 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8685 target
= gen_reg_rtx (mode
);
8686 /* expand_float can't figure out what to do if FROM has VOIDmode.
8687 So give it the correct mode. With -O, cse will optimize this. */
8688 if (GET_MODE (op0
) == VOIDmode
)
8689 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
8691 expand_float (target
, op0
,
8692 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8696 op0
= expand_expr (treeop0
, subtarget
,
8697 VOIDmode
, EXPAND_NORMAL
);
8698 if (modifier
== EXPAND_STACK_PARM
)
8700 temp
= expand_unop (mode
,
8701 optab_for_tree_code (NEGATE_EXPR
, type
,
8705 return REDUCE_BIT_FIELD (temp
);
8708 op0
= expand_expr (treeop0
, subtarget
,
8709 VOIDmode
, EXPAND_NORMAL
);
8710 if (modifier
== EXPAND_STACK_PARM
)
8713 /* ABS_EXPR is not valid for complex arguments. */
8714 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8715 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8717 /* Unsigned abs is simply the operand. Testing here means we don't
8718 risk generating incorrect code below. */
8719 if (TYPE_UNSIGNED (type
))
8722 return expand_abs (mode
, op0
, target
, unsignedp
,
8723 safe_from_p (target
, treeop0
, 1));
8727 target
= original_target
;
8729 || modifier
== EXPAND_STACK_PARM
8730 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8731 || GET_MODE (target
) != mode
8733 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8734 target
= gen_reg_rtx (mode
);
8735 expand_operands (treeop0
, treeop1
,
8736 target
, &op0
, &op1
, EXPAND_NORMAL
);
8738 /* First try to do it with a special MIN or MAX instruction.
8739 If that does not win, use a conditional jump to select the proper
8741 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8742 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8747 /* At this point, a MEM target is no longer useful; we will get better
8750 if (! REG_P (target
))
8751 target
= gen_reg_rtx (mode
);
8753 /* If op1 was placed in target, swap op0 and op1. */
8754 if (target
!= op0
&& target
== op1
)
8761 /* We generate better code and avoid problems with op1 mentioning
8762 target by forcing op1 into a pseudo if it isn't a constant. */
8763 if (! CONSTANT_P (op1
))
8764 op1
= force_reg (mode
, op1
);
8767 enum rtx_code comparison_code
;
8770 if (code
== MAX_EXPR
)
8771 comparison_code
= unsignedp
? GEU
: GE
;
8773 comparison_code
= unsignedp
? LEU
: LE
;
8775 /* Canonicalize to comparisons against 0. */
8776 if (op1
== const1_rtx
)
8778 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8779 or (a != 0 ? a : 1) for unsigned.
8780 For MIN we are safe converting (a <= 1 ? a : 1)
8781 into (a <= 0 ? a : 1) */
8782 cmpop1
= const0_rtx
;
8783 if (code
== MAX_EXPR
)
8784 comparison_code
= unsignedp
? NE
: GT
;
8786 if (op1
== constm1_rtx
&& !unsignedp
)
8788 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8789 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8790 cmpop1
= const0_rtx
;
8791 if (code
== MIN_EXPR
)
8792 comparison_code
= LT
;
8794 #ifdef HAVE_conditional_move
8795 /* Use a conditional move if possible. */
8796 if (can_conditionally_move_p (mode
))
8802 /* Try to emit the conditional move. */
8803 insn
= emit_conditional_move (target
, comparison_code
,
8808 /* If we could do the conditional move, emit the sequence,
8812 rtx seq
= get_insns ();
8818 /* Otherwise discard the sequence and fall back to code with
8824 emit_move_insn (target
, op0
);
8826 temp
= gen_label_rtx ();
8827 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
8828 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
,
8831 emit_move_insn (target
, op1
);
8836 op0
= expand_expr (treeop0
, subtarget
,
8837 VOIDmode
, EXPAND_NORMAL
);
8838 if (modifier
== EXPAND_STACK_PARM
)
8840 /* In case we have to reduce the result to bitfield precision
8841 for unsigned bitfield expand this as XOR with a proper constant
8843 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
8845 wide_int mask
= wi::mask (TYPE_PRECISION (type
),
8846 false, GET_MODE_PRECISION (mode
));
8848 temp
= expand_binop (mode
, xor_optab
, op0
,
8849 immed_wide_int_const (mask
, mode
),
8850 target
, 1, OPTAB_LIB_WIDEN
);
8853 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8857 /* ??? Can optimize bitwise operations with one arg constant.
8858 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8859 and (a bitwise1 b) bitwise2 b (etc)
8860 but that is probably not worth while. */
8869 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
8870 || (GET_MODE_PRECISION (TYPE_MODE (type
))
8871 == TYPE_PRECISION (type
)));
8876 /* If this is a fixed-point operation, then we cannot use the code
8877 below because "expand_shift" doesn't support sat/no-sat fixed-point
8879 if (ALL_FIXED_POINT_MODE_P (mode
))
8882 if (! safe_from_p (subtarget
, treeop1
, 1))
8884 if (modifier
== EXPAND_STACK_PARM
)
8886 op0
= expand_expr (treeop0
, subtarget
,
8887 VOIDmode
, EXPAND_NORMAL
);
8888 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
8890 if (code
== LSHIFT_EXPR
)
8891 temp
= REDUCE_BIT_FIELD (temp
);
8894 /* Could determine the answer when only additive constants differ. Also,
8895 the addition of one can be handled by changing the condition. */
8902 case UNORDERED_EXPR
:
8910 temp
= do_store_flag (ops
,
8911 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8912 tmode
!= VOIDmode
? tmode
: mode
);
8916 /* Use a compare and a jump for BLKmode comparisons, or for function
8917 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8920 || modifier
== EXPAND_STACK_PARM
8921 || ! safe_from_p (target
, treeop0
, 1)
8922 || ! safe_from_p (target
, treeop1
, 1)
8923 /* Make sure we don't have a hard reg (such as function's return
8924 value) live across basic blocks, if not optimizing. */
8925 || (!optimize
&& REG_P (target
)
8926 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8927 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8929 emit_move_insn (target
, const0_rtx
);
8931 op1
= gen_label_rtx ();
8932 jumpifnot_1 (code
, treeop0
, treeop1
, op1
, -1);
8934 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
8935 emit_move_insn (target
, constm1_rtx
);
8937 emit_move_insn (target
, const1_rtx
);
8943 /* Get the rtx code of the operands. */
8944 op0
= expand_normal (treeop0
);
8945 op1
= expand_normal (treeop1
);
8948 target
= gen_reg_rtx (TYPE_MODE (type
));
8950 /* If target overlaps with op1, then either we need to force
8951 op1 into a pseudo (if target also overlaps with op0),
8952 or write the complex parts in reverse order. */
8953 switch (GET_CODE (target
))
8956 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
8958 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
8960 complex_expr_force_op1
:
8961 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
8962 emit_move_insn (temp
, op1
);
8966 complex_expr_swap_order
:
8967 /* Move the imaginary (op1) and real (op0) parts to their
8969 write_complex_part (target
, op1
, true);
8970 write_complex_part (target
, op0
, false);
8976 temp
= adjust_address_nv (target
,
8977 GET_MODE_INNER (GET_MODE (target
)), 0);
8978 if (reg_overlap_mentioned_p (temp
, op1
))
8980 enum machine_mode imode
= GET_MODE_INNER (GET_MODE (target
));
8981 temp
= adjust_address_nv (target
, imode
,
8982 GET_MODE_SIZE (imode
));
8983 if (reg_overlap_mentioned_p (temp
, op0
))
8984 goto complex_expr_force_op1
;
8985 goto complex_expr_swap_order
;
8989 if (reg_overlap_mentioned_p (target
, op1
))
8991 if (reg_overlap_mentioned_p (target
, op0
))
8992 goto complex_expr_force_op1
;
8993 goto complex_expr_swap_order
;
8998 /* Move the real (op0) and imaginary (op1) parts to their location. */
8999 write_complex_part (target
, op0
, false);
9000 write_complex_part (target
, op1
, true);
9004 case WIDEN_SUM_EXPR
:
9006 tree oprnd0
= treeop0
;
9007 tree oprnd1
= treeop1
;
9009 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9010 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
9015 case REDUC_MAX_EXPR
:
9016 case REDUC_MIN_EXPR
:
9017 case REDUC_PLUS_EXPR
:
9019 op0
= expand_normal (treeop0
);
9020 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9021 temp
= expand_unop (mode
, this_optab
, op0
, target
, unsignedp
);
9026 case VEC_LSHIFT_EXPR
:
9027 case VEC_RSHIFT_EXPR
:
9029 target
= expand_vec_shift_expr (ops
, target
);
9033 case VEC_UNPACK_HI_EXPR
:
9034 case VEC_UNPACK_LO_EXPR
:
9036 op0
= expand_normal (treeop0
);
9037 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
9043 case VEC_UNPACK_FLOAT_HI_EXPR
:
9044 case VEC_UNPACK_FLOAT_LO_EXPR
:
9046 op0
= expand_normal (treeop0
);
9047 /* The signedness is determined from input operand. */
9048 temp
= expand_widen_pattern_expr
9049 (ops
, op0
, NULL_RTX
, NULL_RTX
,
9050 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9056 case VEC_WIDEN_MULT_HI_EXPR
:
9057 case VEC_WIDEN_MULT_LO_EXPR
:
9058 case VEC_WIDEN_MULT_EVEN_EXPR
:
9059 case VEC_WIDEN_MULT_ODD_EXPR
:
9060 case VEC_WIDEN_LSHIFT_HI_EXPR
:
9061 case VEC_WIDEN_LSHIFT_LO_EXPR
:
9062 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9063 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
9065 gcc_assert (target
);
9068 case VEC_PACK_TRUNC_EXPR
:
9069 case VEC_PACK_SAT_EXPR
:
9070 case VEC_PACK_FIX_TRUNC_EXPR
:
9071 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9075 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
9076 op2
= expand_normal (treeop2
);
9078 /* Careful here: if the target doesn't support integral vector modes,
9079 a constant selection vector could wind up smooshed into a normal
9080 integral constant. */
9081 if (CONSTANT_P (op2
) && GET_CODE (op2
) != CONST_VECTOR
)
9083 tree sel_type
= TREE_TYPE (treeop2
);
9084 enum machine_mode vmode
9085 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type
)),
9086 TYPE_VECTOR_SUBPARTS (sel_type
));
9087 gcc_assert (GET_MODE_CLASS (vmode
) == MODE_VECTOR_INT
);
9088 op2
= simplify_subreg (vmode
, op2
, TYPE_MODE (sel_type
), 0);
9089 gcc_assert (op2
&& GET_CODE (op2
) == CONST_VECTOR
);
9092 gcc_assert (GET_MODE_CLASS (GET_MODE (op2
)) == MODE_VECTOR_INT
);
9094 temp
= expand_vec_perm (mode
, op0
, op1
, op2
, target
);
9100 tree oprnd0
= treeop0
;
9101 tree oprnd1
= treeop1
;
9102 tree oprnd2
= treeop2
;
9105 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9106 op2
= expand_normal (oprnd2
);
9107 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9112 case REALIGN_LOAD_EXPR
:
9114 tree oprnd0
= treeop0
;
9115 tree oprnd1
= treeop1
;
9116 tree oprnd2
= treeop2
;
9119 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9120 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9121 op2
= expand_normal (oprnd2
);
9122 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9129 /* A COND_EXPR with its type being VOID_TYPE represents a
9130 conditional jump and is handled in
9131 expand_gimple_cond_expr. */
9132 gcc_assert (!VOID_TYPE_P (type
));
9134 /* Note that COND_EXPRs whose type is a structure or union
9135 are required to be constructed to contain assignments of
9136 a temporary variable, so that we can evaluate them here
9137 for side effect only. If type is void, we must do likewise. */
9139 gcc_assert (!TREE_ADDRESSABLE (type
)
9141 && TREE_TYPE (treeop1
) != void_type_node
9142 && TREE_TYPE (treeop2
) != void_type_node
);
9144 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9148 /* If we are not to produce a result, we have no target. Otherwise,
9149 if a target was specified use it; it will not be used as an
9150 intermediate target unless it is safe. If no target, use a
9153 if (modifier
!= EXPAND_STACK_PARM
9155 && safe_from_p (original_target
, treeop0
, 1)
9156 && GET_MODE (original_target
) == mode
9157 && !MEM_P (original_target
))
9158 temp
= original_target
;
9160 temp
= assign_temp (type
, 0, 1);
9162 do_pending_stack_adjust ();
9164 op0
= gen_label_rtx ();
9165 op1
= gen_label_rtx ();
9166 jumpifnot (treeop0
, op0
, -1);
9167 store_expr (treeop1
, temp
,
9168 modifier
== EXPAND_STACK_PARM
,
9171 emit_jump_insn (gen_jump (op1
));
9174 store_expr (treeop2
, temp
,
9175 modifier
== EXPAND_STACK_PARM
,
9183 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9190 /* Here to do an ordinary binary operator. */
9192 expand_operands (treeop0
, treeop1
,
9193 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9195 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9197 if (modifier
== EXPAND_STACK_PARM
)
9199 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9200 unsignedp
, OPTAB_LIB_WIDEN
);
9202 /* Bitwise operations do not need bitfield reduction as we expect their
9203 operands being properly truncated. */
9204 if (code
== BIT_XOR_EXPR
9205 || code
== BIT_AND_EXPR
9206 || code
== BIT_IOR_EXPR
)
9208 return REDUCE_BIT_FIELD (temp
);
9210 #undef REDUCE_BIT_FIELD
9213 /* Return TRUE if expression STMT is suitable for replacement.
9214 Never consider memory loads as replaceable, because those don't ever lead
9215 into constant expressions. */
9218 stmt_is_replaceable_p (gimple stmt
)
9220 if (ssa_is_replaceable_p (stmt
))
9222 /* Don't move around loads. */
9223 if (!gimple_assign_single_p (stmt
)
9224 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9231 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
9232 enum expand_modifier modifier
, rtx
*alt_rtl
,
9233 bool inner_reference_p
)
9235 rtx op0
, op1
, temp
, decl_rtl
;
9238 enum machine_mode mode
;
9239 enum tree_code code
= TREE_CODE (exp
);
9240 rtx subtarget
, original_target
;
9243 bool reduce_bit_field
;
9244 location_t loc
= EXPR_LOCATION (exp
);
9245 struct separate_ops ops
;
9246 tree treeop0
, treeop1
, treeop2
;
9247 tree ssa_name
= NULL_TREE
;
9250 type
= TREE_TYPE (exp
);
9251 mode
= TYPE_MODE (type
);
9252 unsignedp
= TYPE_UNSIGNED (type
);
9254 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9255 if (!VL_EXP_CLASS_P (exp
))
9256 switch (TREE_CODE_LENGTH (code
))
9259 case 3: treeop2
= TREE_OPERAND (exp
, 2);
9260 case 2: treeop1
= TREE_OPERAND (exp
, 1);
9261 case 1: treeop0
= TREE_OPERAND (exp
, 0);
9271 ignore
= (target
== const0_rtx
9272 || ((CONVERT_EXPR_CODE_P (code
)
9273 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9274 && TREE_CODE (type
) == VOID_TYPE
));
9276 /* An operation in what may be a bit-field type needs the
9277 result to be reduced to the precision of the bit-field type,
9278 which is narrower than that of the type's mode. */
9279 reduce_bit_field
= (!ignore
9280 && INTEGRAL_TYPE_P (type
)
9281 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
9283 /* If we are going to ignore this result, we need only do something
9284 if there is a side-effect somewhere in the expression. If there
9285 is, short-circuit the most common cases here. Note that we must
9286 not call expand_expr with anything but const0_rtx in case this
9287 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9291 if (! TREE_SIDE_EFFECTS (exp
))
9294 /* Ensure we reference a volatile object even if value is ignored, but
9295 don't do this if all we are doing is taking its address. */
9296 if (TREE_THIS_VOLATILE (exp
)
9297 && TREE_CODE (exp
) != FUNCTION_DECL
9298 && mode
!= VOIDmode
&& mode
!= BLKmode
9299 && modifier
!= EXPAND_CONST_ADDRESS
)
9301 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9307 if (TREE_CODE_CLASS (code
) == tcc_unary
9308 || code
== BIT_FIELD_REF
9309 || code
== COMPONENT_REF
9310 || code
== INDIRECT_REF
)
9311 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9314 else if (TREE_CODE_CLASS (code
) == tcc_binary
9315 || TREE_CODE_CLASS (code
) == tcc_comparison
9316 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9318 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9319 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9326 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9329 /* Use subtarget as the target for operand 0 of a binary operation. */
9330 subtarget
= get_subtarget (target
);
9331 original_target
= target
;
9337 tree function
= decl_function_context (exp
);
9339 temp
= label_rtx (exp
);
9340 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9342 if (function
!= current_function_decl
9344 LABEL_REF_NONLOCAL_P (temp
) = 1;
9346 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9351 /* ??? ivopts calls expander, without any preparation from
9352 out-of-ssa. So fake instructions as if this was an access to the
9353 base variable. This unnecessarily allocates a pseudo, see how we can
9354 reuse it, if partition base vars have it set already. */
9355 if (!currently_expanding_to_rtl
)
9357 tree var
= SSA_NAME_VAR (exp
);
9358 if (var
&& DECL_RTL_SET_P (var
))
9359 return DECL_RTL (var
);
9360 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9361 LAST_VIRTUAL_REGISTER
+ 1);
9364 g
= get_gimple_for_ssa_name (exp
);
9365 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9367 && modifier
== EXPAND_INITIALIZER
9368 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9369 && (optimize
|| DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9370 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9371 g
= SSA_NAME_DEF_STMT (exp
);
9375 ops
.code
= gimple_assign_rhs_code (g
);
9376 switch (get_gimple_rhs_class (ops
.code
))
9378 case GIMPLE_TERNARY_RHS
:
9379 ops
.op2
= gimple_assign_rhs3 (g
);
9381 case GIMPLE_BINARY_RHS
:
9382 ops
.op1
= gimple_assign_rhs2 (g
);
9384 case GIMPLE_UNARY_RHS
:
9385 ops
.op0
= gimple_assign_rhs1 (g
);
9386 ops
.type
= TREE_TYPE (gimple_assign_lhs (g
));
9387 ops
.location
= gimple_location (g
);
9388 r
= expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9390 case GIMPLE_SINGLE_RHS
:
9392 location_t saved_loc
= curr_insn_location ();
9393 set_curr_insn_location (gimple_location (g
));
9394 r
= expand_expr_real (gimple_assign_rhs1 (g
), target
,
9395 tmode
, modifier
, NULL
, inner_reference_p
);
9396 set_curr_insn_location (saved_loc
);
9402 if (REG_P (r
) && !REG_EXPR (r
))
9403 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9408 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9409 exp
= SSA_NAME_VAR (ssa_name
);
9410 goto expand_decl_rtl
;
9414 /* If a static var's type was incomplete when the decl was written,
9415 but the type is complete now, lay out the decl now. */
9416 if (DECL_SIZE (exp
) == 0
9417 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9418 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9419 layout_decl (exp
, 0);
9421 /* ... fall through ... */
9425 decl_rtl
= DECL_RTL (exp
);
9427 gcc_assert (decl_rtl
);
9428 decl_rtl
= copy_rtx (decl_rtl
);
9429 /* Record writes to register variables. */
9430 if (modifier
== EXPAND_WRITE
9432 && HARD_REGISTER_P (decl_rtl
))
9433 add_to_hard_reg_set (&crtl
->asm_clobbers
,
9434 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
9436 /* Ensure variable marked as used even if it doesn't go through
9437 a parser. If it hasn't be used yet, write out an external
9439 TREE_USED (exp
) = 1;
9441 /* Show we haven't gotten RTL for this yet. */
9444 /* Variables inherited from containing functions should have
9445 been lowered by this point. */
9446 context
= decl_function_context (exp
);
9447 gcc_assert (SCOPE_FILE_SCOPE_P (context
)
9448 || context
== current_function_decl
9449 || TREE_STATIC (exp
)
9450 || DECL_EXTERNAL (exp
)
9451 /* ??? C++ creates functions that are not TREE_STATIC. */
9452 || TREE_CODE (exp
) == FUNCTION_DECL
);
9454 /* This is the case of an array whose size is to be determined
9455 from its initializer, while the initializer is still being parsed.
9456 ??? We aren't parsing while expanding anymore. */
9458 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
9459 temp
= validize_mem (decl_rtl
);
9461 /* If DECL_RTL is memory, we are in the normal case and the
9462 address is not valid, get the address into a register. */
9464 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
9467 *alt_rtl
= decl_rtl
;
9468 decl_rtl
= use_anchored_address (decl_rtl
);
9469 if (modifier
!= EXPAND_CONST_ADDRESS
9470 && modifier
!= EXPAND_SUM
9471 && !memory_address_addr_space_p (DECL_MODE (exp
),
9473 MEM_ADDR_SPACE (decl_rtl
)))
9474 temp
= replace_equiv_address (decl_rtl
,
9475 copy_rtx (XEXP (decl_rtl
, 0)));
9478 /* If we got something, return it. But first, set the alignment
9479 if the address is a register. */
9482 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
9483 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
9488 /* If the mode of DECL_RTL does not match that of the decl,
9489 there are two cases: we are dealing with a BLKmode value
9490 that is returned in a register, or we are dealing with
9491 a promoted value. In the latter case, return a SUBREG
9492 of the wanted mode, but mark it so that we know that it
9493 was already extended. */
9494 if (REG_P (decl_rtl
)
9495 && DECL_MODE (exp
) != BLKmode
9496 && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
9498 enum machine_mode pmode
;
9500 /* Get the signedness to be used for this variable. Ensure we get
9501 the same mode we got when the variable was declared. */
9502 if (code
== SSA_NAME
9503 && (g
= SSA_NAME_DEF_STMT (ssa_name
))
9504 && gimple_code (g
) == GIMPLE_CALL
9505 && !gimple_call_internal_p (g
))
9506 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
9507 gimple_call_fntype (g
),
9510 pmode
= promote_decl_mode (exp
, &unsignedp
);
9511 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
9513 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
9514 SUBREG_PROMOTED_VAR_P (temp
) = 1;
9515 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
9522 /* Given that TYPE_PRECISION (type) is not always equal to
9523 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9524 the former to the latter according to the signedness of the
9526 temp
= immed_wide_int_const (wide_int::from
9528 GET_MODE_PRECISION (TYPE_MODE (type
)),
9535 tree tmp
= NULL_TREE
;
9536 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
9537 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
9538 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
9539 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
9540 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
9541 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
9542 return const_vector_from_tree (exp
);
9543 if (GET_MODE_CLASS (mode
) == MODE_INT
)
9545 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
9547 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
, type_for_mode
, exp
);
9551 vec
<constructor_elt
, va_gc
> *v
;
9553 vec_alloc (v
, VECTOR_CST_NELTS (exp
));
9554 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
9555 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
9556 tmp
= build_constructor (type
, v
);
9558 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
9563 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
9566 /* If optimized, generate immediate CONST_DOUBLE
9567 which will be turned into memory by reload if necessary.
9569 We used to force a register so that loop.c could see it. But
9570 this does not allow gen_* patterns to perform optimizations with
9571 the constants. It also produces two insns in cases like "x = 1.0;".
9572 On most machines, floating-point constants are not permitted in
9573 many insns, so we'd end up copying it to a register in any case.
9575 Now, we do the copying in expand_binop, if appropriate. */
9576 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
9577 TYPE_MODE (TREE_TYPE (exp
)));
9580 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
9581 TYPE_MODE (TREE_TYPE (exp
)));
9584 /* Handle evaluating a complex constant in a CONCAT target. */
9585 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
9587 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9590 rtarg
= XEXP (original_target
, 0);
9591 itarg
= XEXP (original_target
, 1);
9593 /* Move the real and imaginary parts separately. */
9594 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
9595 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
9598 emit_move_insn (rtarg
, op0
);
9600 emit_move_insn (itarg
, op1
);
9602 return original_target
;
9605 /* ... fall through ... */
9608 temp
= expand_expr_constant (exp
, 1, modifier
);
9610 /* temp contains a constant address.
9611 On RISC machines where a constant address isn't valid,
9612 make some insns to get that address into a register. */
9613 if (modifier
!= EXPAND_CONST_ADDRESS
9614 && modifier
!= EXPAND_INITIALIZER
9615 && modifier
!= EXPAND_SUM
9616 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
9617 MEM_ADDR_SPACE (temp
)))
9618 return replace_equiv_address (temp
,
9619 copy_rtx (XEXP (temp
, 0)));
9625 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
,
9628 if (!SAVE_EXPR_RESOLVED_P (exp
))
9630 /* We can indeed still hit this case, typically via builtin
9631 expanders calling save_expr immediately before expanding
9632 something. Assume this means that we only have to deal
9633 with non-BLKmode values. */
9634 gcc_assert (GET_MODE (ret
) != BLKmode
);
9636 val
= build_decl (curr_insn_location (),
9637 VAR_DECL
, NULL
, TREE_TYPE (exp
));
9638 DECL_ARTIFICIAL (val
) = 1;
9639 DECL_IGNORED_P (val
) = 1;
9641 TREE_OPERAND (exp
, 0) = treeop0
;
9642 SAVE_EXPR_RESOLVED_P (exp
) = 1;
9644 if (!CONSTANT_P (ret
))
9645 ret
= copy_to_reg (ret
);
9646 SET_DECL_RTL (val
, ret
);
9654 /* If we don't need the result, just ensure we evaluate any
9658 unsigned HOST_WIDE_INT idx
;
9661 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
9662 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9667 return expand_constructor (exp
, target
, modifier
, false);
9669 case TARGET_MEM_REF
:
9672 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9673 enum insn_code icode
;
9676 op0
= addr_for_mem_ref (exp
, as
, true);
9677 op0
= memory_address_addr_space (mode
, op0
, as
);
9678 temp
= gen_rtx_MEM (mode
, op0
);
9679 set_mem_attributes (temp
, exp
, 0);
9680 set_mem_addr_space (temp
, as
);
9681 align
= get_object_alignment (exp
);
9682 if (modifier
!= EXPAND_WRITE
9683 && modifier
!= EXPAND_MEMORY
9685 && align
< GET_MODE_ALIGNMENT (mode
)
9686 /* If the target does not have special handling for unaligned
9687 loads of mode then it can use regular moves for them. */
9688 && ((icode
= optab_handler (movmisalign_optab
, mode
))
9689 != CODE_FOR_nothing
))
9691 struct expand_operand ops
[2];
9693 /* We've already validated the memory, and we're creating a
9694 new pseudo destination. The predicates really can't fail,
9695 nor can the generator. */
9696 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9697 create_fixed_operand (&ops
[1], temp
);
9698 expand_insn (icode
, 2, ops
);
9699 temp
= ops
[0].value
;
9707 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9708 enum machine_mode address_mode
;
9709 tree base
= TREE_OPERAND (exp
, 0);
9711 enum insn_code icode
;
9713 /* Handle expansion of non-aliased memory with non-BLKmode. That
9714 might end up in a register. */
9715 if (mem_ref_refers_to_non_mem_p (exp
))
9717 HOST_WIDE_INT offset
= mem_ref_offset (exp
).to_short_addr ();
9718 base
= TREE_OPERAND (base
, 0);
9720 && tree_fits_uhwi_p (TYPE_SIZE (type
))
9721 && (GET_MODE_BITSIZE (DECL_MODE (base
))
9722 == tree_to_uhwi (TYPE_SIZE (type
))))
9723 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
9724 target
, tmode
, modifier
);
9725 if (TYPE_MODE (type
) == BLKmode
)
9727 temp
= assign_stack_temp (DECL_MODE (base
),
9728 GET_MODE_SIZE (DECL_MODE (base
)));
9729 store_expr (base
, temp
, 0, false);
9730 temp
= adjust_address (temp
, BLKmode
, offset
);
9731 set_mem_size (temp
, int_size_in_bytes (type
));
9734 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
9735 bitsize_int (offset
* BITS_PER_UNIT
));
9736 return expand_expr (exp
, target
, tmode
, modifier
);
9738 address_mode
= targetm
.addr_space
.address_mode (as
);
9739 base
= TREE_OPERAND (exp
, 0);
9740 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
9742 tree mask
= gimple_assign_rhs2 (def_stmt
);
9743 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
9744 gimple_assign_rhs1 (def_stmt
), mask
);
9745 TREE_OPERAND (exp
, 0) = base
;
9747 align
= get_object_alignment (exp
);
9748 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
9749 op0
= memory_address_addr_space (mode
, op0
, as
);
9750 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
9752 rtx off
= immed_wide_int_const (mem_ref_offset (exp
), address_mode
);
9753 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
9754 op0
= memory_address_addr_space (mode
, op0
, as
);
9756 temp
= gen_rtx_MEM (mode
, op0
);
9757 set_mem_attributes (temp
, exp
, 0);
9758 set_mem_addr_space (temp
, as
);
9759 if (TREE_THIS_VOLATILE (exp
))
9760 MEM_VOLATILE_P (temp
) = 1;
9761 if (modifier
!= EXPAND_WRITE
9762 && modifier
!= EXPAND_MEMORY
9763 && !inner_reference_p
9765 && align
< GET_MODE_ALIGNMENT (mode
))
9767 if ((icode
= optab_handler (movmisalign_optab
, mode
))
9768 != CODE_FOR_nothing
)
9770 struct expand_operand ops
[2];
9772 /* We've already validated the memory, and we're creating a
9773 new pseudo destination. The predicates really can't fail,
9774 nor can the generator. */
9775 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9776 create_fixed_operand (&ops
[1], temp
);
9777 expand_insn (icode
, 2, ops
);
9778 temp
= ops
[0].value
;
9780 else if (SLOW_UNALIGNED_ACCESS (mode
, align
))
9781 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
9782 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
9783 (modifier
== EXPAND_STACK_PARM
9784 ? NULL_RTX
: target
),
9793 tree array
= treeop0
;
9794 tree index
= treeop1
;
9797 /* Fold an expression like: "foo"[2].
9798 This is not done in fold so it won't happen inside &.
9799 Don't fold if this is for wide characters since it's too
9800 difficult to do correctly and this is a very rare case. */
9802 if (modifier
!= EXPAND_CONST_ADDRESS
9803 && modifier
!= EXPAND_INITIALIZER
9804 && modifier
!= EXPAND_MEMORY
)
9806 tree t
= fold_read_from_constant_string (exp
);
9809 return expand_expr (t
, target
, tmode
, modifier
);
9812 /* If this is a constant index into a constant array,
9813 just get the value from the array. Handle both the cases when
9814 we have an explicit constructor and when our operand is a variable
9815 that was declared const. */
9817 if (modifier
!= EXPAND_CONST_ADDRESS
9818 && modifier
!= EXPAND_INITIALIZER
9819 && modifier
!= EXPAND_MEMORY
9820 && TREE_CODE (array
) == CONSTRUCTOR
9821 && ! TREE_SIDE_EFFECTS (array
)
9822 && TREE_CODE (index
) == INTEGER_CST
)
9824 unsigned HOST_WIDE_INT ix
;
9827 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
9829 if (tree_int_cst_equal (field
, index
))
9831 if (!TREE_SIDE_EFFECTS (value
))
9832 return expand_expr (fold (value
), target
, tmode
, modifier
);
9837 else if (optimize
>= 1
9838 && modifier
!= EXPAND_CONST_ADDRESS
9839 && modifier
!= EXPAND_INITIALIZER
9840 && modifier
!= EXPAND_MEMORY
9841 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
9842 && TREE_CODE (index
) == INTEGER_CST
9843 && (TREE_CODE (array
) == VAR_DECL
9844 || TREE_CODE (array
) == CONST_DECL
)
9845 && (init
= ctor_for_folding (array
)) != error_mark_node
)
9847 if (init
== NULL_TREE
)
9849 tree value
= build_zero_cst (type
);
9850 if (TREE_CODE (value
) == CONSTRUCTOR
)
9852 /* If VALUE is a CONSTRUCTOR, this optimization is only
9853 useful if this doesn't store the CONSTRUCTOR into
9854 memory. If it does, it is more efficient to just
9855 load the data from the array directly. */
9856 rtx ret
= expand_constructor (value
, target
,
9858 if (ret
== NULL_RTX
)
9863 return expand_expr (value
, target
, tmode
, modifier
);
9865 else if (TREE_CODE (init
) == CONSTRUCTOR
)
9867 unsigned HOST_WIDE_INT ix
;
9870 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
9872 if (tree_int_cst_equal (field
, index
))
9874 if (TREE_SIDE_EFFECTS (value
))
9877 if (TREE_CODE (value
) == CONSTRUCTOR
)
9879 /* If VALUE is a CONSTRUCTOR, this
9880 optimization is only useful if
9881 this doesn't store the CONSTRUCTOR
9882 into memory. If it does, it is more
9883 efficient to just load the data from
9884 the array directly. */
9885 rtx ret
= expand_constructor (value
, target
,
9887 if (ret
== NULL_RTX
)
9892 expand_expr (fold (value
), target
, tmode
, modifier
);
9895 else if (TREE_CODE (init
) == STRING_CST
)
9897 tree low_bound
= array_ref_low_bound (exp
);
9898 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
9900 /* Optimize the special case of a zero lower bound.
9902 We convert the lower bound to sizetype to avoid problems
9903 with constant folding. E.g. suppose the lower bound is
9904 1 and its mode is QI. Without the conversion
9905 (ARRAY + (INDEX - (unsigned char)1))
9907 (ARRAY + (-(unsigned char)1) + INDEX)
9909 (ARRAY + 255 + INDEX). Oops! */
9910 if (!integer_zerop (low_bound
))
9911 index1
= size_diffop_loc (loc
, index1
,
9912 fold_convert_loc (loc
, sizetype
,
9915 if (compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
9917 tree type
= TREE_TYPE (TREE_TYPE (init
));
9918 enum machine_mode mode
= TYPE_MODE (type
);
9920 if (GET_MODE_CLASS (mode
) == MODE_INT
9921 && GET_MODE_SIZE (mode
) == 1)
9922 return gen_int_mode (TREE_STRING_POINTER (init
)
9923 [TREE_INT_CST_LOW (index1
)],
9929 goto normal_inner_ref
;
9932 /* If the operand is a CONSTRUCTOR, we can just extract the
9933 appropriate field if it is present. */
9934 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
9936 unsigned HOST_WIDE_INT idx
;
9939 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
9941 if (field
== treeop1
9942 /* We can normally use the value of the field in the
9943 CONSTRUCTOR. However, if this is a bitfield in
9944 an integral mode that we can fit in a HOST_WIDE_INT,
9945 we must mask only the number of bits in the bitfield,
9946 since this is done implicitly by the constructor. If
9947 the bitfield does not meet either of those conditions,
9948 we can't do this optimization. */
9949 && (! DECL_BIT_FIELD (field
)
9950 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
9951 && (GET_MODE_PRECISION (DECL_MODE (field
))
9952 <= HOST_BITS_PER_WIDE_INT
))))
9954 if (DECL_BIT_FIELD (field
)
9955 && modifier
== EXPAND_STACK_PARM
)
9957 op0
= expand_expr (value
, target
, tmode
, modifier
);
9958 if (DECL_BIT_FIELD (field
))
9960 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
9961 enum machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
9963 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
9965 op1
= gen_int_mode (((HOST_WIDE_INT
) 1 << bitsize
) - 1,
9967 op0
= expand_and (imode
, op0
, op1
, target
);
9971 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
9973 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
9975 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
9983 goto normal_inner_ref
;
9986 case ARRAY_RANGE_REF
:
9989 enum machine_mode mode1
, mode2
;
9990 HOST_WIDE_INT bitsize
, bitpos
;
9992 int volatilep
= 0, must_force_mem
;
9993 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
9994 &mode1
, &unsignedp
, &volatilep
, true);
9995 rtx orig_op0
, memloc
;
9996 bool mem_attrs_from_type
= false;
9998 /* If we got back the original object, something is wrong. Perhaps
9999 we are evaluating an expression too early. In any event, don't
10000 infinitely recurse. */
10001 gcc_assert (tem
!= exp
);
10003 /* If TEM's type is a union of variable size, pass TARGET to the inner
10004 computation, since it will need a temporary and TARGET is known
10005 to have to do. This occurs in unchecked conversion in Ada. */
10007 = expand_expr_real (tem
,
10008 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10009 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
10010 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10012 && modifier
!= EXPAND_STACK_PARM
10013 ? target
: NULL_RTX
),
10015 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10018 /* If the field has a mode, we want to access it in the
10019 field's mode, not the computed mode.
10020 If a MEM has VOIDmode (external with incomplete type),
10021 use BLKmode for it instead. */
10024 if (mode1
!= VOIDmode
)
10025 op0
= adjust_address (op0
, mode1
, 0);
10026 else if (GET_MODE (op0
) == VOIDmode
)
10027 op0
= adjust_address (op0
, BLKmode
, 0);
10031 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
10033 /* If we have either an offset, a BLKmode result, or a reference
10034 outside the underlying object, we must force it to memory.
10035 Such a case can occur in Ada if we have unchecked conversion
10036 of an expression from a scalar type to an aggregate type or
10037 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10038 passed a partially uninitialized object or a view-conversion
10039 to a larger size. */
10040 must_force_mem
= (offset
10041 || mode1
== BLKmode
10042 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
10044 /* Handle CONCAT first. */
10045 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
10048 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)))
10051 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10054 op0
= XEXP (op0
, 0);
10055 mode2
= GET_MODE (op0
);
10057 else if (bitpos
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10058 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1)))
10062 op0
= XEXP (op0
, 1);
10064 mode2
= GET_MODE (op0
);
10067 /* Otherwise force into memory. */
10068 must_force_mem
= 1;
10071 /* If this is a constant, put it in a register if it is a legitimate
10072 constant and we don't need a memory reference. */
10073 if (CONSTANT_P (op0
)
10074 && mode2
!= BLKmode
10075 && targetm
.legitimate_constant_p (mode2
, op0
)
10076 && !must_force_mem
)
10077 op0
= force_reg (mode2
, op0
);
10079 /* Otherwise, if this is a constant, try to force it to the constant
10080 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10081 is a legitimate constant. */
10082 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
10083 op0
= validize_mem (memloc
);
10085 /* Otherwise, if this is a constant or the object is not in memory
10086 and need be, put it there. */
10087 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
10089 memloc
= assign_temp (TREE_TYPE (tem
), 1, 1);
10090 emit_move_insn (memloc
, op0
);
10092 mem_attrs_from_type
= true;
10097 enum machine_mode address_mode
;
10098 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
10101 gcc_assert (MEM_P (op0
));
10103 address_mode
= get_address_mode (op0
);
10104 if (GET_MODE (offset_rtx
) != address_mode
)
10105 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
10107 /* See the comment in expand_assignment for the rationale. */
10108 if (mode1
!= VOIDmode
10111 && (bitpos
% bitsize
) == 0
10112 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
10113 && MEM_ALIGN (op0
) >= GET_MODE_ALIGNMENT (mode1
))
10115 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10119 op0
= offset_address (op0
, offset_rtx
,
10120 highest_pow2_factor (offset
));
10123 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10124 record its alignment as BIGGEST_ALIGNMENT. */
10125 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
10126 && is_aligning_offset (offset
, tem
))
10127 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
10129 /* Don't forget about volatility even if this is a bitfield. */
10130 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
10132 if (op0
== orig_op0
)
10133 op0
= copy_rtx (op0
);
10135 MEM_VOLATILE_P (op0
) = 1;
10138 /* In cases where an aligned union has an unaligned object
10139 as a field, we might be extracting a BLKmode value from
10140 an integer-mode (e.g., SImode) object. Handle this case
10141 by doing the extract into an object as wide as the field
10142 (which we know to be the width of a basic mode), then
10143 storing into memory, and changing the mode to BLKmode. */
10144 if (mode1
== VOIDmode
10145 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
10146 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
10147 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
10148 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
10149 && modifier
!= EXPAND_CONST_ADDRESS
10150 && modifier
!= EXPAND_INITIALIZER
10151 && modifier
!= EXPAND_MEMORY
)
10152 /* If the bitfield is volatile and the bitsize
10153 is narrower than the access size of the bitfield,
10154 we need to extract bitfields from the access. */
10155 || (volatilep
&& TREE_CODE (exp
) == COMPONENT_REF
10156 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp
, 1))
10157 && mode1
!= BLKmode
10158 && bitsize
< GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
)
10159 /* If the field isn't aligned enough to fetch as a memref,
10160 fetch it as a bit field. */
10161 || (mode1
!= BLKmode
10162 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
10163 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
10165 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
10166 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
10167 && modifier
!= EXPAND_MEMORY
10168 && ((modifier
== EXPAND_CONST_ADDRESS
10169 || modifier
== EXPAND_INITIALIZER
)
10171 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
10172 || (bitpos
% BITS_PER_UNIT
!= 0)))
10173 /* If the type and the field are a constant size and the
10174 size of the type isn't the same size as the bitfield,
10175 we must use bitfield operations. */
10177 && TYPE_SIZE (TREE_TYPE (exp
))
10178 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
10179 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
10182 enum machine_mode ext_mode
= mode
;
10184 if (ext_mode
== BLKmode
10185 && ! (target
!= 0 && MEM_P (op0
)
10187 && bitpos
% BITS_PER_UNIT
== 0))
10188 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
10190 if (ext_mode
== BLKmode
)
10193 target
= assign_temp (type
, 1, 1);
10195 /* ??? Unlike the similar test a few lines below, this one is
10196 very likely obsolete. */
10200 /* In this case, BITPOS must start at a byte boundary and
10201 TARGET, if specified, must be a MEM. */
10202 gcc_assert (MEM_P (op0
)
10203 && (!target
|| MEM_P (target
))
10204 && !(bitpos
% BITS_PER_UNIT
));
10206 emit_block_move (target
,
10207 adjust_address (op0
, VOIDmode
,
10208 bitpos
/ BITS_PER_UNIT
),
10209 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
10211 (modifier
== EXPAND_STACK_PARM
10212 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10217 /* If we have nothing to extract, the result will be 0 for targets
10218 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10219 return 0 for the sake of consistency, as reading a zero-sized
10220 bitfield is valid in Ada and the value is fully specified. */
10224 op0
= validize_mem (op0
);
10226 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10227 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10229 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10230 (modifier
== EXPAND_STACK_PARM
10231 ? NULL_RTX
: target
),
10232 ext_mode
, ext_mode
);
10234 /* If the result is a record type and BITSIZE is narrower than
10235 the mode of OP0, an integral mode, and this is a big endian
10236 machine, we must put the field into the high-order bits. */
10237 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
10238 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
10239 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
10240 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
10241 GET_MODE_BITSIZE (GET_MODE (op0
))
10242 - bitsize
, op0
, 1);
10244 /* If the result type is BLKmode, store the data into a temporary
10245 of the appropriate type, but with the mode corresponding to the
10246 mode for the data we have (op0's mode). */
10247 if (mode
== BLKmode
)
10250 = assign_stack_temp_for_type (ext_mode
,
10251 GET_MODE_BITSIZE (ext_mode
),
10253 emit_move_insn (new_rtx
, op0
);
10254 op0
= copy_rtx (new_rtx
);
10255 PUT_MODE (op0
, BLKmode
);
10261 /* If the result is BLKmode, use that to access the object
10263 if (mode
== BLKmode
)
10266 /* Get a reference to just this component. */
10267 if (modifier
== EXPAND_CONST_ADDRESS
10268 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10269 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10271 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10273 if (op0
== orig_op0
)
10274 op0
= copy_rtx (op0
);
10276 /* If op0 is a temporary because of forcing to memory, pass only the
10277 type to set_mem_attributes so that the original expression is never
10278 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10279 if (mem_attrs_from_type
)
10280 set_mem_attributes (op0
, type
, 0);
10282 set_mem_attributes (op0
, exp
, 0);
10284 if (REG_P (XEXP (op0
, 0)))
10285 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10287 MEM_VOLATILE_P (op0
) |= volatilep
;
10288 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10289 || modifier
== EXPAND_CONST_ADDRESS
10290 || modifier
== EXPAND_INITIALIZER
)
10294 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10296 convert_move (target
, op0
, unsignedp
);
10301 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10304 /* All valid uses of __builtin_va_arg_pack () are removed during
10306 if (CALL_EXPR_VA_ARG_PACK (exp
))
10307 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
10309 tree fndecl
= get_callee_fndecl (exp
), attr
;
10312 && (attr
= lookup_attribute ("error",
10313 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10314 error ("%Kcall to %qs declared with attribute error: %s",
10315 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10316 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10318 && (attr
= lookup_attribute ("warning",
10319 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10320 warning_at (tree_nonartificial_location (exp
),
10321 0, "%Kcall to %qs declared with attribute warning: %s",
10322 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10323 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10325 /* Check for a built-in function. */
10326 if (fndecl
&& DECL_BUILT_IN (fndecl
))
10328 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
10329 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
10332 return expand_call (exp
, target
, ignore
);
10334 case VIEW_CONVERT_EXPR
:
10337 /* If we are converting to BLKmode, try to avoid an intermediate
10338 temporary by fetching an inner memory reference. */
10339 if (mode
== BLKmode
10340 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
10341 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
10342 && handled_component_p (treeop0
))
10344 enum machine_mode mode1
;
10345 HOST_WIDE_INT bitsize
, bitpos
;
10350 = get_inner_reference (treeop0
, &bitsize
, &bitpos
,
10351 &offset
, &mode1
, &unsignedp
, &volatilep
,
10355 /* ??? We should work harder and deal with non-zero offsets. */
10357 && (bitpos
% BITS_PER_UNIT
) == 0
10359 && compare_tree_int (TYPE_SIZE (type
), bitsize
) == 0)
10361 /* See the normal_inner_ref case for the rationale. */
10363 = expand_expr_real (tem
,
10364 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10365 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10367 && modifier
!= EXPAND_STACK_PARM
10368 ? target
: NULL_RTX
),
10370 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10373 if (MEM_P (orig_op0
))
10377 /* Get a reference to just this component. */
10378 if (modifier
== EXPAND_CONST_ADDRESS
10379 || modifier
== EXPAND_SUM
10380 || modifier
== EXPAND_INITIALIZER
)
10381 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10383 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10385 if (op0
== orig_op0
)
10386 op0
= copy_rtx (op0
);
10388 set_mem_attributes (op0
, treeop0
, 0);
10389 if (REG_P (XEXP (op0
, 0)))
10390 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10392 MEM_VOLATILE_P (op0
) |= volatilep
;
10398 op0
= expand_expr_real (treeop0
, NULL_RTX
, VOIDmode
, modifier
,
10399 NULL
, inner_reference_p
);
10401 /* If the input and output modes are both the same, we are done. */
10402 if (mode
== GET_MODE (op0
))
10404 /* If neither mode is BLKmode, and both modes are the same size
10405 then we can use gen_lowpart. */
10406 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
10407 && (GET_MODE_PRECISION (mode
)
10408 == GET_MODE_PRECISION (GET_MODE (op0
)))
10409 && !COMPLEX_MODE_P (GET_MODE (op0
)))
10411 if (GET_CODE (op0
) == SUBREG
)
10412 op0
= force_reg (GET_MODE (op0
), op0
);
10413 temp
= gen_lowpart_common (mode
, op0
);
10418 if (!REG_P (op0
) && !MEM_P (op0
))
10419 op0
= force_reg (GET_MODE (op0
), op0
);
10420 op0
= gen_lowpart (mode
, op0
);
10423 /* If both types are integral, convert from one mode to the other. */
10424 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
10425 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
10426 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
10427 /* If the output type is a bit-field type, do an extraction. */
10428 else if (reduce_bit_field
)
10429 return extract_bit_field (op0
, TYPE_PRECISION (type
), 0,
10430 TYPE_UNSIGNED (type
), NULL_RTX
,
10432 /* As a last resort, spill op0 to memory, and reload it in a
10434 else if (!MEM_P (op0
))
10436 /* If the operand is not a MEM, force it into memory. Since we
10437 are going to be changing the mode of the MEM, don't call
10438 force_const_mem for constants because we don't allow pool
10439 constants to change mode. */
10440 tree inner_type
= TREE_TYPE (treeop0
);
10442 gcc_assert (!TREE_ADDRESSABLE (exp
));
10444 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
10446 = assign_stack_temp_for_type
10447 (TYPE_MODE (inner_type
),
10448 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
10450 emit_move_insn (target
, op0
);
10454 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10455 output type is such that the operand is known to be aligned, indicate
10456 that it is. Otherwise, we need only be concerned about alignment for
10457 non-BLKmode results. */
10460 enum insn_code icode
;
10462 if (TYPE_ALIGN_OK (type
))
10464 /* ??? Copying the MEM without substantially changing it might
10465 run afoul of the code handling volatile memory references in
10466 store_expr, which assumes that TARGET is returned unmodified
10467 if it has been used. */
10468 op0
= copy_rtx (op0
);
10469 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
10471 else if (modifier
!= EXPAND_WRITE
10472 && modifier
!= EXPAND_MEMORY
10473 && !inner_reference_p
10475 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
10477 /* If the target does have special handling for unaligned
10478 loads of mode then use them. */
10479 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10480 != CODE_FOR_nothing
)
10484 op0
= adjust_address (op0
, mode
, 0);
10485 /* We've already validated the memory, and we're creating a
10486 new pseudo destination. The predicates really can't
10488 reg
= gen_reg_rtx (mode
);
10490 /* Nor can the insn generator. */
10491 insn
= GEN_FCN (icode
) (reg
, op0
);
10495 else if (STRICT_ALIGNMENT
)
10497 tree inner_type
= TREE_TYPE (treeop0
);
10498 HOST_WIDE_INT temp_size
10499 = MAX (int_size_in_bytes (inner_type
),
10500 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
10502 = assign_stack_temp_for_type (mode
, temp_size
, type
);
10503 rtx new_with_op0_mode
10504 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
10506 gcc_assert (!TREE_ADDRESSABLE (exp
));
10508 if (GET_MODE (op0
) == BLKmode
)
10509 emit_block_move (new_with_op0_mode
, op0
,
10510 GEN_INT (GET_MODE_SIZE (mode
)),
10511 (modifier
== EXPAND_STACK_PARM
10512 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10514 emit_move_insn (new_with_op0_mode
, op0
);
10520 op0
= adjust_address (op0
, mode
, 0);
10527 tree lhs
= treeop0
;
10528 tree rhs
= treeop1
;
10529 gcc_assert (ignore
);
10531 /* Check for |= or &= of a bitfield of size one into another bitfield
10532 of size 1. In this case, (unless we need the result of the
10533 assignment) we can do this more efficiently with a
10534 test followed by an assignment, if necessary.
10536 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10537 things change so we do, this code should be enhanced to
10539 if (TREE_CODE (lhs
) == COMPONENT_REF
10540 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
10541 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
10542 && TREE_OPERAND (rhs
, 0) == lhs
10543 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
10544 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
10545 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
10547 rtx label
= gen_label_rtx ();
10548 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
10549 do_jump (TREE_OPERAND (rhs
, 1),
10551 value
? 0 : label
, -1);
10552 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
10554 do_pending_stack_adjust ();
10555 emit_label (label
);
10559 expand_assignment (lhs
, rhs
, false);
10564 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
10566 case REALPART_EXPR
:
10567 op0
= expand_normal (treeop0
);
10568 return read_complex_part (op0
, false);
10570 case IMAGPART_EXPR
:
10571 op0
= expand_normal (treeop0
);
10572 return read_complex_part (op0
, true);
10579 /* Expanded in cfgexpand.c. */
10580 gcc_unreachable ();
10582 case TRY_CATCH_EXPR
:
10584 case EH_FILTER_EXPR
:
10585 case TRY_FINALLY_EXPR
:
10586 /* Lowered by tree-eh.c. */
10587 gcc_unreachable ();
10589 case WITH_CLEANUP_EXPR
:
10590 case CLEANUP_POINT_EXPR
:
10592 case CASE_LABEL_EXPR
:
10597 case COMPOUND_EXPR
:
10598 case PREINCREMENT_EXPR
:
10599 case PREDECREMENT_EXPR
:
10600 case POSTINCREMENT_EXPR
:
10601 case POSTDECREMENT_EXPR
:
10604 case COMPOUND_LITERAL_EXPR
:
10605 /* Lowered by gimplify.c. */
10606 gcc_unreachable ();
10609 /* Function descriptors are not valid except for as
10610 initialization constants, and should not be expanded. */
10611 gcc_unreachable ();
10613 case WITH_SIZE_EXPR
:
10614 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10615 have pulled out the size to use in whatever context it needed. */
10616 return expand_expr_real (treeop0
, original_target
, tmode
,
10617 modifier
, alt_rtl
, inner_reference_p
);
10620 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
10624 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10625 signedness of TYPE), possibly returning the result in TARGET. */
10627 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
10629 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
10630 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
10632 /* For constant values, reduce using build_int_cst_type. */
10633 if (CONST_INT_P (exp
))
10635 HOST_WIDE_INT value
= INTVAL (exp
);
10636 tree t
= build_int_cst_type (type
, value
);
10637 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
10639 else if (TYPE_UNSIGNED (type
))
10641 enum machine_mode mode
= GET_MODE (exp
);
10642 rtx mask
= immed_wide_int_const
10643 (wi::mask (prec
, false, GET_MODE_PRECISION (mode
)), mode
);
10644 return expand_and (mode
, exp
, mask
, target
);
10648 int count
= GET_MODE_PRECISION (GET_MODE (exp
)) - prec
;
10649 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
),
10650 exp
, count
, target
, 0);
10651 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
),
10652 exp
, count
, target
, 0);
10656 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10657 when applied to the address of EXP produces an address known to be
10658 aligned more than BIGGEST_ALIGNMENT. */
10661 is_aligning_offset (const_tree offset
, const_tree exp
)
10663 /* Strip off any conversions. */
10664 while (CONVERT_EXPR_P (offset
))
10665 offset
= TREE_OPERAND (offset
, 0);
10667 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10668 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10669 if (TREE_CODE (offset
) != BIT_AND_EXPR
10670 || !tree_fits_uhwi_p (TREE_OPERAND (offset
, 1))
10671 || compare_tree_int (TREE_OPERAND (offset
, 1),
10672 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
10673 || !exact_log2 (tree_to_uhwi (TREE_OPERAND (offset
, 1)) + 1) < 0)
10676 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10677 It must be NEGATE_EXPR. Then strip any more conversions. */
10678 offset
= TREE_OPERAND (offset
, 0);
10679 while (CONVERT_EXPR_P (offset
))
10680 offset
= TREE_OPERAND (offset
, 0);
10682 if (TREE_CODE (offset
) != NEGATE_EXPR
)
10685 offset
= TREE_OPERAND (offset
, 0);
10686 while (CONVERT_EXPR_P (offset
))
10687 offset
= TREE_OPERAND (offset
, 0);
10689 /* This must now be the address of EXP. */
10690 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
10693 /* Return the tree node if an ARG corresponds to a string constant or zero
10694 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10695 in bytes within the string that ARG is accessing. The type of the
10696 offset will be `sizetype'. */
10699 string_constant (tree arg
, tree
*ptr_offset
)
10701 tree array
, offset
, lower_bound
;
10704 if (TREE_CODE (arg
) == ADDR_EXPR
)
10706 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
10708 *ptr_offset
= size_zero_node
;
10709 return TREE_OPERAND (arg
, 0);
10711 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
10713 array
= TREE_OPERAND (arg
, 0);
10714 offset
= size_zero_node
;
10716 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
10718 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10719 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10720 if (TREE_CODE (array
) != STRING_CST
10721 && TREE_CODE (array
) != VAR_DECL
)
10724 /* Check if the array has a nonzero lower bound. */
10725 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
10726 if (!integer_zerop (lower_bound
))
10728 /* If the offset and base aren't both constants, return 0. */
10729 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
10731 if (TREE_CODE (offset
) != INTEGER_CST
)
10733 /* Adjust offset by the lower bound. */
10734 offset
= size_diffop (fold_convert (sizetype
, offset
),
10735 fold_convert (sizetype
, lower_bound
));
10738 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == MEM_REF
)
10740 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10741 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10742 if (TREE_CODE (array
) != ADDR_EXPR
)
10744 array
= TREE_OPERAND (array
, 0);
10745 if (TREE_CODE (array
) != STRING_CST
10746 && TREE_CODE (array
) != VAR_DECL
)
10752 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
10754 tree arg0
= TREE_OPERAND (arg
, 0);
10755 tree arg1
= TREE_OPERAND (arg
, 1);
10760 if (TREE_CODE (arg0
) == ADDR_EXPR
10761 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
10762 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
10764 array
= TREE_OPERAND (arg0
, 0);
10767 else if (TREE_CODE (arg1
) == ADDR_EXPR
10768 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
10769 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
10771 array
= TREE_OPERAND (arg1
, 0);
10780 if (TREE_CODE (array
) == STRING_CST
)
10782 *ptr_offset
= fold_convert (sizetype
, offset
);
10785 else if (TREE_CODE (array
) == VAR_DECL
10786 || TREE_CODE (array
) == CONST_DECL
)
10789 tree init
= ctor_for_folding (array
);
10791 /* Variables initialized to string literals can be handled too. */
10792 if (init
== error_mark_node
10794 || TREE_CODE (init
) != STRING_CST
)
10797 /* Avoid const char foo[4] = "abcde"; */
10798 if (DECL_SIZE_UNIT (array
) == NULL_TREE
10799 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
10800 || (length
= TREE_STRING_LENGTH (init
)) <= 0
10801 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
10804 /* If variable is bigger than the string literal, OFFSET must be constant
10805 and inside of the bounds of the string literal. */
10806 offset
= fold_convert (sizetype
, offset
);
10807 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
10808 && (! tree_fits_uhwi_p (offset
)
10809 || compare_tree_int (offset
, length
) >= 0))
10812 *ptr_offset
= offset
;
10819 /* Generate code to calculate OPS, and exploded expression
10820 using a store-flag instruction and return an rtx for the result.
10821 OPS reflects a comparison.
10823 If TARGET is nonzero, store the result there if convenient.
10825 Return zero if there is no suitable set-flag instruction
10826 available on this machine.
10828 Once expand_expr has been called on the arguments of the comparison,
10829 we are committed to doing the store flag, since it is not safe to
10830 re-evaluate the expression. We emit the store-flag insn by calling
10831 emit_store_flag, but only expand the arguments if we have a reason
10832 to believe that emit_store_flag will be successful. If we think that
10833 it will, but it isn't, we have to simulate the store-flag with a
10834 set/jump/set sequence. */
10837 do_store_flag (sepops ops
, rtx target
, enum machine_mode mode
)
10839 enum rtx_code code
;
10840 tree arg0
, arg1
, type
;
10842 enum machine_mode operand_mode
;
10845 rtx subtarget
= target
;
10846 location_t loc
= ops
->location
;
10851 /* Don't crash if the comparison was erroneous. */
10852 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10855 type
= TREE_TYPE (arg0
);
10856 operand_mode
= TYPE_MODE (type
);
10857 unsignedp
= TYPE_UNSIGNED (type
);
10859 /* We won't bother with BLKmode store-flag operations because it would mean
10860 passing a lot of information to emit_store_flag. */
10861 if (operand_mode
== BLKmode
)
10864 /* We won't bother with store-flag operations involving function pointers
10865 when function pointers must be canonicalized before comparisons. */
10866 #ifdef HAVE_canonicalize_funcptr_for_compare
10867 if (HAVE_canonicalize_funcptr_for_compare
10868 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
10869 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
10871 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
10872 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
10873 == FUNCTION_TYPE
))))
10880 /* For vector typed comparisons emit code to generate the desired
10881 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10882 expander for this. */
10883 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
10885 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
10886 tree if_true
= constant_boolean_node (true, ops
->type
);
10887 tree if_false
= constant_boolean_node (false, ops
->type
);
10888 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
, if_false
, target
);
10891 /* Get the rtx comparison code to use. We know that EXP is a comparison
10892 operation of some type. Some comparisons against 1 and -1 can be
10893 converted to comparisons with zero. Do so here so that the tests
10894 below will be aware that we have a comparison with zero. These
10895 tests will not catch constants in the first operand, but constants
10896 are rarely passed as the first operand. */
10907 if (integer_onep (arg1
))
10908 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10910 code
= unsignedp
? LTU
: LT
;
10913 if (! unsignedp
&& integer_all_onesp (arg1
))
10914 arg1
= integer_zero_node
, code
= LT
;
10916 code
= unsignedp
? LEU
: LE
;
10919 if (! unsignedp
&& integer_all_onesp (arg1
))
10920 arg1
= integer_zero_node
, code
= GE
;
10922 code
= unsignedp
? GTU
: GT
;
10925 if (integer_onep (arg1
))
10926 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10928 code
= unsignedp
? GEU
: GE
;
10931 case UNORDERED_EXPR
:
10957 gcc_unreachable ();
10960 /* Put a constant second. */
10961 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
10962 || TREE_CODE (arg0
) == FIXED_CST
)
10964 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10965 code
= swap_condition (code
);
10968 /* If this is an equality or inequality test of a single bit, we can
10969 do this by shifting the bit being tested to the low-order bit and
10970 masking the result with the constant 1. If the condition was EQ,
10971 we xor it with 1. This does not require an scc insn and is faster
10972 than an scc insn even if we have it.
10974 The code to make this transformation was moved into fold_single_bit_test,
10975 so we just call into the folder and expand its result. */
10977 if ((code
== NE
|| code
== EQ
)
10978 && integer_zerop (arg1
)
10979 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
10981 gimple srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
10983 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
10985 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
10986 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
10987 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
10988 gimple_assign_rhs1 (srcstmt
),
10989 gimple_assign_rhs2 (srcstmt
));
10990 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
10992 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
10996 if (! get_subtarget (target
)
10997 || GET_MODE (subtarget
) != operand_mode
)
11000 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
11003 target
= gen_reg_rtx (mode
);
11005 /* Try a cstore if possible. */
11006 return emit_store_flag_force (target
, code
, op0
, op1
,
11007 operand_mode
, unsignedp
,
11008 (TYPE_PRECISION (ops
->type
) == 1
11009 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
11013 /* Stubs in case we haven't got a casesi insn. */
11014 #ifndef HAVE_casesi
11015 # define HAVE_casesi 0
11016 # define gen_casesi(a, b, c, d, e) (0)
11017 # define CODE_FOR_casesi CODE_FOR_nothing
11020 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11021 0 otherwise (i.e. if there is no casesi instruction).
11023 DEFAULT_PROBABILITY is the probability of jumping to the default
11026 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
11027 rtx table_label
, rtx default_label
, rtx fallback_label
,
11028 int default_probability
)
11030 struct expand_operand ops
[5];
11031 enum machine_mode index_mode
= SImode
;
11032 rtx op1
, op2
, index
;
11037 /* Convert the index to SImode. */
11038 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
11040 enum machine_mode omode
= TYPE_MODE (index_type
);
11041 rtx rangertx
= expand_normal (range
);
11043 /* We must handle the endpoints in the original mode. */
11044 index_expr
= build2 (MINUS_EXPR
, index_type
,
11045 index_expr
, minval
);
11046 minval
= integer_zero_node
;
11047 index
= expand_normal (index_expr
);
11049 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
11050 omode
, 1, default_label
,
11051 default_probability
);
11052 /* Now we can safely truncate. */
11053 index
= convert_to_mode (index_mode
, index
, 0);
11057 if (TYPE_MODE (index_type
) != index_mode
)
11059 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
11060 index_expr
= fold_convert (index_type
, index_expr
);
11063 index
= expand_normal (index_expr
);
11066 do_pending_stack_adjust ();
11068 op1
= expand_normal (minval
);
11069 op2
= expand_normal (range
);
11071 create_input_operand (&ops
[0], index
, index_mode
);
11072 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
11073 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
11074 create_fixed_operand (&ops
[3], table_label
);
11075 create_fixed_operand (&ops
[4], (default_label
11077 : fallback_label
));
11078 expand_jump_insn (CODE_FOR_casesi
, 5, ops
);
11082 /* Attempt to generate a tablejump instruction; same concept. */
11083 #ifndef HAVE_tablejump
11084 #define HAVE_tablejump 0
11085 #define gen_tablejump(x, y) (0)
11088 /* Subroutine of the next function.
11090 INDEX is the value being switched on, with the lowest value
11091 in the table already subtracted.
11092 MODE is its expected mode (needed if INDEX is constant).
11093 RANGE is the length of the jump table.
11094 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11096 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11097 index value is out of range.
11098 DEFAULT_PROBABILITY is the probability of jumping to
11099 the default label. */
11102 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
11103 rtx default_label
, int default_probability
)
11107 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
11108 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
11110 /* Do an unsigned comparison (in the proper mode) between the index
11111 expression and the value which represents the length of the range.
11112 Since we just finished subtracting the lower bound of the range
11113 from the index expression, this comparison allows us to simultaneously
11114 check that the original index expression value is both greater than
11115 or equal to the minimum value of the range and less than or equal to
11116 the maximum value of the range. */
11119 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
11120 default_label
, default_probability
);
11123 /* If index is in range, it must fit in Pmode.
11124 Convert to Pmode so we can index with it. */
11126 index
= convert_to_mode (Pmode
, index
, 1);
11128 /* Don't let a MEM slip through, because then INDEX that comes
11129 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11130 and break_out_memory_refs will go to work on it and mess it up. */
11131 #ifdef PIC_CASE_VECTOR_ADDRESS
11132 if (flag_pic
&& !REG_P (index
))
11133 index
= copy_to_mode_reg (Pmode
, index
);
11136 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11137 GET_MODE_SIZE, because this indicates how large insns are. The other
11138 uses should all be Pmode, because they are addresses. This code
11139 could fail if addresses and insns are not the same size. */
11140 index
= simplify_gen_binary (MULT
, Pmode
, index
,
11141 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
),
11143 index
= simplify_gen_binary (PLUS
, Pmode
, index
,
11144 gen_rtx_LABEL_REF (Pmode
, table_label
));
11146 #ifdef PIC_CASE_VECTOR_ADDRESS
11148 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11151 index
= memory_address (CASE_VECTOR_MODE
, index
);
11152 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11153 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
11154 convert_move (temp
, vector
, 0);
11156 emit_jump_insn (gen_tablejump (temp
, table_label
));
11158 /* If we are generating PIC code or if the table is PC-relative, the
11159 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11160 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11165 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
11166 rtx table_label
, rtx default_label
, int default_probability
)
11170 if (! HAVE_tablejump
)
11173 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
11174 fold_convert (index_type
, index_expr
),
11175 fold_convert (index_type
, minval
));
11176 index
= expand_normal (index_expr
);
11177 do_pending_stack_adjust ();
11179 do_tablejump (index
, TYPE_MODE (index_type
),
11180 convert_modes (TYPE_MODE (index_type
),
11181 TYPE_MODE (TREE_TYPE (range
)),
11182 expand_normal (range
),
11183 TYPE_UNSIGNED (TREE_TYPE (range
))),
11184 table_label
, default_label
, default_probability
);
11188 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11190 const_vector_from_tree (tree exp
)
11196 enum machine_mode inner
, mode
;
11198 mode
= TYPE_MODE (TREE_TYPE (exp
));
11200 if (initializer_zerop (exp
))
11201 return CONST0_RTX (mode
);
11203 units
= GET_MODE_NUNITS (mode
);
11204 inner
= GET_MODE_INNER (mode
);
11206 v
= rtvec_alloc (units
);
11208 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11210 elt
= VECTOR_CST_ELT (exp
, i
);
11212 if (TREE_CODE (elt
) == REAL_CST
)
11213 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
11215 else if (TREE_CODE (elt
) == FIXED_CST
)
11216 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
11219 RTVEC_ELT (v
, i
) = immed_wide_int_const (elt
, inner
);
11222 return gen_rtx_CONST_VECTOR (mode
, v
);
11225 /* Build a decl for a personality function given a language prefix. */
11228 build_personality_function (const char *lang
)
11230 const char *unwind_and_version
;
11234 switch (targetm_common
.except_unwind_info (&global_options
))
11239 unwind_and_version
= "_sj0";
11243 unwind_and_version
= "_v0";
11246 unwind_and_version
= "_seh0";
11249 gcc_unreachable ();
11252 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
11254 type
= build_function_type_list (integer_type_node
, integer_type_node
,
11255 long_long_unsigned_type_node
,
11256 ptr_type_node
, ptr_type_node
, NULL_TREE
);
11257 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
11258 get_identifier (name
), type
);
11259 DECL_ARTIFICIAL (decl
) = 1;
11260 DECL_EXTERNAL (decl
) = 1;
11261 TREE_PUBLIC (decl
) = 1;
11263 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11264 are the flags assigned by targetm.encode_section_info. */
11265 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
11270 /* Extracts the personality function of DECL and returns the corresponding
11274 get_personality_function (tree decl
)
11276 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
11277 enum eh_personality_kind pk
;
11279 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
11280 if (pk
== eh_personality_none
)
11284 && pk
== eh_personality_any
)
11285 personality
= lang_hooks
.eh_personality ();
11287 if (pk
== eh_personality_lang
)
11288 gcc_assert (personality
!= NULL_TREE
);
11290 return XEXP (DECL_RTL (personality
), 0);
11293 #include "gt-expr.h"