1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 #include "diagnostic.h"
57 #include "ssaexpand.h"
59 /* Decide whether a function's arguments should be processed
60 from first to last or from last to first.
62 They should if the stack and args grow in opposite directions, but
63 only if we have push insns. */
67 #ifndef PUSH_ARGS_REVERSED
68 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
69 #define PUSH_ARGS_REVERSED /* If it's last to first. */
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
79 #define STACK_PUSH_CODE PRE_INC
84 /* If this is nonzero, we do not bother generating VOLATILE
85 around volatile memory references, and we are willing to
86 output indirect addresses. If cse is to follow, we reject
87 indirect addresses so a useful potential cse is generated;
88 if it is used only once, instruction combination will produce
89 the same indirect address eventually. */
92 /* This structure is used by move_by_pieces to describe the move to
103 int explicit_inc_from
;
104 unsigned HOST_WIDE_INT len
;
105 HOST_WIDE_INT offset
;
109 /* This structure is used by store_by_pieces to describe the clear to
112 struct store_by_pieces
118 unsigned HOST_WIDE_INT len
;
119 HOST_WIDE_INT offset
;
120 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
125 static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (unsigned HOST_WIDE_INT
,
128 static void move_by_pieces_1 (rtx (*) (rtx
, ...), enum machine_mode
,
129 struct move_by_pieces
*);
130 static bool block_move_libcall_safe_for_call_parm (void);
131 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
);
132 static tree
emit_block_move_libcall_fn (int);
133 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
134 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
135 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
136 static void store_by_pieces_1 (struct store_by_pieces
*, unsigned int);
137 static void store_by_pieces_2 (rtx (*) (rtx
, ...), enum machine_mode
,
138 struct store_by_pieces
*);
139 static tree
clear_storage_libcall_fn (int);
140 static rtx
compress_float_constant (rtx
, rtx
);
141 static rtx
get_subtarget (rtx
);
142 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
143 HOST_WIDE_INT
, enum machine_mode
,
144 tree
, tree
, int, alias_set_type
);
145 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
146 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
, enum machine_mode
,
147 tree
, tree
, alias_set_type
, bool);
149 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
151 static int is_aligning_offset (const_tree
, const_tree
);
152 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
153 enum expand_modifier
);
154 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
155 static rtx
do_store_flag (tree
, rtx
, enum machine_mode
);
157 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
159 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
);
160 static rtx
const_vector_from_tree (tree
);
161 static void write_complex_part (rtx
, rtx
, bool);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load
[NUM_MACHINE_MODES
];
168 static char direct_store
[NUM_MACHINE_MODES
];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem
[NUM_MACHINE_MODES
][NUM_MACHINE_MODES
];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero. */
192 #ifndef SET_BY_PIECES_P
193 #define SET_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
198 /* This macro is used to determine whether store_by_pieces should be
199 called to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
203 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
206 /* This array records the insn_code of insns to perform block moves. */
207 enum insn_code movmem_optab
[NUM_MACHINE_MODES
];
209 /* This array records the insn_code of insns to perform block sets. */
210 enum insn_code setmem_optab
[NUM_MACHINE_MODES
];
212 /* These arrays record the insn_code of three different kinds of insns
213 to perform block compares. */
214 enum insn_code cmpstr_optab
[NUM_MACHINE_MODES
];
215 enum insn_code cmpstrn_optab
[NUM_MACHINE_MODES
];
216 enum insn_code cmpmem_optab
[NUM_MACHINE_MODES
];
218 /* Synchronization primitives. */
219 enum insn_code sync_add_optab
[NUM_MACHINE_MODES
];
220 enum insn_code sync_sub_optab
[NUM_MACHINE_MODES
];
221 enum insn_code sync_ior_optab
[NUM_MACHINE_MODES
];
222 enum insn_code sync_and_optab
[NUM_MACHINE_MODES
];
223 enum insn_code sync_xor_optab
[NUM_MACHINE_MODES
];
224 enum insn_code sync_nand_optab
[NUM_MACHINE_MODES
];
225 enum insn_code sync_old_add_optab
[NUM_MACHINE_MODES
];
226 enum insn_code sync_old_sub_optab
[NUM_MACHINE_MODES
];
227 enum insn_code sync_old_ior_optab
[NUM_MACHINE_MODES
];
228 enum insn_code sync_old_and_optab
[NUM_MACHINE_MODES
];
229 enum insn_code sync_old_xor_optab
[NUM_MACHINE_MODES
];
230 enum insn_code sync_old_nand_optab
[NUM_MACHINE_MODES
];
231 enum insn_code sync_new_add_optab
[NUM_MACHINE_MODES
];
232 enum insn_code sync_new_sub_optab
[NUM_MACHINE_MODES
];
233 enum insn_code sync_new_ior_optab
[NUM_MACHINE_MODES
];
234 enum insn_code sync_new_and_optab
[NUM_MACHINE_MODES
];
235 enum insn_code sync_new_xor_optab
[NUM_MACHINE_MODES
];
236 enum insn_code sync_new_nand_optab
[NUM_MACHINE_MODES
];
237 enum insn_code sync_compare_and_swap
[NUM_MACHINE_MODES
];
238 enum insn_code sync_lock_test_and_set
[NUM_MACHINE_MODES
];
239 enum insn_code sync_lock_release
[NUM_MACHINE_MODES
];
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
247 /* This is run to set up which modes can be used
248 directly in memory and to initialize the block move optab. It is run
249 at the beginning of compilation and when the target is reinitialized. */
252 init_expr_target (void)
255 enum machine_mode mode
;
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
264 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg
= gen_rtx_REG (VOIDmode
, -1);
270 insn
= rtx_alloc (INSN
);
271 pat
= gen_rtx_SET (VOIDmode
, NULL_RTX
, NULL_RTX
);
272 PATTERN (insn
) = pat
;
274 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
275 mode
= (enum machine_mode
) ((int) mode
+ 1))
279 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
280 PUT_MODE (mem
, mode
);
281 PUT_MODE (mem1
, mode
);
282 PUT_MODE (reg
, mode
);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
288 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
289 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
292 if (! HARD_REGNO_MODE_OK (regno
, mode
))
295 SET_REGNO (reg
, regno
);
298 SET_DEST (pat
) = reg
;
299 if (recog (pat
, insn
, &num_clobbers
) >= 0)
300 direct_load
[(int) mode
] = 1;
302 SET_SRC (pat
) = mem1
;
303 SET_DEST (pat
) = reg
;
304 if (recog (pat
, insn
, &num_clobbers
) >= 0)
305 direct_load
[(int) mode
] = 1;
308 SET_DEST (pat
) = mem
;
309 if (recog (pat
, insn
, &num_clobbers
) >= 0)
310 direct_store
[(int) mode
] = 1;
313 SET_DEST (pat
) = mem1
;
314 if (recog (pat
, insn
, &num_clobbers
) >= 0)
315 direct_store
[(int) mode
] = 1;
319 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
321 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
322 mode
= GET_MODE_WIDER_MODE (mode
))
324 enum machine_mode srcmode
;
325 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
326 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
330 ic
= can_extend_p (mode
, srcmode
, 0);
331 if (ic
== CODE_FOR_nothing
)
334 PUT_MODE (mem
, srcmode
);
336 if ((*insn_data
[ic
].operand
[1].predicate
) (mem
, srcmode
))
337 float_extend_from_mem
[mode
][srcmode
] = true;
342 /* This is run at the start of compiling a function. */
347 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
350 /* Copy data from FROM to TO, where the machine modes are not the same.
351 Both modes may be integer, or both may be floating, or both may be
353 UNSIGNEDP should be nonzero if FROM is an unsigned type.
354 This causes zero-extension instead of sign-extension. */
357 convert_move (rtx to
, rtx from
, int unsignedp
)
359 enum machine_mode to_mode
= GET_MODE (to
);
360 enum machine_mode from_mode
= GET_MODE (from
);
361 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
362 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
366 /* rtx code for making an equivalent value. */
367 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
368 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
371 gcc_assert (to_real
== from_real
);
372 gcc_assert (to_mode
!= BLKmode
);
373 gcc_assert (from_mode
!= BLKmode
);
375 /* If the source and destination are already the same, then there's
380 /* If FROM is a SUBREG that indicates that we have already done at least
381 the required extension, strip it. We don't handle such SUBREGs as
384 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
385 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
386 >= GET_MODE_SIZE (to_mode
))
387 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
388 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
390 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
392 if (to_mode
== from_mode
393 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
395 emit_move_insn (to
, from
);
399 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
401 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
403 if (VECTOR_MODE_P (to_mode
))
404 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
406 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
408 emit_move_insn (to
, from
);
412 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
414 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
415 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
424 gcc_assert ((GET_MODE_PRECISION (from_mode
)
425 != GET_MODE_PRECISION (to_mode
))
426 || (DECIMAL_FLOAT_MODE_P (from_mode
)
427 != DECIMAL_FLOAT_MODE_P (to_mode
)));
429 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
430 /* Conversion between decimal float and binary float, same size. */
431 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
432 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
437 /* Try converting directly if the insn is supported. */
439 code
= convert_optab_handler (tab
, to_mode
, from_mode
)->insn_code
;
440 if (code
!= CODE_FOR_nothing
)
442 emit_unop_insn (code
, to
, from
,
443 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
447 /* Otherwise use a libcall. */
448 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
450 /* Is this conversion implemented yet? */
451 gcc_assert (libcall
);
454 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
456 insns
= get_insns ();
458 emit_libcall_block (insns
, to
, value
,
459 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
461 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
465 /* Handle pointer conversion. */ /* SPEE 900220. */
466 /* Targets are expected to provide conversion insns between PxImode and
467 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
468 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
473 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)->insn_code
474 != CODE_FOR_nothing
);
476 if (full_mode
!= from_mode
)
477 from
= convert_to_mode (full_mode
, from
, unsignedp
);
478 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)->insn_code
,
482 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
485 enum machine_mode full_mode
486 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
488 gcc_assert (convert_optab_handler (sext_optab
, full_mode
, from_mode
)->insn_code
489 != CODE_FOR_nothing
);
491 if (to_mode
== full_mode
)
493 emit_unop_insn (convert_optab_handler (sext_optab
, full_mode
, from_mode
)->insn_code
,
498 new_from
= gen_reg_rtx (full_mode
);
499 emit_unop_insn (convert_optab_handler (sext_optab
, full_mode
, from_mode
)->insn_code
,
500 new_from
, from
, UNKNOWN
);
502 /* else proceed to integer conversions below. */
503 from_mode
= full_mode
;
507 /* Make sure both are fixed-point modes or both are not. */
508 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
509 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
510 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
512 /* If we widen from_mode to to_mode and they are in the same class,
513 we won't saturate the result.
514 Otherwise, always saturate the result to play safe. */
515 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
516 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
517 expand_fixed_convert (to
, from
, 0, 0);
519 expand_fixed_convert (to
, from
, 0, 1);
523 /* Now both modes are integers. */
525 /* Handle expanding beyond a word. */
526 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
527 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
534 enum machine_mode lowpart_mode
;
535 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
537 /* Try converting directly if the insn is supported. */
538 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
541 /* If FROM is a SUBREG, put it into a register. Do this
542 so that we always generate the same set of insns for
543 better cse'ing; if an intermediate assignment occurred,
544 we won't be doing the operation directly on the SUBREG. */
545 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
546 from
= force_reg (from_mode
, from
);
547 emit_unop_insn (code
, to
, from
, equiv_code
);
550 /* Next, try converting via full word. */
551 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
552 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
553 != CODE_FOR_nothing
))
555 rtx word_to
= gen_reg_rtx (word_mode
);
558 if (reg_overlap_mentioned_p (to
, from
))
559 from
= force_reg (from_mode
, from
);
562 convert_move (word_to
, from
, unsignedp
);
563 emit_unop_insn (code
, to
, word_to
, equiv_code
);
567 /* No special multiword conversion insn; do it by hand. */
570 /* Since we will turn this into a no conflict block, we must ensure
571 that the source does not overlap the target. */
573 if (reg_overlap_mentioned_p (to
, from
))
574 from
= force_reg (from_mode
, from
);
576 /* Get a copy of FROM widened to a word, if necessary. */
577 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
578 lowpart_mode
= word_mode
;
580 lowpart_mode
= from_mode
;
582 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
584 lowpart
= gen_lowpart (lowpart_mode
, to
);
585 emit_move_insn (lowpart
, lowfrom
);
587 /* Compute the value to put in each remaining word. */
589 fill_value
= const0_rtx
;
591 fill_value
= emit_store_flag (gen_reg_rtx (word_mode
),
592 LT
, lowfrom
, const0_rtx
,
595 /* Fill the remaining words. */
596 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
598 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
599 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
601 gcc_assert (subword
);
603 if (fill_value
!= subword
)
604 emit_move_insn (subword
, fill_value
);
607 insns
= get_insns ();
614 /* Truncating multi-word to a word or less. */
615 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
616 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
619 && ! MEM_VOLATILE_P (from
)
620 && direct_load
[(int) to_mode
]
621 && ! mode_dependent_address_p (XEXP (from
, 0)))
623 || GET_CODE (from
) == SUBREG
))
624 from
= force_reg (from_mode
, from
);
625 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
629 /* Now follow all the conversions between integers
630 no more than a word long. */
632 /* For truncation, usually we can just refer to FROM in a narrower mode. */
633 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
634 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
635 GET_MODE_BITSIZE (from_mode
)))
638 && ! MEM_VOLATILE_P (from
)
639 && direct_load
[(int) to_mode
]
640 && ! mode_dependent_address_p (XEXP (from
, 0)))
642 || GET_CODE (from
) == SUBREG
))
643 from
= force_reg (from_mode
, from
);
644 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
645 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
646 from
= copy_to_reg (from
);
647 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
651 /* Handle extension. */
652 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
654 /* Convert directly if that works. */
655 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
658 emit_unop_insn (code
, to
, from
, equiv_code
);
663 enum machine_mode intermediate
;
667 /* Search for a mode to convert via. */
668 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
669 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
670 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
672 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
673 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
674 GET_MODE_BITSIZE (intermediate
))))
675 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
676 != CODE_FOR_nothing
))
678 convert_move (to
, convert_to_mode (intermediate
, from
,
679 unsignedp
), unsignedp
);
683 /* No suitable intermediate mode.
684 Generate what we need with shifts. */
685 shift_amount
= build_int_cst (NULL_TREE
,
686 GET_MODE_BITSIZE (to_mode
)
687 - GET_MODE_BITSIZE (from_mode
));
688 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
689 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
691 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
694 emit_move_insn (to
, tmp
);
699 /* Support special truncate insns for certain modes. */
700 if (convert_optab_handler (trunc_optab
, to_mode
, from_mode
)->insn_code
!= CODE_FOR_nothing
)
702 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
)->insn_code
,
707 /* Handle truncation of volatile memrefs, and so on;
708 the things that couldn't be truncated directly,
709 and for which there was no special instruction.
711 ??? Code above formerly short-circuited this, for most integer
712 mode pairs, with a force_reg in from_mode followed by a recursive
713 call to this routine. Appears always to have been wrong. */
714 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
716 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
717 emit_move_insn (to
, temp
);
721 /* Mode combination is not recognized. */
725 /* Return an rtx for a value that would result
726 from converting X to mode MODE.
727 Both X and MODE may be floating, or both integer.
728 UNSIGNEDP is nonzero if X is an unsigned value.
729 This can be done by referring to a part of X in place
730 or by copying to a new temporary with conversion. */
733 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
735 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
738 /* Return an rtx for a value that would result
739 from converting X from mode OLDMODE to mode MODE.
740 Both modes may be floating, or both integer.
741 UNSIGNEDP is nonzero if X is an unsigned value.
743 This can be done by referring to a part of X in place
744 or by copying to a new temporary with conversion.
746 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
749 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
753 /* If FROM is a SUBREG that indicates that we have already done at least
754 the required extension, strip it. */
756 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
757 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
758 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
759 x
= gen_lowpart (mode
, x
);
761 if (GET_MODE (x
) != VOIDmode
)
762 oldmode
= GET_MODE (x
);
767 /* There is one case that we must handle specially: If we are converting
768 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
769 we are to interpret the constant as unsigned, gen_lowpart will do
770 the wrong if the constant appears negative. What we want to do is
771 make the high-order word of the constant zero, not all ones. */
773 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
774 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
775 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
777 HOST_WIDE_INT val
= INTVAL (x
);
779 if (oldmode
!= VOIDmode
780 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
782 int width
= GET_MODE_BITSIZE (oldmode
);
784 /* We need to zero extend VAL. */
785 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
788 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
791 /* We can do this with a gen_lowpart if both desired and current modes
792 are integer, and this is either a constant integer, a register, or a
793 non-volatile MEM. Except for the constant case where MODE is no
794 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
796 if ((GET_CODE (x
) == CONST_INT
797 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
798 || (GET_MODE_CLASS (mode
) == MODE_INT
799 && GET_MODE_CLASS (oldmode
) == MODE_INT
800 && (GET_CODE (x
) == CONST_DOUBLE
801 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
802 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
803 && direct_load
[(int) mode
])
805 && (! HARD_REGISTER_P (x
)
806 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
807 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
808 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
810 /* ?? If we don't know OLDMODE, we have to assume here that
811 X does not need sign- or zero-extension. This may not be
812 the case, but it's the best we can do. */
813 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
814 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
816 HOST_WIDE_INT val
= INTVAL (x
);
817 int width
= GET_MODE_BITSIZE (oldmode
);
819 /* We must sign or zero-extend in this case. Start by
820 zero-extending, then sign extend if we need to. */
821 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
823 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
824 val
|= (HOST_WIDE_INT
) (-1) << width
;
826 return gen_int_mode (val
, mode
);
829 return gen_lowpart (mode
, x
);
832 /* Converting from integer constant into mode is always equivalent to an
834 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
836 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
837 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
840 temp
= gen_reg_rtx (mode
);
841 convert_move (temp
, x
, unsignedp
);
845 /* STORE_MAX_PIECES is the number of bytes at a time that we can
846 store efficiently. Due to internal GCC limitations, this is
847 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
848 for an immediate constant. */
850 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
852 /* Determine whether the LEN bytes can be moved by using several move
853 instructions. Return nonzero if a call to move_by_pieces should
857 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
858 unsigned int align ATTRIBUTE_UNUSED
)
860 return MOVE_BY_PIECES_P (len
, align
);
863 /* Generate several move instructions to copy LEN bytes from block FROM to
864 block TO. (These are MEM rtx's with BLKmode).
866 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
867 used to push FROM to the stack.
869 ALIGN is maximum stack alignment we can assume.
871 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
872 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
876 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
877 unsigned int align
, int endp
)
879 struct move_by_pieces data
;
880 rtx to_addr
, from_addr
= XEXP (from
, 0);
881 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
882 enum machine_mode mode
= VOIDmode
, tmode
;
883 enum insn_code icode
;
885 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
888 data
.from_addr
= from_addr
;
891 to_addr
= XEXP (to
, 0);
894 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
895 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
897 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
904 #ifdef STACK_GROWS_DOWNWARD
910 data
.to_addr
= to_addr
;
913 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
914 || GET_CODE (from_addr
) == POST_INC
915 || GET_CODE (from_addr
) == POST_DEC
);
917 data
.explicit_inc_from
= 0;
918 data
.explicit_inc_to
= 0;
919 if (data
.reverse
) data
.offset
= len
;
922 /* If copying requires more than two move insns,
923 copy addresses to registers (to make displacements shorter)
924 and use post-increment if available. */
925 if (!(data
.autinc_from
&& data
.autinc_to
)
926 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
928 /* Find the mode of the largest move... */
929 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
930 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
931 if (GET_MODE_SIZE (tmode
) < max_size
)
934 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
936 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
937 data
.autinc_from
= 1;
938 data
.explicit_inc_from
= -1;
940 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
942 data
.from_addr
= copy_addr_to_reg (from_addr
);
943 data
.autinc_from
= 1;
944 data
.explicit_inc_from
= 1;
946 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
947 data
.from_addr
= copy_addr_to_reg (from_addr
);
948 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
950 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
952 data
.explicit_inc_to
= -1;
954 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
956 data
.to_addr
= copy_addr_to_reg (to_addr
);
958 data
.explicit_inc_to
= 1;
960 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
961 data
.to_addr
= copy_addr_to_reg (to_addr
);
964 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
965 if (align
>= GET_MODE_ALIGNMENT (tmode
))
966 align
= GET_MODE_ALIGNMENT (tmode
);
969 enum machine_mode xmode
;
971 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
973 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
974 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
975 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
978 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
981 /* First move what we can in the largest integer mode, then go to
982 successively smaller modes. */
986 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
987 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
988 if (GET_MODE_SIZE (tmode
) < max_size
)
991 if (mode
== VOIDmode
)
994 icode
= optab_handler (mov_optab
, mode
)->insn_code
;
995 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
996 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
998 max_size
= GET_MODE_SIZE (mode
);
1001 /* The code above should have handled everything. */
1002 gcc_assert (!data
.len
);
1008 gcc_assert (!data
.reverse
);
1013 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
1014 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
1016 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
1019 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
1026 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
1034 /* Return number of insns required to move L bytes by pieces.
1035 ALIGN (in bits) is maximum alignment we can assume. */
1037 static unsigned HOST_WIDE_INT
1038 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
1039 unsigned int max_size
)
1041 unsigned HOST_WIDE_INT n_insns
= 0;
1042 enum machine_mode tmode
;
1044 tmode
= mode_for_size (MOVE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
1045 if (align
>= GET_MODE_ALIGNMENT (tmode
))
1046 align
= GET_MODE_ALIGNMENT (tmode
);
1049 enum machine_mode tmode
, xmode
;
1051 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
1053 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
1054 if (GET_MODE_SIZE (tmode
) > MOVE_MAX_PIECES
1055 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
1058 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
1061 while (max_size
> 1)
1063 enum machine_mode mode
= VOIDmode
;
1064 enum insn_code icode
;
1066 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1067 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1068 if (GET_MODE_SIZE (tmode
) < max_size
)
1071 if (mode
== VOIDmode
)
1074 icode
= optab_handler (mov_optab
, mode
)->insn_code
;
1075 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1076 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1078 max_size
= GET_MODE_SIZE (mode
);
1085 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1086 with move instructions for mode MODE. GENFUN is the gen_... function
1087 to make a move insn for that mode. DATA has all the other info. */
1090 move_by_pieces_1 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
1091 struct move_by_pieces
*data
)
1093 unsigned int size
= GET_MODE_SIZE (mode
);
1094 rtx to1
= NULL_RTX
, from1
;
1096 while (data
->len
>= size
)
1099 data
->offset
-= size
;
1103 if (data
->autinc_to
)
1104 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1107 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1110 if (data
->autinc_from
)
1111 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1114 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1116 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1117 emit_insn (gen_add2_insn (data
->to_addr
,
1118 GEN_INT (-(HOST_WIDE_INT
)size
)));
1119 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1120 emit_insn (gen_add2_insn (data
->from_addr
,
1121 GEN_INT (-(HOST_WIDE_INT
)size
)));
1124 emit_insn ((*genfun
) (to1
, from1
));
1127 #ifdef PUSH_ROUNDING
1128 emit_single_push_insn (mode
, from1
, NULL
);
1134 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1135 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1136 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1137 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1139 if (! data
->reverse
)
1140 data
->offset
+= size
;
1146 /* Emit code to move a block Y to a block X. This may be done with
1147 string-move instructions, with multiple scalar move instructions,
1148 or with a library call.
1150 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1151 SIZE is an rtx that says how long they are.
1152 ALIGN is the maximum alignment we can assume they have.
1153 METHOD describes what kind of copy this is, and what mechanisms may be used.
1155 Return the address of the new block, if memcpy is called and returns it,
1159 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1160 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
1168 case BLOCK_OP_NORMAL
:
1169 case BLOCK_OP_TAILCALL
:
1170 may_use_call
= true;
1173 case BLOCK_OP_CALL_PARM
:
1174 may_use_call
= block_move_libcall_safe_for_call_parm ();
1176 /* Make inhibit_defer_pop nonzero around the library call
1177 to force it to pop the arguments right away. */
1181 case BLOCK_OP_NO_LIBCALL
:
1182 may_use_call
= false;
1189 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1191 gcc_assert (MEM_P (x
));
1192 gcc_assert (MEM_P (y
));
1195 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1196 block copy is more efficient for other large modes, e.g. DCmode. */
1197 x
= adjust_address (x
, BLKmode
, 0);
1198 y
= adjust_address (y
, BLKmode
, 0);
1200 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1201 can be incorrect is coming from __builtin_memcpy. */
1202 if (GET_CODE (size
) == CONST_INT
)
1204 if (INTVAL (size
) == 0)
1207 x
= shallow_copy_rtx (x
);
1208 y
= shallow_copy_rtx (y
);
1209 set_mem_size (x
, size
);
1210 set_mem_size (y
, size
);
1213 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1214 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1215 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1216 expected_align
, expected_size
))
1218 else if (may_use_call
)
1219 retval
= emit_block_move_via_libcall (x
, y
, size
,
1220 method
== BLOCK_OP_TAILCALL
);
1222 emit_block_move_via_loop (x
, y
, size
, align
);
1224 if (method
== BLOCK_OP_CALL_PARM
)
1231 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1233 return emit_block_move_hints (x
, y
, size
, method
, 0, -1);
1236 /* A subroutine of emit_block_move. Returns true if calling the
1237 block move libcall will not clobber any parameters which may have
1238 already been placed on the stack. */
1241 block_move_libcall_safe_for_call_parm (void)
1243 #if defined (REG_PARM_STACK_SPACE)
1247 /* If arguments are pushed on the stack, then they're safe. */
1251 /* If registers go on the stack anyway, any argument is sure to clobber
1252 an outgoing argument. */
1253 #if defined (REG_PARM_STACK_SPACE)
1254 fn
= emit_block_move_libcall_fn (false);
1255 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1256 && REG_PARM_STACK_SPACE (fn
) != 0)
1260 /* If any argument goes in memory, then it might clobber an outgoing
1263 CUMULATIVE_ARGS args_so_far
;
1266 fn
= emit_block_move_libcall_fn (false);
1267 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1269 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1270 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1272 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1273 rtx tmp
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
1274 if (!tmp
|| !REG_P (tmp
))
1276 if (targetm
.calls
.arg_partial_bytes (&args_so_far
, mode
, NULL
, 1))
1278 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, NULL_TREE
, 1);
1284 /* A subroutine of emit_block_move. Expand a movmem pattern;
1285 return true if successful. */
1288 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1289 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
1291 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1292 int save_volatile_ok
= volatile_ok
;
1293 enum machine_mode mode
;
1295 if (expected_align
< align
)
1296 expected_align
= align
;
1298 /* Since this is a move insn, we don't care about volatility. */
1301 /* Try the most limited insn first, because there's no point
1302 including more than one in the machine description unless
1303 the more limited one has some advantage. */
1305 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1306 mode
= GET_MODE_WIDER_MODE (mode
))
1308 enum insn_code code
= movmem_optab
[(int) mode
];
1309 insn_operand_predicate_fn pred
;
1311 if (code
!= CODE_FOR_nothing
1312 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1313 here because if SIZE is less than the mode mask, as it is
1314 returned by the macro, it will definitely be less than the
1315 actual mode mask. */
1316 && ((GET_CODE (size
) == CONST_INT
1317 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1318 <= (GET_MODE_MASK (mode
) >> 1)))
1319 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1320 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1321 || (*pred
) (x
, BLKmode
))
1322 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1323 || (*pred
) (y
, BLKmode
))
1324 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1325 || (*pred
) (opalign
, VOIDmode
)))
1328 rtx last
= get_last_insn ();
1331 op2
= convert_to_mode (mode
, size
, 1);
1332 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1333 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1334 op2
= copy_to_mode_reg (mode
, op2
);
1336 /* ??? When called via emit_block_move_for_call, it'd be
1337 nice if there were some way to inform the backend, so
1338 that it doesn't fail the expansion because it thinks
1339 emitting the libcall would be more efficient. */
1341 if (insn_data
[(int) code
].n_operands
== 4)
1342 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1344 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
,
1345 GEN_INT (expected_align
1347 GEN_INT (expected_size
));
1351 volatile_ok
= save_volatile_ok
;
1355 delete_insns_since (last
);
1359 volatile_ok
= save_volatile_ok
;
1363 /* A subroutine of emit_block_move. Expand a call to memcpy.
1364 Return the return value from memcpy, 0 otherwise. */
1367 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1369 rtx dst_addr
, src_addr
;
1370 tree call_expr
, fn
, src_tree
, dst_tree
, size_tree
;
1371 enum machine_mode size_mode
;
1374 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1375 pseudos. We can then place those new pseudos into a VAR_DECL and
1378 dst_addr
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
1379 src_addr
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
1381 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1382 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1384 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1385 src_tree
= make_tree (ptr_type_node
, src_addr
);
1387 size_mode
= TYPE_MODE (sizetype
);
1389 size
= convert_to_mode (size_mode
, size
, 1);
1390 size
= copy_to_mode_reg (size_mode
, size
);
1392 /* It is incorrect to use the libcall calling conventions to call
1393 memcpy in this context. This could be a user call to memcpy and
1394 the user may wish to examine the return value from memcpy. For
1395 targets where libcalls and normal calls have different conventions
1396 for returning pointers, we could end up generating incorrect code. */
1398 size_tree
= make_tree (sizetype
, size
);
1400 fn
= emit_block_move_libcall_fn (true);
1401 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1402 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1404 retval
= expand_normal (call_expr
);
1409 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1410 for the function we use for block copies. The first time FOR_CALL
1411 is true, we call assemble_external. */
1413 static GTY(()) tree block_move_fn
;
1416 init_block_move_fn (const char *asmspec
)
1422 fn
= get_identifier ("memcpy");
1423 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1424 const_ptr_type_node
, sizetype
,
1427 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
1428 DECL_EXTERNAL (fn
) = 1;
1429 TREE_PUBLIC (fn
) = 1;
1430 DECL_ARTIFICIAL (fn
) = 1;
1431 TREE_NOTHROW (fn
) = 1;
1432 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1433 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1439 set_user_assembler_name (block_move_fn
, asmspec
);
1443 emit_block_move_libcall_fn (int for_call
)
1445 static bool emitted_extern
;
1448 init_block_move_fn (NULL
);
1450 if (for_call
&& !emitted_extern
)
1452 emitted_extern
= true;
1453 make_decl_rtl (block_move_fn
);
1454 assemble_external (block_move_fn
);
1457 return block_move_fn
;
1460 /* A subroutine of emit_block_move. Copy the data via an explicit
1461 loop. This is used only when libcalls are forbidden. */
1462 /* ??? It'd be nice to copy in hunks larger than QImode. */
1465 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1466 unsigned int align ATTRIBUTE_UNUSED
)
1468 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1469 enum machine_mode iter_mode
;
1471 iter_mode
= GET_MODE (size
);
1472 if (iter_mode
== VOIDmode
)
1473 iter_mode
= word_mode
;
1475 top_label
= gen_label_rtx ();
1476 cmp_label
= gen_label_rtx ();
1477 iter
= gen_reg_rtx (iter_mode
);
1479 emit_move_insn (iter
, const0_rtx
);
1481 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1482 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1483 do_pending_stack_adjust ();
1485 emit_jump (cmp_label
);
1486 emit_label (top_label
);
1488 tmp
= convert_modes (Pmode
, iter_mode
, iter
, true);
1489 x_addr
= gen_rtx_PLUS (Pmode
, x_addr
, tmp
);
1490 y_addr
= gen_rtx_PLUS (Pmode
, y_addr
, tmp
);
1491 x
= change_address (x
, QImode
, x_addr
);
1492 y
= change_address (y
, QImode
, y_addr
);
1494 emit_move_insn (x
, y
);
1496 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1497 true, OPTAB_LIB_WIDEN
);
1499 emit_move_insn (iter
, tmp
);
1501 emit_label (cmp_label
);
1503 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1507 /* Copy all or part of a value X into registers starting at REGNO.
1508 The number of registers to be filled is NREGS. */
1511 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1514 #ifdef HAVE_load_multiple
1522 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1523 x
= validize_mem (force_const_mem (mode
, x
));
1525 /* See if the machine can do this with a load multiple insn. */
1526 #ifdef HAVE_load_multiple
1527 if (HAVE_load_multiple
)
1529 last
= get_last_insn ();
1530 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1538 delete_insns_since (last
);
1542 for (i
= 0; i
< nregs
; i
++)
1543 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1544 operand_subword_force (x
, i
, mode
));
1547 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1548 The number of registers to be filled is NREGS. */
1551 move_block_from_reg (int regno
, rtx x
, int nregs
)
1558 /* See if the machine can do this with a store multiple insn. */
1559 #ifdef HAVE_store_multiple
1560 if (HAVE_store_multiple
)
1562 rtx last
= get_last_insn ();
1563 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1571 delete_insns_since (last
);
1575 for (i
= 0; i
< nregs
; i
++)
1577 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1581 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1585 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1586 ORIG, where ORIG is a non-consecutive group of registers represented by
1587 a PARALLEL. The clone is identical to the original except in that the
1588 original set of registers is replaced by a new set of pseudo registers.
1589 The new set has the same modes as the original set. */
1592 gen_group_rtx (rtx orig
)
1597 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1599 length
= XVECLEN (orig
, 0);
1600 tmps
= XALLOCAVEC (rtx
, length
);
1602 /* Skip a NULL entry in first slot. */
1603 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1608 for (; i
< length
; i
++)
1610 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1611 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1613 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1616 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1619 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1620 except that values are placed in TMPS[i], and must later be moved
1621 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1624 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1628 enum machine_mode m
= GET_MODE (orig_src
);
1630 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1633 && !SCALAR_INT_MODE_P (m
)
1634 && !MEM_P (orig_src
)
1635 && GET_CODE (orig_src
) != CONCAT
)
1637 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1638 if (imode
== BLKmode
)
1639 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
, 0);
1641 src
= gen_reg_rtx (imode
);
1642 if (imode
!= BLKmode
)
1643 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1644 emit_move_insn (src
, orig_src
);
1645 /* ...and back again. */
1646 if (imode
!= BLKmode
)
1647 src
= gen_lowpart (imode
, src
);
1648 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1652 /* Check for a NULL entry, used to indicate that the parameter goes
1653 both on the stack and in registers. */
1654 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1659 /* Process the pieces. */
1660 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1662 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1663 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1664 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1667 /* Handle trailing fragments that run over the size of the struct. */
1668 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1670 /* Arrange to shift the fragment to where it belongs.
1671 extract_bit_field loads to the lsb of the reg. */
1673 #ifdef BLOCK_REG_PADDING
1674 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1675 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1680 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1681 bytelen
= ssize
- bytepos
;
1682 gcc_assert (bytelen
> 0);
1685 /* If we won't be loading directly from memory, protect the real source
1686 from strange tricks we might play; but make sure that the source can
1687 be loaded directly into the destination. */
1689 if (!MEM_P (orig_src
)
1690 && (!CONSTANT_P (orig_src
)
1691 || (GET_MODE (orig_src
) != mode
1692 && GET_MODE (orig_src
) != VOIDmode
)))
1694 if (GET_MODE (orig_src
) == VOIDmode
)
1695 src
= gen_reg_rtx (mode
);
1697 src
= gen_reg_rtx (GET_MODE (orig_src
));
1699 emit_move_insn (src
, orig_src
);
1702 /* Optimize the access just a bit. */
1704 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1705 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1706 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1707 && bytelen
== GET_MODE_SIZE (mode
))
1709 tmps
[i
] = gen_reg_rtx (mode
);
1710 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1712 else if (COMPLEX_MODE_P (mode
)
1713 && GET_MODE (src
) == mode
1714 && bytelen
== GET_MODE_SIZE (mode
))
1715 /* Let emit_move_complex do the bulk of the work. */
1717 else if (GET_CODE (src
) == CONCAT
)
1719 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1720 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1722 if ((bytepos
== 0 && bytelen
== slen0
)
1723 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1725 /* The following assumes that the concatenated objects all
1726 have the same size. In this case, a simple calculation
1727 can be used to determine the object and the bit field
1729 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1730 if (! CONSTANT_P (tmps
[i
])
1731 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1732 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1733 (bytepos
% slen0
) * BITS_PER_UNIT
,
1734 1, NULL_RTX
, mode
, mode
);
1740 gcc_assert (!bytepos
);
1741 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1742 emit_move_insn (mem
, src
);
1743 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1744 0, 1, NULL_RTX
, mode
, mode
);
1747 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1748 SIMD register, which is currently broken. While we get GCC
1749 to emit proper RTL for these cases, let's dump to memory. */
1750 else if (VECTOR_MODE_P (GET_MODE (dst
))
1753 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1756 mem
= assign_stack_temp (GET_MODE (src
), slen
, 0);
1757 emit_move_insn (mem
, src
);
1758 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1760 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1761 && XVECLEN (dst
, 0) > 1)
1762 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE(dst
), bytepos
);
1763 else if (CONSTANT_P (src
))
1765 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
1773 gcc_assert (2 * len
== ssize
);
1774 split_double (src
, &first
, &second
);
1781 else if (REG_P (src
) && GET_MODE (src
) == mode
)
1784 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1785 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1789 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1790 build_int_cst (NULL_TREE
, shift
), tmps
[i
], 0);
1794 /* Emit code to move a block SRC of type TYPE to a block DST,
1795 where DST is non-consecutive registers represented by a PARALLEL.
1796 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1800 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1805 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
1806 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1808 /* Copy the extracted pieces into the proper (probable) hard regs. */
1809 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1811 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1814 emit_move_insn (d
, tmps
[i
]);
1818 /* Similar, but load SRC into new pseudos in a format that looks like
1819 PARALLEL. This can later be fed to emit_group_move to get things
1820 in the right place. */
1823 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1828 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1829 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1831 /* Convert the vector to look just like the original PARALLEL, except
1832 with the computed values. */
1833 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1835 rtx e
= XVECEXP (parallel
, 0, i
);
1836 rtx d
= XEXP (e
, 0);
1840 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1841 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1843 RTVEC_ELT (vec
, i
) = e
;
1846 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1849 /* Emit code to move a block SRC to block DST, where SRC and DST are
1850 non-consecutive groups of registers, each represented by a PARALLEL. */
1853 emit_group_move (rtx dst
, rtx src
)
1857 gcc_assert (GET_CODE (src
) == PARALLEL
1858 && GET_CODE (dst
) == PARALLEL
1859 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1861 /* Skip first entry if NULL. */
1862 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1863 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1864 XEXP (XVECEXP (src
, 0, i
), 0));
1867 /* Move a group of registers represented by a PARALLEL into pseudos. */
1870 emit_group_move_into_temps (rtx src
)
1872 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1875 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1877 rtx e
= XVECEXP (src
, 0, i
);
1878 rtx d
= XEXP (e
, 0);
1881 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1882 RTVEC_ELT (vec
, i
) = e
;
1885 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1888 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1889 where SRC is non-consecutive registers represented by a PARALLEL.
1890 SSIZE represents the total size of block ORIG_DST, or -1 if not
1894 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1897 int start
, finish
, i
;
1898 enum machine_mode m
= GET_MODE (orig_dst
);
1900 gcc_assert (GET_CODE (src
) == PARALLEL
);
1902 if (!SCALAR_INT_MODE_P (m
)
1903 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1905 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1906 if (imode
== BLKmode
)
1907 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
, 0);
1909 dst
= gen_reg_rtx (imode
);
1910 emit_group_store (dst
, src
, type
, ssize
);
1911 if (imode
!= BLKmode
)
1912 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1913 emit_move_insn (orig_dst
, dst
);
1917 /* Check for a NULL entry, used to indicate that the parameter goes
1918 both on the stack and in registers. */
1919 if (XEXP (XVECEXP (src
, 0, 0), 0))
1923 finish
= XVECLEN (src
, 0);
1925 tmps
= XALLOCAVEC (rtx
, finish
);
1927 /* Copy the (probable) hard regs into pseudos. */
1928 for (i
= start
; i
< finish
; i
++)
1930 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1931 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
1933 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1934 emit_move_insn (tmps
[i
], reg
);
1940 /* If we won't be storing directly into memory, protect the real destination
1941 from strange tricks we might play. */
1943 if (GET_CODE (dst
) == PARALLEL
)
1947 /* We can get a PARALLEL dst if there is a conditional expression in
1948 a return statement. In that case, the dst and src are the same,
1949 so no action is necessary. */
1950 if (rtx_equal_p (dst
, src
))
1953 /* It is unclear if we can ever reach here, but we may as well handle
1954 it. Allocate a temporary, and split this into a store/load to/from
1957 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
1958 emit_group_store (temp
, src
, type
, ssize
);
1959 emit_group_load (dst
, temp
, type
, ssize
);
1962 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1964 enum machine_mode outer
= GET_MODE (dst
);
1965 enum machine_mode inner
;
1966 HOST_WIDE_INT bytepos
;
1970 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
1971 dst
= gen_reg_rtx (outer
);
1973 /* Make life a bit easier for combine. */
1974 /* If the first element of the vector is the low part
1975 of the destination mode, use a paradoxical subreg to
1976 initialize the destination. */
1979 inner
= GET_MODE (tmps
[start
]);
1980 bytepos
= subreg_lowpart_offset (inner
, outer
);
1981 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
1983 temp
= simplify_gen_subreg (outer
, tmps
[start
],
1987 emit_move_insn (dst
, temp
);
1994 /* If the first element wasn't the low part, try the last. */
1996 && start
< finish
- 1)
1998 inner
= GET_MODE (tmps
[finish
- 1]);
1999 bytepos
= subreg_lowpart_offset (inner
, outer
);
2000 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
2002 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
2006 emit_move_insn (dst
, temp
);
2013 /* Otherwise, simply initialize the result to zero. */
2015 emit_move_insn (dst
, CONST0_RTX (outer
));
2018 /* Process the pieces. */
2019 for (i
= start
; i
< finish
; i
++)
2021 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2022 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2023 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2024 unsigned int adj_bytelen
= bytelen
;
2027 /* Handle trailing fragments that run over the size of the struct. */
2028 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2029 adj_bytelen
= ssize
- bytepos
;
2031 if (GET_CODE (dst
) == CONCAT
)
2033 if (bytepos
+ adj_bytelen
2034 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2035 dest
= XEXP (dst
, 0);
2036 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2038 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2039 dest
= XEXP (dst
, 1);
2043 enum machine_mode dest_mode
= GET_MODE (dest
);
2044 enum machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2046 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2048 if (GET_MODE_ALIGNMENT (dest_mode
)
2049 >= GET_MODE_ALIGNMENT (tmp_mode
))
2051 dest
= assign_stack_temp (dest_mode
,
2052 GET_MODE_SIZE (dest_mode
),
2054 emit_move_insn (adjust_address (dest
,
2062 dest
= assign_stack_temp (tmp_mode
,
2063 GET_MODE_SIZE (tmp_mode
),
2065 emit_move_insn (dest
, tmps
[i
]);
2066 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2072 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2074 /* store_bit_field always takes its value from the lsb.
2075 Move the fragment to the lsb if it's not already there. */
2077 #ifdef BLOCK_REG_PADDING
2078 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2079 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2085 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2086 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2087 build_int_cst (NULL_TREE
, shift
),
2090 bytelen
= adj_bytelen
;
2093 /* Optimize the access just a bit. */
2095 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2096 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2097 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2098 && bytelen
== GET_MODE_SIZE (mode
))
2099 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2101 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2105 /* Copy from the pseudo into the (probable) hard reg. */
2106 if (orig_dst
!= dst
)
2107 emit_move_insn (orig_dst
, dst
);
2110 /* Generate code to copy a BLKmode object of TYPE out of a
2111 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2112 is null, a stack temporary is created. TGTBLK is returned.
2114 The purpose of this routine is to handle functions that return
2115 BLKmode structures in registers. Some machines (the PA for example)
2116 want to return all small structures in registers regardless of the
2117 structure's alignment. */
2120 copy_blkmode_from_reg (rtx tgtblk
, rtx srcreg
, tree type
)
2122 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2123 rtx src
= NULL
, dst
= NULL
;
2124 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2125 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2126 enum machine_mode copy_mode
;
2130 tgtblk
= assign_temp (build_qualified_type (type
,
2132 | TYPE_QUAL_CONST
)),
2134 preserve_temp_slots (tgtblk
);
2137 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2138 into a new pseudo which is a full word. */
2140 if (GET_MODE (srcreg
) != BLKmode
2141 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2142 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2144 /* If the structure doesn't take up a whole number of words, see whether
2145 SRCREG is padded on the left or on the right. If it's on the left,
2146 set PADDING_CORRECTION to the number of bits to skip.
2148 In most ABIs, the structure will be returned at the least end of
2149 the register, which translates to right padding on little-endian
2150 targets and left padding on big-endian targets. The opposite
2151 holds if the structure is returned at the most significant
2152 end of the register. */
2153 if (bytes
% UNITS_PER_WORD
!= 0
2154 && (targetm
.calls
.return_in_msb (type
)
2156 : BYTES_BIG_ENDIAN
))
2158 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2160 /* Copy the structure BITSIZE bits at a time. If the target lives in
2161 memory, take care of not reading/writing past its end by selecting
2162 a copy mode suited to BITSIZE. This should always be possible given
2165 We could probably emit more efficient code for machines which do not use
2166 strict alignment, but it doesn't seem worth the effort at the current
2169 copy_mode
= word_mode
;
2172 enum machine_mode mem_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
2173 if (mem_mode
!= BLKmode
)
2174 copy_mode
= mem_mode
;
2177 for (bitpos
= 0, xbitpos
= padding_correction
;
2178 bitpos
< bytes
* BITS_PER_UNIT
;
2179 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2181 /* We need a new source operand each time xbitpos is on a
2182 word boundary and when xbitpos == padding_correction
2183 (the first time through). */
2184 if (xbitpos
% BITS_PER_WORD
== 0
2185 || xbitpos
== padding_correction
)
2186 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
,
2189 /* We need a new destination operand each time bitpos is on
2191 if (bitpos
% BITS_PER_WORD
== 0)
2192 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2194 /* Use xbitpos for the source extraction (right justified) and
2195 bitpos for the destination store (left justified). */
2196 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, copy_mode
,
2197 extract_bit_field (src
, bitsize
,
2198 xbitpos
% BITS_PER_WORD
, 1,
2199 NULL_RTX
, copy_mode
, copy_mode
));
2205 /* Add a USE expression for REG to the (possibly empty) list pointed
2206 to by CALL_FUSAGE. REG must denote a hard register. */
2209 use_reg (rtx
*call_fusage
, rtx reg
)
2211 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2214 = gen_rtx_EXPR_LIST (VOIDmode
,
2215 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2218 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2219 starting at REGNO. All of these registers must be hard registers. */
2222 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2226 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2228 for (i
= 0; i
< nregs
; i
++)
2229 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2232 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2233 PARALLEL REGS. This is for calls that pass values in multiple
2234 non-contiguous locations. The Irix 6 ABI has examples of this. */
2237 use_group_regs (rtx
*call_fusage
, rtx regs
)
2241 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2243 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2245 /* A NULL entry means the parameter goes both on the stack and in
2246 registers. This can also be a MEM for targets that pass values
2247 partially on the stack and partially in registers. */
2248 if (reg
!= 0 && REG_P (reg
))
2249 use_reg (call_fusage
, reg
);
2253 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2254 assigment and the code of the expresion on the RHS is CODE. Return
2258 get_def_for_expr (tree name
, enum tree_code code
)
2262 if (TREE_CODE (name
) != SSA_NAME
)
2265 def_stmt
= get_gimple_for_ssa_name (name
);
2267 || gimple_assign_rhs_code (def_stmt
) != code
)
2274 /* Determine whether the LEN bytes generated by CONSTFUN can be
2275 stored to memory using several move instructions. CONSTFUNDATA is
2276 a pointer which will be passed as argument in every CONSTFUN call.
2277 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2278 a memset operation and false if it's a copy of a constant string.
2279 Return nonzero if a call to store_by_pieces should succeed. */
2282 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2283 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2284 void *constfundata
, unsigned int align
, bool memsetp
)
2286 unsigned HOST_WIDE_INT l
;
2287 unsigned int max_size
;
2288 HOST_WIDE_INT offset
= 0;
2289 enum machine_mode mode
, tmode
;
2290 enum insn_code icode
;
2298 ? SET_BY_PIECES_P (len
, align
)
2299 : STORE_BY_PIECES_P (len
, align
)))
2302 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2303 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2304 align
= GET_MODE_ALIGNMENT (tmode
);
2307 enum machine_mode xmode
;
2309 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2311 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2312 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2313 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2316 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2319 /* We would first store what we can in the largest integer mode, then go to
2320 successively smaller modes. */
2323 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2328 max_size
= STORE_MAX_PIECES
+ 1;
2329 while (max_size
> 1)
2331 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2332 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2333 if (GET_MODE_SIZE (tmode
) < max_size
)
2336 if (mode
== VOIDmode
)
2339 icode
= optab_handler (mov_optab
, mode
)->insn_code
;
2340 if (icode
!= CODE_FOR_nothing
2341 && align
>= GET_MODE_ALIGNMENT (mode
))
2343 unsigned int size
= GET_MODE_SIZE (mode
);
2350 cst
= (*constfun
) (constfundata
, offset
, mode
);
2351 if (!LEGITIMATE_CONSTANT_P (cst
))
2361 max_size
= GET_MODE_SIZE (mode
);
2364 /* The code above should have handled everything. */
2371 /* Generate several move instructions to store LEN bytes generated by
2372 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2373 pointer which will be passed as argument in every CONSTFUN call.
2374 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2375 a memset operation and false if it's a copy of a constant string.
2376 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2377 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2381 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2382 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2383 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
2385 struct store_by_pieces data
;
2389 gcc_assert (endp
!= 2);
2394 ? SET_BY_PIECES_P (len
, align
)
2395 : STORE_BY_PIECES_P (len
, align
));
2396 data
.constfun
= constfun
;
2397 data
.constfundata
= constfundata
;
2400 store_by_pieces_1 (&data
, align
);
2405 gcc_assert (!data
.reverse
);
2410 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2411 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2413 data
.to_addr
= copy_addr_to_reg (plus_constant (data
.to_addr
,
2416 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2423 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2431 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2432 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2435 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2437 struct store_by_pieces data
;
2442 data
.constfun
= clear_by_pieces_1
;
2443 data
.constfundata
= NULL
;
2446 store_by_pieces_1 (&data
, align
);
2449 /* Callback routine for clear_by_pieces.
2450 Return const0_rtx unconditionally. */
2453 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2454 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2455 enum machine_mode mode ATTRIBUTE_UNUSED
)
2460 /* Subroutine of clear_by_pieces and store_by_pieces.
2461 Generate several move instructions to store LEN bytes of block TO. (A MEM
2462 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2465 store_by_pieces_1 (struct store_by_pieces
*data ATTRIBUTE_UNUSED
,
2466 unsigned int align ATTRIBUTE_UNUSED
)
2468 rtx to_addr
= XEXP (data
->to
, 0);
2469 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2470 enum machine_mode mode
= VOIDmode
, tmode
;
2471 enum insn_code icode
;
2474 data
->to_addr
= to_addr
;
2476 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2477 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2479 data
->explicit_inc_to
= 0;
2481 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2483 data
->offset
= data
->len
;
2485 /* If storing requires more than two move insns,
2486 copy addresses to registers (to make displacements shorter)
2487 and use post-increment if available. */
2488 if (!data
->autinc_to
2489 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2491 /* Determine the main mode we'll be using. */
2492 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2493 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2494 if (GET_MODE_SIZE (tmode
) < max_size
)
2497 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2499 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2500 data
->autinc_to
= 1;
2501 data
->explicit_inc_to
= -1;
2504 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2505 && ! data
->autinc_to
)
2507 data
->to_addr
= copy_addr_to_reg (to_addr
);
2508 data
->autinc_to
= 1;
2509 data
->explicit_inc_to
= 1;
2512 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2513 data
->to_addr
= copy_addr_to_reg (to_addr
);
2516 tmode
= mode_for_size (STORE_MAX_PIECES
* BITS_PER_UNIT
, MODE_INT
, 1);
2517 if (align
>= GET_MODE_ALIGNMENT (tmode
))
2518 align
= GET_MODE_ALIGNMENT (tmode
);
2521 enum machine_mode xmode
;
2523 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
2525 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
2526 if (GET_MODE_SIZE (tmode
) > STORE_MAX_PIECES
2527 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
2530 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
2533 /* First store what we can in the largest integer mode, then go to
2534 successively smaller modes. */
2536 while (max_size
> 1)
2538 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2539 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2540 if (GET_MODE_SIZE (tmode
) < max_size
)
2543 if (mode
== VOIDmode
)
2546 icode
= optab_handler (mov_optab
, mode
)->insn_code
;
2547 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2548 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2550 max_size
= GET_MODE_SIZE (mode
);
2553 /* The code above should have handled everything. */
2554 gcc_assert (!data
->len
);
2557 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2558 with move instructions for mode MODE. GENFUN is the gen_... function
2559 to make a move insn for that mode. DATA has all the other info. */
2562 store_by_pieces_2 (rtx (*genfun
) (rtx
, ...), enum machine_mode mode
,
2563 struct store_by_pieces
*data
)
2565 unsigned int size
= GET_MODE_SIZE (mode
);
2568 while (data
->len
>= size
)
2571 data
->offset
-= size
;
2573 if (data
->autinc_to
)
2574 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2577 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2579 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2580 emit_insn (gen_add2_insn (data
->to_addr
,
2581 GEN_INT (-(HOST_WIDE_INT
) size
)));
2583 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2584 emit_insn ((*genfun
) (to1
, cst
));
2586 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2587 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2589 if (! data
->reverse
)
2590 data
->offset
+= size
;
2596 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2597 its length in bytes. */
2600 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2601 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
2603 enum machine_mode mode
= GET_MODE (object
);
2606 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2608 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2609 just move a zero. Otherwise, do this a piece at a time. */
2611 && GET_CODE (size
) == CONST_INT
2612 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2614 rtx zero
= CONST0_RTX (mode
);
2617 emit_move_insn (object
, zero
);
2621 if (COMPLEX_MODE_P (mode
))
2623 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2626 write_complex_part (object
, zero
, 0);
2627 write_complex_part (object
, zero
, 1);
2633 if (size
== const0_rtx
)
2636 align
= MEM_ALIGN (object
);
2638 if (GET_CODE (size
) == CONST_INT
2639 && CLEAR_BY_PIECES_P (INTVAL (size
), align
))
2640 clear_by_pieces (object
, INTVAL (size
), align
);
2641 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2642 expected_align
, expected_size
))
2645 return set_storage_via_libcall (object
, size
, const0_rtx
,
2646 method
== BLOCK_OP_TAILCALL
);
2652 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2654 return clear_storage_hints (object
, size
, method
, 0, -1);
2658 /* A subroutine of clear_storage. Expand a call to memset.
2659 Return the return value of memset, 0 otherwise. */
2662 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
2664 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
2665 enum machine_mode size_mode
;
2668 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2669 place those into new pseudos into a VAR_DECL and use them later. */
2671 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2673 size_mode
= TYPE_MODE (sizetype
);
2674 size
= convert_to_mode (size_mode
, size
, 1);
2675 size
= copy_to_mode_reg (size_mode
, size
);
2677 /* It is incorrect to use the libcall calling conventions to call
2678 memset in this context. This could be a user call to memset and
2679 the user may wish to examine the return value from memset. For
2680 targets where libcalls and normal calls have different conventions
2681 for returning pointers, we could end up generating incorrect code. */
2683 object_tree
= make_tree (ptr_type_node
, object
);
2684 if (GET_CODE (val
) != CONST_INT
)
2685 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
2686 size_tree
= make_tree (sizetype
, size
);
2687 val_tree
= make_tree (integer_type_node
, val
);
2689 fn
= clear_storage_libcall_fn (true);
2690 call_expr
= build_call_expr (fn
, 3,
2691 object_tree
, integer_zero_node
, size_tree
);
2692 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2694 retval
= expand_normal (call_expr
);
2699 /* A subroutine of set_storage_via_libcall. Create the tree node
2700 for the function we use for block clears. The first time FOR_CALL
2701 is true, we call assemble_external. */
2703 tree block_clear_fn
;
2706 init_block_clear_fn (const char *asmspec
)
2708 if (!block_clear_fn
)
2712 fn
= get_identifier ("memset");
2713 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2714 integer_type_node
, sizetype
,
2717 fn
= build_decl (FUNCTION_DECL
, fn
, args
);
2718 DECL_EXTERNAL (fn
) = 1;
2719 TREE_PUBLIC (fn
) = 1;
2720 DECL_ARTIFICIAL (fn
) = 1;
2721 TREE_NOTHROW (fn
) = 1;
2722 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2723 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2725 block_clear_fn
= fn
;
2729 set_user_assembler_name (block_clear_fn
, asmspec
);
2733 clear_storage_libcall_fn (int for_call
)
2735 static bool emitted_extern
;
2737 if (!block_clear_fn
)
2738 init_block_clear_fn (NULL
);
2740 if (for_call
&& !emitted_extern
)
2742 emitted_extern
= true;
2743 make_decl_rtl (block_clear_fn
);
2744 assemble_external (block_clear_fn
);
2747 return block_clear_fn
;
2750 /* Expand a setmem pattern; return true if successful. */
2753 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
2754 unsigned int expected_align
, HOST_WIDE_INT expected_size
)
2756 /* Try the most limited insn first, because there's no point
2757 including more than one in the machine description unless
2758 the more limited one has some advantage. */
2760 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2761 enum machine_mode mode
;
2763 if (expected_align
< align
)
2764 expected_align
= align
;
2766 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2767 mode
= GET_MODE_WIDER_MODE (mode
))
2769 enum insn_code code
= setmem_optab
[(int) mode
];
2770 insn_operand_predicate_fn pred
;
2772 if (code
!= CODE_FOR_nothing
2773 /* We don't need MODE to be narrower than
2774 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2775 the mode mask, as it is returned by the macro, it will
2776 definitely be less than the actual mode mask. */
2777 && ((GET_CODE (size
) == CONST_INT
2778 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2779 <= (GET_MODE_MASK (mode
) >> 1)))
2780 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2781 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2782 || (*pred
) (object
, BLKmode
))
2783 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
2784 || (*pred
) (opalign
, VOIDmode
)))
2787 enum machine_mode char_mode
;
2788 rtx last
= get_last_insn ();
2791 opsize
= convert_to_mode (mode
, size
, 1);
2792 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2793 if (pred
!= 0 && ! (*pred
) (opsize
, mode
))
2794 opsize
= copy_to_mode_reg (mode
, opsize
);
2797 char_mode
= insn_data
[(int) code
].operand
[2].mode
;
2798 if (char_mode
!= VOIDmode
)
2800 opchar
= convert_to_mode (char_mode
, opchar
, 1);
2801 pred
= insn_data
[(int) code
].operand
[2].predicate
;
2802 if (pred
!= 0 && ! (*pred
) (opchar
, char_mode
))
2803 opchar
= copy_to_mode_reg (char_mode
, opchar
);
2806 if (insn_data
[(int) code
].n_operands
== 4)
2807 pat
= GEN_FCN ((int) code
) (object
, opsize
, opchar
, opalign
);
2809 pat
= GEN_FCN ((int) code
) (object
, opsize
, opchar
, opalign
,
2810 GEN_INT (expected_align
2812 GEN_INT (expected_size
));
2819 delete_insns_since (last
);
2827 /* Write to one of the components of the complex value CPLX. Write VAL to
2828 the real part if IMAG_P is false, and the imaginary part if its true. */
2831 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2833 enum machine_mode cmode
;
2834 enum machine_mode imode
;
2837 if (GET_CODE (cplx
) == CONCAT
)
2839 emit_move_insn (XEXP (cplx
, imag_p
), val
);
2843 cmode
= GET_MODE (cplx
);
2844 imode
= GET_MODE_INNER (cmode
);
2845 ibitsize
= GET_MODE_BITSIZE (imode
);
2847 /* For MEMs simplify_gen_subreg may generate an invalid new address
2848 because, e.g., the original address is considered mode-dependent
2849 by the target, which restricts simplify_subreg from invoking
2850 adjust_address_nv. Instead of preparing fallback support for an
2851 invalid address, we call adjust_address_nv directly. */
2854 emit_move_insn (adjust_address_nv (cplx
, imode
,
2855 imag_p
? GET_MODE_SIZE (imode
) : 0),
2860 /* If the sub-object is at least word sized, then we know that subregging
2861 will work. This special case is important, since store_bit_field
2862 wants to operate on integer modes, and there's rarely an OImode to
2863 correspond to TCmode. */
2864 if (ibitsize
>= BITS_PER_WORD
2865 /* For hard regs we have exact predicates. Assume we can split
2866 the original object if it spans an even number of hard regs.
2867 This special case is important for SCmode on 64-bit platforms
2868 where the natural size of floating-point regs is 32-bit. */
2870 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2871 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2873 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
2874 imag_p
? GET_MODE_SIZE (imode
) : 0);
2877 emit_move_insn (part
, val
);
2881 /* simplify_gen_subreg may fail for sub-word MEMs. */
2882 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2885 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, imode
, val
);
2888 /* Extract one of the components of the complex value CPLX. Extract the
2889 real part if IMAG_P is false, and the imaginary part if it's true. */
2892 read_complex_part (rtx cplx
, bool imag_p
)
2894 enum machine_mode cmode
, imode
;
2897 if (GET_CODE (cplx
) == CONCAT
)
2898 return XEXP (cplx
, imag_p
);
2900 cmode
= GET_MODE (cplx
);
2901 imode
= GET_MODE_INNER (cmode
);
2902 ibitsize
= GET_MODE_BITSIZE (imode
);
2904 /* Special case reads from complex constants that got spilled to memory. */
2905 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
2907 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
2908 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
2910 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
2911 if (CONSTANT_CLASS_P (part
))
2912 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
2916 /* For MEMs simplify_gen_subreg may generate an invalid new address
2917 because, e.g., the original address is considered mode-dependent
2918 by the target, which restricts simplify_subreg from invoking
2919 adjust_address_nv. Instead of preparing fallback support for an
2920 invalid address, we call adjust_address_nv directly. */
2922 return adjust_address_nv (cplx
, imode
,
2923 imag_p
? GET_MODE_SIZE (imode
) : 0);
2925 /* If the sub-object is at least word sized, then we know that subregging
2926 will work. This special case is important, since extract_bit_field
2927 wants to operate on integer modes, and there's rarely an OImode to
2928 correspond to TCmode. */
2929 if (ibitsize
>= BITS_PER_WORD
2930 /* For hard regs we have exact predicates. Assume we can split
2931 the original object if it spans an even number of hard regs.
2932 This special case is important for SCmode on 64-bit platforms
2933 where the natural size of floating-point regs is 32-bit. */
2935 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
2936 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
2938 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
2939 imag_p
? GET_MODE_SIZE (imode
) : 0);
2943 /* simplify_gen_subreg may fail for sub-word MEMs. */
2944 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
2947 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
2948 true, NULL_RTX
, imode
, imode
);
2951 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2952 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2953 represented in NEW_MODE. If FORCE is true, this will never happen, as
2954 we'll force-create a SUBREG if needed. */
2957 emit_move_change_mode (enum machine_mode new_mode
,
2958 enum machine_mode old_mode
, rtx x
, bool force
)
2962 if (push_operand (x
, GET_MODE (x
)))
2964 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
2965 MEM_COPY_ATTRIBUTES (ret
, x
);
2969 /* We don't have to worry about changing the address since the
2970 size in bytes is supposed to be the same. */
2971 if (reload_in_progress
)
2973 /* Copy the MEM to change the mode and move any
2974 substitutions from the old MEM to the new one. */
2975 ret
= adjust_address_nv (x
, new_mode
, 0);
2976 copy_replacements (x
, ret
);
2979 ret
= adjust_address (x
, new_mode
, 0);
2983 /* Note that we do want simplify_subreg's behavior of validating
2984 that the new mode is ok for a hard register. If we were to use
2985 simplify_gen_subreg, we would create the subreg, but would
2986 probably run into the target not being able to implement it. */
2987 /* Except, of course, when FORCE is true, when this is exactly what
2988 we want. Which is needed for CCmodes on some targets. */
2990 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
2992 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
2998 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2999 an integer mode of the same size as MODE. Returns the instruction
3000 emitted, or NULL if such a move could not be generated. */
3003 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
, bool force
)
3005 enum machine_mode imode
;
3006 enum insn_code code
;
3008 /* There must exist a mode of the exact size we require. */
3009 imode
= int_mode_for_mode (mode
);
3010 if (imode
== BLKmode
)
3013 /* The target must support moves in this mode. */
3014 code
= optab_handler (mov_optab
, imode
)->insn_code
;
3015 if (code
== CODE_FOR_nothing
)
3018 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3021 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3024 return emit_insn (GEN_FCN (code
) (x
, y
));
3027 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3028 Return an equivalent MEM that does not use an auto-increment. */
3031 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
3033 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3034 HOST_WIDE_INT adjust
;
3037 adjust
= GET_MODE_SIZE (mode
);
3038 #ifdef PUSH_ROUNDING
3039 adjust
= PUSH_ROUNDING (adjust
);
3041 if (code
== PRE_DEC
|| code
== POST_DEC
)
3043 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3045 rtx expr
= XEXP (XEXP (x
, 0), 1);
3048 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3049 gcc_assert (GET_CODE (XEXP (expr
, 1)) == CONST_INT
);
3050 val
= INTVAL (XEXP (expr
, 1));
3051 if (GET_CODE (expr
) == MINUS
)
3053 gcc_assert (adjust
== val
|| adjust
== -val
);
3057 /* Do not use anti_adjust_stack, since we don't want to update
3058 stack_pointer_delta. */
3059 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3060 GEN_INT (adjust
), stack_pointer_rtx
,
3061 0, OPTAB_LIB_WIDEN
);
3062 if (temp
!= stack_pointer_rtx
)
3063 emit_move_insn (stack_pointer_rtx
, temp
);
3070 temp
= stack_pointer_rtx
;
3075 temp
= plus_constant (stack_pointer_rtx
, -adjust
);
3081 return replace_equiv_address (x
, temp
);
3084 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3085 X is known to satisfy push_operand, and MODE is known to be complex.
3086 Returns the last instruction emitted. */
3089 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
3091 enum machine_mode submode
= GET_MODE_INNER (mode
);
3094 #ifdef PUSH_ROUNDING
3095 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3097 /* In case we output to the stack, but the size is smaller than the
3098 machine can push exactly, we need to use move instructions. */
3099 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3101 x
= emit_move_resolve_push (mode
, x
);
3102 return emit_move_insn (x
, y
);
3106 /* Note that the real part always precedes the imag part in memory
3107 regardless of machine's endianness. */
3108 switch (GET_CODE (XEXP (x
, 0)))
3122 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3123 read_complex_part (y
, imag_first
));
3124 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3125 read_complex_part (y
, !imag_first
));
3128 /* A subroutine of emit_move_complex. Perform the move from Y to X
3129 via two moves of the parts. Returns the last instruction emitted. */
3132 emit_move_complex_parts (rtx x
, rtx y
)
3134 /* Show the output dies here. This is necessary for SUBREGs
3135 of pseudos since we cannot track their lifetimes correctly;
3136 hard regs shouldn't appear here except as return values. */
3137 if (!reload_completed
&& !reload_in_progress
3138 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3141 write_complex_part (x
, read_complex_part (y
, false), false);
3142 write_complex_part (x
, read_complex_part (y
, true), true);
3144 return get_last_insn ();
3147 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3148 MODE is known to be complex. Returns the last instruction emitted. */
3151 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
3155 /* Need to take special care for pushes, to maintain proper ordering
3156 of the data, and possibly extra padding. */
3157 if (push_operand (x
, mode
))
3158 return emit_move_complex_push (mode
, x
, y
);
3160 /* See if we can coerce the target into moving both values at once. */
3162 /* Move floating point as parts. */
3163 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3164 && optab_handler (mov_optab
, GET_MODE_INNER (mode
))->insn_code
!= CODE_FOR_nothing
)
3166 /* Not possible if the values are inherently not adjacent. */
3167 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3169 /* Is possible if both are registers (or subregs of registers). */
3170 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3172 /* If one of the operands is a memory, and alignment constraints
3173 are friendly enough, we may be able to do combined memory operations.
3174 We do not attempt this if Y is a constant because that combination is
3175 usually better with the by-parts thing below. */
3176 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3177 && (!STRICT_ALIGNMENT
3178 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3187 /* For memory to memory moves, optimal behavior can be had with the
3188 existing block move logic. */
3189 if (MEM_P (x
) && MEM_P (y
))
3191 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3192 BLOCK_OP_NO_LIBCALL
);
3193 return get_last_insn ();
3196 ret
= emit_move_via_integer (mode
, x
, y
, true);
3201 return emit_move_complex_parts (x
, y
);
3204 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3205 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3208 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
3212 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3215 enum insn_code code
= optab_handler (mov_optab
, CCmode
)->insn_code
;
3216 if (code
!= CODE_FOR_nothing
)
3218 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3219 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3220 return emit_insn (GEN_FCN (code
) (x
, y
));
3224 /* Otherwise, find the MODE_INT mode of the same width. */
3225 ret
= emit_move_via_integer (mode
, x
, y
, false);
3226 gcc_assert (ret
!= NULL
);
3230 /* Return true if word I of OP lies entirely in the
3231 undefined bits of a paradoxical subreg. */
3234 undefined_operand_subword_p (const_rtx op
, int i
)
3236 enum machine_mode innermode
, innermostmode
;
3238 if (GET_CODE (op
) != SUBREG
)
3240 innermode
= GET_MODE (op
);
3241 innermostmode
= GET_MODE (SUBREG_REG (op
));
3242 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3243 /* The SUBREG_BYTE represents offset, as if the value were stored in
3244 memory, except for a paradoxical subreg where we define
3245 SUBREG_BYTE to be 0; undo this exception as in
3247 if (SUBREG_BYTE (op
) == 0
3248 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3250 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3251 if (WORDS_BIG_ENDIAN
)
3252 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3253 if (BYTES_BIG_ENDIAN
)
3254 offset
+= difference
% UNITS_PER_WORD
;
3256 if (offset
>= GET_MODE_SIZE (innermostmode
)
3257 || offset
<= -GET_MODE_SIZE (word_mode
))
3262 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3263 MODE is any multi-word or full-word mode that lacks a move_insn
3264 pattern. Note that you will get better code if you define such
3265 patterns, even if they must turn into multiple assembler instructions. */
3268 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3275 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3277 /* If X is a push on the stack, do the push now and replace
3278 X with a reference to the stack pointer. */
3279 if (push_operand (x
, mode
))
3280 x
= emit_move_resolve_push (mode
, x
);
3282 /* If we are in reload, see if either operand is a MEM whose address
3283 is scheduled for replacement. */
3284 if (reload_in_progress
&& MEM_P (x
)
3285 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3286 x
= replace_equiv_address_nv (x
, inner
);
3287 if (reload_in_progress
&& MEM_P (y
)
3288 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3289 y
= replace_equiv_address_nv (y
, inner
);
3293 need_clobber
= false;
3295 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3298 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3301 /* Do not generate code for a move if it would come entirely
3302 from the undefined bits of a paradoxical subreg. */
3303 if (undefined_operand_subword_p (y
, i
))
3306 ypart
= operand_subword (y
, i
, 1, mode
);
3308 /* If we can't get a part of Y, put Y into memory if it is a
3309 constant. Otherwise, force it into a register. Then we must
3310 be able to get a part of Y. */
3311 if (ypart
== 0 && CONSTANT_P (y
))
3313 y
= use_anchored_address (force_const_mem (mode
, y
));
3314 ypart
= operand_subword (y
, i
, 1, mode
);
3316 else if (ypart
== 0)
3317 ypart
= operand_subword_force (y
, i
, mode
);
3319 gcc_assert (xpart
&& ypart
);
3321 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3323 last_insn
= emit_move_insn (xpart
, ypart
);
3329 /* Show the output dies here. This is necessary for SUBREGs
3330 of pseudos since we cannot track their lifetimes correctly;
3331 hard regs shouldn't appear here except as return values.
3332 We never want to emit such a clobber after reload. */
3334 && ! (reload_in_progress
|| reload_completed
)
3335 && need_clobber
!= 0)
3343 /* Low level part of emit_move_insn.
3344 Called just like emit_move_insn, but assumes X and Y
3345 are basically valid. */
3348 emit_move_insn_1 (rtx x
, rtx y
)
3350 enum machine_mode mode
= GET_MODE (x
);
3351 enum insn_code code
;
3353 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3355 code
= optab_handler (mov_optab
, mode
)->insn_code
;
3356 if (code
!= CODE_FOR_nothing
)
3357 return emit_insn (GEN_FCN (code
) (x
, y
));
3359 /* Expand complex moves by moving real part and imag part. */
3360 if (COMPLEX_MODE_P (mode
))
3361 return emit_move_complex (mode
, x
, y
);
3363 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3364 || ALL_FIXED_POINT_MODE_P (mode
))
3366 rtx result
= emit_move_via_integer (mode
, x
, y
, true);
3368 /* If we can't find an integer mode, use multi words. */
3372 return emit_move_multi_word (mode
, x
, y
);
3375 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3376 return emit_move_ccmode (mode
, x
, y
);
3378 /* Try using a move pattern for the corresponding integer mode. This is
3379 only safe when simplify_subreg can convert MODE constants into integer
3380 constants. At present, it can only do this reliably if the value
3381 fits within a HOST_WIDE_INT. */
3382 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3384 rtx ret
= emit_move_via_integer (mode
, x
, y
, false);
3389 return emit_move_multi_word (mode
, x
, y
);
3392 /* Generate code to copy Y into X.
3393 Both Y and X must have the same mode, except that
3394 Y can be a constant with VOIDmode.
3395 This mode cannot be BLKmode; use emit_block_move for that.
3397 Return the last instruction emitted. */
3400 emit_move_insn (rtx x
, rtx y
)
3402 enum machine_mode mode
= GET_MODE (x
);
3403 rtx y_cst
= NULL_RTX
;
3406 gcc_assert (mode
!= BLKmode
3407 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3412 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3413 && (last_insn
= compress_float_constant (x
, y
)))
3418 if (!LEGITIMATE_CONSTANT_P (y
))
3420 y
= force_const_mem (mode
, y
);
3422 /* If the target's cannot_force_const_mem prevented the spill,
3423 assume that the target's move expanders will also take care
3424 of the non-legitimate constant. */
3428 y
= use_anchored_address (y
);
3432 /* If X or Y are memory references, verify that their addresses are valid
3435 && (! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
3436 && ! push_operand (x
, GET_MODE (x
))))
3437 x
= validize_mem (x
);
3440 && ! memory_address_p (GET_MODE (y
), XEXP (y
, 0)))
3441 y
= validize_mem (y
);
3443 gcc_assert (mode
!= BLKmode
);
3445 last_insn
= emit_move_insn_1 (x
, y
);
3447 if (y_cst
&& REG_P (x
)
3448 && (set
= single_set (last_insn
)) != NULL_RTX
3449 && SET_DEST (set
) == x
3450 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3451 set_unique_reg_note (last_insn
, REG_EQUAL
, y_cst
);
3456 /* If Y is representable exactly in a narrower mode, and the target can
3457 perform the extension directly from constant or memory, then emit the
3458 move as an extension. */
3461 compress_float_constant (rtx x
, rtx y
)
3463 enum machine_mode dstmode
= GET_MODE (x
);
3464 enum machine_mode orig_srcmode
= GET_MODE (y
);
3465 enum machine_mode srcmode
;
3467 int oldcost
, newcost
;
3468 bool speed
= optimize_insn_for_speed_p ();
3470 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3472 if (LEGITIMATE_CONSTANT_P (y
))
3473 oldcost
= rtx_cost (y
, SET
, speed
);
3475 oldcost
= rtx_cost (force_const_mem (dstmode
, y
), SET
, speed
);
3477 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3478 srcmode
!= orig_srcmode
;
3479 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3482 rtx trunc_y
, last_insn
;
3484 /* Skip if the target can't extend this way. */
3485 ic
= can_extend_p (dstmode
, srcmode
, 0);
3486 if (ic
== CODE_FOR_nothing
)
3489 /* Skip if the narrowed value isn't exact. */
3490 if (! exact_real_truncate (srcmode
, &r
))
3493 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3495 if (LEGITIMATE_CONSTANT_P (trunc_y
))
3497 /* Skip if the target needs extra instructions to perform
3499 if (! (*insn_data
[ic
].operand
[1].predicate
) (trunc_y
, srcmode
))
3501 /* This is valid, but may not be cheaper than the original. */
3502 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
, speed
);
3503 if (oldcost
< newcost
)
3506 else if (float_extend_from_mem
[dstmode
][srcmode
])
3508 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3509 /* This is valid, but may not be cheaper than the original. */
3510 newcost
= rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
), SET
, speed
);
3511 if (oldcost
< newcost
)
3513 trunc_y
= validize_mem (trunc_y
);
3518 /* For CSE's benefit, force the compressed constant pool entry
3519 into a new pseudo. This constant may be used in different modes,
3520 and if not, combine will put things back together for us. */
3521 trunc_y
= force_reg (srcmode
, trunc_y
);
3522 emit_unop_insn (ic
, x
, trunc_y
, UNKNOWN
);
3523 last_insn
= get_last_insn ();
3526 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3534 /* Pushing data onto the stack. */
3536 /* Push a block of length SIZE (perhaps variable)
3537 and return an rtx to address the beginning of the block.
3538 The value may be virtual_outgoing_args_rtx.
3540 EXTRA is the number of bytes of padding to push in addition to SIZE.
3541 BELOW nonzero means this padding comes at low addresses;
3542 otherwise, the padding comes at high addresses. */
3545 push_block (rtx size
, int extra
, int below
)
3549 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3550 if (CONSTANT_P (size
))
3551 anti_adjust_stack (plus_constant (size
, extra
));
3552 else if (REG_P (size
) && extra
== 0)
3553 anti_adjust_stack (size
);
3556 temp
= copy_to_mode_reg (Pmode
, size
);
3558 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3559 temp
, 0, OPTAB_LIB_WIDEN
);
3560 anti_adjust_stack (temp
);
3563 #ifndef STACK_GROWS_DOWNWARD
3569 temp
= virtual_outgoing_args_rtx
;
3570 if (extra
!= 0 && below
)
3571 temp
= plus_constant (temp
, extra
);
3575 if (GET_CODE (size
) == CONST_INT
)
3576 temp
= plus_constant (virtual_outgoing_args_rtx
,
3577 -INTVAL (size
) - (below
? 0 : extra
));
3578 else if (extra
!= 0 && !below
)
3579 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3580 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3582 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3583 negate_rtx (Pmode
, size
));
3586 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3589 #ifdef PUSH_ROUNDING
3591 /* Emit single push insn. */
3594 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
3597 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3599 enum insn_code icode
;
3600 insn_operand_predicate_fn pred
;
3602 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3603 /* If there is push pattern, use it. Otherwise try old way of throwing
3604 MEM representing push operation to move expander. */
3605 icode
= optab_handler (push_optab
, mode
)->insn_code
;
3606 if (icode
!= CODE_FOR_nothing
)
3608 if (((pred
= insn_data
[(int) icode
].operand
[0].predicate
)
3609 && !((*pred
) (x
, mode
))))
3610 x
= force_reg (mode
, x
);
3611 emit_insn (GEN_FCN (icode
) (x
));
3614 if (GET_MODE_SIZE (mode
) == rounded_size
)
3615 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3616 /* If we are to pad downward, adjust the stack pointer first and
3617 then store X into the stack location using an offset. This is
3618 because emit_move_insn does not know how to pad; it does not have
3620 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3622 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3623 HOST_WIDE_INT offset
;
3625 emit_move_insn (stack_pointer_rtx
,
3626 expand_binop (Pmode
,
3627 #ifdef STACK_GROWS_DOWNWARD
3633 GEN_INT (rounded_size
),
3634 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
3636 offset
= (HOST_WIDE_INT
) padding_size
;
3637 #ifdef STACK_GROWS_DOWNWARD
3638 if (STACK_PUSH_CODE
== POST_DEC
)
3639 /* We have already decremented the stack pointer, so get the
3641 offset
+= (HOST_WIDE_INT
) rounded_size
;
3643 if (STACK_PUSH_CODE
== POST_INC
)
3644 /* We have already incremented the stack pointer, so get the
3646 offset
-= (HOST_WIDE_INT
) rounded_size
;
3648 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (offset
));
3652 #ifdef STACK_GROWS_DOWNWARD
3653 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3654 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3655 GEN_INT (-(HOST_WIDE_INT
) rounded_size
));
3657 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3658 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3659 GEN_INT (rounded_size
));
3661 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
3664 dest
= gen_rtx_MEM (mode
, dest_addr
);
3668 set_mem_attributes (dest
, type
, 1);
3670 if (flag_optimize_sibling_calls
)
3671 /* Function incoming arguments may overlap with sibling call
3672 outgoing arguments and we cannot allow reordering of reads
3673 from function arguments with stores to outgoing arguments
3674 of sibling calls. */
3675 set_mem_alias_set (dest
, 0);
3677 emit_move_insn (dest
, x
);
3681 /* Generate code to push X onto the stack, assuming it has mode MODE and
3683 MODE is redundant except when X is a CONST_INT (since they don't
3685 SIZE is an rtx for the size of data to be copied (in bytes),
3686 needed only if X is BLKmode.
3688 ALIGN (in bits) is maximum alignment we can assume.
3690 If PARTIAL and REG are both nonzero, then copy that many of the first
3691 bytes of X into registers starting with REG, and push the rest of X.
3692 The amount of space pushed is decreased by PARTIAL bytes.
3693 REG must be a hard register in this case.
3694 If REG is zero but PARTIAL is not, take any all others actions for an
3695 argument partially in registers, but do not actually load any
3698 EXTRA is the amount in bytes of extra space to leave next to this arg.
3699 This is ignored if an argument block has already been allocated.
3701 On a machine that lacks real push insns, ARGS_ADDR is the address of
3702 the bottom of the argument block for this call. We use indexing off there
3703 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3704 argument block has not been preallocated.
3706 ARGS_SO_FAR is the size of args previously pushed for this call.
3708 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3709 for arguments passed in registers. If nonzero, it will be the number
3710 of bytes required. */
3713 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
3714 unsigned int align
, int partial
, rtx reg
, int extra
,
3715 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
3719 enum direction stack_direction
3720 #ifdef STACK_GROWS_DOWNWARD
3726 /* Decide where to pad the argument: `downward' for below,
3727 `upward' for above, or `none' for don't pad it.
3728 Default is below for small data on big-endian machines; else above. */
3729 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3731 /* Invert direction if stack is post-decrement.
3733 if (STACK_PUSH_CODE
== POST_DEC
)
3734 if (where_pad
!= none
)
3735 where_pad
= (where_pad
== downward
? upward
: downward
);
3740 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
3742 /* Copy a block into the stack, entirely or partially. */
3749 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3750 used
= partial
- offset
;
3752 if (mode
!= BLKmode
)
3754 /* A value is to be stored in an insufficiently aligned
3755 stack slot; copy via a suitably aligned slot if
3757 size
= GEN_INT (GET_MODE_SIZE (mode
));
3758 if (!MEM_P (xinner
))
3760 temp
= assign_temp (type
, 0, 1, 1);
3761 emit_move_insn (temp
, xinner
);
3768 /* USED is now the # of bytes we need not copy to the stack
3769 because registers will take care of them. */
3772 xinner
= adjust_address (xinner
, BLKmode
, used
);
3774 /* If the partial register-part of the arg counts in its stack size,
3775 skip the part of stack space corresponding to the registers.
3776 Otherwise, start copying to the beginning of the stack space,
3777 by setting SKIP to 0. */
3778 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3780 #ifdef PUSH_ROUNDING
3781 /* Do it with several push insns if that doesn't take lots of insns
3782 and if there is no difficulty with push insns that skip bytes
3783 on the stack for alignment purposes. */
3786 && GET_CODE (size
) == CONST_INT
3788 && MEM_ALIGN (xinner
) >= align
3789 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3790 /* Here we avoid the case of a structure whose weak alignment
3791 forces many pushes of a small amount of data,
3792 and such small pushes do rounding that causes trouble. */
3793 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3794 || align
>= BIGGEST_ALIGNMENT
3795 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3796 == (align
/ BITS_PER_UNIT
)))
3797 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3799 /* Push padding now if padding above and stack grows down,
3800 or if padding below and stack grows up.
3801 But if space already allocated, this has already been done. */
3802 if (extra
&& args_addr
== 0
3803 && where_pad
!= none
&& where_pad
!= stack_direction
)
3804 anti_adjust_stack (GEN_INT (extra
));
3806 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
3809 #endif /* PUSH_ROUNDING */
3813 /* Otherwise make space on the stack and copy the data
3814 to the address of that space. */
3816 /* Deduct words put into registers from the size we must copy. */
3819 if (GET_CODE (size
) == CONST_INT
)
3820 size
= GEN_INT (INTVAL (size
) - used
);
3822 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3823 GEN_INT (used
), NULL_RTX
, 0,
3827 /* Get the address of the stack space.
3828 In this case, we do not deal with EXTRA separately.
3829 A single stack adjust will do. */
3832 temp
= push_block (size
, extra
, where_pad
== downward
);
3835 else if (GET_CODE (args_so_far
) == CONST_INT
)
3836 temp
= memory_address (BLKmode
,
3837 plus_constant (args_addr
,
3838 skip
+ INTVAL (args_so_far
)));
3840 temp
= memory_address (BLKmode
,
3841 plus_constant (gen_rtx_PLUS (Pmode
,
3846 if (!ACCUMULATE_OUTGOING_ARGS
)
3848 /* If the source is referenced relative to the stack pointer,
3849 copy it to another register to stabilize it. We do not need
3850 to do this if we know that we won't be changing sp. */
3852 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3853 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3854 temp
= copy_to_reg (temp
);
3857 target
= gen_rtx_MEM (BLKmode
, temp
);
3859 /* We do *not* set_mem_attributes here, because incoming arguments
3860 may overlap with sibling call outgoing arguments and we cannot
3861 allow reordering of reads from function arguments with stores
3862 to outgoing arguments of sibling calls. We do, however, want
3863 to record the alignment of the stack slot. */
3864 /* ALIGN may well be better aligned than TYPE, e.g. due to
3865 PARM_BOUNDARY. Assume the caller isn't lying. */
3866 set_mem_align (target
, align
);
3868 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
3871 else if (partial
> 0)
3873 /* Scalar partly in registers. */
3875 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3878 /* # bytes of start of argument
3879 that we must make space for but need not store. */
3880 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3881 int args_offset
= INTVAL (args_so_far
);
3884 /* Push padding now if padding above and stack grows down,
3885 or if padding below and stack grows up.
3886 But if space already allocated, this has already been done. */
3887 if (extra
&& args_addr
== 0
3888 && where_pad
!= none
&& where_pad
!= stack_direction
)
3889 anti_adjust_stack (GEN_INT (extra
));
3891 /* If we make space by pushing it, we might as well push
3892 the real data. Otherwise, we can leave OFFSET nonzero
3893 and leave the space uninitialized. */
3897 /* Now NOT_STACK gets the number of words that we don't need to
3898 allocate on the stack. Convert OFFSET to words too. */
3899 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
3900 offset
/= UNITS_PER_WORD
;
3902 /* If the partial register-part of the arg counts in its stack size,
3903 skip the part of stack space corresponding to the registers.
3904 Otherwise, start copying to the beginning of the stack space,
3905 by setting SKIP to 0. */
3906 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3908 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3909 x
= validize_mem (force_const_mem (mode
, x
));
3911 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3912 SUBREGs of such registers are not allowed. */
3913 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3914 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3915 x
= copy_to_reg (x
);
3917 /* Loop over all the words allocated on the stack for this arg. */
3918 /* We can do it by words, because any scalar bigger than a word
3919 has a size a multiple of a word. */
3920 #ifndef PUSH_ARGS_REVERSED
3921 for (i
= not_stack
; i
< size
; i
++)
3923 for (i
= size
- 1; i
>= not_stack
; i
--)
3925 if (i
>= not_stack
+ offset
)
3926 emit_push_insn (operand_subword_force (x
, i
, mode
),
3927 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3929 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3931 reg_parm_stack_space
, alignment_pad
);
3938 /* Push padding now if padding above and stack grows down,
3939 or if padding below and stack grows up.
3940 But if space already allocated, this has already been done. */
3941 if (extra
&& args_addr
== 0
3942 && where_pad
!= none
&& where_pad
!= stack_direction
)
3943 anti_adjust_stack (GEN_INT (extra
));
3945 #ifdef PUSH_ROUNDING
3946 if (args_addr
== 0 && PUSH_ARGS
)
3947 emit_single_push_insn (mode
, x
, type
);
3951 if (GET_CODE (args_so_far
) == CONST_INT
)
3953 = memory_address (mode
,
3954 plus_constant (args_addr
,
3955 INTVAL (args_so_far
)));
3957 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3959 dest
= gen_rtx_MEM (mode
, addr
);
3961 /* We do *not* set_mem_attributes here, because incoming arguments
3962 may overlap with sibling call outgoing arguments and we cannot
3963 allow reordering of reads from function arguments with stores
3964 to outgoing arguments of sibling calls. We do, however, want
3965 to record the alignment of the stack slot. */
3966 /* ALIGN may well be better aligned than TYPE, e.g. due to
3967 PARM_BOUNDARY. Assume the caller isn't lying. */
3968 set_mem_align (dest
, align
);
3970 emit_move_insn (dest
, x
);
3974 /* If part should go in registers, copy that part
3975 into the appropriate registers. Do this now, at the end,
3976 since mem-to-mem copies above may do function calls. */
3977 if (partial
> 0 && reg
!= 0)
3979 /* Handle calls that pass values in multiple non-contiguous locations.
3980 The Irix 6 ABI has examples of this. */
3981 if (GET_CODE (reg
) == PARALLEL
)
3982 emit_group_load (reg
, x
, type
, -1);
3985 gcc_assert (partial
% UNITS_PER_WORD
== 0);
3986 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
3990 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3991 anti_adjust_stack (GEN_INT (extra
));
3993 if (alignment_pad
&& args_addr
== 0)
3994 anti_adjust_stack (alignment_pad
);
3997 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4001 get_subtarget (rtx x
)
4005 /* Only registers can be subtargets. */
4007 /* Don't use hard regs to avoid extending their life. */
4008 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4012 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4013 FIELD is a bitfield. Returns true if the optimization was successful,
4014 and there's nothing else to do. */
4017 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
4018 unsigned HOST_WIDE_INT bitpos
,
4019 enum machine_mode mode1
, rtx str_rtx
,
4022 enum machine_mode str_mode
= GET_MODE (str_rtx
);
4023 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4028 if (mode1
!= VOIDmode
4029 || bitsize
>= BITS_PER_WORD
4030 || str_bitsize
> BITS_PER_WORD
4031 || TREE_SIDE_EFFECTS (to
)
4032 || TREE_THIS_VOLATILE (to
))
4036 if (!BINARY_CLASS_P (src
)
4037 || TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4040 op0
= TREE_OPERAND (src
, 0);
4041 op1
= TREE_OPERAND (src
, 1);
4044 if (!operand_equal_p (to
, op0
, 0))
4047 if (MEM_P (str_rtx
))
4049 unsigned HOST_WIDE_INT offset1
;
4051 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4052 str_mode
= word_mode
;
4053 str_mode
= get_best_mode (bitsize
, bitpos
,
4054 MEM_ALIGN (str_rtx
), str_mode
, 0);
4055 if (str_mode
== VOIDmode
)
4057 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4060 bitpos
%= str_bitsize
;
4061 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4062 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4064 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4067 /* If the bit field covers the whole REG/MEM, store_field
4068 will likely generate better code. */
4069 if (bitsize
>= str_bitsize
)
4072 /* We can't handle fields split across multiple entities. */
4073 if (bitpos
+ bitsize
> str_bitsize
)
4076 if (BYTES_BIG_ENDIAN
)
4077 bitpos
= str_bitsize
- bitpos
- bitsize
;
4079 switch (TREE_CODE (src
))
4083 /* For now, just optimize the case of the topmost bitfield
4084 where we don't need to do any masking and also
4085 1 bit bitfields where xor can be used.
4086 We might win by one instruction for the other bitfields
4087 too if insv/extv instructions aren't used, so that
4088 can be added later. */
4089 if (bitpos
+ bitsize
!= str_bitsize
4090 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4093 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4094 value
= convert_modes (str_mode
,
4095 TYPE_MODE (TREE_TYPE (op1
)), value
,
4096 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4098 /* We may be accessing data outside the field, which means
4099 we can alias adjacent data. */
4100 if (MEM_P (str_rtx
))
4102 str_rtx
= shallow_copy_rtx (str_rtx
);
4103 set_mem_alias_set (str_rtx
, 0);
4104 set_mem_expr (str_rtx
, 0);
4107 binop
= TREE_CODE (src
) == PLUS_EXPR
? add_optab
: sub_optab
;
4108 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
4110 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4113 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
,
4114 build_int_cst (NULL_TREE
, bitpos
),
4116 result
= expand_binop (str_mode
, binop
, str_rtx
,
4117 value
, str_rtx
, 1, OPTAB_WIDEN
);
4118 if (result
!= str_rtx
)
4119 emit_move_insn (str_rtx
, result
);
4124 if (TREE_CODE (op1
) != INTEGER_CST
)
4126 value
= expand_expr (op1
, NULL_RTX
, GET_MODE (str_rtx
), EXPAND_NORMAL
);
4127 value
= convert_modes (GET_MODE (str_rtx
),
4128 TYPE_MODE (TREE_TYPE (op1
)), value
,
4129 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4131 /* We may be accessing data outside the field, which means
4132 we can alias adjacent data. */
4133 if (MEM_P (str_rtx
))
4135 str_rtx
= shallow_copy_rtx (str_rtx
);
4136 set_mem_alias_set (str_rtx
, 0);
4137 set_mem_expr (str_rtx
, 0);
4140 binop
= TREE_CODE (src
) == BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4141 if (bitpos
+ bitsize
!= GET_MODE_BITSIZE (GET_MODE (str_rtx
)))
4143 rtx mask
= GEN_INT (((unsigned HOST_WIDE_INT
) 1 << bitsize
)
4145 value
= expand_and (GET_MODE (str_rtx
), value
, mask
,
4148 value
= expand_shift (LSHIFT_EXPR
, GET_MODE (str_rtx
), value
,
4149 build_int_cst (NULL_TREE
, bitpos
),
4151 result
= expand_binop (GET_MODE (str_rtx
), binop
, str_rtx
,
4152 value
, str_rtx
, 1, OPTAB_WIDEN
);
4153 if (result
!= str_rtx
)
4154 emit_move_insn (str_rtx
, result
);
4165 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4166 is true, try generating a nontemporal store. */
4169 expand_assignment (tree to
, tree from
, bool nontemporal
)
4174 /* Don't crash if the lhs of the assignment was erroneous. */
4175 if (TREE_CODE (to
) == ERROR_MARK
)
4177 result
= expand_normal (from
);
4181 /* Optimize away no-op moves without side-effects. */
4182 if (operand_equal_p (to
, from
, 0))
4185 /* Assignment of a structure component needs special treatment
4186 if the structure component's rtx is not simply a MEM.
4187 Assignment of an array element at a constant index, and assignment of
4188 an array element in an unaligned packed structure field, has the same
4190 if (handled_component_p (to
)
4191 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4193 enum machine_mode mode1
;
4194 HOST_WIDE_INT bitsize
, bitpos
;
4201 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4202 &unsignedp
, &volatilep
, true);
4204 /* If we are going to use store_bit_field and extract_bit_field,
4205 make sure to_rtx will be safe for multiple use. */
4207 to_rtx
= expand_normal (tem
);
4213 if (!MEM_P (to_rtx
))
4215 /* We can get constant negative offsets into arrays with broken
4216 user code. Translate this to a trap instead of ICEing. */
4217 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
4218 expand_builtin_trap ();
4219 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
4222 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4223 #ifdef POINTERS_EXTEND_UNSIGNED
4224 if (GET_MODE (offset_rtx
) != Pmode
)
4225 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
4227 if (GET_MODE (offset_rtx
) != ptr_mode
)
4228 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4231 /* A constant address in TO_RTX can have VOIDmode, we must not try
4232 to call force_reg for that case. Avoid that case. */
4234 && GET_MODE (to_rtx
) == BLKmode
4235 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4237 && (bitpos
% bitsize
) == 0
4238 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4239 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4241 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4245 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4246 highest_pow2_factor_for_target (to
,
4250 /* Handle expand_expr of a complex value returning a CONCAT. */
4251 if (GET_CODE (to_rtx
) == CONCAT
)
4253 if (TREE_CODE (TREE_TYPE (from
)) == COMPLEX_TYPE
)
4255 gcc_assert (bitpos
== 0);
4256 result
= store_expr (from
, to_rtx
, false, nontemporal
);
4260 gcc_assert (bitpos
== 0 || bitpos
== GET_MODE_BITSIZE (mode1
));
4261 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
4269 /* If the field is at offset zero, we could have been given the
4270 DECL_RTX of the parent struct. Don't munge it. */
4271 to_rtx
= shallow_copy_rtx (to_rtx
);
4273 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4275 /* Deal with volatile and readonly fields. The former is only
4276 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4278 MEM_VOLATILE_P (to_rtx
) = 1;
4279 if (component_uses_parent_alias_set (to
))
4280 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
4283 if (optimize_bitfield_assignment_op (bitsize
, bitpos
, mode1
,
4287 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
4288 TREE_TYPE (tem
), get_alias_set (to
),
4293 preserve_temp_slots (result
);
4299 /* If the rhs is a function call and its value is not an aggregate,
4300 call the function before we start to compute the lhs.
4301 This is needed for correct code for cases such as
4302 val = setjmp (buf) on machines where reference to val
4303 requires loading up part of an address in a separate insn.
4305 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4306 since it might be a promoted variable where the zero- or sign- extension
4307 needs to be done. Handling this in the normal way is safe because no
4308 computation is done before the call. The same is true for SSA names. */
4309 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
4310 && COMPLETE_TYPE_P (TREE_TYPE (from
))
4311 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4312 && ! (((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
4313 && REG_P (DECL_RTL (to
)))
4314 || TREE_CODE (to
) == SSA_NAME
))
4319 value
= expand_normal (from
);
4321 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4323 /* Handle calls that return values in multiple non-contiguous locations.
4324 The Irix 6 ABI has examples of this. */
4325 if (GET_CODE (to_rtx
) == PARALLEL
)
4326 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
4327 int_size_in_bytes (TREE_TYPE (from
)));
4328 else if (GET_MODE (to_rtx
) == BLKmode
)
4329 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
4332 if (POINTER_TYPE_P (TREE_TYPE (to
)))
4333 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
4334 emit_move_insn (to_rtx
, value
);
4336 preserve_temp_slots (to_rtx
);
4342 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4343 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4346 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4348 /* Don't move directly into a return register. */
4349 if (TREE_CODE (to
) == RESULT_DECL
4350 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
4355 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
4357 if (GET_CODE (to_rtx
) == PARALLEL
)
4358 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
4359 int_size_in_bytes (TREE_TYPE (from
)));
4361 emit_move_insn (to_rtx
, temp
);
4363 preserve_temp_slots (to_rtx
);
4369 /* In case we are returning the contents of an object which overlaps
4370 the place the value is being stored, use a safe function when copying
4371 a value through a pointer into a structure value return block. */
4372 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
4373 && cfun
->returns_struct
4374 && !cfun
->returns_pcc_struct
)
4379 size
= expr_size (from
);
4380 from_rtx
= expand_normal (from
);
4382 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
4383 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
4384 XEXP (from_rtx
, 0), Pmode
,
4385 convert_to_mode (TYPE_MODE (sizetype
),
4386 size
, TYPE_UNSIGNED (sizetype
)),
4387 TYPE_MODE (sizetype
));
4389 preserve_temp_slots (to_rtx
);
4395 /* Compute FROM and store the value in the rtx we got. */
4398 result
= store_expr (from
, to_rtx
, 0, nontemporal
);
4399 preserve_temp_slots (result
);
4405 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4406 succeeded, false otherwise. */
4409 emit_storent_insn (rtx to
, rtx from
)
4411 enum machine_mode mode
= GET_MODE (to
), imode
;
4412 enum insn_code code
= optab_handler (storent_optab
, mode
)->insn_code
;
4415 if (code
== CODE_FOR_nothing
)
4418 imode
= insn_data
[code
].operand
[0].mode
;
4419 if (!insn_data
[code
].operand
[0].predicate (to
, imode
))
4422 imode
= insn_data
[code
].operand
[1].mode
;
4423 if (!insn_data
[code
].operand
[1].predicate (from
, imode
))
4425 from
= copy_to_mode_reg (imode
, from
);
4426 if (!insn_data
[code
].operand
[1].predicate (from
, imode
))
4430 pattern
= GEN_FCN (code
) (to
, from
);
4431 if (pattern
== NULL_RTX
)
4434 emit_insn (pattern
);
4438 /* Generate code for computing expression EXP,
4439 and storing the value into TARGET.
4441 If the mode is BLKmode then we may return TARGET itself.
4442 It turns out that in BLKmode it doesn't cause a problem.
4443 because C has no operators that could combine two different
4444 assignments into the same BLKmode object with different values
4445 with no sequence point. Will other languages need this to
4448 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4449 stack, and block moves may need to be treated specially.
4451 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4454 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
)
4457 rtx alt_rtl
= NULL_RTX
;
4458 int dont_return_target
= 0;
4460 if (VOID_TYPE_P (TREE_TYPE (exp
)))
4462 /* C++ can generate ?: expressions with a throw expression in one
4463 branch and an rvalue in the other. Here, we resolve attempts to
4464 store the throw expression's nonexistent result. */
4465 gcc_assert (!call_param_p
);
4466 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4469 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
4471 /* Perform first part of compound expression, then assign from second
4473 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
4474 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4475 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
4478 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
4480 /* For conditional expression, get safe form of the target. Then
4481 test the condition, doing the appropriate assignment on either
4482 side. This avoids the creation of unnecessary temporaries.
4483 For non-BLKmode, it is more efficient not to do this. */
4485 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
4487 do_pending_stack_adjust ();
4489 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
4490 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
4492 emit_jump_insn (gen_jump (lab2
));
4495 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
,
4502 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4503 /* If this is a scalar in a register that is stored in a wider mode
4504 than the declared mode, compute the result into its declared mode
4505 and then convert to the wider mode. Our value is the computed
4508 rtx inner_target
= 0;
4510 /* We can do the conversion inside EXP, which will often result
4511 in some optimizations. Do the conversion in two steps: first
4512 change the signedness, if needed, then the extend. But don't
4513 do this if the type of EXP is a subtype of something else
4514 since then the conversion might involve more than just
4515 converting modes. */
4516 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4517 && TREE_TYPE (TREE_TYPE (exp
)) == 0
4518 && GET_MODE_PRECISION (GET_MODE (target
))
4519 == TYPE_PRECISION (TREE_TYPE (exp
)))
4521 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
4522 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4524 /* Some types, e.g. Fortran's logical*4, won't have a signed
4525 version, so use the mode instead. */
4527 = (signed_or_unsigned_type_for
4528 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)));
4530 ntype
= lang_hooks
.types
.type_for_mode
4531 (TYPE_MODE (TREE_TYPE (exp
)),
4532 SUBREG_PROMOTED_UNSIGNED_P (target
));
4534 exp
= fold_convert (ntype
, exp
);
4537 exp
= fold_convert (lang_hooks
.types
.type_for_mode
4538 (GET_MODE (SUBREG_REG (target
)),
4539 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4542 inner_target
= SUBREG_REG (target
);
4545 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
4546 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
4548 /* If TEMP is a VOIDmode constant, use convert_modes to make
4549 sure that we properly convert it. */
4550 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4552 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4553 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
4554 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4555 GET_MODE (target
), temp
,
4556 SUBREG_PROMOTED_UNSIGNED_P (target
));
4559 convert_move (SUBREG_REG (target
), temp
,
4560 SUBREG_PROMOTED_UNSIGNED_P (target
));
4564 else if (TREE_CODE (exp
) == STRING_CST
4565 && !nontemporal
&& !call_param_p
4566 && TREE_STRING_LENGTH (exp
) > 0
4567 && TYPE_MODE (TREE_TYPE (exp
)) == BLKmode
)
4569 /* Optimize initialization of an array with a STRING_CST. */
4570 HOST_WIDE_INT exp_len
, str_copy_len
;
4573 exp_len
= int_expr_size (exp
);
4577 str_copy_len
= strlen (TREE_STRING_POINTER (exp
));
4578 if (str_copy_len
< TREE_STRING_LENGTH (exp
) - 1)
4581 str_copy_len
= TREE_STRING_LENGTH (exp
);
4582 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0)
4584 str_copy_len
+= STORE_MAX_PIECES
- 1;
4585 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
4587 str_copy_len
= MIN (str_copy_len
, exp_len
);
4588 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
4589 CONST_CAST(char *, TREE_STRING_POINTER (exp
)),
4590 MEM_ALIGN (target
), false))
4595 dest_mem
= store_by_pieces (dest_mem
,
4596 str_copy_len
, builtin_strncpy_read_str
,
4597 CONST_CAST(char *, TREE_STRING_POINTER (exp
)),
4598 MEM_ALIGN (target
), false,
4599 exp_len
> str_copy_len
? 1 : 0);
4600 if (exp_len
> str_copy_len
)
4601 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
4602 GEN_INT (exp_len
- str_copy_len
),
4611 /* If we want to use a nontemporal store, force the value to
4613 tmp_target
= nontemporal
? NULL_RTX
: target
;
4614 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
4616 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
4618 /* Return TARGET if it's a specified hardware register.
4619 If TARGET is a volatile mem ref, either return TARGET
4620 or return a reg copied *from* TARGET; ANSI requires this.
4622 Otherwise, if TEMP is not TARGET, return TEMP
4623 if it is constant (for efficiency),
4624 or if we really want the correct value. */
4625 if (!(target
&& REG_P (target
)
4626 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4627 && !(MEM_P (target
) && MEM_VOLATILE_P (target
))
4628 && ! rtx_equal_p (temp
, target
)
4629 && CONSTANT_P (temp
))
4630 dont_return_target
= 1;
4633 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4634 the same as that of TARGET, adjust the constant. This is needed, for
4635 example, in case it is a CONST_DOUBLE and we want only a word-sized
4637 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4638 && TREE_CODE (exp
) != ERROR_MARK
4639 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4640 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4641 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
4643 /* If value was not generated in the target, store it there.
4644 Convert the value to TARGET's type first if necessary and emit the
4645 pending incrementations that have been queued when expanding EXP.
4646 Note that we cannot emit the whole queue blindly because this will
4647 effectively disable the POST_INC optimization later.
4649 If TEMP and TARGET compare equal according to rtx_equal_p, but
4650 one or both of them are volatile memory refs, we have to distinguish
4652 - expand_expr has used TARGET. In this case, we must not generate
4653 another copy. This can be detected by TARGET being equal according
4655 - expand_expr has not used TARGET - that means that the source just
4656 happens to have the same RTX form. Since temp will have been created
4657 by expand_expr, it will compare unequal according to == .
4658 We must generate a copy in this case, to reach the correct number
4659 of volatile memory references. */
4661 if ((! rtx_equal_p (temp
, target
)
4662 || (temp
!= target
&& (side_effects_p (temp
)
4663 || side_effects_p (target
))))
4664 && TREE_CODE (exp
) != ERROR_MARK
4665 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4666 but TARGET is not valid memory reference, TEMP will differ
4667 from TARGET although it is really the same location. */
4668 && !(alt_rtl
&& rtx_equal_p (alt_rtl
, target
))
4669 /* If there's nothing to copy, don't bother. Don't call
4670 expr_size unless necessary, because some front-ends (C++)
4671 expr_size-hook must not be given objects that are not
4672 supposed to be bit-copied or bit-initialized. */
4673 && expr_size (exp
) != const0_rtx
)
4675 if (GET_MODE (temp
) != GET_MODE (target
)
4676 && GET_MODE (temp
) != VOIDmode
)
4678 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4679 if (dont_return_target
)
4681 /* In this case, we will return TEMP,
4682 so make sure it has the proper mode.
4683 But don't forget to store the value into TARGET. */
4684 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4685 emit_move_insn (target
, temp
);
4687 else if (GET_MODE (target
) == BLKmode
4688 || GET_MODE (temp
) == BLKmode
)
4689 emit_block_move (target
, temp
, expr_size (exp
),
4691 ? BLOCK_OP_CALL_PARM
4692 : BLOCK_OP_NORMAL
));
4694 convert_move (target
, temp
, unsignedp
);
4697 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4699 /* Handle copying a string constant into an array. The string
4700 constant may be shorter than the array. So copy just the string's
4701 actual length, and clear the rest. First get the size of the data
4702 type of the string, which is actually the size of the target. */
4703 rtx size
= expr_size (exp
);
4705 if (GET_CODE (size
) == CONST_INT
4706 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4707 emit_block_move (target
, temp
, size
,
4709 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4712 /* Compute the size of the data to copy from the string. */
4714 = size_binop (MIN_EXPR
,
4715 make_tree (sizetype
, size
),
4716 size_int (TREE_STRING_LENGTH (exp
)));
4718 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
4720 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
4723 /* Copy that much. */
4724 copy_size_rtx
= convert_to_mode (ptr_mode
, copy_size_rtx
,
4725 TYPE_UNSIGNED (sizetype
));
4726 emit_block_move (target
, temp
, copy_size_rtx
,
4728 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4730 /* Figure out how much is left in TARGET that we have to clear.
4731 Do all calculations in ptr_mode. */
4732 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4734 size
= plus_constant (size
, -INTVAL (copy_size_rtx
));
4735 target
= adjust_address (target
, BLKmode
,
4736 INTVAL (copy_size_rtx
));
4740 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
4741 copy_size_rtx
, NULL_RTX
, 0,
4744 #ifdef POINTERS_EXTEND_UNSIGNED
4745 if (GET_MODE (copy_size_rtx
) != Pmode
)
4746 copy_size_rtx
= convert_to_mode (Pmode
, copy_size_rtx
,
4747 TYPE_UNSIGNED (sizetype
));
4750 target
= offset_address (target
, copy_size_rtx
,
4751 highest_pow2_factor (copy_size
));
4752 label
= gen_label_rtx ();
4753 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4754 GET_MODE (size
), 0, label
);
4757 if (size
!= const0_rtx
)
4758 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
4764 /* Handle calls that return values in multiple non-contiguous locations.
4765 The Irix 6 ABI has examples of this. */
4766 else if (GET_CODE (target
) == PARALLEL
)
4767 emit_group_load (target
, temp
, TREE_TYPE (exp
),
4768 int_size_in_bytes (TREE_TYPE (exp
)));
4769 else if (GET_MODE (temp
) == BLKmode
)
4770 emit_block_move (target
, temp
, expr_size (exp
),
4772 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
4773 else if (nontemporal
4774 && emit_storent_insn (target
, temp
))
4775 /* If we managed to emit a nontemporal store, there is nothing else to
4780 temp
= force_operand (temp
, target
);
4782 emit_move_insn (target
, temp
);
4789 /* Helper for categorize_ctor_elements. Identical interface. */
4792 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4793 HOST_WIDE_INT
*p_elt_count
,
4796 unsigned HOST_WIDE_INT idx
;
4797 HOST_WIDE_INT nz_elts
, elt_count
;
4798 tree value
, purpose
;
4800 /* Whether CTOR is a valid constant initializer, in accordance with what
4801 initializer_constant_valid_p does. If inferred from the constructor
4802 elements, true until proven otherwise. */
4803 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
4804 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
4809 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
4814 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4816 tree lo_index
= TREE_OPERAND (purpose
, 0);
4817 tree hi_index
= TREE_OPERAND (purpose
, 1);
4819 if (host_integerp (lo_index
, 1) && host_integerp (hi_index
, 1))
4820 mult
= (tree_low_cst (hi_index
, 1)
4821 - tree_low_cst (lo_index
, 1) + 1);
4824 switch (TREE_CODE (value
))
4828 HOST_WIDE_INT nz
= 0, ic
= 0;
4831 = categorize_ctor_elements_1 (value
, &nz
, &ic
, p_must_clear
);
4833 nz_elts
+= mult
* nz
;
4834 elt_count
+= mult
* ic
;
4836 if (const_from_elts_p
&& const_p
)
4837 const_p
= const_elt_p
;
4844 if (!initializer_zerop (value
))
4850 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
4851 elt_count
+= mult
* TREE_STRING_LENGTH (value
);
4855 if (!initializer_zerop (TREE_REALPART (value
)))
4857 if (!initializer_zerop (TREE_IMAGPART (value
)))
4865 for (v
= TREE_VECTOR_CST_ELTS (value
); v
; v
= TREE_CHAIN (v
))
4867 if (!initializer_zerop (TREE_VALUE (v
)))
4878 if (const_from_elts_p
&& const_p
)
4879 const_p
= initializer_constant_valid_p (value
, TREE_TYPE (value
))
4886 && (TREE_CODE (TREE_TYPE (ctor
)) == UNION_TYPE
4887 || TREE_CODE (TREE_TYPE (ctor
)) == QUAL_UNION_TYPE
))
4890 bool clear_this
= true;
4892 if (!VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (ctor
)))
4894 /* We don't expect more than one element of the union to be
4895 initialized. Not sure what we should do otherwise... */
4896 gcc_assert (VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (ctor
))
4899 init_sub_type
= TREE_TYPE (VEC_index (constructor_elt
,
4900 CONSTRUCTOR_ELTS (ctor
),
4903 /* ??? We could look at each element of the union, and find the
4904 largest element. Which would avoid comparing the size of the
4905 initialized element against any tail padding in the union.
4906 Doesn't seem worth the effort... */
4907 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor
)),
4908 TYPE_SIZE (init_sub_type
)) == 1)
4910 /* And now we have to find out if the element itself is fully
4911 constructed. E.g. for union { struct { int a, b; } s; } u
4912 = { .s = { .a = 1 } }. */
4913 if (elt_count
== count_type_elements (init_sub_type
, false))
4918 *p_must_clear
= clear_this
;
4921 *p_nz_elts
+= nz_elts
;
4922 *p_elt_count
+= elt_count
;
4927 /* Examine CTOR to discover:
4928 * how many scalar fields are set to nonzero values,
4929 and place it in *P_NZ_ELTS;
4930 * how many scalar fields in total are in CTOR,
4931 and place it in *P_ELT_COUNT.
4932 * if a type is a union, and the initializer from the constructor
4933 is not the largest element in the union, then set *p_must_clear.
4935 Return whether or not CTOR is a valid static constant initializer, the same
4936 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4939 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
4940 HOST_WIDE_INT
*p_elt_count
,
4945 *p_must_clear
= false;
4948 categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_elt_count
, p_must_clear
);
4951 /* Count the number of scalars in TYPE. Return -1 on overflow or
4952 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4953 array member at the end of the structure. */
4956 count_type_elements (const_tree type
, bool allow_flexarr
)
4958 const HOST_WIDE_INT max
= ~((HOST_WIDE_INT
)1 << (HOST_BITS_PER_WIDE_INT
-1));
4959 switch (TREE_CODE (type
))
4963 tree telts
= array_type_nelts (type
);
4964 if (telts
&& host_integerp (telts
, 1))
4966 HOST_WIDE_INT n
= tree_low_cst (telts
, 1) + 1;
4967 HOST_WIDE_INT m
= count_type_elements (TREE_TYPE (type
), false);
4970 else if (max
/ n
> m
)
4978 HOST_WIDE_INT n
= 0, t
;
4981 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4982 if (TREE_CODE (f
) == FIELD_DECL
)
4984 t
= count_type_elements (TREE_TYPE (f
), false);
4987 /* Check for structures with flexible array member. */
4988 tree tf
= TREE_TYPE (f
);
4990 && TREE_CHAIN (f
) == NULL
4991 && TREE_CODE (tf
) == ARRAY_TYPE
4993 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
4994 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
4995 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
4996 && int_size_in_bytes (type
) >= 0)
5008 case QUAL_UNION_TYPE
:
5015 return TYPE_VECTOR_SUBPARTS (type
);
5019 case FIXED_POINT_TYPE
:
5024 case REFERENCE_TYPE
:
5039 /* Return 1 if EXP contains mostly (3/4) zeros. */
5042 mostly_zeros_p (const_tree exp
)
5044 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5047 HOST_WIDE_INT nz_elts
, count
, elts
;
5050 categorize_ctor_elements (exp
, &nz_elts
, &count
, &must_clear
);
5054 elts
= count_type_elements (TREE_TYPE (exp
), false);
5056 return nz_elts
< elts
/ 4;
5059 return initializer_zerop (exp
);
5062 /* Return 1 if EXP contains all zeros. */
5065 all_zeros_p (const_tree exp
)
5067 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5070 HOST_WIDE_INT nz_elts
, count
;
5073 categorize_ctor_elements (exp
, &nz_elts
, &count
, &must_clear
);
5074 return nz_elts
== 0;
5077 return initializer_zerop (exp
);
5080 /* Helper function for store_constructor.
5081 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5082 TYPE is the type of the CONSTRUCTOR, not the element type.
5083 CLEARED is as for store_constructor.
5084 ALIAS_SET is the alias set to use for any stores.
5086 This provides a recursive shortcut back to store_constructor when it isn't
5087 necessary to go through store_field. This is so that we can pass through
5088 the cleared field to let store_constructor know that we may not have to
5089 clear a substructure if the outer structure has already been cleared. */
5092 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
5093 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
5094 tree exp
, tree type
, int cleared
,
5095 alias_set_type alias_set
)
5097 if (TREE_CODE (exp
) == CONSTRUCTOR
5098 /* We can only call store_constructor recursively if the size and
5099 bit position are on a byte boundary. */
5100 && bitpos
% BITS_PER_UNIT
== 0
5101 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
5102 /* If we have a nonzero bitpos for a register target, then we just
5103 let store_field do the bitfield handling. This is unlikely to
5104 generate unnecessary clear instructions anyways. */
5105 && (bitpos
== 0 || MEM_P (target
)))
5109 = adjust_address (target
,
5110 GET_MODE (target
) == BLKmode
5112 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
5113 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5116 /* Update the alias set, if required. */
5117 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
5118 && MEM_ALIAS_SET (target
) != 0)
5120 target
= copy_rtx (target
);
5121 set_mem_alias_set (target
, alias_set
);
5124 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
5127 store_field (target
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
, false);
5130 /* Store the value of constructor EXP into the rtx TARGET.
5131 TARGET is either a REG or a MEM; we know it cannot conflict, since
5132 safe_from_p has been called.
5133 CLEARED is true if TARGET is known to have been zero'd.
5134 SIZE is the number of bytes of TARGET we are allowed to modify: this
5135 may not be the same as the size of EXP if we are assigning to a field
5136 which has been packed to exclude padding bits. */
5139 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
5141 tree type
= TREE_TYPE (exp
);
5142 #ifdef WORD_REGISTER_OPERATIONS
5143 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
5146 switch (TREE_CODE (type
))
5150 case QUAL_UNION_TYPE
:
5152 unsigned HOST_WIDE_INT idx
;
5155 /* If size is zero or the target is already cleared, do nothing. */
5156 if (size
== 0 || cleared
)
5158 /* We either clear the aggregate or indicate the value is dead. */
5159 else if ((TREE_CODE (type
) == UNION_TYPE
5160 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5161 && ! CONSTRUCTOR_ELTS (exp
))
5162 /* If the constructor is empty, clear the union. */
5164 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
5168 /* If we are building a static constructor into a register,
5169 set the initial value as zero so we can fold the value into
5170 a constant. But if more than one register is involved,
5171 this probably loses. */
5172 else if (REG_P (target
) && TREE_STATIC (exp
)
5173 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
5175 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5179 /* If the constructor has fewer fields than the structure or
5180 if we are initializing the structure to mostly zeros, clear
5181 the whole structure first. Don't do this if TARGET is a
5182 register whose mode size isn't equal to SIZE since
5183 clear_storage can't handle this case. */
5185 && (((int)VEC_length (constructor_elt
, CONSTRUCTOR_ELTS (exp
))
5186 != fields_length (type
))
5187 || mostly_zeros_p (exp
))
5189 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
5192 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5196 if (REG_P (target
) && !cleared
)
5197 emit_clobber (target
);
5199 /* Store each element of the constructor into the
5200 corresponding field of TARGET. */
5201 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
5203 enum machine_mode mode
;
5204 HOST_WIDE_INT bitsize
;
5205 HOST_WIDE_INT bitpos
= 0;
5207 rtx to_rtx
= target
;
5209 /* Just ignore missing fields. We cleared the whole
5210 structure, above, if any fields are missing. */
5214 if (cleared
&& initializer_zerop (value
))
5217 if (host_integerp (DECL_SIZE (field
), 1))
5218 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
5222 mode
= DECL_MODE (field
);
5223 if (DECL_BIT_FIELD (field
))
5226 offset
= DECL_FIELD_OFFSET (field
);
5227 if (host_integerp (offset
, 0)
5228 && host_integerp (bit_position (field
), 0))
5230 bitpos
= int_bit_position (field
);
5234 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
5241 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
5242 make_tree (TREE_TYPE (exp
),
5245 offset_rtx
= expand_normal (offset
);
5246 gcc_assert (MEM_P (to_rtx
));
5248 #ifdef POINTERS_EXTEND_UNSIGNED
5249 if (GET_MODE (offset_rtx
) != Pmode
)
5250 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
5252 if (GET_MODE (offset_rtx
) != ptr_mode
)
5253 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
5256 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5257 highest_pow2_factor (offset
));
5260 #ifdef WORD_REGISTER_OPERATIONS
5261 /* If this initializes a field that is smaller than a
5262 word, at the start of a word, try to widen it to a full
5263 word. This special case allows us to output C++ member
5264 function initializations in a form that the optimizers
5267 && bitsize
< BITS_PER_WORD
5268 && bitpos
% BITS_PER_WORD
== 0
5269 && GET_MODE_CLASS (mode
) == MODE_INT
5270 && TREE_CODE (value
) == INTEGER_CST
5272 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
5274 tree type
= TREE_TYPE (value
);
5276 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
5278 type
= lang_hooks
.types
.type_for_size
5279 (BITS_PER_WORD
, TYPE_UNSIGNED (type
));
5280 value
= fold_convert (type
, value
);
5283 if (BYTES_BIG_ENDIAN
)
5285 = fold_build2 (LSHIFT_EXPR
, type
, value
,
5286 build_int_cst (type
,
5287 BITS_PER_WORD
- bitsize
));
5288 bitsize
= BITS_PER_WORD
;
5293 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
5294 && DECL_NONADDRESSABLE_P (field
))
5296 to_rtx
= copy_rtx (to_rtx
);
5297 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
5300 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
5301 value
, type
, cleared
,
5302 get_alias_set (TREE_TYPE (field
)));
5309 unsigned HOST_WIDE_INT i
;
5312 tree elttype
= TREE_TYPE (type
);
5314 HOST_WIDE_INT minelt
= 0;
5315 HOST_WIDE_INT maxelt
= 0;
5317 domain
= TYPE_DOMAIN (type
);
5318 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
5319 && TYPE_MAX_VALUE (domain
)
5320 && host_integerp (TYPE_MIN_VALUE (domain
), 0)
5321 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
5323 /* If we have constant bounds for the range of the type, get them. */
5326 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
5327 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
5330 /* If the constructor has fewer elements than the array, clear
5331 the whole array first. Similarly if this is static
5332 constructor of a non-BLKmode object. */
5335 else if (REG_P (target
) && TREE_STATIC (exp
))
5339 unsigned HOST_WIDE_INT idx
;
5341 HOST_WIDE_INT count
= 0, zero_count
= 0;
5342 need_to_clear
= ! const_bounds_p
;
5344 /* This loop is a more accurate version of the loop in
5345 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5346 is also needed to check for missing elements. */
5347 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
5349 HOST_WIDE_INT this_node_count
;
5354 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5356 tree lo_index
= TREE_OPERAND (index
, 0);
5357 tree hi_index
= TREE_OPERAND (index
, 1);
5359 if (! host_integerp (lo_index
, 1)
5360 || ! host_integerp (hi_index
, 1))
5366 this_node_count
= (tree_low_cst (hi_index
, 1)
5367 - tree_low_cst (lo_index
, 1) + 1);
5370 this_node_count
= 1;
5372 count
+= this_node_count
;
5373 if (mostly_zeros_p (value
))
5374 zero_count
+= this_node_count
;
5377 /* Clear the entire array first if there are any missing
5378 elements, or if the incidence of zero elements is >=
5381 && (count
< maxelt
- minelt
+ 1
5382 || 4 * zero_count
>= 3 * count
))
5386 if (need_to_clear
&& size
> 0)
5389 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5391 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5395 if (!cleared
&& REG_P (target
))
5396 /* Inform later passes that the old value is dead. */
5397 emit_clobber (target
);
5399 /* Store each element of the constructor into the
5400 corresponding element of TARGET, determined by counting the
5402 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
5404 enum machine_mode mode
;
5405 HOST_WIDE_INT bitsize
;
5406 HOST_WIDE_INT bitpos
;
5408 rtx xtarget
= target
;
5410 if (cleared
&& initializer_zerop (value
))
5413 unsignedp
= TYPE_UNSIGNED (elttype
);
5414 mode
= TYPE_MODE (elttype
);
5415 if (mode
== BLKmode
)
5416 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
5417 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
5420 bitsize
= GET_MODE_BITSIZE (mode
);
5422 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
5424 tree lo_index
= TREE_OPERAND (index
, 0);
5425 tree hi_index
= TREE_OPERAND (index
, 1);
5426 rtx index_r
, pos_rtx
;
5427 HOST_WIDE_INT lo
, hi
, count
;
5430 /* If the range is constant and "small", unroll the loop. */
5432 && host_integerp (lo_index
, 0)
5433 && host_integerp (hi_index
, 0)
5434 && (lo
= tree_low_cst (lo_index
, 0),
5435 hi
= tree_low_cst (hi_index
, 0),
5436 count
= hi
- lo
+ 1,
5439 || (host_integerp (TYPE_SIZE (elttype
), 1)
5440 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
5443 lo
-= minelt
; hi
-= minelt
;
5444 for (; lo
<= hi
; lo
++)
5446 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
5449 && !MEM_KEEP_ALIAS_SET_P (target
)
5450 && TREE_CODE (type
) == ARRAY_TYPE
5451 && TYPE_NONALIASED_COMPONENT (type
))
5453 target
= copy_rtx (target
);
5454 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5457 store_constructor_field
5458 (target
, bitsize
, bitpos
, mode
, value
, type
, cleared
,
5459 get_alias_set (elttype
));
5464 rtx loop_start
= gen_label_rtx ();
5465 rtx loop_end
= gen_label_rtx ();
5468 expand_normal (hi_index
);
5469 unsignedp
= TYPE_UNSIGNED (domain
);
5471 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
5474 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
5476 SET_DECL_RTL (index
, index_r
);
5477 store_expr (lo_index
, index_r
, 0, false);
5479 /* Build the head of the loop. */
5480 do_pending_stack_adjust ();
5481 emit_label (loop_start
);
5483 /* Assign value to element index. */
5485 fold_convert (ssizetype
,
5486 fold_build2 (MINUS_EXPR
,
5489 TYPE_MIN_VALUE (domain
)));
5492 size_binop (MULT_EXPR
, position
,
5493 fold_convert (ssizetype
,
5494 TYPE_SIZE_UNIT (elttype
)));
5496 pos_rtx
= expand_normal (position
);
5497 xtarget
= offset_address (target
, pos_rtx
,
5498 highest_pow2_factor (position
));
5499 xtarget
= adjust_address (xtarget
, mode
, 0);
5500 if (TREE_CODE (value
) == CONSTRUCTOR
)
5501 store_constructor (value
, xtarget
, cleared
,
5502 bitsize
/ BITS_PER_UNIT
);
5504 store_expr (value
, xtarget
, 0, false);
5506 /* Generate a conditional jump to exit the loop. */
5507 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
5509 jumpif (exit_cond
, loop_end
);
5511 /* Update the loop counter, and jump to the head of
5513 expand_assignment (index
,
5514 build2 (PLUS_EXPR
, TREE_TYPE (index
),
5515 index
, integer_one_node
),
5518 emit_jump (loop_start
);
5520 /* Build the end of the loop. */
5521 emit_label (loop_end
);
5524 else if ((index
!= 0 && ! host_integerp (index
, 0))
5525 || ! host_integerp (TYPE_SIZE (elttype
), 1))
5530 index
= ssize_int (1);
5533 index
= fold_convert (ssizetype
,
5534 fold_build2 (MINUS_EXPR
,
5537 TYPE_MIN_VALUE (domain
)));
5540 size_binop (MULT_EXPR
, index
,
5541 fold_convert (ssizetype
,
5542 TYPE_SIZE_UNIT (elttype
)));
5543 xtarget
= offset_address (target
,
5544 expand_normal (position
),
5545 highest_pow2_factor (position
));
5546 xtarget
= adjust_address (xtarget
, mode
, 0);
5547 store_expr (value
, xtarget
, 0, false);
5552 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
5553 * tree_low_cst (TYPE_SIZE (elttype
), 1));
5555 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
5557 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
5558 && TREE_CODE (type
) == ARRAY_TYPE
5559 && TYPE_NONALIASED_COMPONENT (type
))
5561 target
= copy_rtx (target
);
5562 MEM_KEEP_ALIAS_SET_P (target
) = 1;
5564 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
5565 type
, cleared
, get_alias_set (elttype
));
5573 unsigned HOST_WIDE_INT idx
;
5574 constructor_elt
*ce
;
5578 tree elttype
= TREE_TYPE (type
);
5579 int elt_size
= tree_low_cst (TYPE_SIZE (elttype
), 1);
5580 enum machine_mode eltmode
= TYPE_MODE (elttype
);
5581 HOST_WIDE_INT bitsize
;
5582 HOST_WIDE_INT bitpos
;
5583 rtvec vector
= NULL
;
5585 alias_set_type alias
;
5587 gcc_assert (eltmode
!= BLKmode
);
5589 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
5590 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
5592 enum machine_mode mode
= GET_MODE (target
);
5594 icode
= (int) optab_handler (vec_init_optab
, mode
)->insn_code
;
5595 if (icode
!= CODE_FOR_nothing
)
5599 vector
= rtvec_alloc (n_elts
);
5600 for (i
= 0; i
< n_elts
; i
++)
5601 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
5605 /* If the constructor has fewer elements than the vector,
5606 clear the whole array first. Similarly if this is static
5607 constructor of a non-BLKmode object. */
5610 else if (REG_P (target
) && TREE_STATIC (exp
))
5614 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
5617 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
5619 int n_elts_here
= tree_low_cst
5620 (int_const_binop (TRUNC_DIV_EXPR
,
5621 TYPE_SIZE (TREE_TYPE (value
)),
5622 TYPE_SIZE (elttype
), 0), 1);
5624 count
+= n_elts_here
;
5625 if (mostly_zeros_p (value
))
5626 zero_count
+= n_elts_here
;
5629 /* Clear the entire vector first if there are any missing elements,
5630 or if the incidence of zero elements is >= 75%. */
5631 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
5634 if (need_to_clear
&& size
> 0 && !vector
)
5637 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5639 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5643 /* Inform later passes that the old value is dead. */
5644 if (!cleared
&& !vector
&& REG_P (target
))
5645 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5648 alias
= MEM_ALIAS_SET (target
);
5650 alias
= get_alias_set (elttype
);
5652 /* Store each element of the constructor into the corresponding
5653 element of TARGET, determined by counting the elements. */
5654 for (idx
= 0, i
= 0;
5655 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
);
5656 idx
++, i
+= bitsize
/ elt_size
)
5658 HOST_WIDE_INT eltpos
;
5659 tree value
= ce
->value
;
5661 bitsize
= tree_low_cst (TYPE_SIZE (TREE_TYPE (value
)), 1);
5662 if (cleared
&& initializer_zerop (value
))
5666 eltpos
= tree_low_cst (ce
->index
, 1);
5672 /* Vector CONSTRUCTORs should only be built from smaller
5673 vectors in the case of BLKmode vectors. */
5674 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
5675 RTVEC_ELT (vector
, eltpos
)
5676 = expand_normal (value
);
5680 enum machine_mode value_mode
=
5681 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
5682 ? TYPE_MODE (TREE_TYPE (value
))
5684 bitpos
= eltpos
* elt_size
;
5685 store_constructor_field (target
, bitsize
, bitpos
,
5686 value_mode
, value
, type
,
5692 emit_insn (GEN_FCN (icode
)
5694 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
5703 /* Store the value of EXP (an expression tree)
5704 into a subfield of TARGET which has mode MODE and occupies
5705 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5706 If MODE is VOIDmode, it means that we are storing into a bit-field.
5708 Always return const0_rtx unless we have something particular to
5711 TYPE is the type of the underlying object,
5713 ALIAS_SET is the alias set for the destination. This value will
5714 (in general) be different from that for TARGET, since TARGET is a
5715 reference to the containing structure.
5717 If NONTEMPORAL is true, try generating a nontemporal store. */
5720 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
5721 enum machine_mode mode
, tree exp
, tree type
,
5722 alias_set_type alias_set
, bool nontemporal
)
5724 HOST_WIDE_INT width_mask
= 0;
5726 if (TREE_CODE (exp
) == ERROR_MARK
)
5729 /* If we have nothing to store, do nothing unless the expression has
5732 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5733 else if (bitsize
>= 0 && bitsize
< HOST_BITS_PER_WIDE_INT
)
5734 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5736 /* If we are storing into an unaligned field of an aligned union that is
5737 in a register, we may have the mode of TARGET being an integer mode but
5738 MODE == BLKmode. In that case, get an aligned object whose size and
5739 alignment are the same as TARGET and store TARGET into it (we can avoid
5740 the store if the field being stored is the entire width of TARGET). Then
5741 call ourselves recursively to store the field into a BLKmode version of
5742 that object. Finally, load from the object into TARGET. This is not
5743 very efficient in general, but should only be slightly more expensive
5744 than the otherwise-required unaligned accesses. Perhaps this can be
5745 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5746 twice, once with emit_move_insn and once via store_field. */
5749 && (REG_P (target
) || GET_CODE (target
) == SUBREG
))
5751 rtx object
= assign_temp (type
, 0, 1, 1);
5752 rtx blk_object
= adjust_address (object
, BLKmode
, 0);
5754 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5755 emit_move_insn (object
, target
);
5757 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, type
, alias_set
,
5760 emit_move_insn (target
, object
);
5762 /* We want to return the BLKmode version of the data. */
5766 if (GET_CODE (target
) == CONCAT
)
5768 /* We're storing into a struct containing a single __complex. */
5770 gcc_assert (!bitpos
);
5771 return store_expr (exp
, target
, 0, nontemporal
);
5774 /* If the structure is in a register or if the component
5775 is a bit field, we cannot use addressing to access it.
5776 Use bit-field techniques or SUBREG to store in it. */
5778 if (mode
== VOIDmode
5779 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5780 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5781 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5783 || GET_CODE (target
) == SUBREG
5784 /* If the field isn't aligned enough to store as an ordinary memref,
5785 store it as a bit field. */
5787 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
5788 || bitpos
% GET_MODE_ALIGNMENT (mode
))
5789 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
5790 || (bitpos
% BITS_PER_UNIT
!= 0)))
5791 /* If the RHS and field are a constant size and the size of the
5792 RHS isn't the same size as the bitfield, we must use bitfield
5795 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5796 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5801 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5802 implies a mask operation. If the precision is the same size as
5803 the field we're storing into, that mask is redundant. This is
5804 particularly common with bit field assignments generated by the
5806 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
5809 tree type
= TREE_TYPE (exp
);
5810 if (INTEGRAL_TYPE_P (type
)
5811 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
5812 && bitsize
== TYPE_PRECISION (type
))
5814 tree op
= gimple_assign_rhs1 (nop_def
);
5815 type
= TREE_TYPE (op
);
5816 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
5821 temp
= expand_normal (exp
);
5823 /* If BITSIZE is narrower than the size of the type of EXP
5824 we will be narrowing TEMP. Normally, what's wanted are the
5825 low-order bits. However, if EXP's type is a record and this is
5826 big-endian machine, we want the upper BITSIZE bits. */
5827 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5828 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
5829 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5830 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5831 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5835 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5837 if (mode
!= VOIDmode
&& mode
!= BLKmode
5838 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5839 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5841 /* If the modes of TEMP and TARGET are both BLKmode, both
5842 must be in memory and BITPOS must be aligned on a byte
5843 boundary. If so, we simply do a block copy. Likewise
5844 for a BLKmode-like TARGET. */
5845 if (GET_MODE (temp
) == BLKmode
5846 && (GET_MODE (target
) == BLKmode
5848 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
5849 && (bitpos
% BITS_PER_UNIT
) == 0
5850 && (bitsize
% BITS_PER_UNIT
) == 0)))
5852 gcc_assert (MEM_P (target
) && MEM_P (temp
)
5853 && (bitpos
% BITS_PER_UNIT
) == 0);
5855 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5856 emit_block_move (target
, temp
,
5857 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5864 /* Store the value in the bitfield. */
5865 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
);
5871 /* Now build a reference to just the desired component. */
5872 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
5874 if (to_rtx
== target
)
5875 to_rtx
= copy_rtx (to_rtx
);
5877 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5878 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
5879 set_mem_alias_set (to_rtx
, alias_set
);
5881 return store_expr (exp
, to_rtx
, 0, nontemporal
);
5885 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5886 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5887 codes and find the ultimate containing object, which we return.
5889 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5890 bit position, and *PUNSIGNEDP to the signedness of the field.
5891 If the position of the field is variable, we store a tree
5892 giving the variable offset (in units) in *POFFSET.
5893 This offset is in addition to the bit position.
5894 If the position is not variable, we store 0 in *POFFSET.
5896 If any of the extraction expressions is volatile,
5897 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5899 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5900 Otherwise, it is a mode that can be used to access the field.
5902 If the field describes a variable-sized object, *PMODE is set to
5903 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5904 this case, but the address of the object can be found.
5906 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5907 look through nodes that serve as markers of a greater alignment than
5908 the one that can be deduced from the expression. These nodes make it
5909 possible for front-ends to prevent temporaries from being created by
5910 the middle-end on alignment considerations. For that purpose, the
5911 normal operating mode at high-level is to always pass FALSE so that
5912 the ultimate containing object is really returned; moreover, the
5913 associated predicate handled_component_p will always return TRUE
5914 on these nodes, thus indicating that they are essentially handled
5915 by get_inner_reference. TRUE should only be passed when the caller
5916 is scanning the expression in order to build another representation
5917 and specifically knows how to handle these nodes; as such, this is
5918 the normal operating mode in the RTL expanders. */
5921 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
5922 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
5923 enum machine_mode
*pmode
, int *punsignedp
,
5924 int *pvolatilep
, bool keep_aligning
)
5927 enum machine_mode mode
= VOIDmode
;
5928 bool blkmode_bitfield
= false;
5929 tree offset
= size_zero_node
;
5930 tree bit_offset
= bitsize_zero_node
;
5932 /* First get the mode, signedness, and size. We do this from just the
5933 outermost expression. */
5934 if (TREE_CODE (exp
) == COMPONENT_REF
)
5936 tree field
= TREE_OPERAND (exp
, 1);
5937 size_tree
= DECL_SIZE (field
);
5938 if (!DECL_BIT_FIELD (field
))
5939 mode
= DECL_MODE (field
);
5940 else if (DECL_MODE (field
) == BLKmode
)
5941 blkmode_bitfield
= true;
5943 *punsignedp
= DECL_UNSIGNED (field
);
5945 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5947 size_tree
= TREE_OPERAND (exp
, 1);
5948 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5949 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
5951 /* For vector types, with the correct size of access, use the mode of
5953 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
5954 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
5955 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
5956 mode
= TYPE_MODE (TREE_TYPE (exp
));
5960 mode
= TYPE_MODE (TREE_TYPE (exp
));
5961 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
5963 if (mode
== BLKmode
)
5964 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5966 *pbitsize
= GET_MODE_BITSIZE (mode
);
5971 if (! host_integerp (size_tree
, 1))
5972 mode
= BLKmode
, *pbitsize
= -1;
5974 *pbitsize
= tree_low_cst (size_tree
, 1);
5977 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5978 and find the ultimate containing object. */
5981 switch (TREE_CODE (exp
))
5984 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5985 TREE_OPERAND (exp
, 2));
5990 tree field
= TREE_OPERAND (exp
, 1);
5991 tree this_offset
= component_ref_field_offset (exp
);
5993 /* If this field hasn't been filled in yet, don't go past it.
5994 This should only happen when folding expressions made during
5995 type construction. */
5996 if (this_offset
== 0)
5999 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
6000 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
6001 DECL_FIELD_BIT_OFFSET (field
));
6003 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6008 case ARRAY_RANGE_REF
:
6010 tree index
= TREE_OPERAND (exp
, 1);
6011 tree low_bound
= array_ref_low_bound (exp
);
6012 tree unit_size
= array_ref_element_size (exp
);
6014 /* We assume all arrays have sizes that are a multiple of a byte.
6015 First subtract the lower bound, if any, in the type of the
6016 index, then convert to sizetype and multiply by the size of
6017 the array element. */
6018 if (! integer_zerop (low_bound
))
6019 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
6022 offset
= size_binop (PLUS_EXPR
, offset
,
6023 size_binop (MULT_EXPR
,
6024 fold_convert (sizetype
, index
),
6033 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
6034 bitsize_int (*pbitsize
));
6037 case VIEW_CONVERT_EXPR
:
6038 if (keep_aligning
&& STRICT_ALIGNMENT
6039 && (TYPE_ALIGN (TREE_TYPE (exp
))
6040 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6041 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6042 < BIGGEST_ALIGNMENT
)
6043 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
6044 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6052 /* If any reference in the chain is volatile, the effect is volatile. */
6053 if (TREE_THIS_VOLATILE (exp
))
6056 exp
= TREE_OPERAND (exp
, 0);
6060 /* If OFFSET is constant, see if we can return the whole thing as a
6061 constant bit position. Make sure to handle overflow during
6063 if (host_integerp (offset
, 0))
6065 double_int tem
= double_int_mul (tree_to_double_int (offset
),
6066 uhwi_to_double_int (BITS_PER_UNIT
));
6067 tem
= double_int_add (tem
, tree_to_double_int (bit_offset
));
6068 if (double_int_fits_in_shwi_p (tem
))
6070 *pbitpos
= double_int_to_shwi (tem
);
6071 *poffset
= offset
= NULL_TREE
;
6075 /* Otherwise, split it up. */
6078 *pbitpos
= tree_low_cst (bit_offset
, 0);
6082 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6083 if (mode
== VOIDmode
6085 && (*pbitpos
% BITS_PER_UNIT
) == 0
6086 && (*pbitsize
% BITS_PER_UNIT
) == 0)
6094 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6095 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6096 EXP is marked as PACKED. */
6099 contains_packed_reference (const_tree exp
)
6101 bool packed_p
= false;
6105 switch (TREE_CODE (exp
))
6109 tree field
= TREE_OPERAND (exp
, 1);
6110 packed_p
= DECL_PACKED (field
)
6111 || TYPE_PACKED (TREE_TYPE (field
))
6112 || TYPE_PACKED (TREE_TYPE (exp
));
6120 case ARRAY_RANGE_REF
:
6123 case VIEW_CONVERT_EXPR
:
6129 exp
= TREE_OPERAND (exp
, 0);
6135 /* Return a tree of sizetype representing the size, in bytes, of the element
6136 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6139 array_ref_element_size (tree exp
)
6141 tree aligned_size
= TREE_OPERAND (exp
, 3);
6142 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6144 /* If a size was specified in the ARRAY_REF, it's the size measured
6145 in alignment units of the element type. So multiply by that value. */
6148 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6149 sizetype from another type of the same width and signedness. */
6150 if (TREE_TYPE (aligned_size
) != sizetype
)
6151 aligned_size
= fold_convert (sizetype
, aligned_size
);
6152 return size_binop (MULT_EXPR
, aligned_size
,
6153 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
6156 /* Otherwise, take the size from that of the element type. Substitute
6157 any PLACEHOLDER_EXPR that we have. */
6159 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
6162 /* Return a tree representing the lower bound of the array mentioned in
6163 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6166 array_ref_low_bound (tree exp
)
6168 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6170 /* If a lower bound is specified in EXP, use it. */
6171 if (TREE_OPERAND (exp
, 2))
6172 return TREE_OPERAND (exp
, 2);
6174 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6175 substituting for a PLACEHOLDER_EXPR as needed. */
6176 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6177 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
6179 /* Otherwise, return a zero of the appropriate type. */
6180 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
6183 /* Return a tree representing the upper bound of the array mentioned in
6184 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6187 array_ref_up_bound (tree exp
)
6189 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6191 /* If there is a domain type and it has an upper bound, use it, substituting
6192 for a PLACEHOLDER_EXPR as needed. */
6193 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
6194 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
6196 /* Otherwise fail. */
6200 /* Return a tree representing the offset, in bytes, of the field referenced
6201 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6204 component_ref_field_offset (tree exp
)
6206 tree aligned_offset
= TREE_OPERAND (exp
, 2);
6207 tree field
= TREE_OPERAND (exp
, 1);
6209 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6210 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6214 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6215 sizetype from another type of the same width and signedness. */
6216 if (TREE_TYPE (aligned_offset
) != sizetype
)
6217 aligned_offset
= fold_convert (sizetype
, aligned_offset
);
6218 return size_binop (MULT_EXPR
, aligned_offset
,
6219 size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
));
6222 /* Otherwise, take the offset from that of the field. Substitute
6223 any PLACEHOLDER_EXPR that we have. */
6225 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
6228 /* Given an rtx VALUE that may contain additions and multiplications, return
6229 an equivalent value that just refers to a register, memory, or constant.
6230 This is done by generating instructions to perform the arithmetic and
6231 returning a pseudo-register containing the value.
6233 The returned value may be a REG, SUBREG, MEM or constant. */
6236 force_operand (rtx value
, rtx target
)
6239 /* Use subtarget as the target for operand 0 of a binary operation. */
6240 rtx subtarget
= get_subtarget (target
);
6241 enum rtx_code code
= GET_CODE (value
);
6243 /* Check for subreg applied to an expression produced by loop optimizer. */
6245 && !REG_P (SUBREG_REG (value
))
6246 && !MEM_P (SUBREG_REG (value
)))
6249 = simplify_gen_subreg (GET_MODE (value
),
6250 force_reg (GET_MODE (SUBREG_REG (value
)),
6251 force_operand (SUBREG_REG (value
),
6253 GET_MODE (SUBREG_REG (value
)),
6254 SUBREG_BYTE (value
));
6255 code
= GET_CODE (value
);
6258 /* Check for a PIC address load. */
6259 if ((code
== PLUS
|| code
== MINUS
)
6260 && XEXP (value
, 0) == pic_offset_table_rtx
6261 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
6262 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
6263 || GET_CODE (XEXP (value
, 1)) == CONST
))
6266 subtarget
= gen_reg_rtx (GET_MODE (value
));
6267 emit_move_insn (subtarget
, value
);
6271 if (ARITHMETIC_P (value
))
6273 op2
= XEXP (value
, 1);
6274 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
6276 if (code
== MINUS
&& GET_CODE (op2
) == CONST_INT
)
6279 op2
= negate_rtx (GET_MODE (value
), op2
);
6282 /* Check for an addition with OP2 a constant integer and our first
6283 operand a PLUS of a virtual register and something else. In that
6284 case, we want to emit the sum of the virtual register and the
6285 constant first and then add the other value. This allows virtual
6286 register instantiation to simply modify the constant rather than
6287 creating another one around this addition. */
6288 if (code
== PLUS
&& GET_CODE (op2
) == CONST_INT
6289 && GET_CODE (XEXP (value
, 0)) == PLUS
6290 && REG_P (XEXP (XEXP (value
, 0), 0))
6291 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6292 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
6294 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
6295 XEXP (XEXP (value
, 0), 0), op2
,
6296 subtarget
, 0, OPTAB_LIB_WIDEN
);
6297 return expand_simple_binop (GET_MODE (value
), code
, temp
,
6298 force_operand (XEXP (XEXP (value
,
6300 target
, 0, OPTAB_LIB_WIDEN
);
6303 op1
= force_operand (XEXP (value
, 0), subtarget
);
6304 op2
= force_operand (op2
, NULL_RTX
);
6308 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
6310 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
6311 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6312 target
, 1, OPTAB_LIB_WIDEN
);
6314 return expand_divmod (0,
6315 FLOAT_MODE_P (GET_MODE (value
))
6316 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
6317 GET_MODE (value
), op1
, op2
, target
, 0);
6319 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6322 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
6325 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
6328 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6329 target
, 0, OPTAB_LIB_WIDEN
);
6331 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
6332 target
, 1, OPTAB_LIB_WIDEN
);
6335 if (UNARY_P (value
))
6338 target
= gen_reg_rtx (GET_MODE (value
));
6339 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
6346 case FLOAT_TRUNCATE
:
6347 convert_move (target
, op1
, code
== ZERO_EXTEND
);
6352 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
6356 case UNSIGNED_FLOAT
:
6357 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
6361 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
6365 #ifdef INSN_SCHEDULING
6366 /* On machines that have insn scheduling, we want all memory reference to be
6367 explicit, so we need to deal with such paradoxical SUBREGs. */
6368 if (GET_CODE (value
) == SUBREG
&& MEM_P (SUBREG_REG (value
))
6369 && (GET_MODE_SIZE (GET_MODE (value
))
6370 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value
)))))
6372 = simplify_gen_subreg (GET_MODE (value
),
6373 force_reg (GET_MODE (SUBREG_REG (value
)),
6374 force_operand (SUBREG_REG (value
),
6376 GET_MODE (SUBREG_REG (value
)),
6377 SUBREG_BYTE (value
));
6383 /* Subroutine of expand_expr: return nonzero iff there is no way that
6384 EXP can reference X, which is being modified. TOP_P is nonzero if this
6385 call is going to be used to determine whether we need a temporary
6386 for EXP, as opposed to a recursive call to this function.
6388 It is always safe for this routine to return zero since it merely
6389 searches for optimization opportunities. */
6392 safe_from_p (const_rtx x
, tree exp
, int top_p
)
6398 /* If EXP has varying size, we MUST use a target since we currently
6399 have no way of allocating temporaries of variable size
6400 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6401 So we assume here that something at a higher level has prevented a
6402 clash. This is somewhat bogus, but the best we can do. Only
6403 do this when X is BLKmode and when we are at the top level. */
6404 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6405 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
6406 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
6407 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
6408 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
6410 && GET_MODE (x
) == BLKmode
)
6411 /* If X is in the outgoing argument area, it is always safe. */
6413 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
6414 || (GET_CODE (XEXP (x
, 0)) == PLUS
6415 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
6418 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6419 find the underlying pseudo. */
6420 if (GET_CODE (x
) == SUBREG
)
6423 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6427 /* Now look at our tree code and possibly recurse. */
6428 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
6430 case tcc_declaration
:
6431 exp_rtl
= DECL_RTL_IF_SET (exp
);
6437 case tcc_exceptional
:
6438 if (TREE_CODE (exp
) == TREE_LIST
)
6442 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
6444 exp
= TREE_CHAIN (exp
);
6447 if (TREE_CODE (exp
) != TREE_LIST
)
6448 return safe_from_p (x
, exp
, 0);
6451 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
6453 constructor_elt
*ce
;
6454 unsigned HOST_WIDE_INT idx
;
6457 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (exp
), idx
, ce
);
6459 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
6460 || !safe_from_p (x
, ce
->value
, 0))
6464 else if (TREE_CODE (exp
) == ERROR_MARK
)
6465 return 1; /* An already-visited SAVE_EXPR? */
6470 /* The only case we look at here is the DECL_INITIAL inside a
6472 return (TREE_CODE (exp
) != DECL_EXPR
6473 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
6474 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
6475 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
6478 case tcc_comparison
:
6479 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
6484 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6486 case tcc_expression
:
6489 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6490 the expression. If it is set, we conflict iff we are that rtx or
6491 both are in memory. Otherwise, we check all operands of the
6492 expression recursively. */
6494 switch (TREE_CODE (exp
))
6497 /* If the operand is static or we are static, we can't conflict.
6498 Likewise if we don't conflict with the operand at all. */
6499 if (staticp (TREE_OPERAND (exp
, 0))
6500 || TREE_STATIC (exp
)
6501 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
6504 /* Otherwise, the only way this can conflict is if we are taking
6505 the address of a DECL a that address if part of X, which is
6507 exp
= TREE_OPERAND (exp
, 0);
6510 if (!DECL_RTL_SET_P (exp
)
6511 || !MEM_P (DECL_RTL (exp
)))
6514 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
6518 case MISALIGNED_INDIRECT_REF
:
6519 case ALIGN_INDIRECT_REF
:
6522 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
6523 get_alias_set (exp
)))
6528 /* Assume that the call will clobber all hard registers and
6530 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
6535 case WITH_CLEANUP_EXPR
:
6536 case CLEANUP_POINT_EXPR
:
6537 /* Lowered by gimplify.c. */
6541 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
6547 /* If we have an rtx, we do not need to scan our operands. */
6551 nops
= TREE_OPERAND_LENGTH (exp
);
6552 for (i
= 0; i
< nops
; i
++)
6553 if (TREE_OPERAND (exp
, i
) != 0
6554 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
6560 /* Should never get a type here. */
6564 /* If we have an rtl, find any enclosed object. Then see if we conflict
6568 if (GET_CODE (exp_rtl
) == SUBREG
)
6570 exp_rtl
= SUBREG_REG (exp_rtl
);
6572 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
6576 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6577 are memory and they conflict. */
6578 return ! (rtx_equal_p (x
, exp_rtl
)
6579 || (MEM_P (x
) && MEM_P (exp_rtl
)
6580 && true_dependence (exp_rtl
, VOIDmode
, x
,
6581 rtx_addr_varies_p
)));
6584 /* If we reach here, it is safe. */
6589 /* Return the highest power of two that EXP is known to be a multiple of.
6590 This is used in updating alignment of MEMs in array references. */
6592 unsigned HOST_WIDE_INT
6593 highest_pow2_factor (const_tree exp
)
6595 unsigned HOST_WIDE_INT c0
, c1
;
6597 switch (TREE_CODE (exp
))
6600 /* We can find the lowest bit that's a one. If the low
6601 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6602 We need to handle this case since we can find it in a COND_EXPR,
6603 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6604 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6606 if (TREE_OVERFLOW (exp
))
6607 return BIGGEST_ALIGNMENT
;
6610 /* Note: tree_low_cst is intentionally not used here,
6611 we don't care about the upper bits. */
6612 c0
= TREE_INT_CST_LOW (exp
);
6614 return c0
? c0
: BIGGEST_ALIGNMENT
;
6618 case PLUS_EXPR
: case MINUS_EXPR
: case MIN_EXPR
: case MAX_EXPR
:
6619 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6620 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6621 return MIN (c0
, c1
);
6624 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6625 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6628 case ROUND_DIV_EXPR
: case TRUNC_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6630 if (integer_pow2p (TREE_OPERAND (exp
, 1))
6631 && host_integerp (TREE_OPERAND (exp
, 1), 1))
6633 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 0));
6634 c1
= tree_low_cst (TREE_OPERAND (exp
, 1), 1);
6635 return MAX (1, c0
/ c1
);
6640 /* The highest power of two of a bit-and expression is the maximum of
6641 that of its operands. We typically get here for a complex LHS and
6642 a constant negative power of two on the RHS to force an explicit
6643 alignment, so don't bother looking at the LHS. */
6644 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6648 return highest_pow2_factor (TREE_OPERAND (exp
, 0));
6651 return highest_pow2_factor (TREE_OPERAND (exp
, 1));
6654 c0
= highest_pow2_factor (TREE_OPERAND (exp
, 1));
6655 c1
= highest_pow2_factor (TREE_OPERAND (exp
, 2));
6656 return MIN (c0
, c1
);
6665 /* Similar, except that the alignment requirements of TARGET are
6666 taken into account. Assume it is at least as aligned as its
6667 type, unless it is a COMPONENT_REF in which case the layout of
6668 the structure gives the alignment. */
6670 static unsigned HOST_WIDE_INT
6671 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
6673 unsigned HOST_WIDE_INT target_align
, factor
;
6675 factor
= highest_pow2_factor (exp
);
6676 if (TREE_CODE (target
) == COMPONENT_REF
)
6677 target_align
= DECL_ALIGN_UNIT (TREE_OPERAND (target
, 1));
6679 target_align
= TYPE_ALIGN_UNIT (TREE_TYPE (target
));
6680 return MAX (factor
, target_align
);
6683 /* Return &VAR expression for emulated thread local VAR. */
6686 emutls_var_address (tree var
)
6688 tree emuvar
= emutls_decl (var
);
6689 tree fn
= built_in_decls
[BUILT_IN_EMUTLS_GET_ADDRESS
];
6690 tree arg
= build_fold_addr_expr_with_type (emuvar
, ptr_type_node
);
6691 tree arglist
= build_tree_list (NULL_TREE
, arg
);
6692 tree call
= build_function_call_expr (fn
, arglist
);
6693 return fold_convert (build_pointer_type (TREE_TYPE (var
)), call
);
6697 /* Subroutine of expand_expr. Expand the two operands of a binary
6698 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6699 The value may be stored in TARGET if TARGET is nonzero. The
6700 MODIFIER argument is as documented by expand_expr. */
6703 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
6704 enum expand_modifier modifier
)
6706 if (! safe_from_p (target
, exp1
, 1))
6708 if (operand_equal_p (exp0
, exp1
, 0))
6710 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6711 *op1
= copy_rtx (*op0
);
6715 /* If we need to preserve evaluation order, copy exp0 into its own
6716 temporary variable so that it can't be clobbered by exp1. */
6717 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
6718 exp0
= save_expr (exp0
);
6719 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
6720 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
6725 /* Return a MEM that contains constant EXP. DEFER is as for
6726 output_constant_def and MODIFIER is as for expand_expr. */
6729 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
6733 mem
= output_constant_def (exp
, defer
);
6734 if (modifier
!= EXPAND_INITIALIZER
)
6735 mem
= use_anchored_address (mem
);
6739 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6740 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6743 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
6744 enum expand_modifier modifier
)
6746 rtx result
, subtarget
;
6748 HOST_WIDE_INT bitsize
, bitpos
;
6749 int volatilep
, unsignedp
;
6750 enum machine_mode mode1
;
6752 /* If we are taking the address of a constant and are at the top level,
6753 we have to use output_constant_def since we can't call force_const_mem
6755 /* ??? This should be considered a front-end bug. We should not be
6756 generating ADDR_EXPR of something that isn't an LVALUE. The only
6757 exception here is STRING_CST. */
6758 if (CONSTANT_CLASS_P (exp
))
6759 return XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
6761 /* Everything must be something allowed by is_gimple_addressable. */
6762 switch (TREE_CODE (exp
))
6765 /* This case will happen via recursion for &a->b. */
6766 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6769 /* Recurse and make the output_constant_def clause above handle this. */
6770 return expand_expr_addr_expr_1 (DECL_INITIAL (exp
), target
,
6774 /* The real part of the complex number is always first, therefore
6775 the address is the same as the address of the parent object. */
6778 inner
= TREE_OPERAND (exp
, 0);
6782 /* The imaginary part of the complex number is always second.
6783 The expression is therefore always offset by the size of the
6786 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
6787 inner
= TREE_OPERAND (exp
, 0);
6791 /* TLS emulation hook - replace __thread VAR's &VAR with
6792 __emutls_get_address (&_emutls.VAR). */
6793 if (! targetm
.have_tls
6794 && TREE_CODE (exp
) == VAR_DECL
6795 && DECL_THREAD_LOCAL_P (exp
))
6797 exp
= emutls_var_address (exp
);
6798 return expand_expr (exp
, target
, tmode
, modifier
);
6803 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6804 expand_expr, as that can have various side effects; LABEL_DECLs for
6805 example, may not have their DECL_RTL set yet. Expand the rtl of
6806 CONSTRUCTORs too, which should yield a memory reference for the
6807 constructor's contents. Assume language specific tree nodes can
6808 be expanded in some interesting way. */
6809 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
6811 || TREE_CODE (exp
) == CONSTRUCTOR
6812 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
6814 result
= expand_expr (exp
, target
, tmode
,
6815 modifier
== EXPAND_INITIALIZER
6816 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
6818 /* If the DECL isn't in memory, then the DECL wasn't properly
6819 marked TREE_ADDRESSABLE, which will be either a front-end
6820 or a tree optimizer bug. */
6821 gcc_assert (MEM_P (result
));
6822 result
= XEXP (result
, 0);
6824 /* ??? Is this needed anymore? */
6825 if (DECL_P (exp
) && !TREE_USED (exp
) == 0)
6827 assemble_external (exp
);
6828 TREE_USED (exp
) = 1;
6831 if (modifier
!= EXPAND_INITIALIZER
6832 && modifier
!= EXPAND_CONST_ADDRESS
)
6833 result
= force_operand (result
, target
);
6837 /* Pass FALSE as the last argument to get_inner_reference although
6838 we are expanding to RTL. The rationale is that we know how to
6839 handle "aligning nodes" here: we can just bypass them because
6840 they won't change the final object whose address will be returned
6841 (they actually exist only for that purpose). */
6842 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6843 &mode1
, &unsignedp
, &volatilep
, false);
6847 /* We must have made progress. */
6848 gcc_assert (inner
!= exp
);
6850 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
6851 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6852 inner alignment, force the inner to be sufficiently aligned. */
6853 if (CONSTANT_CLASS_P (inner
)
6854 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
6856 inner
= copy_node (inner
);
6857 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
6858 TYPE_ALIGN (TREE_TYPE (inner
)) = TYPE_ALIGN (TREE_TYPE (exp
));
6859 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
6861 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
);
6867 if (modifier
!= EXPAND_NORMAL
)
6868 result
= force_operand (result
, NULL
);
6869 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
6870 modifier
== EXPAND_INITIALIZER
6871 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
6873 result
= convert_memory_address (tmode
, result
);
6874 tmp
= convert_memory_address (tmode
, tmp
);
6876 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6877 result
= gen_rtx_PLUS (tmode
, result
, tmp
);
6880 subtarget
= bitpos
? NULL_RTX
: target
;
6881 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
6882 1, OPTAB_LIB_WIDEN
);
6888 /* Someone beforehand should have rejected taking the address
6889 of such an object. */
6890 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
6892 result
= plus_constant (result
, bitpos
/ BITS_PER_UNIT
);
6893 if (modifier
< EXPAND_SUM
)
6894 result
= force_operand (result
, target
);
6900 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6901 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6904 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
6905 enum expand_modifier modifier
)
6907 enum machine_mode rmode
;
6910 /* Target mode of VOIDmode says "whatever's natural". */
6911 if (tmode
== VOIDmode
)
6912 tmode
= TYPE_MODE (TREE_TYPE (exp
));
6914 /* We can get called with some Weird Things if the user does silliness
6915 like "(short) &a". In that case, convert_memory_address won't do
6916 the right thing, so ignore the given target mode. */
6917 if (tmode
!= Pmode
&& tmode
!= ptr_mode
)
6920 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
6923 /* Despite expand_expr claims concerning ignoring TMODE when not
6924 strictly convenient, stuff breaks if we don't honor it. Note
6925 that combined with the above, we only do this for pointer modes. */
6926 rmode
= GET_MODE (result
);
6927 if (rmode
== VOIDmode
)
6930 result
= convert_memory_address (tmode
, result
);
6935 /* Generate code for computing CONSTRUCTOR EXP.
6936 An rtx for the computed value is returned. If AVOID_TEMP_MEM
6937 is TRUE, instead of creating a temporary variable in memory
6938 NULL is returned and the caller needs to handle it differently. */
6941 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
6942 bool avoid_temp_mem
)
6944 tree type
= TREE_TYPE (exp
);
6945 enum machine_mode mode
= TYPE_MODE (type
);
6947 /* Try to avoid creating a temporary at all. This is possible
6948 if all of the initializer is zero.
6949 FIXME: try to handle all [0..255] initializers we can handle
6951 if (TREE_STATIC (exp
)
6952 && !TREE_ADDRESSABLE (exp
)
6953 && target
!= 0 && mode
== BLKmode
6954 && all_zeros_p (exp
))
6956 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
6960 /* All elts simple constants => refer to a constant in memory. But
6961 if this is a non-BLKmode mode, let it store a field at a time
6962 since that should make a CONST_INT or CONST_DOUBLE when we
6963 fold. Likewise, if we have a target we can use, it is best to
6964 store directly into the target unless the type is large enough
6965 that memcpy will be used. If we are making an initializer and
6966 all operands are constant, put it in memory as well.
6968 FIXME: Avoid trying to fill vector constructors piece-meal.
6969 Output them with output_constant_def below unless we're sure
6970 they're zeros. This should go away when vector initializers
6971 are treated like VECTOR_CST instead of arrays. */
6972 if ((TREE_STATIC (exp
)
6973 && ((mode
== BLKmode
6974 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6975 || TREE_ADDRESSABLE (exp
)
6976 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6977 && (! MOVE_BY_PIECES_P
6978 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6980 && ! mostly_zeros_p (exp
))))
6981 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
6982 && TREE_CONSTANT (exp
)))
6989 constructor
= expand_expr_constant (exp
, 1, modifier
);
6991 if (modifier
!= EXPAND_CONST_ADDRESS
6992 && modifier
!= EXPAND_INITIALIZER
6993 && modifier
!= EXPAND_SUM
)
6994 constructor
= validize_mem (constructor
);
6999 /* Handle calls that pass values in multiple non-contiguous
7000 locations. The Irix 6 ABI has examples of this. */
7001 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7002 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
7008 = assign_temp (build_qualified_type (type
, (TYPE_QUALS (type
)
7009 | (TREE_READONLY (exp
)
7010 * TYPE_QUAL_CONST
))),
7011 0, TREE_ADDRESSABLE (exp
), 1);
7014 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7019 /* expand_expr: generate code for computing expression EXP.
7020 An rtx for the computed value is returned. The value is never null.
7021 In the case of a void EXP, const0_rtx is returned.
7023 The value may be stored in TARGET if TARGET is nonzero.
7024 TARGET is just a suggestion; callers must assume that
7025 the rtx returned may not be the same as TARGET.
7027 If TARGET is CONST0_RTX, it means that the value will be ignored.
7029 If TMODE is not VOIDmode, it suggests generating the
7030 result in mode TMODE. But this is done only when convenient.
7031 Otherwise, TMODE is ignored and the value generated in its natural mode.
7032 TMODE is just a suggestion; callers must assume that
7033 the rtx returned may not have mode TMODE.
7035 Note that TARGET may have neither TMODE nor MODE. In that case, it
7036 probably will not be used.
7038 If MODIFIER is EXPAND_SUM then when EXP is an addition
7039 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7040 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7041 products as above, or REG or MEM, or constant.
7042 Ordinarily in such cases we would output mul or add instructions
7043 and then return a pseudo reg containing the sum.
7045 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7046 it also marks a label as absolutely required (it can't be dead).
7047 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7048 This is used for outputting expressions used in initializers.
7050 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7051 with a constant address even if that address is not normally legitimate.
7052 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7054 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7055 a call parameter. Such targets require special care as we haven't yet
7056 marked TARGET so that it's safe from being trashed by libcalls. We
7057 don't want to use TARGET for anything but the final result;
7058 Intermediate values must go elsewhere. Additionally, calls to
7059 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7061 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7062 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7063 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7064 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7067 static rtx
expand_expr_real_1 (tree
, rtx
, enum machine_mode
,
7068 enum expand_modifier
, rtx
*);
7071 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
7072 enum expand_modifier modifier
, rtx
*alt_rtl
)
7075 rtx ret
, last
= NULL
;
7077 /* Handle ERROR_MARK before anybody tries to access its type. */
7078 if (TREE_CODE (exp
) == ERROR_MARK
7079 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
7081 ret
= CONST0_RTX (tmode
);
7082 return ret
? ret
: const0_rtx
;
7085 if (flag_non_call_exceptions
)
7087 rn
= lookup_expr_eh_region (exp
);
7089 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
7091 last
= get_last_insn ();
7094 /* If this is an expression of some kind and it has an associated line
7095 number, then emit the line number before expanding the expression.
7097 We need to save and restore the file and line information so that
7098 errors discovered during expansion are emitted with the right
7099 information. It would be better of the diagnostic routines
7100 used the file/line information embedded in the tree nodes rather
7102 if (cfun
&& EXPR_HAS_LOCATION (exp
))
7104 location_t saved_location
= input_location
;
7105 input_location
= EXPR_LOCATION (exp
);
7106 set_curr_insn_source_location (input_location
);
7108 /* Record where the insns produced belong. */
7109 set_curr_insn_block (TREE_BLOCK (exp
));
7111 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
7113 input_location
= saved_location
;
7117 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
);
7120 /* If using non-call exceptions, mark all insns that may trap.
7121 expand_call() will mark CALL_INSNs before we get to this code,
7122 but it doesn't handle libcalls, and these may trap. */
7126 for (insn
= next_real_insn (last
); insn
;
7127 insn
= next_real_insn (insn
))
7129 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
7130 /* If we want exceptions for non-call insns, any
7131 may_trap_p instruction may throw. */
7132 && GET_CODE (PATTERN (insn
)) != CLOBBER
7133 && GET_CODE (PATTERN (insn
)) != USE
7134 && (CALL_P (insn
) || may_trap_p (PATTERN (insn
))))
7135 add_reg_note (insn
, REG_EH_REGION
, GEN_INT (rn
));
7143 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
7144 enum expand_modifier modifier
, rtx
*alt_rtl
)
7146 rtx op0
, op1
, op2
, temp
, decl_rtl
;
7149 enum machine_mode mode
;
7150 enum tree_code code
= TREE_CODE (exp
);
7152 rtx subtarget
, original_target
;
7154 tree context
, subexp0
, subexp1
;
7155 bool reduce_bit_field
;
7156 gimple subexp0_def
, subexp1_def
;
7158 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7159 ? reduce_to_bit_field_precision ((expr), \
7164 type
= TREE_TYPE (exp
);
7165 mode
= TYPE_MODE (type
);
7166 unsignedp
= TYPE_UNSIGNED (type
);
7168 ignore
= (target
== const0_rtx
7169 || ((CONVERT_EXPR_CODE_P (code
)
7170 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
7171 && TREE_CODE (type
) == VOID_TYPE
));
7173 /* An operation in what may be a bit-field type needs the
7174 result to be reduced to the precision of the bit-field type,
7175 which is narrower than that of the type's mode. */
7176 reduce_bit_field
= (!ignore
7177 && TREE_CODE (type
) == INTEGER_TYPE
7178 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
7180 /* If we are going to ignore this result, we need only do something
7181 if there is a side-effect somewhere in the expression. If there
7182 is, short-circuit the most common cases here. Note that we must
7183 not call expand_expr with anything but const0_rtx in case this
7184 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7188 if (! TREE_SIDE_EFFECTS (exp
))
7191 /* Ensure we reference a volatile object even if value is ignored, but
7192 don't do this if all we are doing is taking its address. */
7193 if (TREE_THIS_VOLATILE (exp
)
7194 && TREE_CODE (exp
) != FUNCTION_DECL
7195 && mode
!= VOIDmode
&& mode
!= BLKmode
7196 && modifier
!= EXPAND_CONST_ADDRESS
)
7198 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
7200 temp
= copy_to_reg (temp
);
7204 if (TREE_CODE_CLASS (code
) == tcc_unary
7205 || code
== COMPONENT_REF
|| code
== INDIRECT_REF
)
7206 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
7209 else if (TREE_CODE_CLASS (code
) == tcc_binary
7210 || TREE_CODE_CLASS (code
) == tcc_comparison
7211 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
7213 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
7214 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
7217 else if (code
== BIT_FIELD_REF
)
7219 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
7220 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
7221 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, modifier
);
7228 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
7231 /* Use subtarget as the target for operand 0 of a binary operation. */
7232 subtarget
= get_subtarget (target
);
7233 original_target
= target
;
7239 tree function
= decl_function_context (exp
);
7241 temp
= label_rtx (exp
);
7242 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
7244 if (function
!= current_function_decl
7246 LABEL_REF_NONLOCAL_P (temp
) = 1;
7248 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
7253 /* ??? ivopts calls expander, without any preparation from
7254 out-of-ssa. So fake instructions as if this was an access to the
7255 base variable. This unnecessarily allocates a pseudo, see how we can
7256 reuse it, if partition base vars have it set already. */
7257 if (!currently_expanding_to_rtl
)
7258 return expand_expr_real_1 (SSA_NAME_VAR (exp
), target
, tmode
, modifier
, NULL
);
7260 gimple g
= get_gimple_for_ssa_name (exp
);
7262 return expand_expr_real_1 (gimple_assign_rhs_to_tree (g
), target
,
7263 tmode
, modifier
, NULL
);
7265 decl_rtl
= get_rtx_for_ssa_name (exp
);
7266 exp
= SSA_NAME_VAR (exp
);
7267 goto expand_decl_rtl
;
7271 /* If a static var's type was incomplete when the decl was written,
7272 but the type is complete now, lay out the decl now. */
7273 if (DECL_SIZE (exp
) == 0
7274 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
7275 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
7276 layout_decl (exp
, 0);
7278 /* TLS emulation hook - replace __thread vars with
7279 *__emutls_get_address (&_emutls.var). */
7280 if (! targetm
.have_tls
7281 && TREE_CODE (exp
) == VAR_DECL
7282 && DECL_THREAD_LOCAL_P (exp
))
7284 exp
= build_fold_indirect_ref (emutls_var_address (exp
));
7285 return expand_expr_real_1 (exp
, target
, tmode
, modifier
, NULL
);
7288 /* ... fall through ... */
7292 decl_rtl
= DECL_RTL (exp
);
7294 gcc_assert (decl_rtl
);
7295 decl_rtl
= copy_rtx (decl_rtl
);
7297 /* Ensure variable marked as used even if it doesn't go through
7298 a parser. If it hasn't be used yet, write out an external
7300 if (! TREE_USED (exp
))
7302 assemble_external (exp
);
7303 TREE_USED (exp
) = 1;
7306 /* Show we haven't gotten RTL for this yet. */
7309 /* Variables inherited from containing functions should have
7310 been lowered by this point. */
7311 context
= decl_function_context (exp
);
7312 gcc_assert (!context
7313 || context
== current_function_decl
7314 || TREE_STATIC (exp
)
7315 /* ??? C++ creates functions that are not TREE_STATIC. */
7316 || TREE_CODE (exp
) == FUNCTION_DECL
);
7318 /* This is the case of an array whose size is to be determined
7319 from its initializer, while the initializer is still being parsed.
7322 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
7323 temp
= validize_mem (decl_rtl
);
7325 /* If DECL_RTL is memory, we are in the normal case and the
7326 address is not valid, get the address into a register. */
7328 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
7331 *alt_rtl
= decl_rtl
;
7332 decl_rtl
= use_anchored_address (decl_rtl
);
7333 if (modifier
!= EXPAND_CONST_ADDRESS
7334 && modifier
!= EXPAND_SUM
7335 && !memory_address_p (DECL_MODE (exp
), XEXP (decl_rtl
, 0)))
7336 temp
= replace_equiv_address (decl_rtl
,
7337 copy_rtx (XEXP (decl_rtl
, 0)));
7340 /* If we got something, return it. But first, set the alignment
7341 if the address is a register. */
7344 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
7345 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
7350 /* If the mode of DECL_RTL does not match that of the decl, it
7351 must be a promoted value. We return a SUBREG of the wanted mode,
7352 but mark it so that we know that it was already extended. */
7354 if (REG_P (decl_rtl
)
7355 && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
7357 enum machine_mode pmode
;
7359 /* Get the signedness used for this variable. Ensure we get the
7360 same mode we got when the variable was declared. */
7361 pmode
= promote_mode (type
, DECL_MODE (exp
), &unsignedp
,
7362 (TREE_CODE (exp
) == RESULT_DECL
7363 || TREE_CODE (exp
) == PARM_DECL
) ? 1 : 0);
7364 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
7366 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
7367 SUBREG_PROMOTED_VAR_P (temp
) = 1;
7368 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
7375 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
7376 TREE_INT_CST_HIGH (exp
), mode
);
7382 tree tmp
= NULL_TREE
;
7383 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
7384 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
7385 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
7386 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
7387 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
7388 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
7389 return const_vector_from_tree (exp
);
7390 if (GET_MODE_CLASS (mode
) == MODE_INT
)
7392 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
7394 tmp
= fold_unary (VIEW_CONVERT_EXPR
, type_for_mode
, exp
);
7397 tmp
= build_constructor_from_list (type
,
7398 TREE_VECTOR_CST_ELTS (exp
));
7399 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
7404 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
7407 /* If optimized, generate immediate CONST_DOUBLE
7408 which will be turned into memory by reload if necessary.
7410 We used to force a register so that loop.c could see it. But
7411 this does not allow gen_* patterns to perform optimizations with
7412 the constants. It also produces two insns in cases like "x = 1.0;".
7413 On most machines, floating-point constants are not permitted in
7414 many insns, so we'd end up copying it to a register in any case.
7416 Now, we do the copying in expand_binop, if appropriate. */
7417 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
7418 TYPE_MODE (TREE_TYPE (exp
)));
7421 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
7422 TYPE_MODE (TREE_TYPE (exp
)));
7425 /* Handle evaluating a complex constant in a CONCAT target. */
7426 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
7428 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7431 rtarg
= XEXP (original_target
, 0);
7432 itarg
= XEXP (original_target
, 1);
7434 /* Move the real and imaginary parts separately. */
7435 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
7436 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
7439 emit_move_insn (rtarg
, op0
);
7441 emit_move_insn (itarg
, op1
);
7443 return original_target
;
7446 /* ... fall through ... */
7449 temp
= expand_expr_constant (exp
, 1, modifier
);
7451 /* temp contains a constant address.
7452 On RISC machines where a constant address isn't valid,
7453 make some insns to get that address into a register. */
7454 if (modifier
!= EXPAND_CONST_ADDRESS
7455 && modifier
!= EXPAND_INITIALIZER
7456 && modifier
!= EXPAND_SUM
7457 && ! memory_address_p (mode
, XEXP (temp
, 0)))
7458 return replace_equiv_address (temp
,
7459 copy_rtx (XEXP (temp
, 0)));
7464 tree val
= TREE_OPERAND (exp
, 0);
7465 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
);
7467 if (!SAVE_EXPR_RESOLVED_P (exp
))
7469 /* We can indeed still hit this case, typically via builtin
7470 expanders calling save_expr immediately before expanding
7471 something. Assume this means that we only have to deal
7472 with non-BLKmode values. */
7473 gcc_assert (GET_MODE (ret
) != BLKmode
);
7475 val
= build_decl (VAR_DECL
, NULL
, TREE_TYPE (exp
));
7476 DECL_ARTIFICIAL (val
) = 1;
7477 DECL_IGNORED_P (val
) = 1;
7478 TREE_OPERAND (exp
, 0) = val
;
7479 SAVE_EXPR_RESOLVED_P (exp
) = 1;
7481 if (!CONSTANT_P (ret
))
7482 ret
= copy_to_reg (ret
);
7483 SET_DECL_RTL (val
, ret
);
7490 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
7491 expand_goto (TREE_OPERAND (exp
, 0));
7493 expand_computed_goto (TREE_OPERAND (exp
, 0));
7497 /* If we don't need the result, just ensure we evaluate any
7501 unsigned HOST_WIDE_INT idx
;
7504 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
7505 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
7510 return expand_constructor (exp
, target
, modifier
, false);
7512 case MISALIGNED_INDIRECT_REF
:
7513 case ALIGN_INDIRECT_REF
:
7516 tree exp1
= TREE_OPERAND (exp
, 0);
7518 if (modifier
!= EXPAND_WRITE
)
7522 t
= fold_read_from_constant_string (exp
);
7524 return expand_expr (t
, target
, tmode
, modifier
);
7527 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
7528 op0
= memory_address (mode
, op0
);
7530 if (code
== ALIGN_INDIRECT_REF
)
7532 int align
= TYPE_ALIGN_UNIT (type
);
7533 op0
= gen_rtx_AND (Pmode
, op0
, GEN_INT (-align
));
7534 op0
= memory_address (mode
, op0
);
7537 temp
= gen_rtx_MEM (mode
, op0
);
7539 set_mem_attributes (temp
, exp
, 0);
7541 /* Resolve the misalignment now, so that we don't have to remember
7542 to resolve it later. Of course, this only works for reads. */
7543 /* ??? When we get around to supporting writes, we'll have to handle
7544 this in store_expr directly. The vectorizer isn't generating
7545 those yet, however. */
7546 if (code
== MISALIGNED_INDIRECT_REF
)
7551 gcc_assert (modifier
== EXPAND_NORMAL
7552 || modifier
== EXPAND_STACK_PARM
);
7554 /* The vectorizer should have already checked the mode. */
7555 icode
= optab_handler (movmisalign_optab
, mode
)->insn_code
;
7556 gcc_assert (icode
!= CODE_FOR_nothing
);
7558 /* We've already validated the memory, and we're creating a
7559 new pseudo destination. The predicates really can't fail. */
7560 reg
= gen_reg_rtx (mode
);
7562 /* Nor can the insn generator. */
7563 insn
= GEN_FCN (icode
) (reg
, temp
);
7572 case TARGET_MEM_REF
:
7574 struct mem_address addr
;
7576 get_address_description (exp
, &addr
);
7577 op0
= addr_for_mem_ref (&addr
, true);
7578 op0
= memory_address (mode
, op0
);
7579 temp
= gen_rtx_MEM (mode
, op0
);
7580 set_mem_attributes (temp
, TMR_ORIGINAL (exp
), 0);
7587 tree array
= TREE_OPERAND (exp
, 0);
7588 tree index
= TREE_OPERAND (exp
, 1);
7590 /* Fold an expression like: "foo"[2].
7591 This is not done in fold so it won't happen inside &.
7592 Don't fold if this is for wide characters since it's too
7593 difficult to do correctly and this is a very rare case. */
7595 if (modifier
!= EXPAND_CONST_ADDRESS
7596 && modifier
!= EXPAND_INITIALIZER
7597 && modifier
!= EXPAND_MEMORY
)
7599 tree t
= fold_read_from_constant_string (exp
);
7602 return expand_expr (t
, target
, tmode
, modifier
);
7605 /* If this is a constant index into a constant array,
7606 just get the value from the array. Handle both the cases when
7607 we have an explicit constructor and when our operand is a variable
7608 that was declared const. */
7610 if (modifier
!= EXPAND_CONST_ADDRESS
7611 && modifier
!= EXPAND_INITIALIZER
7612 && modifier
!= EXPAND_MEMORY
7613 && TREE_CODE (array
) == CONSTRUCTOR
7614 && ! TREE_SIDE_EFFECTS (array
)
7615 && TREE_CODE (index
) == INTEGER_CST
)
7617 unsigned HOST_WIDE_INT ix
;
7620 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
7622 if (tree_int_cst_equal (field
, index
))
7624 if (!TREE_SIDE_EFFECTS (value
))
7625 return expand_expr (fold (value
), target
, tmode
, modifier
);
7630 else if (optimize
>= 1
7631 && modifier
!= EXPAND_CONST_ADDRESS
7632 && modifier
!= EXPAND_INITIALIZER
7633 && modifier
!= EXPAND_MEMORY
7634 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
7635 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
7636 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
7637 && targetm
.binds_local_p (array
))
7639 if (TREE_CODE (index
) == INTEGER_CST
)
7641 tree init
= DECL_INITIAL (array
);
7643 if (TREE_CODE (init
) == CONSTRUCTOR
)
7645 unsigned HOST_WIDE_INT ix
;
7648 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
7650 if (tree_int_cst_equal (field
, index
))
7652 if (TREE_SIDE_EFFECTS (value
))
7655 if (TREE_CODE (value
) == CONSTRUCTOR
)
7657 /* If VALUE is a CONSTRUCTOR, this
7658 optimization is only useful if
7659 this doesn't store the CONSTRUCTOR
7660 into memory. If it does, it is more
7661 efficient to just load the data from
7662 the array directly. */
7663 rtx ret
= expand_constructor (value
, target
,
7665 if (ret
== NULL_RTX
)
7669 return expand_expr (fold (value
), target
, tmode
,
7673 else if(TREE_CODE (init
) == STRING_CST
)
7675 tree index1
= index
;
7676 tree low_bound
= array_ref_low_bound (exp
);
7677 index1
= fold_convert (sizetype
, TREE_OPERAND (exp
, 1));
7679 /* Optimize the special-case of a zero lower bound.
7681 We convert the low_bound to sizetype to avoid some problems
7682 with constant folding. (E.g. suppose the lower bound is 1,
7683 and its mode is QI. Without the conversion,l (ARRAY
7684 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7685 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7687 if (! integer_zerop (low_bound
))
7688 index1
= size_diffop (index1
, fold_convert (sizetype
,
7691 if (0 > compare_tree_int (index1
,
7692 TREE_STRING_LENGTH (init
)))
7694 tree type
= TREE_TYPE (TREE_TYPE (init
));
7695 enum machine_mode mode
= TYPE_MODE (type
);
7697 if (GET_MODE_CLASS (mode
) == MODE_INT
7698 && GET_MODE_SIZE (mode
) == 1)
7699 return gen_int_mode (TREE_STRING_POINTER (init
)
7700 [TREE_INT_CST_LOW (index1
)],
7707 goto normal_inner_ref
;
7710 /* If the operand is a CONSTRUCTOR, we can just extract the
7711 appropriate field if it is present. */
7712 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
)
7714 unsigned HOST_WIDE_INT idx
;
7717 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
7719 if (field
== TREE_OPERAND (exp
, 1)
7720 /* We can normally use the value of the field in the
7721 CONSTRUCTOR. However, if this is a bitfield in
7722 an integral mode that we can fit in a HOST_WIDE_INT,
7723 we must mask only the number of bits in the bitfield,
7724 since this is done implicitly by the constructor. If
7725 the bitfield does not meet either of those conditions,
7726 we can't do this optimization. */
7727 && (! DECL_BIT_FIELD (field
)
7728 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
7729 && (GET_MODE_BITSIZE (DECL_MODE (field
))
7730 <= HOST_BITS_PER_WIDE_INT
))))
7732 if (DECL_BIT_FIELD (field
)
7733 && modifier
== EXPAND_STACK_PARM
)
7735 op0
= expand_expr (value
, target
, tmode
, modifier
);
7736 if (DECL_BIT_FIELD (field
))
7738 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
7739 enum machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
7741 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
7743 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
7744 op0
= expand_and (imode
, op0
, op1
, target
);
7749 = build_int_cst (NULL_TREE
,
7750 GET_MODE_BITSIZE (imode
) - bitsize
);
7752 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
7754 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
7762 goto normal_inner_ref
;
7765 case ARRAY_RANGE_REF
:
7768 enum machine_mode mode1
, mode2
;
7769 HOST_WIDE_INT bitsize
, bitpos
;
7771 int volatilep
= 0, must_force_mem
;
7772 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7773 &mode1
, &unsignedp
, &volatilep
, true);
7774 rtx orig_op0
, memloc
;
7776 /* If we got back the original object, something is wrong. Perhaps
7777 we are evaluating an expression too early. In any event, don't
7778 infinitely recurse. */
7779 gcc_assert (tem
!= exp
);
7781 /* If TEM's type is a union of variable size, pass TARGET to the inner
7782 computation, since it will need a temporary and TARGET is known
7783 to have to do. This occurs in unchecked conversion in Ada. */
7786 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
7787 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
7789 && modifier
!= EXPAND_STACK_PARM
7790 ? target
: NULL_RTX
),
7792 (modifier
== EXPAND_INITIALIZER
7793 || modifier
== EXPAND_CONST_ADDRESS
7794 || modifier
== EXPAND_STACK_PARM
)
7795 ? modifier
: EXPAND_NORMAL
);
7798 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
7800 /* If we have either an offset, a BLKmode result, or a reference
7801 outside the underlying object, we must force it to memory.
7802 Such a case can occur in Ada if we have unchecked conversion
7803 of an expression from a scalar type to an aggregate type or
7804 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
7805 passed a partially uninitialized object or a view-conversion
7806 to a larger size. */
7807 must_force_mem
= (offset
7809 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
7811 /* If this is a constant, put it in a register if it is a legitimate
7812 constant and we don't need a memory reference. */
7813 if (CONSTANT_P (op0
)
7815 && LEGITIMATE_CONSTANT_P (op0
)
7817 op0
= force_reg (mode2
, op0
);
7819 /* Otherwise, if this is a constant, try to force it to the constant
7820 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
7821 is a legitimate constant. */
7822 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
7823 op0
= validize_mem (memloc
);
7825 /* Otherwise, if this is a constant or the object is not in memory
7826 and need be, put it there. */
7827 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
7829 tree nt
= build_qualified_type (TREE_TYPE (tem
),
7830 (TYPE_QUALS (TREE_TYPE (tem
))
7831 | TYPE_QUAL_CONST
));
7832 memloc
= assign_temp (nt
, 1, 1, 1);
7833 emit_move_insn (memloc
, op0
);
7839 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
7842 gcc_assert (MEM_P (op0
));
7844 #ifdef POINTERS_EXTEND_UNSIGNED
7845 if (GET_MODE (offset_rtx
) != Pmode
)
7846 offset_rtx
= convert_to_mode (Pmode
, offset_rtx
, 0);
7848 if (GET_MODE (offset_rtx
) != ptr_mode
)
7849 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
7852 if (GET_MODE (op0
) == BLKmode
7853 /* A constant address in OP0 can have VOIDmode, we must
7854 not try to call force_reg in that case. */
7855 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
7857 && (bitpos
% bitsize
) == 0
7858 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
7859 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
7861 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
7865 op0
= offset_address (op0
, offset_rtx
,
7866 highest_pow2_factor (offset
));
7869 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7870 record its alignment as BIGGEST_ALIGNMENT. */
7871 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
7872 && is_aligning_offset (offset
, tem
))
7873 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
7875 /* Don't forget about volatility even if this is a bitfield. */
7876 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
7878 if (op0
== orig_op0
)
7879 op0
= copy_rtx (op0
);
7881 MEM_VOLATILE_P (op0
) = 1;
7884 /* The following code doesn't handle CONCAT.
7885 Assume only bitpos == 0 can be used for CONCAT, due to
7886 one element arrays having the same mode as its element. */
7887 if (GET_CODE (op0
) == CONCAT
)
7889 gcc_assert (bitpos
== 0
7890 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)));
7894 /* In cases where an aligned union has an unaligned object
7895 as a field, we might be extracting a BLKmode value from
7896 an integer-mode (e.g., SImode) object. Handle this case
7897 by doing the extract into an object as wide as the field
7898 (which we know to be the width of a basic mode), then
7899 storing into memory, and changing the mode to BLKmode. */
7900 if (mode1
== VOIDmode
7901 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
7902 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7903 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7904 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
7905 && modifier
!= EXPAND_CONST_ADDRESS
7906 && modifier
!= EXPAND_INITIALIZER
)
7907 /* If the field isn't aligned enough to fetch as a memref,
7908 fetch it as a bit field. */
7909 || (mode1
!= BLKmode
7910 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
7911 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
7913 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
7914 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
7915 && ((modifier
== EXPAND_CONST_ADDRESS
7916 || modifier
== EXPAND_INITIALIZER
)
7918 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
7919 || (bitpos
% BITS_PER_UNIT
!= 0)))
7920 /* If the type and the field are a constant size and the
7921 size of the type isn't the same size as the bitfield,
7922 we must use bitfield operations. */
7924 && TYPE_SIZE (TREE_TYPE (exp
))
7925 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
7926 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7929 enum machine_mode ext_mode
= mode
;
7931 if (ext_mode
== BLKmode
7932 && ! (target
!= 0 && MEM_P (op0
)
7934 && bitpos
% BITS_PER_UNIT
== 0))
7935 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7937 if (ext_mode
== BLKmode
)
7940 target
= assign_temp (type
, 0, 1, 1);
7945 /* In this case, BITPOS must start at a byte boundary and
7946 TARGET, if specified, must be a MEM. */
7947 gcc_assert (MEM_P (op0
)
7948 && (!target
|| MEM_P (target
))
7949 && !(bitpos
% BITS_PER_UNIT
));
7951 emit_block_move (target
,
7952 adjust_address (op0
, VOIDmode
,
7953 bitpos
/ BITS_PER_UNIT
),
7954 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7956 (modifier
== EXPAND_STACK_PARM
7957 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
7962 op0
= validize_mem (op0
);
7964 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
7965 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
7967 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
7968 (modifier
== EXPAND_STACK_PARM
7969 ? NULL_RTX
: target
),
7970 ext_mode
, ext_mode
);
7972 /* If the result is a record type and BITSIZE is narrower than
7973 the mode of OP0, an integral mode, and this is a big endian
7974 machine, we must put the field into the high-order bits. */
7975 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7976 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7977 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
7978 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7979 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7983 /* If the result type is BLKmode, store the data into a temporary
7984 of the appropriate type, but with the mode corresponding to the
7985 mode for the data we have (op0's mode). It's tempting to make
7986 this a constant type, since we know it's only being stored once,
7987 but that can cause problems if we are taking the address of this
7988 COMPONENT_REF because the MEM of any reference via that address
7989 will have flags corresponding to the type, which will not
7990 necessarily be constant. */
7991 if (mode
== BLKmode
)
7993 HOST_WIDE_INT size
= GET_MODE_BITSIZE (ext_mode
);
7996 /* If the reference doesn't use the alias set of its type,
7997 we cannot create the temporary using that type. */
7998 if (component_uses_parent_alias_set (exp
))
8000 new_rtx
= assign_stack_local (ext_mode
, size
, 0);
8001 set_mem_alias_set (new_rtx
, get_alias_set (exp
));
8004 new_rtx
= assign_stack_temp_for_type (ext_mode
, size
, 0, type
);
8006 emit_move_insn (new_rtx
, op0
);
8007 op0
= copy_rtx (new_rtx
);
8008 PUT_MODE (op0
, BLKmode
);
8009 set_mem_attributes (op0
, exp
, 1);
8015 /* If the result is BLKmode, use that to access the object
8017 if (mode
== BLKmode
)
8020 /* Get a reference to just this component. */
8021 if (modifier
== EXPAND_CONST_ADDRESS
8022 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8023 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
8025 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
8027 if (op0
== orig_op0
)
8028 op0
= copy_rtx (op0
);
8030 set_mem_attributes (op0
, exp
, 0);
8031 if (REG_P (XEXP (op0
, 0)))
8032 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
8034 MEM_VOLATILE_P (op0
) |= volatilep
;
8035 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
8036 || modifier
== EXPAND_CONST_ADDRESS
8037 || modifier
== EXPAND_INITIALIZER
)
8039 else if (target
== 0)
8040 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8042 convert_move (target
, op0
, unsignedp
);
8047 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
8050 /* All valid uses of __builtin_va_arg_pack () are removed during
8052 if (CALL_EXPR_VA_ARG_PACK (exp
))
8053 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
8055 tree fndecl
= get_callee_fndecl (exp
), attr
;
8058 && (attr
= lookup_attribute ("error",
8059 DECL_ATTRIBUTES (fndecl
))) != NULL
)
8060 error ("%Kcall to %qs declared with attribute error: %s",
8061 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
8062 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
8064 && (attr
= lookup_attribute ("warning",
8065 DECL_ATTRIBUTES (fndecl
))) != NULL
)
8066 warning_at (tree_nonartificial_location (exp
),
8067 0, "%Kcall to %qs declared with attribute warning: %s",
8068 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
8069 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
8071 /* Check for a built-in function. */
8072 if (fndecl
&& DECL_BUILT_IN (fndecl
))
8074 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
8075 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
8078 return expand_call (exp
, target
, ignore
);
8082 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
8085 if (TREE_CODE (type
) == UNION_TYPE
)
8087 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8089 /* If both input and output are BLKmode, this conversion isn't doing
8090 anything except possibly changing memory attribute. */
8091 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8093 rtx result
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
8096 result
= copy_rtx (result
);
8097 set_mem_attributes (result
, exp
, 0);
8103 if (TYPE_MODE (type
) != BLKmode
)
8104 target
= gen_reg_rtx (TYPE_MODE (type
));
8106 target
= assign_temp (type
, 0, 1, 1);
8110 /* Store data into beginning of memory target. */
8111 store_expr (TREE_OPERAND (exp
, 0),
8112 adjust_address (target
, TYPE_MODE (valtype
), 0),
8113 modifier
== EXPAND_STACK_PARM
,
8118 gcc_assert (REG_P (target
));
8120 /* Store this field into a union of the proper type. */
8121 store_field (target
,
8122 MIN ((int_size_in_bytes (TREE_TYPE
8123 (TREE_OPERAND (exp
, 0)))
8125 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
8126 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
8130 /* Return the entire union. */
8134 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8136 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
8139 /* If the signedness of the conversion differs and OP0 is
8140 a promoted SUBREG, clear that indication since we now
8141 have to do the proper extension. */
8142 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
8143 && GET_CODE (op0
) == SUBREG
)
8144 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8146 return REDUCE_BIT_FIELD (op0
);
8149 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
,
8150 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8151 if (GET_MODE (op0
) == mode
)
8154 /* If OP0 is a constant, just convert it into the proper mode. */
8155 else if (CONSTANT_P (op0
))
8157 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8158 enum machine_mode inner_mode
= TYPE_MODE (inner_type
);
8160 if (modifier
== EXPAND_INITIALIZER
)
8161 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
8162 subreg_lowpart_offset (mode
,
8165 op0
= convert_modes (mode
, inner_mode
, op0
,
8166 TYPE_UNSIGNED (inner_type
));
8169 else if (modifier
== EXPAND_INITIALIZER
)
8170 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8172 else if (target
== 0)
8173 op0
= convert_to_mode (mode
, op0
,
8174 TYPE_UNSIGNED (TREE_TYPE
8175 (TREE_OPERAND (exp
, 0))));
8178 convert_move (target
, op0
,
8179 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8183 return REDUCE_BIT_FIELD (op0
);
8185 case VIEW_CONVERT_EXPR
:
8188 /* If we are converting to BLKmode, try to avoid an intermediate
8189 temporary by fetching an inner memory reference. */
8191 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
8192 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != BLKmode
8193 && handled_component_p (TREE_OPERAND (exp
, 0)))
8195 enum machine_mode mode1
;
8196 HOST_WIDE_INT bitsize
, bitpos
;
8201 = get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, &bitpos
,
8202 &offset
, &mode1
, &unsignedp
, &volatilep
,
8206 /* ??? We should work harder and deal with non-zero offsets. */
8208 && (bitpos
% BITS_PER_UNIT
) == 0
8210 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) == 0)
8212 /* See the normal_inner_ref case for the rationale. */
8215 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
8216 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
8218 && modifier
!= EXPAND_STACK_PARM
8219 ? target
: NULL_RTX
),
8221 (modifier
== EXPAND_INITIALIZER
8222 || modifier
== EXPAND_CONST_ADDRESS
8223 || modifier
== EXPAND_STACK_PARM
)
8224 ? modifier
: EXPAND_NORMAL
);
8226 if (MEM_P (orig_op0
))
8230 /* Get a reference to just this component. */
8231 if (modifier
== EXPAND_CONST_ADDRESS
8232 || modifier
== EXPAND_SUM
8233 || modifier
== EXPAND_INITIALIZER
)
8234 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
8236 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
8238 if (op0
== orig_op0
)
8239 op0
= copy_rtx (op0
);
8241 set_mem_attributes (op0
, TREE_OPERAND (exp
, 0), 0);
8242 if (REG_P (XEXP (op0
, 0)))
8243 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
8245 MEM_VOLATILE_P (op0
) |= volatilep
;
8251 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, modifier
);
8253 /* If the input and output modes are both the same, we are done. */
8254 if (mode
== GET_MODE (op0
))
8256 /* If neither mode is BLKmode, and both modes are the same size
8257 then we can use gen_lowpart. */
8258 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
8259 && GET_MODE_SIZE (mode
) == GET_MODE_SIZE (GET_MODE (op0
)))
8261 if (GET_CODE (op0
) == SUBREG
)
8262 op0
= force_reg (GET_MODE (op0
), op0
);
8263 op0
= gen_lowpart (mode
, op0
);
8265 /* If both modes are integral, then we can convert from one to the
8267 else if (SCALAR_INT_MODE_P (GET_MODE (op0
)) && SCALAR_INT_MODE_P (mode
))
8268 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
8269 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8270 /* As a last resort, spill op0 to memory, and reload it in a
8272 else if (!MEM_P (op0
))
8274 /* If the operand is not a MEM, force it into memory. Since we
8275 are going to be changing the mode of the MEM, don't call
8276 force_const_mem for constants because we don't allow pool
8277 constants to change mode. */
8278 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8280 gcc_assert (!TREE_ADDRESSABLE (exp
));
8282 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
8284 = assign_stack_temp_for_type
8285 (TYPE_MODE (inner_type
),
8286 GET_MODE_SIZE (TYPE_MODE (inner_type
)), 0, inner_type
);
8288 emit_move_insn (target
, op0
);
8292 /* At this point, OP0 is in the correct mode. If the output type is
8293 such that the operand is known to be aligned, indicate that it is.
8294 Otherwise, we need only be concerned about alignment for non-BLKmode
8298 op0
= copy_rtx (op0
);
8300 if (TYPE_ALIGN_OK (type
))
8301 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
8302 else if (STRICT_ALIGNMENT
8304 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
8306 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8307 HOST_WIDE_INT temp_size
8308 = MAX (int_size_in_bytes (inner_type
),
8309 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
8311 = assign_stack_temp_for_type (mode
, temp_size
, 0, type
);
8312 rtx new_with_op0_mode
8313 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
8315 gcc_assert (!TREE_ADDRESSABLE (exp
));
8317 if (GET_MODE (op0
) == BLKmode
)
8318 emit_block_move (new_with_op0_mode
, op0
,
8319 GEN_INT (GET_MODE_SIZE (mode
)),
8320 (modifier
== EXPAND_STACK_PARM
8321 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
8323 emit_move_insn (new_with_op0_mode
, op0
);
8328 op0
= adjust_address (op0
, mode
, 0);
8333 case POINTER_PLUS_EXPR
:
8334 /* Even though the sizetype mode and the pointer's mode can be different
8335 expand is able to handle this correctly and get the correct result out
8336 of the PLUS_EXPR code. */
8337 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8338 if sizetype precision is smaller than pointer precision. */
8339 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8340 exp
= build2 (PLUS_EXPR
, type
,
8341 TREE_OPERAND (exp
, 0),
8343 fold_convert (ssizetype
,
8344 TREE_OPERAND (exp
, 1))));
8347 /* Check if this is a case for multiplication and addition. */
8348 if ((TREE_CODE (type
) == INTEGER_TYPE
8349 || TREE_CODE (type
) == FIXED_POINT_TYPE
)
8350 && (subexp0_def
= get_def_for_expr (TREE_OPERAND (exp
, 0),
8353 tree subsubexp0
, subsubexp1
;
8354 gimple subsubexp0_def
, subsubexp1_def
;
8355 enum tree_code this_code
;
8357 this_code
= TREE_CODE (type
) == INTEGER_TYPE
? NOP_EXPR
8358 : FIXED_CONVERT_EXPR
;
8359 subsubexp0
= gimple_assign_rhs1 (subexp0_def
);
8360 subsubexp0_def
= get_def_for_expr (subsubexp0
, this_code
);
8361 subsubexp1
= gimple_assign_rhs2 (subexp0_def
);
8362 subsubexp1_def
= get_def_for_expr (subsubexp1
, this_code
);
8363 if (subsubexp0_def
&& subsubexp1_def
8364 && (top0
= gimple_assign_rhs1 (subsubexp0_def
))
8365 && (top1
= gimple_assign_rhs1 (subsubexp1_def
))
8366 && (TYPE_PRECISION (TREE_TYPE (top0
))
8367 < TYPE_PRECISION (TREE_TYPE (subsubexp0
)))
8368 && (TYPE_PRECISION (TREE_TYPE (top0
))
8369 == TYPE_PRECISION (TREE_TYPE (top1
)))
8370 && (TYPE_UNSIGNED (TREE_TYPE (top0
))
8371 == TYPE_UNSIGNED (TREE_TYPE (top1
))))
8373 tree op0type
= TREE_TYPE (top0
);
8374 enum machine_mode innermode
= TYPE_MODE (op0type
);
8375 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8376 bool sat_p
= TYPE_SATURATING (TREE_TYPE (subsubexp0
));
8378 this_optab
= zextend_p
? umadd_widen_optab
: smadd_widen_optab
;
8380 this_optab
= zextend_p
? usmadd_widen_optab
8381 : ssmadd_widen_optab
;
8382 if (mode
== GET_MODE_2XWIDER_MODE (innermode
)
8383 && (optab_handler (this_optab
, mode
)->insn_code
8384 != CODE_FOR_nothing
))
8386 expand_operands (top0
, top1
, NULL_RTX
, &op0
, &op1
,
8388 op2
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
,
8389 VOIDmode
, EXPAND_NORMAL
);
8390 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
8393 return REDUCE_BIT_FIELD (temp
);
8398 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8399 something else, make sure we add the register to the constant and
8400 then to the other thing. This case can occur during strength
8401 reduction and doing it this way will produce better code if the
8402 frame pointer or argument pointer is eliminated.
8404 fold-const.c will ensure that the constant is always in the inner
8405 PLUS_EXPR, so the only case we need to do anything about is if
8406 sp, ap, or fp is our second argument, in which case we must swap
8407 the innermost first argument and our second argument. */
8409 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
8410 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
8411 && TREE_CODE (TREE_OPERAND (exp
, 1)) == VAR_DECL
8412 && (DECL_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
8413 || DECL_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
8414 || DECL_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
8416 tree t
= TREE_OPERAND (exp
, 1);
8418 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
8419 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
8422 /* If the result is to be ptr_mode and we are adding an integer to
8423 something, we might be forming a constant. So try to use
8424 plus_constant. If it produces a sum and we can't accept it,
8425 use force_operand. This allows P = &ARR[const] to generate
8426 efficient code on machines where a SYMBOL_REF is not a valid
8429 If this is an EXPAND_SUM call, always return the sum. */
8430 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8431 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8433 if (modifier
== EXPAND_STACK_PARM
)
8435 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
8436 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
8437 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
8441 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
8443 /* Use immed_double_const to ensure that the constant is
8444 truncated according to the mode of OP1, then sign extended
8445 to a HOST_WIDE_INT. Using the constant directly can result
8446 in non-canonical RTL in a 64x32 cross compile. */
8448 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
8450 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
8451 op1
= plus_constant (op1
, INTVAL (constant_part
));
8452 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8453 op1
= force_operand (op1
, target
);
8454 return REDUCE_BIT_FIELD (op1
);
8457 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
8458 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
8459 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
8463 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8464 (modifier
== EXPAND_INITIALIZER
8465 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8466 if (! CONSTANT_P (op0
))
8468 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
8469 VOIDmode
, modifier
);
8470 /* Return a PLUS if modifier says it's OK. */
8471 if (modifier
== EXPAND_SUM
8472 || modifier
== EXPAND_INITIALIZER
)
8473 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8476 /* Use immed_double_const to ensure that the constant is
8477 truncated according to the mode of OP1, then sign extended
8478 to a HOST_WIDE_INT. Using the constant directly can result
8479 in non-canonical RTL in a 64x32 cross compile. */
8481 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
8483 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8484 op0
= plus_constant (op0
, INTVAL (constant_part
));
8485 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8486 op0
= force_operand (op0
, target
);
8487 return REDUCE_BIT_FIELD (op0
);
8491 /* No sense saving up arithmetic to be done
8492 if it's all in the wrong mode to form part of an address.
8493 And force_operand won't know whether to sign-extend or
8495 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8496 || mode
!= ptr_mode
)
8498 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8499 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8500 if (op0
== const0_rtx
)
8502 if (op1
== const0_rtx
)
8507 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8508 subtarget
, &op0
, &op1
, modifier
);
8509 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8512 /* Check if this is a case for multiplication and subtraction. */
8513 if ((TREE_CODE (type
) == INTEGER_TYPE
8514 || TREE_CODE (type
) == FIXED_POINT_TYPE
)
8515 && (subexp1_def
= get_def_for_expr (TREE_OPERAND (exp
, 1),
8518 tree subsubexp0
, subsubexp1
;
8519 gimple subsubexp0_def
, subsubexp1_def
;
8520 enum tree_code this_code
;
8522 this_code
= TREE_CODE (type
) == INTEGER_TYPE
? NOP_EXPR
8523 : FIXED_CONVERT_EXPR
;
8524 subsubexp0
= gimple_assign_rhs1 (subexp1_def
);
8525 subsubexp0_def
= get_def_for_expr (subsubexp0
, this_code
);
8526 subsubexp1
= gimple_assign_rhs2 (subexp1_def
);
8527 subsubexp1_def
= get_def_for_expr (subsubexp1
, this_code
);
8528 if (subsubexp0_def
&& subsubexp1_def
8529 && (top0
= gimple_assign_rhs1 (subsubexp0_def
))
8530 && (top1
= gimple_assign_rhs1 (subsubexp1_def
))
8531 && (TYPE_PRECISION (TREE_TYPE (top0
))
8532 < TYPE_PRECISION (TREE_TYPE (subsubexp0
)))
8533 && (TYPE_PRECISION (TREE_TYPE (top0
))
8534 == TYPE_PRECISION (TREE_TYPE (top1
)))
8535 && (TYPE_UNSIGNED (TREE_TYPE (top0
))
8536 == TYPE_UNSIGNED (TREE_TYPE (top1
))))
8538 tree op0type
= TREE_TYPE (top0
);
8539 enum machine_mode innermode
= TYPE_MODE (op0type
);
8540 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8541 bool sat_p
= TYPE_SATURATING (TREE_TYPE (subsubexp0
));
8543 this_optab
= zextend_p
? umsub_widen_optab
: smsub_widen_optab
;
8545 this_optab
= zextend_p
? usmsub_widen_optab
8546 : ssmsub_widen_optab
;
8547 if (mode
== GET_MODE_2XWIDER_MODE (innermode
)
8548 && (optab_handler (this_optab
, mode
)->insn_code
8549 != CODE_FOR_nothing
))
8551 expand_operands (top0
, top1
, NULL_RTX
, &op0
, &op1
,
8553 op2
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
,
8554 VOIDmode
, EXPAND_NORMAL
);
8555 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
8558 return REDUCE_BIT_FIELD (temp
);
8563 /* For initializers, we are allowed to return a MINUS of two
8564 symbolic constants. Here we handle all cases when both operands
8566 /* Handle difference of two symbolic constants,
8567 for the sake of an initializer. */
8568 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8569 && really_constant_p (TREE_OPERAND (exp
, 0))
8570 && really_constant_p (TREE_OPERAND (exp
, 1)))
8572 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8573 NULL_RTX
, &op0
, &op1
, modifier
);
8575 /* If the last operand is a CONST_INT, use plus_constant of
8576 the negated constant. Else make the MINUS. */
8577 if (GET_CODE (op1
) == CONST_INT
)
8578 return REDUCE_BIT_FIELD (plus_constant (op0
, - INTVAL (op1
)));
8580 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
8583 /* No sense saving up arithmetic to be done
8584 if it's all in the wrong mode to form part of an address.
8585 And force_operand won't know whether to sign-extend or
8587 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8588 || mode
!= ptr_mode
)
8591 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8592 subtarget
, &op0
, &op1
, modifier
);
8594 /* Convert A - const to A + (-const). */
8595 if (GET_CODE (op1
) == CONST_INT
)
8597 op1
= negate_rtx (mode
, op1
);
8598 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8604 /* If this is a fixed-point operation, then we cannot use the code
8605 below because "expand_mult" doesn't support sat/no-sat fixed-point
8607 if (ALL_FIXED_POINT_MODE_P (mode
))
8610 /* If first operand is constant, swap them.
8611 Thus the following special case checks need only
8612 check the second operand. */
8613 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
8615 tree t1
= TREE_OPERAND (exp
, 0);
8616 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
8617 TREE_OPERAND (exp
, 1) = t1
;
8620 /* Attempt to return something suitable for generating an
8621 indexed address, for machines that support that. */
8623 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8624 && host_integerp (TREE_OPERAND (exp
, 1), 0))
8626 tree exp1
= TREE_OPERAND (exp
, 1);
8628 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
8632 op0
= force_operand (op0
, NULL_RTX
);
8634 op0
= copy_to_mode_reg (mode
, op0
);
8636 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8637 gen_int_mode (tree_low_cst (exp1
, 0),
8638 TYPE_MODE (TREE_TYPE (exp1
)))));
8641 if (modifier
== EXPAND_STACK_PARM
)
8644 /* Check for multiplying things that have been extended
8645 from a narrower type. If this machine supports multiplying
8646 in that narrower type with a result in the desired type,
8647 do it that way, and avoid the explicit type-conversion. */
8649 subexp0
= TREE_OPERAND (exp
, 0);
8650 subexp1
= TREE_OPERAND (exp
, 1);
8651 subexp0_def
= get_def_for_expr (subexp0
, NOP_EXPR
);
8652 subexp1_def
= get_def_for_expr (subexp1
, NOP_EXPR
);
8653 top0
= top1
= NULL_TREE
;
8655 /* First, check if we have a multiplication of one signed and one
8656 unsigned operand. */
8658 && (top0
= gimple_assign_rhs1 (subexp0_def
))
8660 && (top1
= gimple_assign_rhs1 (subexp1_def
))
8661 && TREE_CODE (type
) == INTEGER_TYPE
8662 && (TYPE_PRECISION (TREE_TYPE (top0
))
8663 < TYPE_PRECISION (TREE_TYPE (subexp0
)))
8664 && (TYPE_PRECISION (TREE_TYPE (top0
))
8665 == TYPE_PRECISION (TREE_TYPE (top1
)))
8666 && (TYPE_UNSIGNED (TREE_TYPE (top0
))
8667 != TYPE_UNSIGNED (TREE_TYPE (top1
))))
8669 enum machine_mode innermode
8670 = TYPE_MODE (TREE_TYPE (top0
));
8671 this_optab
= usmul_widen_optab
;
8672 if (mode
== GET_MODE_WIDER_MODE (innermode
))
8674 if (optab_handler (this_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
8676 if (TYPE_UNSIGNED (TREE_TYPE (top0
)))
8677 expand_operands (top0
, top1
, NULL_RTX
, &op0
, &op1
,
8680 expand_operands (top0
, top1
, NULL_RTX
, &op1
, &op0
,
8687 /* Check for a multiplication with matching signedness. If
8688 valid, TOP0 and TOP1 were set in the previous if
8691 && TREE_CODE (type
) == INTEGER_TYPE
8692 && (TYPE_PRECISION (TREE_TYPE (top0
))
8693 < TYPE_PRECISION (TREE_TYPE (subexp0
)))
8694 && ((TREE_CODE (subexp1
) == INTEGER_CST
8695 && int_fits_type_p (subexp1
, TREE_TYPE (top0
))
8696 /* Don't use a widening multiply if a shift will do. */
8697 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (subexp1
)))
8698 > HOST_BITS_PER_WIDE_INT
)
8699 || exact_log2 (TREE_INT_CST_LOW (subexp1
)) < 0))
8702 && (TYPE_PRECISION (TREE_TYPE (top1
))
8703 == TYPE_PRECISION (TREE_TYPE (top0
))
8704 /* If both operands are extended, they must either both
8705 be zero-extended or both be sign-extended. */
8706 && (TYPE_UNSIGNED (TREE_TYPE (top1
))
8707 == TYPE_UNSIGNED (TREE_TYPE (top0
)))))))
8709 tree op0type
= TREE_TYPE (top0
);
8710 enum machine_mode innermode
= TYPE_MODE (op0type
);
8711 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8712 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8713 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8715 if (mode
== GET_MODE_2XWIDER_MODE (innermode
))
8717 if (optab_handler (this_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
8719 if (TREE_CODE (subexp1
) == INTEGER_CST
)
8720 expand_operands (top0
, subexp1
, NULL_RTX
, &op0
, &op1
,
8723 expand_operands (top0
, top1
, NULL_RTX
, &op0
, &op1
,
8727 else if (optab_handler (other_optab
, mode
)->insn_code
!= CODE_FOR_nothing
8728 && innermode
== word_mode
)
8731 op0
= expand_normal (top0
);
8732 if (TREE_CODE (subexp1
) == INTEGER_CST
)
8733 op1
= convert_modes (innermode
, mode
,
8734 expand_normal (subexp1
), unsignedp
);
8736 op1
= expand_normal (top1
);
8737 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8738 unsignedp
, OPTAB_LIB_WIDEN
);
8739 hipart
= gen_highpart (innermode
, temp
);
8740 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8744 emit_move_insn (hipart
, htem
);
8745 return REDUCE_BIT_FIELD (temp
);
8749 expand_operands (subexp0
, subexp1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8750 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8752 case TRUNC_DIV_EXPR
:
8753 case FLOOR_DIV_EXPR
:
8755 case ROUND_DIV_EXPR
:
8756 case EXACT_DIV_EXPR
:
8757 /* If this is a fixed-point operation, then we cannot use the code
8758 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8760 if (ALL_FIXED_POINT_MODE_P (mode
))
8763 if (modifier
== EXPAND_STACK_PARM
)
8765 /* Possible optimization: compute the dividend with EXPAND_SUM
8766 then if the divisor is constant can optimize the case
8767 where some terms of the dividend have coeffs divisible by it. */
8768 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8769 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8770 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8775 case TRUNC_MOD_EXPR
:
8776 case FLOOR_MOD_EXPR
:
8778 case ROUND_MOD_EXPR
:
8779 if (modifier
== EXPAND_STACK_PARM
)
8781 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8782 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8783 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8785 case FIXED_CONVERT_EXPR
:
8786 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8787 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8788 target
= gen_reg_rtx (mode
);
8790 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == INTEGER_TYPE
8791 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
8792 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8793 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8795 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8798 case FIX_TRUNC_EXPR
:
8799 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8800 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8801 target
= gen_reg_rtx (mode
);
8802 expand_fix (target
, op0
, unsignedp
);
8806 op0
= expand_normal (TREE_OPERAND (exp
, 0));
8807 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8808 target
= gen_reg_rtx (mode
);
8809 /* expand_float can't figure out what to do if FROM has VOIDmode.
8810 So give it the correct mode. With -O, cse will optimize this. */
8811 if (GET_MODE (op0
) == VOIDmode
)
8812 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8814 expand_float (target
, op0
,
8815 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
8819 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
,
8820 VOIDmode
, EXPAND_NORMAL
);
8821 if (modifier
== EXPAND_STACK_PARM
)
8823 temp
= expand_unop (mode
,
8824 optab_for_tree_code (NEGATE_EXPR
, type
,
8828 return REDUCE_BIT_FIELD (temp
);
8831 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
,
8832 VOIDmode
, EXPAND_NORMAL
);
8833 if (modifier
== EXPAND_STACK_PARM
)
8836 /* ABS_EXPR is not valid for complex arguments. */
8837 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8838 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8840 /* Unsigned abs is simply the operand. Testing here means we don't
8841 risk generating incorrect code below. */
8842 if (TYPE_UNSIGNED (type
))
8845 return expand_abs (mode
, op0
, target
, unsignedp
,
8846 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
8850 target
= original_target
;
8852 || modifier
== EXPAND_STACK_PARM
8853 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8854 || GET_MODE (target
) != mode
8856 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8857 target
= gen_reg_rtx (mode
);
8858 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
8859 target
, &op0
, &op1
, EXPAND_NORMAL
);
8861 /* First try to do it with a special MIN or MAX instruction.
8862 If that does not win, use a conditional jump to select the proper
8864 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8865 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8870 /* At this point, a MEM target is no longer useful; we will get better
8873 if (! REG_P (target
))
8874 target
= gen_reg_rtx (mode
);
8876 /* If op1 was placed in target, swap op0 and op1. */
8877 if (target
!= op0
&& target
== op1
)
8884 /* We generate better code and avoid problems with op1 mentioning
8885 target by forcing op1 into a pseudo if it isn't a constant. */
8886 if (! CONSTANT_P (op1
))
8887 op1
= force_reg (mode
, op1
);
8890 enum rtx_code comparison_code
;
8893 if (code
== MAX_EXPR
)
8894 comparison_code
= unsignedp
? GEU
: GE
;
8896 comparison_code
= unsignedp
? LEU
: LE
;
8898 /* Canonicalize to comparisons against 0. */
8899 if (op1
== const1_rtx
)
8901 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8902 or (a != 0 ? a : 1) for unsigned.
8903 For MIN we are safe converting (a <= 1 ? a : 1)
8904 into (a <= 0 ? a : 1) */
8905 cmpop1
= const0_rtx
;
8906 if (code
== MAX_EXPR
)
8907 comparison_code
= unsignedp
? NE
: GT
;
8909 if (op1
== constm1_rtx
&& !unsignedp
)
8911 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8912 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8913 cmpop1
= const0_rtx
;
8914 if (code
== MIN_EXPR
)
8915 comparison_code
= LT
;
8917 #ifdef HAVE_conditional_move
8918 /* Use a conditional move if possible. */
8919 if (can_conditionally_move_p (mode
))
8923 /* ??? Same problem as in expmed.c: emit_conditional_move
8924 forces a stack adjustment via compare_from_rtx, and we
8925 lose the stack adjustment if the sequence we are about
8926 to create is discarded. */
8927 do_pending_stack_adjust ();
8931 /* Try to emit the conditional move. */
8932 insn
= emit_conditional_move (target
, comparison_code
,
8937 /* If we could do the conditional move, emit the sequence,
8941 rtx seq
= get_insns ();
8947 /* Otherwise discard the sequence and fall back to code with
8953 emit_move_insn (target
, op0
);
8955 temp
= gen_label_rtx ();
8956 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
8957 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
);
8959 emit_move_insn (target
, op1
);
8964 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
,
8965 VOIDmode
, EXPAND_NORMAL
);
8966 if (modifier
== EXPAND_STACK_PARM
)
8968 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8972 /* ??? Can optimize bitwise operations with one arg constant.
8973 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8974 and (a bitwise1 b) bitwise2 b (etc)
8975 but that is probably not worth while. */
8977 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8978 boolean values when we want in all cases to compute both of them. In
8979 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8980 as actual zero-or-1 values and then bitwise anding. In cases where
8981 there cannot be any side effects, better code would be made by
8982 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8983 how to recognize those cases. */
8985 case TRUTH_AND_EXPR
:
8986 code
= BIT_AND_EXPR
;
8991 code
= BIT_IOR_EXPR
;
8995 case TRUTH_XOR_EXPR
:
8996 code
= BIT_XOR_EXPR
;
9002 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
9003 || (GET_MODE_PRECISION (TYPE_MODE (type
))
9004 == TYPE_PRECISION (type
)));
9009 /* If this is a fixed-point operation, then we cannot use the code
9010 below because "expand_shift" doesn't support sat/no-sat fixed-point
9012 if (ALL_FIXED_POINT_MODE_P (mode
))
9015 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
9017 if (modifier
== EXPAND_STACK_PARM
)
9019 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
,
9020 VOIDmode
, EXPAND_NORMAL
);
9021 temp
= expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
9023 if (code
== LSHIFT_EXPR
)
9024 temp
= REDUCE_BIT_FIELD (temp
);
9027 /* Could determine the answer when only additive constants differ. Also,
9028 the addition of one can be handled by changing the condition. */
9035 case UNORDERED_EXPR
:
9043 temp
= do_store_flag (exp
,
9044 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
9045 tmode
!= VOIDmode
? tmode
: mode
);
9049 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
9050 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
9052 && REG_P (original_target
)
9053 && (GET_MODE (original_target
)
9054 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9056 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
9057 VOIDmode
, EXPAND_NORMAL
);
9059 /* If temp is constant, we can just compute the result. */
9060 if (GET_CODE (temp
) == CONST_INT
)
9062 if (INTVAL (temp
) != 0)
9063 emit_move_insn (target
, const1_rtx
);
9065 emit_move_insn (target
, const0_rtx
);
9070 if (temp
!= original_target
)
9072 enum machine_mode mode1
= GET_MODE (temp
);
9073 if (mode1
== VOIDmode
)
9074 mode1
= tmode
!= VOIDmode
? tmode
: mode
;
9076 temp
= copy_to_mode_reg (mode1
, temp
);
9079 op1
= gen_label_rtx ();
9080 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
9081 GET_MODE (temp
), unsignedp
, op1
);
9082 emit_move_insn (temp
, const1_rtx
);
9087 /* If no set-flag instruction, must generate a conditional store
9088 into a temporary variable. Drop through and handle this
9090 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9091 are occassionally created by folding during expansion. */
9092 case TRUTH_ANDIF_EXPR
:
9093 case TRUTH_ORIF_EXPR
:
9096 || modifier
== EXPAND_STACK_PARM
9097 || ! safe_from_p (target
, exp
, 1)
9098 /* Make sure we don't have a hard reg (such as function's return
9099 value) live across basic blocks, if not optimizing. */
9100 || (!optimize
&& REG_P (target
)
9101 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
9102 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
9105 emit_move_insn (target
, const0_rtx
);
9107 op1
= gen_label_rtx ();
9108 jumpifnot (exp
, op1
);
9111 emit_move_insn (target
, const1_rtx
);
9114 return ignore
? const0_rtx
: target
;
9116 case TRUTH_NOT_EXPR
:
9117 if (modifier
== EXPAND_STACK_PARM
)
9119 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
,
9120 VOIDmode
, EXPAND_NORMAL
);
9121 /* The parser is careful to generate TRUTH_NOT_EXPR
9122 only with operands that are always zero or one. */
9123 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
9124 target
, 1, OPTAB_LIB_WIDEN
);
9128 case STATEMENT_LIST
:
9130 tree_stmt_iterator iter
;
9132 gcc_assert (ignore
);
9134 for (iter
= tsi_start (exp
); !tsi_end_p (iter
); tsi_next (&iter
))
9135 expand_expr (tsi_stmt (iter
), const0_rtx
, VOIDmode
, modifier
);
9140 /* A COND_EXPR with its type being VOID_TYPE represents a
9141 conditional jump and is handled in
9142 expand_gimple_cond_expr. */
9143 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp
)));
9145 /* Note that COND_EXPRs whose type is a structure or union
9146 are required to be constructed to contain assignments of
9147 a temporary variable, so that we can evaluate them here
9148 for side effect only. If type is void, we must do likewise. */
9150 gcc_assert (!TREE_ADDRESSABLE (type
)
9152 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
9153 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
);
9155 /* If we are not to produce a result, we have no target. Otherwise,
9156 if a target was specified use it; it will not be used as an
9157 intermediate target unless it is safe. If no target, use a
9160 if (modifier
!= EXPAND_STACK_PARM
9162 && safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
9163 && GET_MODE (original_target
) == mode
9164 #ifdef HAVE_conditional_move
9165 && (! can_conditionally_move_p (mode
)
9166 || REG_P (original_target
))
9168 && !MEM_P (original_target
))
9169 temp
= original_target
;
9171 temp
= assign_temp (type
, 0, 0, 1);
9173 do_pending_stack_adjust ();
9175 op0
= gen_label_rtx ();
9176 op1
= gen_label_rtx ();
9177 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
9178 store_expr (TREE_OPERAND (exp
, 1), temp
,
9179 modifier
== EXPAND_STACK_PARM
,
9182 emit_jump_insn (gen_jump (op1
));
9185 store_expr (TREE_OPERAND (exp
, 2), temp
,
9186 modifier
== EXPAND_STACK_PARM
,
9194 target
= expand_vec_cond_expr (exp
, target
);
9199 tree lhs
= TREE_OPERAND (exp
, 0);
9200 tree rhs
= TREE_OPERAND (exp
, 1);
9201 gcc_assert (ignore
);
9203 /* Check for |= or &= of a bitfield of size one into another bitfield
9204 of size 1. In this case, (unless we need the result of the
9205 assignment) we can do this more efficiently with a
9206 test followed by an assignment, if necessary.
9208 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9209 things change so we do, this code should be enhanced to
9211 if (TREE_CODE (lhs
) == COMPONENT_REF
9212 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
9213 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
9214 && TREE_OPERAND (rhs
, 0) == lhs
9215 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
9216 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
9217 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
9219 rtx label
= gen_label_rtx ();
9220 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
9221 do_jump (TREE_OPERAND (rhs
, 1),
9224 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
9225 MOVE_NONTEMPORAL (exp
));
9226 do_pending_stack_adjust ();
9231 expand_assignment (lhs
, rhs
, MOVE_NONTEMPORAL (exp
));
9236 if (!TREE_OPERAND (exp
, 0))
9237 expand_null_return ();
9239 expand_return (TREE_OPERAND (exp
, 0));
9243 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
9246 /* Get the rtx code of the operands. */
9247 op0
= expand_normal (TREE_OPERAND (exp
, 0));
9248 op1
= expand_normal (TREE_OPERAND (exp
, 1));
9251 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
9253 /* Move the real (op0) and imaginary (op1) parts to their location. */
9254 write_complex_part (target
, op0
, false);
9255 write_complex_part (target
, op1
, true);
9260 op0
= expand_normal (TREE_OPERAND (exp
, 0));
9261 return read_complex_part (op0
, false);
9264 op0
= expand_normal (TREE_OPERAND (exp
, 0));
9265 return read_complex_part (op0
, true);
9268 expand_resx_expr (exp
);
9271 case TRY_CATCH_EXPR
:
9273 case EH_FILTER_EXPR
:
9274 case TRY_FINALLY_EXPR
:
9275 /* Lowered by tree-eh.c. */
9278 case WITH_CLEANUP_EXPR
:
9279 case CLEANUP_POINT_EXPR
:
9281 case CASE_LABEL_EXPR
:
9287 case PREINCREMENT_EXPR
:
9288 case PREDECREMENT_EXPR
:
9289 case POSTINCREMENT_EXPR
:
9290 case POSTDECREMENT_EXPR
:
9293 /* Lowered by gimplify.c. */
9296 case CHANGE_DYNAMIC_TYPE_EXPR
:
9297 /* This is ignored at the RTL level. The tree level set
9298 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9299 overkill for the RTL layer but is all that we can
9304 return get_exception_pointer ();
9307 return get_exception_filter ();
9310 /* Function descriptors are not valid except for as
9311 initialization constants, and should not be expanded. */
9319 expand_label (TREE_OPERAND (exp
, 0));
9323 expand_asm_expr (exp
);
9326 case WITH_SIZE_EXPR
:
9327 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9328 have pulled out the size to use in whatever context it needed. */
9329 return expand_expr_real (TREE_OPERAND (exp
, 0), original_target
, tmode
,
9332 case REALIGN_LOAD_EXPR
:
9334 tree oprnd0
= TREE_OPERAND (exp
, 0);
9335 tree oprnd1
= TREE_OPERAND (exp
, 1);
9336 tree oprnd2
= TREE_OPERAND (exp
, 2);
9339 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9340 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9341 op2
= expand_normal (oprnd2
);
9342 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9350 tree oprnd0
= TREE_OPERAND (exp
, 0);
9351 tree oprnd1
= TREE_OPERAND (exp
, 1);
9352 tree oprnd2
= TREE_OPERAND (exp
, 2);
9355 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9356 op2
= expand_normal (oprnd2
);
9357 target
= expand_widen_pattern_expr (exp
, op0
, op1
, op2
,
9362 case WIDEN_SUM_EXPR
:
9364 tree oprnd0
= TREE_OPERAND (exp
, 0);
9365 tree oprnd1
= TREE_OPERAND (exp
, 1);
9367 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9368 target
= expand_widen_pattern_expr (exp
, op0
, NULL_RTX
, op1
,
9373 case REDUC_MAX_EXPR
:
9374 case REDUC_MIN_EXPR
:
9375 case REDUC_PLUS_EXPR
:
9377 op0
= expand_normal (TREE_OPERAND (exp
, 0));
9378 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9379 temp
= expand_unop (mode
, this_optab
, op0
, target
, unsignedp
);
9384 case VEC_EXTRACT_EVEN_EXPR
:
9385 case VEC_EXTRACT_ODD_EXPR
:
9387 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
9388 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9389 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9390 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
9396 case VEC_INTERLEAVE_HIGH_EXPR
:
9397 case VEC_INTERLEAVE_LOW_EXPR
:
9399 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
9400 NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9401 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9402 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
9408 case VEC_LSHIFT_EXPR
:
9409 case VEC_RSHIFT_EXPR
:
9411 target
= expand_vec_shift_expr (exp
, target
);
9415 case VEC_UNPACK_HI_EXPR
:
9416 case VEC_UNPACK_LO_EXPR
:
9418 op0
= expand_normal (TREE_OPERAND (exp
, 0));
9419 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9420 temp
= expand_widen_pattern_expr (exp
, op0
, NULL_RTX
, NULL_RTX
,
9426 case VEC_UNPACK_FLOAT_HI_EXPR
:
9427 case VEC_UNPACK_FLOAT_LO_EXPR
:
9429 op0
= expand_normal (TREE_OPERAND (exp
, 0));
9430 /* The signedness is determined from input operand. */
9431 this_optab
= optab_for_tree_code (code
,
9432 TREE_TYPE (TREE_OPERAND (exp
, 0)),
9434 temp
= expand_widen_pattern_expr
9435 (exp
, op0
, NULL_RTX
, NULL_RTX
,
9436 target
, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9442 case VEC_WIDEN_MULT_HI_EXPR
:
9443 case VEC_WIDEN_MULT_LO_EXPR
:
9445 tree oprnd0
= TREE_OPERAND (exp
, 0);
9446 tree oprnd1
= TREE_OPERAND (exp
, 1);
9448 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9449 target
= expand_widen_pattern_expr (exp
, op0
, op1
, NULL_RTX
,
9451 gcc_assert (target
);
9455 case VEC_PACK_TRUNC_EXPR
:
9456 case VEC_PACK_SAT_EXPR
:
9457 case VEC_PACK_FIX_TRUNC_EXPR
:
9458 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9461 case COMPOUND_LITERAL_EXPR
:
9463 /* Initialize the anonymous variable declared in the compound
9464 literal, then return the variable. */
9465 tree decl
= COMPOUND_LITERAL_EXPR_DECL (exp
);
9467 /* Create RTL for this variable. */
9468 if (!DECL_RTL_SET_P (decl
))
9470 if (DECL_HARD_REGISTER (decl
))
9471 /* The user specified an assembler name for this variable.
9473 rest_of_decl_compilation (decl
, 0, 0);
9478 return expand_expr_real (decl
, original_target
, tmode
,
9486 /* Here to do an ordinary binary operator. */
9488 expand_operands (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1),
9489 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9491 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9493 if (modifier
== EXPAND_STACK_PARM
)
9495 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9496 unsignedp
, OPTAB_LIB_WIDEN
);
9498 return REDUCE_BIT_FIELD (temp
);
9500 #undef REDUCE_BIT_FIELD
9502 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9503 signedness of TYPE), possibly returning the result in TARGET. */
9505 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
9507 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
9508 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
9510 /* For constant values, reduce using build_int_cst_type. */
9511 if (GET_CODE (exp
) == CONST_INT
)
9513 HOST_WIDE_INT value
= INTVAL (exp
);
9514 tree t
= build_int_cst_type (type
, value
);
9515 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
9517 else if (TYPE_UNSIGNED (type
))
9520 if (prec
< HOST_BITS_PER_WIDE_INT
)
9521 mask
= immed_double_const (((unsigned HOST_WIDE_INT
) 1 << prec
) - 1, 0,
9524 mask
= immed_double_const ((unsigned HOST_WIDE_INT
) -1,
9525 ((unsigned HOST_WIDE_INT
) 1
9526 << (prec
- HOST_BITS_PER_WIDE_INT
)) - 1,
9528 return expand_and (GET_MODE (exp
), exp
, mask
, target
);
9532 tree count
= build_int_cst (NULL_TREE
,
9533 GET_MODE_BITSIZE (GET_MODE (exp
)) - prec
);
9534 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
9535 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
), exp
, count
, target
, 0);
9539 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9540 when applied to the address of EXP produces an address known to be
9541 aligned more than BIGGEST_ALIGNMENT. */
9544 is_aligning_offset (const_tree offset
, const_tree exp
)
9546 /* Strip off any conversions. */
9547 while (CONVERT_EXPR_P (offset
))
9548 offset
= TREE_OPERAND (offset
, 0);
9550 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9551 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9552 if (TREE_CODE (offset
) != BIT_AND_EXPR
9553 || !host_integerp (TREE_OPERAND (offset
, 1), 1)
9554 || compare_tree_int (TREE_OPERAND (offset
, 1),
9555 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
9556 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset
, 1), 1) + 1) < 0)
9559 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9560 It must be NEGATE_EXPR. Then strip any more conversions. */
9561 offset
= TREE_OPERAND (offset
, 0);
9562 while (CONVERT_EXPR_P (offset
))
9563 offset
= TREE_OPERAND (offset
, 0);
9565 if (TREE_CODE (offset
) != NEGATE_EXPR
)
9568 offset
= TREE_OPERAND (offset
, 0);
9569 while (CONVERT_EXPR_P (offset
))
9570 offset
= TREE_OPERAND (offset
, 0);
9572 /* This must now be the address of EXP. */
9573 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
9576 /* Return the tree node if an ARG corresponds to a string constant or zero
9577 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9578 in bytes within the string that ARG is accessing. The type of the
9579 offset will be `sizetype'. */
9582 string_constant (tree arg
, tree
*ptr_offset
)
9584 tree array
, offset
, lower_bound
;
9587 if (TREE_CODE (arg
) == ADDR_EXPR
)
9589 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9591 *ptr_offset
= size_zero_node
;
9592 return TREE_OPERAND (arg
, 0);
9594 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
9596 array
= TREE_OPERAND (arg
, 0);
9597 offset
= size_zero_node
;
9599 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
9601 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
9602 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
9603 if (TREE_CODE (array
) != STRING_CST
9604 && TREE_CODE (array
) != VAR_DECL
)
9607 /* Check if the array has a nonzero lower bound. */
9608 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
9609 if (!integer_zerop (lower_bound
))
9611 /* If the offset and base aren't both constants, return 0. */
9612 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
9614 if (TREE_CODE (offset
) != INTEGER_CST
)
9616 /* Adjust offset by the lower bound. */
9617 offset
= size_diffop (fold_convert (sizetype
, offset
),
9618 fold_convert (sizetype
, lower_bound
));
9624 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
9626 tree arg0
= TREE_OPERAND (arg
, 0);
9627 tree arg1
= TREE_OPERAND (arg
, 1);
9632 if (TREE_CODE (arg0
) == ADDR_EXPR
9633 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
9634 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
9636 array
= TREE_OPERAND (arg0
, 0);
9639 else if (TREE_CODE (arg1
) == ADDR_EXPR
9640 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
9641 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
9643 array
= TREE_OPERAND (arg1
, 0);
9652 if (TREE_CODE (array
) == STRING_CST
)
9654 *ptr_offset
= fold_convert (sizetype
, offset
);
9657 else if (TREE_CODE (array
) == VAR_DECL
)
9661 /* Variables initialized to string literals can be handled too. */
9662 if (DECL_INITIAL (array
) == NULL_TREE
9663 || TREE_CODE (DECL_INITIAL (array
)) != STRING_CST
)
9666 /* If they are read-only, non-volatile and bind locally. */
9667 if (! TREE_READONLY (array
)
9668 || TREE_SIDE_EFFECTS (array
)
9669 || ! targetm
.binds_local_p (array
))
9672 /* Avoid const char foo[4] = "abcde"; */
9673 if (DECL_SIZE_UNIT (array
) == NULL_TREE
9674 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
9675 || (length
= TREE_STRING_LENGTH (DECL_INITIAL (array
))) <= 0
9676 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
9679 /* If variable is bigger than the string literal, OFFSET must be constant
9680 and inside of the bounds of the string literal. */
9681 offset
= fold_convert (sizetype
, offset
);
9682 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
9683 && (! host_integerp (offset
, 1)
9684 || compare_tree_int (offset
, length
) >= 0))
9687 *ptr_offset
= offset
;
9688 return DECL_INITIAL (array
);
9694 /* Generate code to calculate EXP using a store-flag instruction
9695 and return an rtx for the result. EXP is either a comparison
9696 or a TRUTH_NOT_EXPR whose operand is a comparison.
9698 If TARGET is nonzero, store the result there if convenient.
9700 Return zero if there is no suitable set-flag instruction
9701 available on this machine.
9703 Once expand_expr has been called on the arguments of the comparison,
9704 we are committed to doing the store flag, since it is not safe to
9705 re-evaluate the expression. We emit the store-flag insn by calling
9706 emit_store_flag, but only expand the arguments if we have a reason
9707 to believe that emit_store_flag will be successful. If we think that
9708 it will, but it isn't, we have to simulate the store-flag with a
9709 set/jump/set sequence. */
9712 do_store_flag (tree exp
, rtx target
, enum machine_mode mode
)
9715 tree arg0
, arg1
, type
;
9717 enum machine_mode operand_mode
;
9721 rtx subtarget
= target
;
9724 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9725 result at the end. We can't simply invert the test since it would
9726 have already been inverted if it were valid. This case occurs for
9727 some floating-point comparisons. */
9729 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
9730 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
9732 arg0
= TREE_OPERAND (exp
, 0);
9733 arg1
= TREE_OPERAND (exp
, 1);
9735 /* Don't crash if the comparison was erroneous. */
9736 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
9739 type
= TREE_TYPE (arg0
);
9740 operand_mode
= TYPE_MODE (type
);
9741 unsignedp
= TYPE_UNSIGNED (type
);
9743 /* We won't bother with BLKmode store-flag operations because it would mean
9744 passing a lot of information to emit_store_flag. */
9745 if (operand_mode
== BLKmode
)
9748 /* We won't bother with store-flag operations involving function pointers
9749 when function pointers must be canonicalized before comparisons. */
9750 #ifdef HAVE_canonicalize_funcptr_for_compare
9751 if (HAVE_canonicalize_funcptr_for_compare
9752 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
9753 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
9755 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
9756 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
9757 == FUNCTION_TYPE
))))
9764 /* Get the rtx comparison code to use. We know that EXP is a comparison
9765 operation of some type. Some comparisons against 1 and -1 can be
9766 converted to comparisons with zero. Do so here so that the tests
9767 below will be aware that we have a comparison with zero. These
9768 tests will not catch constants in the first operand, but constants
9769 are rarely passed as the first operand. */
9771 switch (TREE_CODE (exp
))
9780 if (integer_onep (arg1
))
9781 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
9783 code
= unsignedp
? LTU
: LT
;
9786 if (! unsignedp
&& integer_all_onesp (arg1
))
9787 arg1
= integer_zero_node
, code
= LT
;
9789 code
= unsignedp
? LEU
: LE
;
9792 if (! unsignedp
&& integer_all_onesp (arg1
))
9793 arg1
= integer_zero_node
, code
= GE
;
9795 code
= unsignedp
? GTU
: GT
;
9798 if (integer_onep (arg1
))
9799 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
9801 code
= unsignedp
? GEU
: GE
;
9804 case UNORDERED_EXPR
:
9833 /* Put a constant second. */
9834 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
9835 || TREE_CODE (arg0
) == FIXED_CST
)
9837 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
9838 code
= swap_condition (code
);
9841 /* If this is an equality or inequality test of a single bit, we can
9842 do this by shifting the bit being tested to the low-order bit and
9843 masking the result with the constant 1. If the condition was EQ,
9844 we xor it with 1. This does not require an scc insn and is faster
9845 than an scc insn even if we have it.
9847 The code to make this transformation was moved into fold_single_bit_test,
9848 so we just call into the folder and expand its result. */
9850 if ((code
== NE
|| code
== EQ
)
9851 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
9852 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
9854 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
9855 return expand_expr (fold_single_bit_test (code
== NE
? NE_EXPR
: EQ_EXPR
,
9857 target
, VOIDmode
, EXPAND_NORMAL
);
9860 /* Now see if we are likely to be able to do this. Return if not. */
9861 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
9864 if (! get_subtarget (target
)
9865 || GET_MODE (subtarget
) != operand_mode
)
9868 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9871 target
= gen_reg_rtx (mode
);
9873 result
= emit_store_flag (target
, code
, op0
, op1
,
9874 operand_mode
, unsignedp
, 1);
9879 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
9880 result
, 0, OPTAB_LIB_WIDEN
);
9884 /* If this failed, we have to do this with set/compare/jump/set code. */
9886 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
9887 target
= gen_reg_rtx (GET_MODE (target
));
9889 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
9890 label
= gen_label_rtx ();
9891 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, operand_mode
, NULL_RTX
,
9894 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
9901 /* Stubs in case we haven't got a casesi insn. */
9903 # define HAVE_casesi 0
9904 # define gen_casesi(a, b, c, d, e) (0)
9905 # define CODE_FOR_casesi CODE_FOR_nothing
9908 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9909 0 otherwise (i.e. if there is no casesi instruction). */
9911 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
9912 rtx table_label ATTRIBUTE_UNUSED
, rtx default_label
,
9913 rtx fallback_label ATTRIBUTE_UNUSED
)
9915 enum machine_mode index_mode
= SImode
;
9916 int index_bits
= GET_MODE_BITSIZE (index_mode
);
9917 rtx op1
, op2
, index
;
9918 enum machine_mode op_mode
;
9923 /* Convert the index to SImode. */
9924 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
9926 enum machine_mode omode
= TYPE_MODE (index_type
);
9927 rtx rangertx
= expand_normal (range
);
9929 /* We must handle the endpoints in the original mode. */
9930 index_expr
= build2 (MINUS_EXPR
, index_type
,
9931 index_expr
, minval
);
9932 minval
= integer_zero_node
;
9933 index
= expand_normal (index_expr
);
9935 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
9936 omode
, 1, default_label
);
9937 /* Now we can safely truncate. */
9938 index
= convert_to_mode (index_mode
, index
, 0);
9942 if (TYPE_MODE (index_type
) != index_mode
)
9944 index_type
= lang_hooks
.types
.type_for_size (index_bits
, 0);
9945 index_expr
= fold_convert (index_type
, index_expr
);
9948 index
= expand_normal (index_expr
);
9951 do_pending_stack_adjust ();
9953 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[0].mode
;
9954 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[0].predicate
)
9956 index
= copy_to_mode_reg (op_mode
, index
);
9958 op1
= expand_normal (minval
);
9960 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[1].mode
;
9961 op1
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (minval
)),
9962 op1
, TYPE_UNSIGNED (TREE_TYPE (minval
)));
9963 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[1].predicate
)
9965 op1
= copy_to_mode_reg (op_mode
, op1
);
9967 op2
= expand_normal (range
);
9969 op_mode
= insn_data
[(int) CODE_FOR_casesi
].operand
[2].mode
;
9970 op2
= convert_modes (op_mode
, TYPE_MODE (TREE_TYPE (range
)),
9971 op2
, TYPE_UNSIGNED (TREE_TYPE (range
)));
9972 if (! (*insn_data
[(int) CODE_FOR_casesi
].operand
[2].predicate
)
9974 op2
= copy_to_mode_reg (op_mode
, op2
);
9976 emit_jump_insn (gen_casesi (index
, op1
, op2
,
9977 table_label
, !default_label
9978 ? fallback_label
: default_label
));
9982 /* Attempt to generate a tablejump instruction; same concept. */
9983 #ifndef HAVE_tablejump
9984 #define HAVE_tablejump 0
9985 #define gen_tablejump(x, y) (0)
9988 /* Subroutine of the next function.
9990 INDEX is the value being switched on, with the lowest value
9991 in the table already subtracted.
9992 MODE is its expected mode (needed if INDEX is constant).
9993 RANGE is the length of the jump table.
9994 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9996 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9997 index value is out of range. */
10000 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
10005 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
10006 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
10008 /* Do an unsigned comparison (in the proper mode) between the index
10009 expression and the value which represents the length of the range.
10010 Since we just finished subtracting the lower bound of the range
10011 from the index expression, this comparison allows us to simultaneously
10012 check that the original index expression value is both greater than
10013 or equal to the minimum value of the range and less than or equal to
10014 the maximum value of the range. */
10017 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10020 /* If index is in range, it must fit in Pmode.
10021 Convert to Pmode so we can index with it. */
10023 index
= convert_to_mode (Pmode
, index
, 1);
10025 /* Don't let a MEM slip through, because then INDEX that comes
10026 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10027 and break_out_memory_refs will go to work on it and mess it up. */
10028 #ifdef PIC_CASE_VECTOR_ADDRESS
10029 if (flag_pic
&& !REG_P (index
))
10030 index
= copy_to_mode_reg (Pmode
, index
);
10033 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10034 GET_MODE_SIZE, because this indicates how large insns are. The other
10035 uses should all be Pmode, because they are addresses. This code
10036 could fail if addresses and insns are not the same size. */
10037 index
= gen_rtx_PLUS (Pmode
,
10038 gen_rtx_MULT (Pmode
, index
,
10039 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10040 gen_rtx_LABEL_REF (Pmode
, table_label
));
10041 #ifdef PIC_CASE_VECTOR_ADDRESS
10043 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10046 index
= memory_address (CASE_VECTOR_MODE
, index
);
10047 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10048 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
10049 convert_move (temp
, vector
, 0);
10051 emit_jump_insn (gen_tablejump (temp
, table_label
));
10053 /* If we are generating PIC code or if the table is PC-relative, the
10054 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10055 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10060 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
10061 rtx table_label
, rtx default_label
)
10065 if (! HAVE_tablejump
)
10068 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
10069 fold_convert (index_type
, index_expr
),
10070 fold_convert (index_type
, minval
));
10071 index
= expand_normal (index_expr
);
10072 do_pending_stack_adjust ();
10074 do_tablejump (index
, TYPE_MODE (index_type
),
10075 convert_modes (TYPE_MODE (index_type
),
10076 TYPE_MODE (TREE_TYPE (range
)),
10077 expand_normal (range
),
10078 TYPE_UNSIGNED (TREE_TYPE (range
))),
10079 table_label
, default_label
);
10083 /* Nonzero if the mode is a valid vector mode for this architecture.
10084 This returns nonzero even if there is no hardware support for the
10085 vector mode, but we can emulate with narrower modes. */
10088 vector_mode_valid_p (enum machine_mode mode
)
10090 enum mode_class mclass
= GET_MODE_CLASS (mode
);
10091 enum machine_mode innermode
;
10093 /* Doh! What's going on? */
10094 if (mclass
!= MODE_VECTOR_INT
10095 && mclass
!= MODE_VECTOR_FLOAT
10096 && mclass
!= MODE_VECTOR_FRACT
10097 && mclass
!= MODE_VECTOR_UFRACT
10098 && mclass
!= MODE_VECTOR_ACCUM
10099 && mclass
!= MODE_VECTOR_UACCUM
)
10102 /* Hardware support. Woo hoo! */
10103 if (targetm
.vector_mode_supported_p (mode
))
10106 innermode
= GET_MODE_INNER (mode
);
10108 /* We should probably return 1 if requesting V4DI and we have no DI,
10109 but we have V2DI, but this is probably very unlikely. */
10111 /* If we have support for the inner mode, we can safely emulate it.
10112 We may not have V2DI, but me can emulate with a pair of DIs. */
10113 return targetm
.scalar_mode_supported_p (innermode
);
10116 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10118 const_vector_from_tree (tree exp
)
10123 enum machine_mode inner
, mode
;
10125 mode
= TYPE_MODE (TREE_TYPE (exp
));
10127 if (initializer_zerop (exp
))
10128 return CONST0_RTX (mode
);
10130 units
= GET_MODE_NUNITS (mode
);
10131 inner
= GET_MODE_INNER (mode
);
10133 v
= rtvec_alloc (units
);
10135 link
= TREE_VECTOR_CST_ELTS (exp
);
10136 for (i
= 0; link
; link
= TREE_CHAIN (link
), ++i
)
10138 elt
= TREE_VALUE (link
);
10140 if (TREE_CODE (elt
) == REAL_CST
)
10141 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
10143 else if (TREE_CODE (elt
) == FIXED_CST
)
10144 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
10147 RTVEC_ELT (v
, i
) = immed_double_const (TREE_INT_CST_LOW (elt
),
10148 TREE_INT_CST_HIGH (elt
),
10152 /* Initialize remaining elements to 0. */
10153 for (; i
< units
; ++i
)
10154 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
10156 return gen_rtx_CONST_VECTOR (mode
, v
);
10158 #include "gt-expr.h"