expr.c (emit_block_move_via_movmem): Use n_generator_args instead of n_operands.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "reload.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
51 #include "target.h"
52 #include "timevar.h"
53 #include "df.h"
54 #include "diagnostic.h"
55 #include "ssaexpand.h"
56 #include "target-globals.h"
57
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
60
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
63
64 #ifdef PUSH_ROUNDING
65
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
69 #endif
70 #endif
71
72 #endif
73
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
77 #else
78 #define STACK_PUSH_CODE PRE_INC
79 #endif
80 #endif
81
82
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
90
91 /* This structure is used by move_by_pieces to describe the move to
92 be performed. */
93 struct move_by_pieces_d
94 {
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
99 rtx from;
100 rtx from_addr;
101 int autinc_from;
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
105 int reverse;
106 };
107
108 /* This structure is used by store_by_pieces to describe the clear to
109 be performed. */
110
111 struct store_by_pieces_d
112 {
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
120 void *constfundata;
121 int reverse;
122 };
123
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 unsigned int,
126 unsigned int);
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces_d *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces_d *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
147
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
149
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (sepops, rtx, enum machine_mode);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
161
162 /* This macro is used to determine whether move_by_pieces should be called
163 to perform a structure copy. */
164 #ifndef MOVE_BY_PIECES_P
165 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
166 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
167 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
168 #endif
169
170 /* This macro is used to determine whether clear_by_pieces should be
171 called to clear storage. */
172 #ifndef CLEAR_BY_PIECES_P
173 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
174 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
175 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
176 #endif
177
178 /* This macro is used to determine whether store_by_pieces should be
179 called to "memset" storage with byte values other than zero. */
180 #ifndef SET_BY_PIECES_P
181 #define SET_BY_PIECES_P(SIZE, ALIGN) \
182 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
183 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
184 #endif
185
186 /* This macro is used to determine whether store_by_pieces should be
187 called to "memcpy" storage when the source is a constant string. */
188 #ifndef STORE_BY_PIECES_P
189 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
190 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
191 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
192 #endif
193
194 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
195
196 #ifndef SLOW_UNALIGNED_ACCESS
197 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
198 #endif
199 \f
200 /* This is run to set up which modes can be used
201 directly in memory and to initialize the block move optab. It is run
202 at the beginning of compilation and when the target is reinitialized. */
203
204 void
205 init_expr_target (void)
206 {
207 rtx insn, pat;
208 enum machine_mode mode;
209 int num_clobbers;
210 rtx mem, mem1;
211 rtx reg;
212
213 /* Try indexing by frame ptr and try by stack ptr.
214 It is known that on the Convex the stack ptr isn't a valid index.
215 With luck, one or the other is valid on any machine. */
216 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
217 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
218
219 /* A scratch register we can modify in-place below to avoid
220 useless RTL allocations. */
221 reg = gen_rtx_REG (VOIDmode, -1);
222
223 insn = rtx_alloc (INSN);
224 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
225 PATTERN (insn) = pat;
226
227 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
228 mode = (enum machine_mode) ((int) mode + 1))
229 {
230 int regno;
231
232 direct_load[(int) mode] = direct_store[(int) mode] = 0;
233 PUT_MODE (mem, mode);
234 PUT_MODE (mem1, mode);
235 PUT_MODE (reg, mode);
236
237 /* See if there is some register that can be used in this mode and
238 directly loaded or stored from memory. */
239
240 if (mode != VOIDmode && mode != BLKmode)
241 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
242 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
243 regno++)
244 {
245 if (! HARD_REGNO_MODE_OK (regno, mode))
246 continue;
247
248 SET_REGNO (reg, regno);
249
250 SET_SRC (pat) = mem;
251 SET_DEST (pat) = reg;
252 if (recog (pat, insn, &num_clobbers) >= 0)
253 direct_load[(int) mode] = 1;
254
255 SET_SRC (pat) = mem1;
256 SET_DEST (pat) = reg;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_load[(int) mode] = 1;
259
260 SET_SRC (pat) = reg;
261 SET_DEST (pat) = mem;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_store[(int) mode] = 1;
264
265 SET_SRC (pat) = reg;
266 SET_DEST (pat) = mem1;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_store[(int) mode] = 1;
269 }
270 }
271
272 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
273
274 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
275 mode = GET_MODE_WIDER_MODE (mode))
276 {
277 enum machine_mode srcmode;
278 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
279 srcmode = GET_MODE_WIDER_MODE (srcmode))
280 {
281 enum insn_code ic;
282
283 ic = can_extend_p (mode, srcmode, 0);
284 if (ic == CODE_FOR_nothing)
285 continue;
286
287 PUT_MODE (mem, srcmode);
288
289 if (insn_operand_matches (ic, 1, mem))
290 float_extend_from_mem[mode][srcmode] = true;
291 }
292 }
293 }
294
295 /* This is run at the start of compiling a function. */
296
297 void
298 init_expr (void)
299 {
300 memset (&crtl->expr, 0, sizeof (crtl->expr));
301 }
302 \f
303 /* Copy data from FROM to TO, where the machine modes are not the same.
304 Both modes may be integer, or both may be floating, or both may be
305 fixed-point.
306 UNSIGNEDP should be nonzero if FROM is an unsigned type.
307 This causes zero-extension instead of sign-extension. */
308
309 void
310 convert_move (rtx to, rtx from, int unsignedp)
311 {
312 enum machine_mode to_mode = GET_MODE (to);
313 enum machine_mode from_mode = GET_MODE (from);
314 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
315 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
316 enum insn_code code;
317 rtx libcall;
318
319 /* rtx code for making an equivalent value. */
320 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
321 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
322
323
324 gcc_assert (to_real == from_real);
325 gcc_assert (to_mode != BLKmode);
326 gcc_assert (from_mode != BLKmode);
327
328 /* If the source and destination are already the same, then there's
329 nothing to do. */
330 if (to == from)
331 return;
332
333 /* If FROM is a SUBREG that indicates that we have already done at least
334 the required extension, strip it. We don't handle such SUBREGs as
335 TO here. */
336
337 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
338 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
339 >= GET_MODE_SIZE (to_mode))
340 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
341 from = gen_lowpart (to_mode, from), from_mode = to_mode;
342
343 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
344
345 if (to_mode == from_mode
346 || (from_mode == VOIDmode && CONSTANT_P (from)))
347 {
348 emit_move_insn (to, from);
349 return;
350 }
351
352 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
353 {
354 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
355
356 if (VECTOR_MODE_P (to_mode))
357 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
358 else
359 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
360
361 emit_move_insn (to, from);
362 return;
363 }
364
365 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
366 {
367 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
368 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
369 return;
370 }
371
372 if (to_real)
373 {
374 rtx value, insns;
375 convert_optab tab;
376
377 gcc_assert ((GET_MODE_PRECISION (from_mode)
378 != GET_MODE_PRECISION (to_mode))
379 || (DECIMAL_FLOAT_MODE_P (from_mode)
380 != DECIMAL_FLOAT_MODE_P (to_mode)));
381
382 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
383 /* Conversion between decimal float and binary float, same size. */
384 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
385 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
386 tab = sext_optab;
387 else
388 tab = trunc_optab;
389
390 /* Try converting directly if the insn is supported. */
391
392 code = convert_optab_handler (tab, to_mode, from_mode);
393 if (code != CODE_FOR_nothing)
394 {
395 emit_unop_insn (code, to, from,
396 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
397 return;
398 }
399
400 /* Otherwise use a libcall. */
401 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
402
403 /* Is this conversion implemented yet? */
404 gcc_assert (libcall);
405
406 start_sequence ();
407 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
408 1, from, from_mode);
409 insns = get_insns ();
410 end_sequence ();
411 emit_libcall_block (insns, to, value,
412 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
413 from)
414 : gen_rtx_FLOAT_EXTEND (to_mode, from));
415 return;
416 }
417
418 /* Handle pointer conversion. */ /* SPEE 900220. */
419 /* Targets are expected to provide conversion insns between PxImode and
420 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
421 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
422 {
423 enum machine_mode full_mode
424 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
425
426 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
427 != CODE_FOR_nothing);
428
429 if (full_mode != from_mode)
430 from = convert_to_mode (full_mode, from, unsignedp);
431 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
432 to, from, UNKNOWN);
433 return;
434 }
435 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
436 {
437 rtx new_from;
438 enum machine_mode full_mode
439 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
440
441 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)
442 != CODE_FOR_nothing);
443
444 if (to_mode == full_mode)
445 {
446 emit_unop_insn (convert_optab_handler (sext_optab, full_mode,
447 from_mode),
448 to, from, UNKNOWN);
449 return;
450 }
451
452 new_from = gen_reg_rtx (full_mode);
453 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode),
454 new_from, from, UNKNOWN);
455
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
458 from = new_from;
459 }
460
461 /* Make sure both are fixed-point modes or both are not. */
462 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
463 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
464 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
465 {
466 /* If we widen from_mode to to_mode and they are in the same class,
467 we won't saturate the result.
468 Otherwise, always saturate the result to play safe. */
469 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
470 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
471 expand_fixed_convert (to, from, 0, 0);
472 else
473 expand_fixed_convert (to, from, 0, 1);
474 return;
475 }
476
477 /* Now both modes are integers. */
478
479 /* Handle expanding beyond a word. */
480 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
481 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
482 {
483 rtx insns;
484 rtx lowpart;
485 rtx fill_value;
486 rtx lowfrom;
487 int i;
488 enum machine_mode lowpart_mode;
489 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
490
491 /* Try converting directly if the insn is supported. */
492 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
493 != CODE_FOR_nothing)
494 {
495 /* If FROM is a SUBREG, put it into a register. Do this
496 so that we always generate the same set of insns for
497 better cse'ing; if an intermediate assignment occurred,
498 we won't be doing the operation directly on the SUBREG. */
499 if (optimize > 0 && GET_CODE (from) == SUBREG)
500 from = force_reg (from_mode, from);
501 emit_unop_insn (code, to, from, equiv_code);
502 return;
503 }
504 /* Next, try converting via full word. */
505 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
506 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
507 != CODE_FOR_nothing))
508 {
509 rtx word_to = gen_reg_rtx (word_mode);
510 if (REG_P (to))
511 {
512 if (reg_overlap_mentioned_p (to, from))
513 from = force_reg (from_mode, from);
514 emit_clobber (to);
515 }
516 convert_move (word_to, from, unsignedp);
517 emit_unop_insn (code, to, word_to, equiv_code);
518 return;
519 }
520
521 /* No special multiword conversion insn; do it by hand. */
522 start_sequence ();
523
524 /* Since we will turn this into a no conflict block, we must ensure
525 that the source does not overlap the target. */
526
527 if (reg_overlap_mentioned_p (to, from))
528 from = force_reg (from_mode, from);
529
530 /* Get a copy of FROM widened to a word, if necessary. */
531 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
532 lowpart_mode = word_mode;
533 else
534 lowpart_mode = from_mode;
535
536 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
537
538 lowpart = gen_lowpart (lowpart_mode, to);
539 emit_move_insn (lowpart, lowfrom);
540
541 /* Compute the value to put in each remaining word. */
542 if (unsignedp)
543 fill_value = const0_rtx;
544 else
545 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
546 LT, lowfrom, const0_rtx,
547 VOIDmode, 0, -1);
548
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
551 {
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
554
555 gcc_assert (subword);
556
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
559 }
560
561 insns = get_insns ();
562 end_sequence ();
563
564 emit_insn (insns);
565 return;
566 }
567
568 /* Truncating multi-word to a word or less. */
569 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
570 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
571 {
572 if (!((MEM_P (from)
573 && ! MEM_VOLATILE_P (from)
574 && direct_load[(int) to_mode]
575 && ! mode_dependent_address_p (XEXP (from, 0)))
576 || REG_P (from)
577 || GET_CODE (from) == SUBREG))
578 from = force_reg (from_mode, from);
579 convert_move (to, gen_lowpart (word_mode, from), 0);
580 return;
581 }
582
583 /* Now follow all the conversions between integers
584 no more than a word long. */
585
586 /* For truncation, usually we can just refer to FROM in a narrower mode. */
587 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
588 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
589 GET_MODE_BITSIZE (from_mode)))
590 {
591 if (!((MEM_P (from)
592 && ! MEM_VOLATILE_P (from)
593 && direct_load[(int) to_mode]
594 && ! mode_dependent_address_p (XEXP (from, 0)))
595 || REG_P (from)
596 || GET_CODE (from) == SUBREG))
597 from = force_reg (from_mode, from);
598 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
599 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
600 from = copy_to_reg (from);
601 emit_move_insn (to, gen_lowpart (to_mode, from));
602 return;
603 }
604
605 /* Handle extension. */
606 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
607 {
608 /* Convert directly if that works. */
609 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
610 != CODE_FOR_nothing)
611 {
612 emit_unop_insn (code, to, from, equiv_code);
613 return;
614 }
615 else
616 {
617 enum machine_mode intermediate;
618 rtx tmp;
619 tree shift_amount;
620
621 /* Search for a mode to convert via. */
622 for (intermediate = from_mode; intermediate != VOIDmode;
623 intermediate = GET_MODE_WIDER_MODE (intermediate))
624 if (((can_extend_p (to_mode, intermediate, unsignedp)
625 != CODE_FOR_nothing)
626 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628 GET_MODE_BITSIZE (intermediate))))
629 && (can_extend_p (intermediate, from_mode, unsignedp)
630 != CODE_FOR_nothing))
631 {
632 convert_move (to, convert_to_mode (intermediate, from,
633 unsignedp), unsignedp);
634 return;
635 }
636
637 /* No suitable intermediate mode.
638 Generate what we need with shifts. */
639 shift_amount = build_int_cst (NULL_TREE,
640 GET_MODE_BITSIZE (to_mode)
641 - GET_MODE_BITSIZE (from_mode));
642 from = gen_lowpart (to_mode, force_reg (from_mode, from));
643 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
644 to, unsignedp);
645 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
646 to, unsignedp);
647 if (tmp != to)
648 emit_move_insn (to, tmp);
649 return;
650 }
651 }
652
653 /* Support special truncate insns for certain modes. */
654 if (convert_optab_handler (trunc_optab, to_mode,
655 from_mode) != CODE_FOR_nothing)
656 {
657 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
658 to, from, UNKNOWN);
659 return;
660 }
661
662 /* Handle truncation of volatile memrefs, and so on;
663 the things that couldn't be truncated directly,
664 and for which there was no special instruction.
665
666 ??? Code above formerly short-circuited this, for most integer
667 mode pairs, with a force_reg in from_mode followed by a recursive
668 call to this routine. Appears always to have been wrong. */
669 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
670 {
671 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
672 emit_move_insn (to, temp);
673 return;
674 }
675
676 /* Mode combination is not recognized. */
677 gcc_unreachable ();
678 }
679
680 /* Return an rtx for a value that would result
681 from converting X to mode MODE.
682 Both X and MODE may be floating, or both integer.
683 UNSIGNEDP is nonzero if X is an unsigned value.
684 This can be done by referring to a part of X in place
685 or by copying to a new temporary with conversion. */
686
687 rtx
688 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
689 {
690 return convert_modes (mode, VOIDmode, x, unsignedp);
691 }
692
693 /* Return an rtx for a value that would result
694 from converting X from mode OLDMODE to mode MODE.
695 Both modes may be floating, or both integer.
696 UNSIGNEDP is nonzero if X is an unsigned value.
697
698 This can be done by referring to a part of X in place
699 or by copying to a new temporary with conversion.
700
701 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
702
703 rtx
704 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
705 {
706 rtx temp;
707
708 /* If FROM is a SUBREG that indicates that we have already done at least
709 the required extension, strip it. */
710
711 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
712 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
713 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
714 x = gen_lowpart (mode, x);
715
716 if (GET_MODE (x) != VOIDmode)
717 oldmode = GET_MODE (x);
718
719 if (mode == oldmode)
720 return x;
721
722 /* There is one case that we must handle specially: If we are converting
723 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
724 we are to interpret the constant as unsigned, gen_lowpart will do
725 the wrong if the constant appears negative. What we want to do is
726 make the high-order word of the constant zero, not all ones. */
727
728 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
729 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
730 && CONST_INT_P (x) && INTVAL (x) < 0)
731 {
732 double_int val = uhwi_to_double_int (INTVAL (x));
733
734 /* We need to zero extend VAL. */
735 if (oldmode != VOIDmode)
736 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
737
738 return immed_double_int_const (val, mode);
739 }
740
741 /* We can do this with a gen_lowpart if both desired and current modes
742 are integer, and this is either a constant integer, a register, or a
743 non-volatile MEM. Except for the constant case where MODE is no
744 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
745
746 if ((CONST_INT_P (x)
747 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
748 || (GET_MODE_CLASS (mode) == MODE_INT
749 && GET_MODE_CLASS (oldmode) == MODE_INT
750 && (GET_CODE (x) == CONST_DOUBLE
751 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
752 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
753 && direct_load[(int) mode])
754 || (REG_P (x)
755 && (! HARD_REGISTER_P (x)
756 || HARD_REGNO_MODE_OK (REGNO (x), mode))
757 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
758 GET_MODE_BITSIZE (GET_MODE (x)))))))))
759 {
760 /* ?? If we don't know OLDMODE, we have to assume here that
761 X does not need sign- or zero-extension. This may not be
762 the case, but it's the best we can do. */
763 if (CONST_INT_P (x) && oldmode != VOIDmode
764 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
765 {
766 HOST_WIDE_INT val = INTVAL (x);
767 int width = GET_MODE_BITSIZE (oldmode);
768
769 /* We must sign or zero-extend in this case. Start by
770 zero-extending, then sign extend if we need to. */
771 val &= ((HOST_WIDE_INT) 1 << width) - 1;
772 if (! unsignedp
773 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
774 val |= (HOST_WIDE_INT) (-1) << width;
775
776 return gen_int_mode (val, mode);
777 }
778
779 return gen_lowpart (mode, x);
780 }
781
782 /* Converting from integer constant into mode is always equivalent to an
783 subreg operation. */
784 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
785 {
786 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
787 return simplify_gen_subreg (mode, x, oldmode, 0);
788 }
789
790 temp = gen_reg_rtx (mode);
791 convert_move (temp, x, unsignedp);
792 return temp;
793 }
794 \f
795 /* Return the largest alignment we can use for doing a move (or store)
796 of MAX_PIECES. ALIGN is the largest alignment we could use. */
797
798 static unsigned int
799 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
800 {
801 enum machine_mode tmode;
802
803 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
804 if (align >= GET_MODE_ALIGNMENT (tmode))
805 align = GET_MODE_ALIGNMENT (tmode);
806 else
807 {
808 enum machine_mode tmode, xmode;
809
810 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
811 tmode != VOIDmode;
812 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
813 if (GET_MODE_SIZE (tmode) > max_pieces
814 || SLOW_UNALIGNED_ACCESS (tmode, align))
815 break;
816
817 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
818 }
819
820 return align;
821 }
822
823 /* Return the widest integer mode no wider than SIZE. If no such mode
824 can be found, return VOIDmode. */
825
826 static enum machine_mode
827 widest_int_mode_for_size (unsigned int size)
828 {
829 enum machine_mode tmode, mode = VOIDmode;
830
831 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
832 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
833 if (GET_MODE_SIZE (tmode) < size)
834 mode = tmode;
835
836 return mode;
837 }
838
839 /* STORE_MAX_PIECES is the number of bytes at a time that we can
840 store efficiently. Due to internal GCC limitations, this is
841 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
842 for an immediate constant. */
843
844 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
845
846 /* Determine whether the LEN bytes can be moved by using several move
847 instructions. Return nonzero if a call to move_by_pieces should
848 succeed. */
849
850 int
851 can_move_by_pieces (unsigned HOST_WIDE_INT len,
852 unsigned int align ATTRIBUTE_UNUSED)
853 {
854 return MOVE_BY_PIECES_P (len, align);
855 }
856
857 /* Generate several move instructions to copy LEN bytes from block FROM to
858 block TO. (These are MEM rtx's with BLKmode).
859
860 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
861 used to push FROM to the stack.
862
863 ALIGN is maximum stack alignment we can assume.
864
865 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
866 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
867 stpcpy. */
868
869 rtx
870 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
871 unsigned int align, int endp)
872 {
873 struct move_by_pieces_d data;
874 enum machine_mode to_addr_mode, from_addr_mode
875 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
876 rtx to_addr, from_addr = XEXP (from, 0);
877 unsigned int max_size = MOVE_MAX_PIECES + 1;
878 enum insn_code icode;
879
880 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
881
882 data.offset = 0;
883 data.from_addr = from_addr;
884 if (to)
885 {
886 to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
887 to_addr = XEXP (to, 0);
888 data.to = to;
889 data.autinc_to
890 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
891 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
892 data.reverse
893 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
894 }
895 else
896 {
897 to_addr_mode = VOIDmode;
898 to_addr = NULL_RTX;
899 data.to = NULL_RTX;
900 data.autinc_to = 1;
901 #ifdef STACK_GROWS_DOWNWARD
902 data.reverse = 1;
903 #else
904 data.reverse = 0;
905 #endif
906 }
907 data.to_addr = to_addr;
908 data.from = from;
909 data.autinc_from
910 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
911 || GET_CODE (from_addr) == POST_INC
912 || GET_CODE (from_addr) == POST_DEC);
913
914 data.explicit_inc_from = 0;
915 data.explicit_inc_to = 0;
916 if (data.reverse) data.offset = len;
917 data.len = len;
918
919 /* If copying requires more than two move insns,
920 copy addresses to registers (to make displacements shorter)
921 and use post-increment if available. */
922 if (!(data.autinc_from && data.autinc_to)
923 && move_by_pieces_ninsns (len, align, max_size) > 2)
924 {
925 /* Find the mode of the largest move...
926 MODE might not be used depending on the definitions of the
927 USE_* macros below. */
928 enum machine_mode mode ATTRIBUTE_UNUSED
929 = widest_int_mode_for_size (max_size);
930
931 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
932 {
933 data.from_addr = copy_to_mode_reg (from_addr_mode,
934 plus_constant (from_addr, len));
935 data.autinc_from = 1;
936 data.explicit_inc_from = -1;
937 }
938 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
939 {
940 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
941 data.autinc_from = 1;
942 data.explicit_inc_from = 1;
943 }
944 if (!data.autinc_from && CONSTANT_P (from_addr))
945 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
946 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
947 {
948 data.to_addr = copy_to_mode_reg (to_addr_mode,
949 plus_constant (to_addr, len));
950 data.autinc_to = 1;
951 data.explicit_inc_to = -1;
952 }
953 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
954 {
955 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
956 data.autinc_to = 1;
957 data.explicit_inc_to = 1;
958 }
959 if (!data.autinc_to && CONSTANT_P (to_addr))
960 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
961 }
962
963 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
964
965 /* First move what we can in the largest integer mode, then go to
966 successively smaller modes. */
967
968 while (max_size > 1)
969 {
970 enum machine_mode mode = widest_int_mode_for_size (max_size);
971
972 if (mode == VOIDmode)
973 break;
974
975 icode = optab_handler (mov_optab, mode);
976 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
977 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
978
979 max_size = GET_MODE_SIZE (mode);
980 }
981
982 /* The code above should have handled everything. */
983 gcc_assert (!data.len);
984
985 if (endp)
986 {
987 rtx to1;
988
989 gcc_assert (!data.reverse);
990 if (data.autinc_to)
991 {
992 if (endp == 2)
993 {
994 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
995 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
996 else
997 data.to_addr = copy_to_mode_reg (to_addr_mode,
998 plus_constant (data.to_addr,
999 -1));
1000 }
1001 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1002 data.offset);
1003 }
1004 else
1005 {
1006 if (endp == 2)
1007 --data.offset;
1008 to1 = adjust_address (data.to, QImode, data.offset);
1009 }
1010 return to1;
1011 }
1012 else
1013 return data.to;
1014 }
1015
1016 /* Return number of insns required to move L bytes by pieces.
1017 ALIGN (in bits) is maximum alignment we can assume. */
1018
1019 static unsigned HOST_WIDE_INT
1020 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1021 unsigned int max_size)
1022 {
1023 unsigned HOST_WIDE_INT n_insns = 0;
1024
1025 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1026
1027 while (max_size > 1)
1028 {
1029 enum machine_mode mode;
1030 enum insn_code icode;
1031
1032 mode = widest_int_mode_for_size (max_size);
1033
1034 if (mode == VOIDmode)
1035 break;
1036
1037 icode = optab_handler (mov_optab, mode);
1038 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1039 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1040
1041 max_size = GET_MODE_SIZE (mode);
1042 }
1043
1044 gcc_assert (!l);
1045 return n_insns;
1046 }
1047
1048 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1049 with move instructions for mode MODE. GENFUN is the gen_... function
1050 to make a move insn for that mode. DATA has all the other info. */
1051
1052 static void
1053 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1054 struct move_by_pieces_d *data)
1055 {
1056 unsigned int size = GET_MODE_SIZE (mode);
1057 rtx to1 = NULL_RTX, from1;
1058
1059 while (data->len >= size)
1060 {
1061 if (data->reverse)
1062 data->offset -= size;
1063
1064 if (data->to)
1065 {
1066 if (data->autinc_to)
1067 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1068 data->offset);
1069 else
1070 to1 = adjust_address (data->to, mode, data->offset);
1071 }
1072
1073 if (data->autinc_from)
1074 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1075 data->offset);
1076 else
1077 from1 = adjust_address (data->from, mode, data->offset);
1078
1079 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1080 emit_insn (gen_add2_insn (data->to_addr,
1081 GEN_INT (-(HOST_WIDE_INT)size)));
1082 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1083 emit_insn (gen_add2_insn (data->from_addr,
1084 GEN_INT (-(HOST_WIDE_INT)size)));
1085
1086 if (data->to)
1087 emit_insn ((*genfun) (to1, from1));
1088 else
1089 {
1090 #ifdef PUSH_ROUNDING
1091 emit_single_push_insn (mode, from1, NULL);
1092 #else
1093 gcc_unreachable ();
1094 #endif
1095 }
1096
1097 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1098 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1099 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1100 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1101
1102 if (! data->reverse)
1103 data->offset += size;
1104
1105 data->len -= size;
1106 }
1107 }
1108 \f
1109 /* Emit code to move a block Y to a block X. This may be done with
1110 string-move instructions, with multiple scalar move instructions,
1111 or with a library call.
1112
1113 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1114 SIZE is an rtx that says how long they are.
1115 ALIGN is the maximum alignment we can assume they have.
1116 METHOD describes what kind of copy this is, and what mechanisms may be used.
1117
1118 Return the address of the new block, if memcpy is called and returns it,
1119 0 otherwise. */
1120
1121 rtx
1122 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1123 unsigned int expected_align, HOST_WIDE_INT expected_size)
1124 {
1125 bool may_use_call;
1126 rtx retval = 0;
1127 unsigned int align;
1128
1129 gcc_assert (size);
1130 if (CONST_INT_P (size)
1131 && INTVAL (size) == 0)
1132 return 0;
1133
1134 switch (method)
1135 {
1136 case BLOCK_OP_NORMAL:
1137 case BLOCK_OP_TAILCALL:
1138 may_use_call = true;
1139 break;
1140
1141 case BLOCK_OP_CALL_PARM:
1142 may_use_call = block_move_libcall_safe_for_call_parm ();
1143
1144 /* Make inhibit_defer_pop nonzero around the library call
1145 to force it to pop the arguments right away. */
1146 NO_DEFER_POP;
1147 break;
1148
1149 case BLOCK_OP_NO_LIBCALL:
1150 may_use_call = false;
1151 break;
1152
1153 default:
1154 gcc_unreachable ();
1155 }
1156
1157 gcc_assert (MEM_P (x) && MEM_P (y));
1158 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1159 gcc_assert (align >= BITS_PER_UNIT);
1160
1161 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1162 block copy is more efficient for other large modes, e.g. DCmode. */
1163 x = adjust_address (x, BLKmode, 0);
1164 y = adjust_address (y, BLKmode, 0);
1165
1166 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1167 can be incorrect is coming from __builtin_memcpy. */
1168 if (CONST_INT_P (size))
1169 {
1170 x = shallow_copy_rtx (x);
1171 y = shallow_copy_rtx (y);
1172 set_mem_size (x, size);
1173 set_mem_size (y, size);
1174 }
1175
1176 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1177 move_by_pieces (x, y, INTVAL (size), align, 0);
1178 else if (emit_block_move_via_movmem (x, y, size, align,
1179 expected_align, expected_size))
1180 ;
1181 else if (may_use_call
1182 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1183 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1184 retval = emit_block_move_via_libcall (x, y, size,
1185 method == BLOCK_OP_TAILCALL);
1186 else
1187 emit_block_move_via_loop (x, y, size, align);
1188
1189 if (method == BLOCK_OP_CALL_PARM)
1190 OK_DEFER_POP;
1191
1192 return retval;
1193 }
1194
1195 rtx
1196 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1197 {
1198 return emit_block_move_hints (x, y, size, method, 0, -1);
1199 }
1200
1201 /* A subroutine of emit_block_move. Returns true if calling the
1202 block move libcall will not clobber any parameters which may have
1203 already been placed on the stack. */
1204
1205 static bool
1206 block_move_libcall_safe_for_call_parm (void)
1207 {
1208 #if defined (REG_PARM_STACK_SPACE)
1209 tree fn;
1210 #endif
1211
1212 /* If arguments are pushed on the stack, then they're safe. */
1213 if (PUSH_ARGS)
1214 return true;
1215
1216 /* If registers go on the stack anyway, any argument is sure to clobber
1217 an outgoing argument. */
1218 #if defined (REG_PARM_STACK_SPACE)
1219 fn = emit_block_move_libcall_fn (false);
1220 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1221 depend on its argument. */
1222 (void) fn;
1223 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1224 && REG_PARM_STACK_SPACE (fn) != 0)
1225 return false;
1226 #endif
1227
1228 /* If any argument goes in memory, then it might clobber an outgoing
1229 argument. */
1230 {
1231 CUMULATIVE_ARGS args_so_far;
1232 tree fn, arg;
1233
1234 fn = emit_block_move_libcall_fn (false);
1235 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1236
1237 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1238 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1239 {
1240 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1241 rtx tmp = targetm.calls.function_arg (&args_so_far, mode,
1242 NULL_TREE, true);
1243 if (!tmp || !REG_P (tmp))
1244 return false;
1245 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1246 return false;
1247 targetm.calls.function_arg_advance (&args_so_far, mode,
1248 NULL_TREE, true);
1249 }
1250 }
1251 return true;
1252 }
1253
1254 /* A subroutine of emit_block_move. Expand a movmem pattern;
1255 return true if successful. */
1256
1257 static bool
1258 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1259 unsigned int expected_align, HOST_WIDE_INT expected_size)
1260 {
1261 int save_volatile_ok = volatile_ok;
1262 enum machine_mode mode;
1263
1264 if (expected_align < align)
1265 expected_align = align;
1266
1267 /* Since this is a move insn, we don't care about volatility. */
1268 volatile_ok = 1;
1269
1270 /* Try the most limited insn first, because there's no point
1271 including more than one in the machine description unless
1272 the more limited one has some advantage. */
1273
1274 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1275 mode = GET_MODE_WIDER_MODE (mode))
1276 {
1277 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1278
1279 if (code != CODE_FOR_nothing
1280 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1281 here because if SIZE is less than the mode mask, as it is
1282 returned by the macro, it will definitely be less than the
1283 actual mode mask. */
1284 && ((CONST_INT_P (size)
1285 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1286 <= (GET_MODE_MASK (mode) >> 1)))
1287 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
1288 {
1289 struct expand_operand ops[6];
1290 unsigned int nops;
1291
1292 /* ??? When called via emit_block_move_for_call, it'd be
1293 nice if there were some way to inform the backend, so
1294 that it doesn't fail the expansion because it thinks
1295 emitting the libcall would be more efficient. */
1296 nops = insn_data[(int) code].n_generator_args;
1297 gcc_assert (nops == 4 || nops == 6);
1298
1299 create_fixed_operand (&ops[0], x);
1300 create_fixed_operand (&ops[1], y);
1301 /* The check above guarantees that this size conversion is valid. */
1302 create_convert_operand_to (&ops[2], size, mode, true);
1303 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1304 if (nops == 6)
1305 {
1306 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1307 create_integer_operand (&ops[5], expected_size);
1308 }
1309 if (maybe_expand_insn (code, nops, ops))
1310 {
1311 volatile_ok = save_volatile_ok;
1312 return true;
1313 }
1314 }
1315 }
1316
1317 volatile_ok = save_volatile_ok;
1318 return false;
1319 }
1320
1321 /* A subroutine of emit_block_move. Expand a call to memcpy.
1322 Return the return value from memcpy, 0 otherwise. */
1323
1324 rtx
1325 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1326 {
1327 rtx dst_addr, src_addr;
1328 tree call_expr, fn, src_tree, dst_tree, size_tree;
1329 enum machine_mode size_mode;
1330 rtx retval;
1331
1332 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1333 pseudos. We can then place those new pseudos into a VAR_DECL and
1334 use them later. */
1335
1336 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1337 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1338
1339 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1340 src_addr = convert_memory_address (ptr_mode, src_addr);
1341
1342 dst_tree = make_tree (ptr_type_node, dst_addr);
1343 src_tree = make_tree (ptr_type_node, src_addr);
1344
1345 size_mode = TYPE_MODE (sizetype);
1346
1347 size = convert_to_mode (size_mode, size, 1);
1348 size = copy_to_mode_reg (size_mode, size);
1349
1350 /* It is incorrect to use the libcall calling conventions to call
1351 memcpy in this context. This could be a user call to memcpy and
1352 the user may wish to examine the return value from memcpy. For
1353 targets where libcalls and normal calls have different conventions
1354 for returning pointers, we could end up generating incorrect code. */
1355
1356 size_tree = make_tree (sizetype, size);
1357
1358 fn = emit_block_move_libcall_fn (true);
1359 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1360 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1361
1362 retval = expand_normal (call_expr);
1363
1364 return retval;
1365 }
1366
1367 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1368 for the function we use for block copies. The first time FOR_CALL
1369 is true, we call assemble_external. */
1370
1371 static GTY(()) tree block_move_fn;
1372
1373 void
1374 init_block_move_fn (const char *asmspec)
1375 {
1376 if (!block_move_fn)
1377 {
1378 tree args, fn;
1379
1380 fn = get_identifier ("memcpy");
1381 args = build_function_type_list (ptr_type_node, ptr_type_node,
1382 const_ptr_type_node, sizetype,
1383 NULL_TREE);
1384
1385 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1386 DECL_EXTERNAL (fn) = 1;
1387 TREE_PUBLIC (fn) = 1;
1388 DECL_ARTIFICIAL (fn) = 1;
1389 TREE_NOTHROW (fn) = 1;
1390 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1391 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1392
1393 block_move_fn = fn;
1394 }
1395
1396 if (asmspec)
1397 set_user_assembler_name (block_move_fn, asmspec);
1398 }
1399
1400 static tree
1401 emit_block_move_libcall_fn (int for_call)
1402 {
1403 static bool emitted_extern;
1404
1405 if (!block_move_fn)
1406 init_block_move_fn (NULL);
1407
1408 if (for_call && !emitted_extern)
1409 {
1410 emitted_extern = true;
1411 make_decl_rtl (block_move_fn);
1412 assemble_external (block_move_fn);
1413 }
1414
1415 return block_move_fn;
1416 }
1417
1418 /* A subroutine of emit_block_move. Copy the data via an explicit
1419 loop. This is used only when libcalls are forbidden. */
1420 /* ??? It'd be nice to copy in hunks larger than QImode. */
1421
1422 static void
1423 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1424 unsigned int align ATTRIBUTE_UNUSED)
1425 {
1426 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1427 enum machine_mode x_addr_mode
1428 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1429 enum machine_mode y_addr_mode
1430 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1431 enum machine_mode iter_mode;
1432
1433 iter_mode = GET_MODE (size);
1434 if (iter_mode == VOIDmode)
1435 iter_mode = word_mode;
1436
1437 top_label = gen_label_rtx ();
1438 cmp_label = gen_label_rtx ();
1439 iter = gen_reg_rtx (iter_mode);
1440
1441 emit_move_insn (iter, const0_rtx);
1442
1443 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1444 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1445 do_pending_stack_adjust ();
1446
1447 emit_jump (cmp_label);
1448 emit_label (top_label);
1449
1450 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1451 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1452
1453 if (x_addr_mode != y_addr_mode)
1454 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1455 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1456
1457 x = change_address (x, QImode, x_addr);
1458 y = change_address (y, QImode, y_addr);
1459
1460 emit_move_insn (x, y);
1461
1462 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1463 true, OPTAB_LIB_WIDEN);
1464 if (tmp != iter)
1465 emit_move_insn (iter, tmp);
1466
1467 emit_label (cmp_label);
1468
1469 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1470 true, top_label);
1471 }
1472 \f
1473 /* Copy all or part of a value X into registers starting at REGNO.
1474 The number of registers to be filled is NREGS. */
1475
1476 void
1477 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1478 {
1479 int i;
1480 #ifdef HAVE_load_multiple
1481 rtx pat;
1482 rtx last;
1483 #endif
1484
1485 if (nregs == 0)
1486 return;
1487
1488 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1489 x = validize_mem (force_const_mem (mode, x));
1490
1491 /* See if the machine can do this with a load multiple insn. */
1492 #ifdef HAVE_load_multiple
1493 if (HAVE_load_multiple)
1494 {
1495 last = get_last_insn ();
1496 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1497 GEN_INT (nregs));
1498 if (pat)
1499 {
1500 emit_insn (pat);
1501 return;
1502 }
1503 else
1504 delete_insns_since (last);
1505 }
1506 #endif
1507
1508 for (i = 0; i < nregs; i++)
1509 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1510 operand_subword_force (x, i, mode));
1511 }
1512
1513 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1514 The number of registers to be filled is NREGS. */
1515
1516 void
1517 move_block_from_reg (int regno, rtx x, int nregs)
1518 {
1519 int i;
1520
1521 if (nregs == 0)
1522 return;
1523
1524 /* See if the machine can do this with a store multiple insn. */
1525 #ifdef HAVE_store_multiple
1526 if (HAVE_store_multiple)
1527 {
1528 rtx last = get_last_insn ();
1529 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1530 GEN_INT (nregs));
1531 if (pat)
1532 {
1533 emit_insn (pat);
1534 return;
1535 }
1536 else
1537 delete_insns_since (last);
1538 }
1539 #endif
1540
1541 for (i = 0; i < nregs; i++)
1542 {
1543 rtx tem = operand_subword (x, i, 1, BLKmode);
1544
1545 gcc_assert (tem);
1546
1547 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1548 }
1549 }
1550
1551 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1552 ORIG, where ORIG is a non-consecutive group of registers represented by
1553 a PARALLEL. The clone is identical to the original except in that the
1554 original set of registers is replaced by a new set of pseudo registers.
1555 The new set has the same modes as the original set. */
1556
1557 rtx
1558 gen_group_rtx (rtx orig)
1559 {
1560 int i, length;
1561 rtx *tmps;
1562
1563 gcc_assert (GET_CODE (orig) == PARALLEL);
1564
1565 length = XVECLEN (orig, 0);
1566 tmps = XALLOCAVEC (rtx, length);
1567
1568 /* Skip a NULL entry in first slot. */
1569 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1570
1571 if (i)
1572 tmps[0] = 0;
1573
1574 for (; i < length; i++)
1575 {
1576 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1577 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1578
1579 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1580 }
1581
1582 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1583 }
1584
1585 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1586 except that values are placed in TMPS[i], and must later be moved
1587 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1588
1589 static void
1590 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1591 {
1592 rtx src;
1593 int start, i;
1594 enum machine_mode m = GET_MODE (orig_src);
1595
1596 gcc_assert (GET_CODE (dst) == PARALLEL);
1597
1598 if (m != VOIDmode
1599 && !SCALAR_INT_MODE_P (m)
1600 && !MEM_P (orig_src)
1601 && GET_CODE (orig_src) != CONCAT)
1602 {
1603 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1604 if (imode == BLKmode)
1605 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1606 else
1607 src = gen_reg_rtx (imode);
1608 if (imode != BLKmode)
1609 src = gen_lowpart (GET_MODE (orig_src), src);
1610 emit_move_insn (src, orig_src);
1611 /* ...and back again. */
1612 if (imode != BLKmode)
1613 src = gen_lowpart (imode, src);
1614 emit_group_load_1 (tmps, dst, src, type, ssize);
1615 return;
1616 }
1617
1618 /* Check for a NULL entry, used to indicate that the parameter goes
1619 both on the stack and in registers. */
1620 if (XEXP (XVECEXP (dst, 0, 0), 0))
1621 start = 0;
1622 else
1623 start = 1;
1624
1625 /* Process the pieces. */
1626 for (i = start; i < XVECLEN (dst, 0); i++)
1627 {
1628 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1629 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1630 unsigned int bytelen = GET_MODE_SIZE (mode);
1631 int shift = 0;
1632
1633 /* Handle trailing fragments that run over the size of the struct. */
1634 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1635 {
1636 /* Arrange to shift the fragment to where it belongs.
1637 extract_bit_field loads to the lsb of the reg. */
1638 if (
1639 #ifdef BLOCK_REG_PADDING
1640 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1641 == (BYTES_BIG_ENDIAN ? upward : downward)
1642 #else
1643 BYTES_BIG_ENDIAN
1644 #endif
1645 )
1646 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1647 bytelen = ssize - bytepos;
1648 gcc_assert (bytelen > 0);
1649 }
1650
1651 /* If we won't be loading directly from memory, protect the real source
1652 from strange tricks we might play; but make sure that the source can
1653 be loaded directly into the destination. */
1654 src = orig_src;
1655 if (!MEM_P (orig_src)
1656 && (!CONSTANT_P (orig_src)
1657 || (GET_MODE (orig_src) != mode
1658 && GET_MODE (orig_src) != VOIDmode)))
1659 {
1660 if (GET_MODE (orig_src) == VOIDmode)
1661 src = gen_reg_rtx (mode);
1662 else
1663 src = gen_reg_rtx (GET_MODE (orig_src));
1664
1665 emit_move_insn (src, orig_src);
1666 }
1667
1668 /* Optimize the access just a bit. */
1669 if (MEM_P (src)
1670 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1671 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1672 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1673 && bytelen == GET_MODE_SIZE (mode))
1674 {
1675 tmps[i] = gen_reg_rtx (mode);
1676 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1677 }
1678 else if (COMPLEX_MODE_P (mode)
1679 && GET_MODE (src) == mode
1680 && bytelen == GET_MODE_SIZE (mode))
1681 /* Let emit_move_complex do the bulk of the work. */
1682 tmps[i] = src;
1683 else if (GET_CODE (src) == CONCAT)
1684 {
1685 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1686 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1687
1688 if ((bytepos == 0 && bytelen == slen0)
1689 || (bytepos != 0 && bytepos + bytelen <= slen))
1690 {
1691 /* The following assumes that the concatenated objects all
1692 have the same size. In this case, a simple calculation
1693 can be used to determine the object and the bit field
1694 to be extracted. */
1695 tmps[i] = XEXP (src, bytepos / slen0);
1696 if (! CONSTANT_P (tmps[i])
1697 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1698 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1699 (bytepos % slen0) * BITS_PER_UNIT,
1700 1, false, NULL_RTX, mode, mode);
1701 }
1702 else
1703 {
1704 rtx mem;
1705
1706 gcc_assert (!bytepos);
1707 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1708 emit_move_insn (mem, src);
1709 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1710 0, 1, false, NULL_RTX, mode, mode);
1711 }
1712 }
1713 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1714 SIMD register, which is currently broken. While we get GCC
1715 to emit proper RTL for these cases, let's dump to memory. */
1716 else if (VECTOR_MODE_P (GET_MODE (dst))
1717 && REG_P (src))
1718 {
1719 int slen = GET_MODE_SIZE (GET_MODE (src));
1720 rtx mem;
1721
1722 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1723 emit_move_insn (mem, src);
1724 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1725 }
1726 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1727 && XVECLEN (dst, 0) > 1)
1728 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1729 else if (CONSTANT_P (src))
1730 {
1731 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1732
1733 if (len == ssize)
1734 tmps[i] = src;
1735 else
1736 {
1737 rtx first, second;
1738
1739 gcc_assert (2 * len == ssize);
1740 split_double (src, &first, &second);
1741 if (i)
1742 tmps[i] = second;
1743 else
1744 tmps[i] = first;
1745 }
1746 }
1747 else if (REG_P (src) && GET_MODE (src) == mode)
1748 tmps[i] = src;
1749 else
1750 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1751 bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1752 mode, mode);
1753
1754 if (shift)
1755 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1756 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1757 }
1758 }
1759
1760 /* Emit code to move a block SRC of type TYPE to a block DST,
1761 where DST is non-consecutive registers represented by a PARALLEL.
1762 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1763 if not known. */
1764
1765 void
1766 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1767 {
1768 rtx *tmps;
1769 int i;
1770
1771 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1772 emit_group_load_1 (tmps, dst, src, type, ssize);
1773
1774 /* Copy the extracted pieces into the proper (probable) hard regs. */
1775 for (i = 0; i < XVECLEN (dst, 0); i++)
1776 {
1777 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1778 if (d == NULL)
1779 continue;
1780 emit_move_insn (d, tmps[i]);
1781 }
1782 }
1783
1784 /* Similar, but load SRC into new pseudos in a format that looks like
1785 PARALLEL. This can later be fed to emit_group_move to get things
1786 in the right place. */
1787
1788 rtx
1789 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1790 {
1791 rtvec vec;
1792 int i;
1793
1794 vec = rtvec_alloc (XVECLEN (parallel, 0));
1795 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1796
1797 /* Convert the vector to look just like the original PARALLEL, except
1798 with the computed values. */
1799 for (i = 0; i < XVECLEN (parallel, 0); i++)
1800 {
1801 rtx e = XVECEXP (parallel, 0, i);
1802 rtx d = XEXP (e, 0);
1803
1804 if (d)
1805 {
1806 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1807 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1808 }
1809 RTVEC_ELT (vec, i) = e;
1810 }
1811
1812 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1813 }
1814
1815 /* Emit code to move a block SRC to block DST, where SRC and DST are
1816 non-consecutive groups of registers, each represented by a PARALLEL. */
1817
1818 void
1819 emit_group_move (rtx dst, rtx src)
1820 {
1821 int i;
1822
1823 gcc_assert (GET_CODE (src) == PARALLEL
1824 && GET_CODE (dst) == PARALLEL
1825 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1826
1827 /* Skip first entry if NULL. */
1828 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1829 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1830 XEXP (XVECEXP (src, 0, i), 0));
1831 }
1832
1833 /* Move a group of registers represented by a PARALLEL into pseudos. */
1834
1835 rtx
1836 emit_group_move_into_temps (rtx src)
1837 {
1838 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1839 int i;
1840
1841 for (i = 0; i < XVECLEN (src, 0); i++)
1842 {
1843 rtx e = XVECEXP (src, 0, i);
1844 rtx d = XEXP (e, 0);
1845
1846 if (d)
1847 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1848 RTVEC_ELT (vec, i) = e;
1849 }
1850
1851 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1852 }
1853
1854 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1855 where SRC is non-consecutive registers represented by a PARALLEL.
1856 SSIZE represents the total size of block ORIG_DST, or -1 if not
1857 known. */
1858
1859 void
1860 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1861 {
1862 rtx *tmps, dst;
1863 int start, finish, i;
1864 enum machine_mode m = GET_MODE (orig_dst);
1865
1866 gcc_assert (GET_CODE (src) == PARALLEL);
1867
1868 if (!SCALAR_INT_MODE_P (m)
1869 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1870 {
1871 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1872 if (imode == BLKmode)
1873 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1874 else
1875 dst = gen_reg_rtx (imode);
1876 emit_group_store (dst, src, type, ssize);
1877 if (imode != BLKmode)
1878 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1879 emit_move_insn (orig_dst, dst);
1880 return;
1881 }
1882
1883 /* Check for a NULL entry, used to indicate that the parameter goes
1884 both on the stack and in registers. */
1885 if (XEXP (XVECEXP (src, 0, 0), 0))
1886 start = 0;
1887 else
1888 start = 1;
1889 finish = XVECLEN (src, 0);
1890
1891 tmps = XALLOCAVEC (rtx, finish);
1892
1893 /* Copy the (probable) hard regs into pseudos. */
1894 for (i = start; i < finish; i++)
1895 {
1896 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1897 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1898 {
1899 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1900 emit_move_insn (tmps[i], reg);
1901 }
1902 else
1903 tmps[i] = reg;
1904 }
1905
1906 /* If we won't be storing directly into memory, protect the real destination
1907 from strange tricks we might play. */
1908 dst = orig_dst;
1909 if (GET_CODE (dst) == PARALLEL)
1910 {
1911 rtx temp;
1912
1913 /* We can get a PARALLEL dst if there is a conditional expression in
1914 a return statement. In that case, the dst and src are the same,
1915 so no action is necessary. */
1916 if (rtx_equal_p (dst, src))
1917 return;
1918
1919 /* It is unclear if we can ever reach here, but we may as well handle
1920 it. Allocate a temporary, and split this into a store/load to/from
1921 the temporary. */
1922
1923 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1924 emit_group_store (temp, src, type, ssize);
1925 emit_group_load (dst, temp, type, ssize);
1926 return;
1927 }
1928 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1929 {
1930 enum machine_mode outer = GET_MODE (dst);
1931 enum machine_mode inner;
1932 HOST_WIDE_INT bytepos;
1933 bool done = false;
1934 rtx temp;
1935
1936 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1937 dst = gen_reg_rtx (outer);
1938
1939 /* Make life a bit easier for combine. */
1940 /* If the first element of the vector is the low part
1941 of the destination mode, use a paradoxical subreg to
1942 initialize the destination. */
1943 if (start < finish)
1944 {
1945 inner = GET_MODE (tmps[start]);
1946 bytepos = subreg_lowpart_offset (inner, outer);
1947 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1948 {
1949 temp = simplify_gen_subreg (outer, tmps[start],
1950 inner, 0);
1951 if (temp)
1952 {
1953 emit_move_insn (dst, temp);
1954 done = true;
1955 start++;
1956 }
1957 }
1958 }
1959
1960 /* If the first element wasn't the low part, try the last. */
1961 if (!done
1962 && start < finish - 1)
1963 {
1964 inner = GET_MODE (tmps[finish - 1]);
1965 bytepos = subreg_lowpart_offset (inner, outer);
1966 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1967 {
1968 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1969 inner, 0);
1970 if (temp)
1971 {
1972 emit_move_insn (dst, temp);
1973 done = true;
1974 finish--;
1975 }
1976 }
1977 }
1978
1979 /* Otherwise, simply initialize the result to zero. */
1980 if (!done)
1981 emit_move_insn (dst, CONST0_RTX (outer));
1982 }
1983
1984 /* Process the pieces. */
1985 for (i = start; i < finish; i++)
1986 {
1987 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1988 enum machine_mode mode = GET_MODE (tmps[i]);
1989 unsigned int bytelen = GET_MODE_SIZE (mode);
1990 unsigned int adj_bytelen = bytelen;
1991 rtx dest = dst;
1992
1993 /* Handle trailing fragments that run over the size of the struct. */
1994 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1995 adj_bytelen = ssize - bytepos;
1996
1997 if (GET_CODE (dst) == CONCAT)
1998 {
1999 if (bytepos + adj_bytelen
2000 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2001 dest = XEXP (dst, 0);
2002 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2003 {
2004 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2005 dest = XEXP (dst, 1);
2006 }
2007 else
2008 {
2009 enum machine_mode dest_mode = GET_MODE (dest);
2010 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2011
2012 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2013
2014 if (GET_MODE_ALIGNMENT (dest_mode)
2015 >= GET_MODE_ALIGNMENT (tmp_mode))
2016 {
2017 dest = assign_stack_temp (dest_mode,
2018 GET_MODE_SIZE (dest_mode),
2019 0);
2020 emit_move_insn (adjust_address (dest,
2021 tmp_mode,
2022 bytepos),
2023 tmps[i]);
2024 dst = dest;
2025 }
2026 else
2027 {
2028 dest = assign_stack_temp (tmp_mode,
2029 GET_MODE_SIZE (tmp_mode),
2030 0);
2031 emit_move_insn (dest, tmps[i]);
2032 dst = adjust_address (dest, dest_mode, bytepos);
2033 }
2034 break;
2035 }
2036 }
2037
2038 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2039 {
2040 /* store_bit_field always takes its value from the lsb.
2041 Move the fragment to the lsb if it's not already there. */
2042 if (
2043 #ifdef BLOCK_REG_PADDING
2044 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2045 == (BYTES_BIG_ENDIAN ? upward : downward)
2046 #else
2047 BYTES_BIG_ENDIAN
2048 #endif
2049 )
2050 {
2051 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2052 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2053 build_int_cst (NULL_TREE, shift),
2054 tmps[i], 0);
2055 }
2056 bytelen = adj_bytelen;
2057 }
2058
2059 /* Optimize the access just a bit. */
2060 if (MEM_P (dest)
2061 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2062 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2063 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2064 && bytelen == GET_MODE_SIZE (mode))
2065 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2066 else
2067 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2068 mode, tmps[i]);
2069 }
2070
2071 /* Copy from the pseudo into the (probable) hard reg. */
2072 if (orig_dst != dst)
2073 emit_move_insn (orig_dst, dst);
2074 }
2075
2076 /* Generate code to copy a BLKmode object of TYPE out of a
2077 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2078 is null, a stack temporary is created. TGTBLK is returned.
2079
2080 The purpose of this routine is to handle functions that return
2081 BLKmode structures in registers. Some machines (the PA for example)
2082 want to return all small structures in registers regardless of the
2083 structure's alignment. */
2084
2085 rtx
2086 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2087 {
2088 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2089 rtx src = NULL, dst = NULL;
2090 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2091 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2092 enum machine_mode copy_mode;
2093
2094 if (tgtblk == 0)
2095 {
2096 tgtblk = assign_temp (build_qualified_type (type,
2097 (TYPE_QUALS (type)
2098 | TYPE_QUAL_CONST)),
2099 0, 1, 1);
2100 preserve_temp_slots (tgtblk);
2101 }
2102
2103 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2104 into a new pseudo which is a full word. */
2105
2106 if (GET_MODE (srcreg) != BLKmode
2107 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2108 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2109
2110 /* If the structure doesn't take up a whole number of words, see whether
2111 SRCREG is padded on the left or on the right. If it's on the left,
2112 set PADDING_CORRECTION to the number of bits to skip.
2113
2114 In most ABIs, the structure will be returned at the least end of
2115 the register, which translates to right padding on little-endian
2116 targets and left padding on big-endian targets. The opposite
2117 holds if the structure is returned at the most significant
2118 end of the register. */
2119 if (bytes % UNITS_PER_WORD != 0
2120 && (targetm.calls.return_in_msb (type)
2121 ? !BYTES_BIG_ENDIAN
2122 : BYTES_BIG_ENDIAN))
2123 padding_correction
2124 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2125
2126 /* Copy the structure BITSIZE bits at a time. If the target lives in
2127 memory, take care of not reading/writing past its end by selecting
2128 a copy mode suited to BITSIZE. This should always be possible given
2129 how it is computed.
2130
2131 We could probably emit more efficient code for machines which do not use
2132 strict alignment, but it doesn't seem worth the effort at the current
2133 time. */
2134
2135 copy_mode = word_mode;
2136 if (MEM_P (tgtblk))
2137 {
2138 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2139 if (mem_mode != BLKmode)
2140 copy_mode = mem_mode;
2141 }
2142
2143 for (bitpos = 0, xbitpos = padding_correction;
2144 bitpos < bytes * BITS_PER_UNIT;
2145 bitpos += bitsize, xbitpos += bitsize)
2146 {
2147 /* We need a new source operand each time xbitpos is on a
2148 word boundary and when xbitpos == padding_correction
2149 (the first time through). */
2150 if (xbitpos % BITS_PER_WORD == 0
2151 || xbitpos == padding_correction)
2152 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2153 GET_MODE (srcreg));
2154
2155 /* We need a new destination operand each time bitpos is on
2156 a word boundary. */
2157 if (bitpos % BITS_PER_WORD == 0)
2158 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2159
2160 /* Use xbitpos for the source extraction (right justified) and
2161 bitpos for the destination store (left justified). */
2162 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2163 extract_bit_field (src, bitsize,
2164 xbitpos % BITS_PER_WORD, 1, false,
2165 NULL_RTX, copy_mode, copy_mode));
2166 }
2167
2168 return tgtblk;
2169 }
2170
2171 /* Add a USE expression for REG to the (possibly empty) list pointed
2172 to by CALL_FUSAGE. REG must denote a hard register. */
2173
2174 void
2175 use_reg (rtx *call_fusage, rtx reg)
2176 {
2177 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2178
2179 *call_fusage
2180 = gen_rtx_EXPR_LIST (VOIDmode,
2181 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2182 }
2183
2184 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2185 starting at REGNO. All of these registers must be hard registers. */
2186
2187 void
2188 use_regs (rtx *call_fusage, int regno, int nregs)
2189 {
2190 int i;
2191
2192 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2193
2194 for (i = 0; i < nregs; i++)
2195 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2196 }
2197
2198 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2199 PARALLEL REGS. This is for calls that pass values in multiple
2200 non-contiguous locations. The Irix 6 ABI has examples of this. */
2201
2202 void
2203 use_group_regs (rtx *call_fusage, rtx regs)
2204 {
2205 int i;
2206
2207 for (i = 0; i < XVECLEN (regs, 0); i++)
2208 {
2209 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2210
2211 /* A NULL entry means the parameter goes both on the stack and in
2212 registers. This can also be a MEM for targets that pass values
2213 partially on the stack and partially in registers. */
2214 if (reg != 0 && REG_P (reg))
2215 use_reg (call_fusage, reg);
2216 }
2217 }
2218
2219 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2220 assigment and the code of the expresion on the RHS is CODE. Return
2221 NULL otherwise. */
2222
2223 static gimple
2224 get_def_for_expr (tree name, enum tree_code code)
2225 {
2226 gimple def_stmt;
2227
2228 if (TREE_CODE (name) != SSA_NAME)
2229 return NULL;
2230
2231 def_stmt = get_gimple_for_ssa_name (name);
2232 if (!def_stmt
2233 || gimple_assign_rhs_code (def_stmt) != code)
2234 return NULL;
2235
2236 return def_stmt;
2237 }
2238 \f
2239
2240 /* Determine whether the LEN bytes generated by CONSTFUN can be
2241 stored to memory using several move instructions. CONSTFUNDATA is
2242 a pointer which will be passed as argument in every CONSTFUN call.
2243 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2244 a memset operation and false if it's a copy of a constant string.
2245 Return nonzero if a call to store_by_pieces should succeed. */
2246
2247 int
2248 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2249 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2250 void *constfundata, unsigned int align, bool memsetp)
2251 {
2252 unsigned HOST_WIDE_INT l;
2253 unsigned int max_size;
2254 HOST_WIDE_INT offset = 0;
2255 enum machine_mode mode;
2256 enum insn_code icode;
2257 int reverse;
2258 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2259 rtx cst ATTRIBUTE_UNUSED;
2260
2261 if (len == 0)
2262 return 1;
2263
2264 if (! (memsetp
2265 ? SET_BY_PIECES_P (len, align)
2266 : STORE_BY_PIECES_P (len, align)))
2267 return 0;
2268
2269 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2270
2271 /* We would first store what we can in the largest integer mode, then go to
2272 successively smaller modes. */
2273
2274 for (reverse = 0;
2275 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2276 reverse++)
2277 {
2278 l = len;
2279 max_size = STORE_MAX_PIECES + 1;
2280 while (max_size > 1)
2281 {
2282 mode = widest_int_mode_for_size (max_size);
2283
2284 if (mode == VOIDmode)
2285 break;
2286
2287 icode = optab_handler (mov_optab, mode);
2288 if (icode != CODE_FOR_nothing
2289 && align >= GET_MODE_ALIGNMENT (mode))
2290 {
2291 unsigned int size = GET_MODE_SIZE (mode);
2292
2293 while (l >= size)
2294 {
2295 if (reverse)
2296 offset -= size;
2297
2298 cst = (*constfun) (constfundata, offset, mode);
2299 if (!LEGITIMATE_CONSTANT_P (cst))
2300 return 0;
2301
2302 if (!reverse)
2303 offset += size;
2304
2305 l -= size;
2306 }
2307 }
2308
2309 max_size = GET_MODE_SIZE (mode);
2310 }
2311
2312 /* The code above should have handled everything. */
2313 gcc_assert (!l);
2314 }
2315
2316 return 1;
2317 }
2318
2319 /* Generate several move instructions to store LEN bytes generated by
2320 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2321 pointer which will be passed as argument in every CONSTFUN call.
2322 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2323 a memset operation and false if it's a copy of a constant string.
2324 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2325 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2326 stpcpy. */
2327
2328 rtx
2329 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2330 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2331 void *constfundata, unsigned int align, bool memsetp, int endp)
2332 {
2333 enum machine_mode to_addr_mode
2334 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2335 struct store_by_pieces_d data;
2336
2337 if (len == 0)
2338 {
2339 gcc_assert (endp != 2);
2340 return to;
2341 }
2342
2343 gcc_assert (memsetp
2344 ? SET_BY_PIECES_P (len, align)
2345 : STORE_BY_PIECES_P (len, align));
2346 data.constfun = constfun;
2347 data.constfundata = constfundata;
2348 data.len = len;
2349 data.to = to;
2350 store_by_pieces_1 (&data, align);
2351 if (endp)
2352 {
2353 rtx to1;
2354
2355 gcc_assert (!data.reverse);
2356 if (data.autinc_to)
2357 {
2358 if (endp == 2)
2359 {
2360 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2361 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2362 else
2363 data.to_addr = copy_to_mode_reg (to_addr_mode,
2364 plus_constant (data.to_addr,
2365 -1));
2366 }
2367 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2368 data.offset);
2369 }
2370 else
2371 {
2372 if (endp == 2)
2373 --data.offset;
2374 to1 = adjust_address (data.to, QImode, data.offset);
2375 }
2376 return to1;
2377 }
2378 else
2379 return data.to;
2380 }
2381
2382 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2383 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2384
2385 static void
2386 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2387 {
2388 struct store_by_pieces_d data;
2389
2390 if (len == 0)
2391 return;
2392
2393 data.constfun = clear_by_pieces_1;
2394 data.constfundata = NULL;
2395 data.len = len;
2396 data.to = to;
2397 store_by_pieces_1 (&data, align);
2398 }
2399
2400 /* Callback routine for clear_by_pieces.
2401 Return const0_rtx unconditionally. */
2402
2403 static rtx
2404 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2405 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2406 enum machine_mode mode ATTRIBUTE_UNUSED)
2407 {
2408 return const0_rtx;
2409 }
2410
2411 /* Subroutine of clear_by_pieces and store_by_pieces.
2412 Generate several move instructions to store LEN bytes of block TO. (A MEM
2413 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2414
2415 static void
2416 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2417 unsigned int align ATTRIBUTE_UNUSED)
2418 {
2419 enum machine_mode to_addr_mode
2420 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2421 rtx to_addr = XEXP (data->to, 0);
2422 unsigned int max_size = STORE_MAX_PIECES + 1;
2423 enum insn_code icode;
2424
2425 data->offset = 0;
2426 data->to_addr = to_addr;
2427 data->autinc_to
2428 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2429 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2430
2431 data->explicit_inc_to = 0;
2432 data->reverse
2433 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2434 if (data->reverse)
2435 data->offset = data->len;
2436
2437 /* If storing requires more than two move insns,
2438 copy addresses to registers (to make displacements shorter)
2439 and use post-increment if available. */
2440 if (!data->autinc_to
2441 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2442 {
2443 /* Determine the main mode we'll be using.
2444 MODE might not be used depending on the definitions of the
2445 USE_* macros below. */
2446 enum machine_mode mode ATTRIBUTE_UNUSED
2447 = widest_int_mode_for_size (max_size);
2448
2449 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2450 {
2451 data->to_addr = copy_to_mode_reg (to_addr_mode,
2452 plus_constant (to_addr, data->len));
2453 data->autinc_to = 1;
2454 data->explicit_inc_to = -1;
2455 }
2456
2457 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2458 && ! data->autinc_to)
2459 {
2460 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2461 data->autinc_to = 1;
2462 data->explicit_inc_to = 1;
2463 }
2464
2465 if ( !data->autinc_to && CONSTANT_P (to_addr))
2466 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2467 }
2468
2469 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2470
2471 /* First store what we can in the largest integer mode, then go to
2472 successively smaller modes. */
2473
2474 while (max_size > 1)
2475 {
2476 enum machine_mode mode = widest_int_mode_for_size (max_size);
2477
2478 if (mode == VOIDmode)
2479 break;
2480
2481 icode = optab_handler (mov_optab, mode);
2482 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2483 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2484
2485 max_size = GET_MODE_SIZE (mode);
2486 }
2487
2488 /* The code above should have handled everything. */
2489 gcc_assert (!data->len);
2490 }
2491
2492 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2493 with move instructions for mode MODE. GENFUN is the gen_... function
2494 to make a move insn for that mode. DATA has all the other info. */
2495
2496 static void
2497 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2498 struct store_by_pieces_d *data)
2499 {
2500 unsigned int size = GET_MODE_SIZE (mode);
2501 rtx to1, cst;
2502
2503 while (data->len >= size)
2504 {
2505 if (data->reverse)
2506 data->offset -= size;
2507
2508 if (data->autinc_to)
2509 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2510 data->offset);
2511 else
2512 to1 = adjust_address (data->to, mode, data->offset);
2513
2514 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2515 emit_insn (gen_add2_insn (data->to_addr,
2516 GEN_INT (-(HOST_WIDE_INT) size)));
2517
2518 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2519 emit_insn ((*genfun) (to1, cst));
2520
2521 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2522 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2523
2524 if (! data->reverse)
2525 data->offset += size;
2526
2527 data->len -= size;
2528 }
2529 }
2530 \f
2531 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2532 its length in bytes. */
2533
2534 rtx
2535 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2536 unsigned int expected_align, HOST_WIDE_INT expected_size)
2537 {
2538 enum machine_mode mode = GET_MODE (object);
2539 unsigned int align;
2540
2541 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2542
2543 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2544 just move a zero. Otherwise, do this a piece at a time. */
2545 if (mode != BLKmode
2546 && CONST_INT_P (size)
2547 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2548 {
2549 rtx zero = CONST0_RTX (mode);
2550 if (zero != NULL)
2551 {
2552 emit_move_insn (object, zero);
2553 return NULL;
2554 }
2555
2556 if (COMPLEX_MODE_P (mode))
2557 {
2558 zero = CONST0_RTX (GET_MODE_INNER (mode));
2559 if (zero != NULL)
2560 {
2561 write_complex_part (object, zero, 0);
2562 write_complex_part (object, zero, 1);
2563 return NULL;
2564 }
2565 }
2566 }
2567
2568 if (size == const0_rtx)
2569 return NULL;
2570
2571 align = MEM_ALIGN (object);
2572
2573 if (CONST_INT_P (size)
2574 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2575 clear_by_pieces (object, INTVAL (size), align);
2576 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2577 expected_align, expected_size))
2578 ;
2579 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2580 return set_storage_via_libcall (object, size, const0_rtx,
2581 method == BLOCK_OP_TAILCALL);
2582 else
2583 gcc_unreachable ();
2584
2585 return NULL;
2586 }
2587
2588 rtx
2589 clear_storage (rtx object, rtx size, enum block_op_methods method)
2590 {
2591 return clear_storage_hints (object, size, method, 0, -1);
2592 }
2593
2594
2595 /* A subroutine of clear_storage. Expand a call to memset.
2596 Return the return value of memset, 0 otherwise. */
2597
2598 rtx
2599 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2600 {
2601 tree call_expr, fn, object_tree, size_tree, val_tree;
2602 enum machine_mode size_mode;
2603 rtx retval;
2604
2605 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2606 place those into new pseudos into a VAR_DECL and use them later. */
2607
2608 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2609
2610 size_mode = TYPE_MODE (sizetype);
2611 size = convert_to_mode (size_mode, size, 1);
2612 size = copy_to_mode_reg (size_mode, size);
2613
2614 /* It is incorrect to use the libcall calling conventions to call
2615 memset in this context. This could be a user call to memset and
2616 the user may wish to examine the return value from memset. For
2617 targets where libcalls and normal calls have different conventions
2618 for returning pointers, we could end up generating incorrect code. */
2619
2620 object_tree = make_tree (ptr_type_node, object);
2621 if (!CONST_INT_P (val))
2622 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2623 size_tree = make_tree (sizetype, size);
2624 val_tree = make_tree (integer_type_node, val);
2625
2626 fn = clear_storage_libcall_fn (true);
2627 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2628 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2629
2630 retval = expand_normal (call_expr);
2631
2632 return retval;
2633 }
2634
2635 /* A subroutine of set_storage_via_libcall. Create the tree node
2636 for the function we use for block clears. The first time FOR_CALL
2637 is true, we call assemble_external. */
2638
2639 tree block_clear_fn;
2640
2641 void
2642 init_block_clear_fn (const char *asmspec)
2643 {
2644 if (!block_clear_fn)
2645 {
2646 tree fn, args;
2647
2648 fn = get_identifier ("memset");
2649 args = build_function_type_list (ptr_type_node, ptr_type_node,
2650 integer_type_node, sizetype,
2651 NULL_TREE);
2652
2653 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2654 DECL_EXTERNAL (fn) = 1;
2655 TREE_PUBLIC (fn) = 1;
2656 DECL_ARTIFICIAL (fn) = 1;
2657 TREE_NOTHROW (fn) = 1;
2658 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2659 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2660
2661 block_clear_fn = fn;
2662 }
2663
2664 if (asmspec)
2665 set_user_assembler_name (block_clear_fn, asmspec);
2666 }
2667
2668 static tree
2669 clear_storage_libcall_fn (int for_call)
2670 {
2671 static bool emitted_extern;
2672
2673 if (!block_clear_fn)
2674 init_block_clear_fn (NULL);
2675
2676 if (for_call && !emitted_extern)
2677 {
2678 emitted_extern = true;
2679 make_decl_rtl (block_clear_fn);
2680 assemble_external (block_clear_fn);
2681 }
2682
2683 return block_clear_fn;
2684 }
2685 \f
2686 /* Expand a setmem pattern; return true if successful. */
2687
2688 bool
2689 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2690 unsigned int expected_align, HOST_WIDE_INT expected_size)
2691 {
2692 /* Try the most limited insn first, because there's no point
2693 including more than one in the machine description unless
2694 the more limited one has some advantage. */
2695
2696 enum machine_mode mode;
2697
2698 if (expected_align < align)
2699 expected_align = align;
2700
2701 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2702 mode = GET_MODE_WIDER_MODE (mode))
2703 {
2704 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2705
2706 if (code != CODE_FOR_nothing
2707 /* We don't need MODE to be narrower than
2708 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2709 the mode mask, as it is returned by the macro, it will
2710 definitely be less than the actual mode mask. */
2711 && ((CONST_INT_P (size)
2712 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2713 <= (GET_MODE_MASK (mode) >> 1)))
2714 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
2715 {
2716 struct expand_operand ops[6];
2717 unsigned int nops;
2718
2719 nops = insn_data[(int) code].n_generator_args;
2720 gcc_assert (nops == 4 || nops == 6);
2721
2722 create_fixed_operand (&ops[0], object);
2723 /* The check above guarantees that this size conversion is valid. */
2724 create_convert_operand_to (&ops[1], size, mode, true);
2725 create_convert_operand_from (&ops[2], val, byte_mode, true);
2726 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2727 if (nops == 6)
2728 {
2729 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2730 create_integer_operand (&ops[5], expected_size);
2731 }
2732 if (maybe_expand_insn (code, nops, ops))
2733 return true;
2734 }
2735 }
2736
2737 return false;
2738 }
2739
2740 \f
2741 /* Write to one of the components of the complex value CPLX. Write VAL to
2742 the real part if IMAG_P is false, and the imaginary part if its true. */
2743
2744 static void
2745 write_complex_part (rtx cplx, rtx val, bool imag_p)
2746 {
2747 enum machine_mode cmode;
2748 enum machine_mode imode;
2749 unsigned ibitsize;
2750
2751 if (GET_CODE (cplx) == CONCAT)
2752 {
2753 emit_move_insn (XEXP (cplx, imag_p), val);
2754 return;
2755 }
2756
2757 cmode = GET_MODE (cplx);
2758 imode = GET_MODE_INNER (cmode);
2759 ibitsize = GET_MODE_BITSIZE (imode);
2760
2761 /* For MEMs simplify_gen_subreg may generate an invalid new address
2762 because, e.g., the original address is considered mode-dependent
2763 by the target, which restricts simplify_subreg from invoking
2764 adjust_address_nv. Instead of preparing fallback support for an
2765 invalid address, we call adjust_address_nv directly. */
2766 if (MEM_P (cplx))
2767 {
2768 emit_move_insn (adjust_address_nv (cplx, imode,
2769 imag_p ? GET_MODE_SIZE (imode) : 0),
2770 val);
2771 return;
2772 }
2773
2774 /* If the sub-object is at least word sized, then we know that subregging
2775 will work. This special case is important, since store_bit_field
2776 wants to operate on integer modes, and there's rarely an OImode to
2777 correspond to TCmode. */
2778 if (ibitsize >= BITS_PER_WORD
2779 /* For hard regs we have exact predicates. Assume we can split
2780 the original object if it spans an even number of hard regs.
2781 This special case is important for SCmode on 64-bit platforms
2782 where the natural size of floating-point regs is 32-bit. */
2783 || (REG_P (cplx)
2784 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2785 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2786 {
2787 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2788 imag_p ? GET_MODE_SIZE (imode) : 0);
2789 if (part)
2790 {
2791 emit_move_insn (part, val);
2792 return;
2793 }
2794 else
2795 /* simplify_gen_subreg may fail for sub-word MEMs. */
2796 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2797 }
2798
2799 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2800 }
2801
2802 /* Extract one of the components of the complex value CPLX. Extract the
2803 real part if IMAG_P is false, and the imaginary part if it's true. */
2804
2805 static rtx
2806 read_complex_part (rtx cplx, bool imag_p)
2807 {
2808 enum machine_mode cmode, imode;
2809 unsigned ibitsize;
2810
2811 if (GET_CODE (cplx) == CONCAT)
2812 return XEXP (cplx, imag_p);
2813
2814 cmode = GET_MODE (cplx);
2815 imode = GET_MODE_INNER (cmode);
2816 ibitsize = GET_MODE_BITSIZE (imode);
2817
2818 /* Special case reads from complex constants that got spilled to memory. */
2819 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2820 {
2821 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2822 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2823 {
2824 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2825 if (CONSTANT_CLASS_P (part))
2826 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2827 }
2828 }
2829
2830 /* For MEMs simplify_gen_subreg may generate an invalid new address
2831 because, e.g., the original address is considered mode-dependent
2832 by the target, which restricts simplify_subreg from invoking
2833 adjust_address_nv. Instead of preparing fallback support for an
2834 invalid address, we call adjust_address_nv directly. */
2835 if (MEM_P (cplx))
2836 return adjust_address_nv (cplx, imode,
2837 imag_p ? GET_MODE_SIZE (imode) : 0);
2838
2839 /* If the sub-object is at least word sized, then we know that subregging
2840 will work. This special case is important, since extract_bit_field
2841 wants to operate on integer modes, and there's rarely an OImode to
2842 correspond to TCmode. */
2843 if (ibitsize >= BITS_PER_WORD
2844 /* For hard regs we have exact predicates. Assume we can split
2845 the original object if it spans an even number of hard regs.
2846 This special case is important for SCmode on 64-bit platforms
2847 where the natural size of floating-point regs is 32-bit. */
2848 || (REG_P (cplx)
2849 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2850 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2851 {
2852 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2853 imag_p ? GET_MODE_SIZE (imode) : 0);
2854 if (ret)
2855 return ret;
2856 else
2857 /* simplify_gen_subreg may fail for sub-word MEMs. */
2858 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2859 }
2860
2861 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2862 true, false, NULL_RTX, imode, imode);
2863 }
2864 \f
2865 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2866 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2867 represented in NEW_MODE. If FORCE is true, this will never happen, as
2868 we'll force-create a SUBREG if needed. */
2869
2870 static rtx
2871 emit_move_change_mode (enum machine_mode new_mode,
2872 enum machine_mode old_mode, rtx x, bool force)
2873 {
2874 rtx ret;
2875
2876 if (push_operand (x, GET_MODE (x)))
2877 {
2878 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2879 MEM_COPY_ATTRIBUTES (ret, x);
2880 }
2881 else if (MEM_P (x))
2882 {
2883 /* We don't have to worry about changing the address since the
2884 size in bytes is supposed to be the same. */
2885 if (reload_in_progress)
2886 {
2887 /* Copy the MEM to change the mode and move any
2888 substitutions from the old MEM to the new one. */
2889 ret = adjust_address_nv (x, new_mode, 0);
2890 copy_replacements (x, ret);
2891 }
2892 else
2893 ret = adjust_address (x, new_mode, 0);
2894 }
2895 else
2896 {
2897 /* Note that we do want simplify_subreg's behavior of validating
2898 that the new mode is ok for a hard register. If we were to use
2899 simplify_gen_subreg, we would create the subreg, but would
2900 probably run into the target not being able to implement it. */
2901 /* Except, of course, when FORCE is true, when this is exactly what
2902 we want. Which is needed for CCmodes on some targets. */
2903 if (force)
2904 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2905 else
2906 ret = simplify_subreg (new_mode, x, old_mode, 0);
2907 }
2908
2909 return ret;
2910 }
2911
2912 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2913 an integer mode of the same size as MODE. Returns the instruction
2914 emitted, or NULL if such a move could not be generated. */
2915
2916 static rtx
2917 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2918 {
2919 enum machine_mode imode;
2920 enum insn_code code;
2921
2922 /* There must exist a mode of the exact size we require. */
2923 imode = int_mode_for_mode (mode);
2924 if (imode == BLKmode)
2925 return NULL_RTX;
2926
2927 /* The target must support moves in this mode. */
2928 code = optab_handler (mov_optab, imode);
2929 if (code == CODE_FOR_nothing)
2930 return NULL_RTX;
2931
2932 x = emit_move_change_mode (imode, mode, x, force);
2933 if (x == NULL_RTX)
2934 return NULL_RTX;
2935 y = emit_move_change_mode (imode, mode, y, force);
2936 if (y == NULL_RTX)
2937 return NULL_RTX;
2938 return emit_insn (GEN_FCN (code) (x, y));
2939 }
2940
2941 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2942 Return an equivalent MEM that does not use an auto-increment. */
2943
2944 static rtx
2945 emit_move_resolve_push (enum machine_mode mode, rtx x)
2946 {
2947 enum rtx_code code = GET_CODE (XEXP (x, 0));
2948 HOST_WIDE_INT adjust;
2949 rtx temp;
2950
2951 adjust = GET_MODE_SIZE (mode);
2952 #ifdef PUSH_ROUNDING
2953 adjust = PUSH_ROUNDING (adjust);
2954 #endif
2955 if (code == PRE_DEC || code == POST_DEC)
2956 adjust = -adjust;
2957 else if (code == PRE_MODIFY || code == POST_MODIFY)
2958 {
2959 rtx expr = XEXP (XEXP (x, 0), 1);
2960 HOST_WIDE_INT val;
2961
2962 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2963 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
2964 val = INTVAL (XEXP (expr, 1));
2965 if (GET_CODE (expr) == MINUS)
2966 val = -val;
2967 gcc_assert (adjust == val || adjust == -val);
2968 adjust = val;
2969 }
2970
2971 /* Do not use anti_adjust_stack, since we don't want to update
2972 stack_pointer_delta. */
2973 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2974 GEN_INT (adjust), stack_pointer_rtx,
2975 0, OPTAB_LIB_WIDEN);
2976 if (temp != stack_pointer_rtx)
2977 emit_move_insn (stack_pointer_rtx, temp);
2978
2979 switch (code)
2980 {
2981 case PRE_INC:
2982 case PRE_DEC:
2983 case PRE_MODIFY:
2984 temp = stack_pointer_rtx;
2985 break;
2986 case POST_INC:
2987 case POST_DEC:
2988 case POST_MODIFY:
2989 temp = plus_constant (stack_pointer_rtx, -adjust);
2990 break;
2991 default:
2992 gcc_unreachable ();
2993 }
2994
2995 return replace_equiv_address (x, temp);
2996 }
2997
2998 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2999 X is known to satisfy push_operand, and MODE is known to be complex.
3000 Returns the last instruction emitted. */
3001
3002 rtx
3003 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3004 {
3005 enum machine_mode submode = GET_MODE_INNER (mode);
3006 bool imag_first;
3007
3008 #ifdef PUSH_ROUNDING
3009 unsigned int submodesize = GET_MODE_SIZE (submode);
3010
3011 /* In case we output to the stack, but the size is smaller than the
3012 machine can push exactly, we need to use move instructions. */
3013 if (PUSH_ROUNDING (submodesize) != submodesize)
3014 {
3015 x = emit_move_resolve_push (mode, x);
3016 return emit_move_insn (x, y);
3017 }
3018 #endif
3019
3020 /* Note that the real part always precedes the imag part in memory
3021 regardless of machine's endianness. */
3022 switch (GET_CODE (XEXP (x, 0)))
3023 {
3024 case PRE_DEC:
3025 case POST_DEC:
3026 imag_first = true;
3027 break;
3028 case PRE_INC:
3029 case POST_INC:
3030 imag_first = false;
3031 break;
3032 default:
3033 gcc_unreachable ();
3034 }
3035
3036 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3037 read_complex_part (y, imag_first));
3038 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3039 read_complex_part (y, !imag_first));
3040 }
3041
3042 /* A subroutine of emit_move_complex. Perform the move from Y to X
3043 via two moves of the parts. Returns the last instruction emitted. */
3044
3045 rtx
3046 emit_move_complex_parts (rtx x, rtx y)
3047 {
3048 /* Show the output dies here. This is necessary for SUBREGs
3049 of pseudos since we cannot track their lifetimes correctly;
3050 hard regs shouldn't appear here except as return values. */
3051 if (!reload_completed && !reload_in_progress
3052 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3053 emit_clobber (x);
3054
3055 write_complex_part (x, read_complex_part (y, false), false);
3056 write_complex_part (x, read_complex_part (y, true), true);
3057
3058 return get_last_insn ();
3059 }
3060
3061 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3062 MODE is known to be complex. Returns the last instruction emitted. */
3063
3064 static rtx
3065 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3066 {
3067 bool try_int;
3068
3069 /* Need to take special care for pushes, to maintain proper ordering
3070 of the data, and possibly extra padding. */
3071 if (push_operand (x, mode))
3072 return emit_move_complex_push (mode, x, y);
3073
3074 /* See if we can coerce the target into moving both values at once. */
3075
3076 /* Move floating point as parts. */
3077 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3078 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3079 try_int = false;
3080 /* Not possible if the values are inherently not adjacent. */
3081 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3082 try_int = false;
3083 /* Is possible if both are registers (or subregs of registers). */
3084 else if (register_operand (x, mode) && register_operand (y, mode))
3085 try_int = true;
3086 /* If one of the operands is a memory, and alignment constraints
3087 are friendly enough, we may be able to do combined memory operations.
3088 We do not attempt this if Y is a constant because that combination is
3089 usually better with the by-parts thing below. */
3090 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3091 && (!STRICT_ALIGNMENT
3092 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3093 try_int = true;
3094 else
3095 try_int = false;
3096
3097 if (try_int)
3098 {
3099 rtx ret;
3100
3101 /* For memory to memory moves, optimal behavior can be had with the
3102 existing block move logic. */
3103 if (MEM_P (x) && MEM_P (y))
3104 {
3105 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3106 BLOCK_OP_NO_LIBCALL);
3107 return get_last_insn ();
3108 }
3109
3110 ret = emit_move_via_integer (mode, x, y, true);
3111 if (ret)
3112 return ret;
3113 }
3114
3115 return emit_move_complex_parts (x, y);
3116 }
3117
3118 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3119 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3120
3121 static rtx
3122 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3123 {
3124 rtx ret;
3125
3126 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3127 if (mode != CCmode)
3128 {
3129 enum insn_code code = optab_handler (mov_optab, CCmode);
3130 if (code != CODE_FOR_nothing)
3131 {
3132 x = emit_move_change_mode (CCmode, mode, x, true);
3133 y = emit_move_change_mode (CCmode, mode, y, true);
3134 return emit_insn (GEN_FCN (code) (x, y));
3135 }
3136 }
3137
3138 /* Otherwise, find the MODE_INT mode of the same width. */
3139 ret = emit_move_via_integer (mode, x, y, false);
3140 gcc_assert (ret != NULL);
3141 return ret;
3142 }
3143
3144 /* Return true if word I of OP lies entirely in the
3145 undefined bits of a paradoxical subreg. */
3146
3147 static bool
3148 undefined_operand_subword_p (const_rtx op, int i)
3149 {
3150 enum machine_mode innermode, innermostmode;
3151 int offset;
3152 if (GET_CODE (op) != SUBREG)
3153 return false;
3154 innermode = GET_MODE (op);
3155 innermostmode = GET_MODE (SUBREG_REG (op));
3156 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3157 /* The SUBREG_BYTE represents offset, as if the value were stored in
3158 memory, except for a paradoxical subreg where we define
3159 SUBREG_BYTE to be 0; undo this exception as in
3160 simplify_subreg. */
3161 if (SUBREG_BYTE (op) == 0
3162 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3163 {
3164 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3165 if (WORDS_BIG_ENDIAN)
3166 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3167 if (BYTES_BIG_ENDIAN)
3168 offset += difference % UNITS_PER_WORD;
3169 }
3170 if (offset >= GET_MODE_SIZE (innermostmode)
3171 || offset <= -GET_MODE_SIZE (word_mode))
3172 return true;
3173 return false;
3174 }
3175
3176 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3177 MODE is any multi-word or full-word mode that lacks a move_insn
3178 pattern. Note that you will get better code if you define such
3179 patterns, even if they must turn into multiple assembler instructions. */
3180
3181 static rtx
3182 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3183 {
3184 rtx last_insn = 0;
3185 rtx seq, inner;
3186 bool need_clobber;
3187 int i;
3188
3189 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3190
3191 /* If X is a push on the stack, do the push now and replace
3192 X with a reference to the stack pointer. */
3193 if (push_operand (x, mode))
3194 x = emit_move_resolve_push (mode, x);
3195
3196 /* If we are in reload, see if either operand is a MEM whose address
3197 is scheduled for replacement. */
3198 if (reload_in_progress && MEM_P (x)
3199 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3200 x = replace_equiv_address_nv (x, inner);
3201 if (reload_in_progress && MEM_P (y)
3202 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3203 y = replace_equiv_address_nv (y, inner);
3204
3205 start_sequence ();
3206
3207 need_clobber = false;
3208 for (i = 0;
3209 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3210 i++)
3211 {
3212 rtx xpart = operand_subword (x, i, 1, mode);
3213 rtx ypart;
3214
3215 /* Do not generate code for a move if it would come entirely
3216 from the undefined bits of a paradoxical subreg. */
3217 if (undefined_operand_subword_p (y, i))
3218 continue;
3219
3220 ypart = operand_subword (y, i, 1, mode);
3221
3222 /* If we can't get a part of Y, put Y into memory if it is a
3223 constant. Otherwise, force it into a register. Then we must
3224 be able to get a part of Y. */
3225 if (ypart == 0 && CONSTANT_P (y))
3226 {
3227 y = use_anchored_address (force_const_mem (mode, y));
3228 ypart = operand_subword (y, i, 1, mode);
3229 }
3230 else if (ypart == 0)
3231 ypart = operand_subword_force (y, i, mode);
3232
3233 gcc_assert (xpart && ypart);
3234
3235 need_clobber |= (GET_CODE (xpart) == SUBREG);
3236
3237 last_insn = emit_move_insn (xpart, ypart);
3238 }
3239
3240 seq = get_insns ();
3241 end_sequence ();
3242
3243 /* Show the output dies here. This is necessary for SUBREGs
3244 of pseudos since we cannot track their lifetimes correctly;
3245 hard regs shouldn't appear here except as return values.
3246 We never want to emit such a clobber after reload. */
3247 if (x != y
3248 && ! (reload_in_progress || reload_completed)
3249 && need_clobber != 0)
3250 emit_clobber (x);
3251
3252 emit_insn (seq);
3253
3254 return last_insn;
3255 }
3256
3257 /* Low level part of emit_move_insn.
3258 Called just like emit_move_insn, but assumes X and Y
3259 are basically valid. */
3260
3261 rtx
3262 emit_move_insn_1 (rtx x, rtx y)
3263 {
3264 enum machine_mode mode = GET_MODE (x);
3265 enum insn_code code;
3266
3267 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3268
3269 code = optab_handler (mov_optab, mode);
3270 if (code != CODE_FOR_nothing)
3271 return emit_insn (GEN_FCN (code) (x, y));
3272
3273 /* Expand complex moves by moving real part and imag part. */
3274 if (COMPLEX_MODE_P (mode))
3275 return emit_move_complex (mode, x, y);
3276
3277 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3278 || ALL_FIXED_POINT_MODE_P (mode))
3279 {
3280 rtx result = emit_move_via_integer (mode, x, y, true);
3281
3282 /* If we can't find an integer mode, use multi words. */
3283 if (result)
3284 return result;
3285 else
3286 return emit_move_multi_word (mode, x, y);
3287 }
3288
3289 if (GET_MODE_CLASS (mode) == MODE_CC)
3290 return emit_move_ccmode (mode, x, y);
3291
3292 /* Try using a move pattern for the corresponding integer mode. This is
3293 only safe when simplify_subreg can convert MODE constants into integer
3294 constants. At present, it can only do this reliably if the value
3295 fits within a HOST_WIDE_INT. */
3296 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3297 {
3298 rtx ret = emit_move_via_integer (mode, x, y, false);
3299 if (ret)
3300 return ret;
3301 }
3302
3303 return emit_move_multi_word (mode, x, y);
3304 }
3305
3306 /* Generate code to copy Y into X.
3307 Both Y and X must have the same mode, except that
3308 Y can be a constant with VOIDmode.
3309 This mode cannot be BLKmode; use emit_block_move for that.
3310
3311 Return the last instruction emitted. */
3312
3313 rtx
3314 emit_move_insn (rtx x, rtx y)
3315 {
3316 enum machine_mode mode = GET_MODE (x);
3317 rtx y_cst = NULL_RTX;
3318 rtx last_insn, set;
3319
3320 gcc_assert (mode != BLKmode
3321 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3322
3323 if (CONSTANT_P (y))
3324 {
3325 if (optimize
3326 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3327 && (last_insn = compress_float_constant (x, y)))
3328 return last_insn;
3329
3330 y_cst = y;
3331
3332 if (!LEGITIMATE_CONSTANT_P (y))
3333 {
3334 y = force_const_mem (mode, y);
3335
3336 /* If the target's cannot_force_const_mem prevented the spill,
3337 assume that the target's move expanders will also take care
3338 of the non-legitimate constant. */
3339 if (!y)
3340 y = y_cst;
3341 else
3342 y = use_anchored_address (y);
3343 }
3344 }
3345
3346 /* If X or Y are memory references, verify that their addresses are valid
3347 for the machine. */
3348 if (MEM_P (x)
3349 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3350 MEM_ADDR_SPACE (x))
3351 && ! push_operand (x, GET_MODE (x))))
3352 x = validize_mem (x);
3353
3354 if (MEM_P (y)
3355 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3356 MEM_ADDR_SPACE (y)))
3357 y = validize_mem (y);
3358
3359 gcc_assert (mode != BLKmode);
3360
3361 last_insn = emit_move_insn_1 (x, y);
3362
3363 if (y_cst && REG_P (x)
3364 && (set = single_set (last_insn)) != NULL_RTX
3365 && SET_DEST (set) == x
3366 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3367 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3368
3369 return last_insn;
3370 }
3371
3372 /* If Y is representable exactly in a narrower mode, and the target can
3373 perform the extension directly from constant or memory, then emit the
3374 move as an extension. */
3375
3376 static rtx
3377 compress_float_constant (rtx x, rtx y)
3378 {
3379 enum machine_mode dstmode = GET_MODE (x);
3380 enum machine_mode orig_srcmode = GET_MODE (y);
3381 enum machine_mode srcmode;
3382 REAL_VALUE_TYPE r;
3383 int oldcost, newcost;
3384 bool speed = optimize_insn_for_speed_p ();
3385
3386 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3387
3388 if (LEGITIMATE_CONSTANT_P (y))
3389 oldcost = rtx_cost (y, SET, speed);
3390 else
3391 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3392
3393 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3394 srcmode != orig_srcmode;
3395 srcmode = GET_MODE_WIDER_MODE (srcmode))
3396 {
3397 enum insn_code ic;
3398 rtx trunc_y, last_insn;
3399
3400 /* Skip if the target can't extend this way. */
3401 ic = can_extend_p (dstmode, srcmode, 0);
3402 if (ic == CODE_FOR_nothing)
3403 continue;
3404
3405 /* Skip if the narrowed value isn't exact. */
3406 if (! exact_real_truncate (srcmode, &r))
3407 continue;
3408
3409 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3410
3411 if (LEGITIMATE_CONSTANT_P (trunc_y))
3412 {
3413 /* Skip if the target needs extra instructions to perform
3414 the extension. */
3415 if (!insn_operand_matches (ic, 1, trunc_y))
3416 continue;
3417 /* This is valid, but may not be cheaper than the original. */
3418 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3419 if (oldcost < newcost)
3420 continue;
3421 }
3422 else if (float_extend_from_mem[dstmode][srcmode])
3423 {
3424 trunc_y = force_const_mem (srcmode, trunc_y);
3425 /* This is valid, but may not be cheaper than the original. */
3426 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3427 if (oldcost < newcost)
3428 continue;
3429 trunc_y = validize_mem (trunc_y);
3430 }
3431 else
3432 continue;
3433
3434 /* For CSE's benefit, force the compressed constant pool entry
3435 into a new pseudo. This constant may be used in different modes,
3436 and if not, combine will put things back together for us. */
3437 trunc_y = force_reg (srcmode, trunc_y);
3438 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3439 last_insn = get_last_insn ();
3440
3441 if (REG_P (x))
3442 set_unique_reg_note (last_insn, REG_EQUAL, y);
3443
3444 return last_insn;
3445 }
3446
3447 return NULL_RTX;
3448 }
3449 \f
3450 /* Pushing data onto the stack. */
3451
3452 /* Push a block of length SIZE (perhaps variable)
3453 and return an rtx to address the beginning of the block.
3454 The value may be virtual_outgoing_args_rtx.
3455
3456 EXTRA is the number of bytes of padding to push in addition to SIZE.
3457 BELOW nonzero means this padding comes at low addresses;
3458 otherwise, the padding comes at high addresses. */
3459
3460 rtx
3461 push_block (rtx size, int extra, int below)
3462 {
3463 rtx temp;
3464
3465 size = convert_modes (Pmode, ptr_mode, size, 1);
3466 if (CONSTANT_P (size))
3467 anti_adjust_stack (plus_constant (size, extra));
3468 else if (REG_P (size) && extra == 0)
3469 anti_adjust_stack (size);
3470 else
3471 {
3472 temp = copy_to_mode_reg (Pmode, size);
3473 if (extra != 0)
3474 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3475 temp, 0, OPTAB_LIB_WIDEN);
3476 anti_adjust_stack (temp);
3477 }
3478
3479 #ifndef STACK_GROWS_DOWNWARD
3480 if (0)
3481 #else
3482 if (1)
3483 #endif
3484 {
3485 temp = virtual_outgoing_args_rtx;
3486 if (extra != 0 && below)
3487 temp = plus_constant (temp, extra);
3488 }
3489 else
3490 {
3491 if (CONST_INT_P (size))
3492 temp = plus_constant (virtual_outgoing_args_rtx,
3493 -INTVAL (size) - (below ? 0 : extra));
3494 else if (extra != 0 && !below)
3495 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3496 negate_rtx (Pmode, plus_constant (size, extra)));
3497 else
3498 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3499 negate_rtx (Pmode, size));
3500 }
3501
3502 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3503 }
3504
3505 #ifdef PUSH_ROUNDING
3506
3507 /* Emit single push insn. */
3508
3509 static void
3510 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3511 {
3512 rtx dest_addr;
3513 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3514 rtx dest;
3515 enum insn_code icode;
3516
3517 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3518 /* If there is push pattern, use it. Otherwise try old way of throwing
3519 MEM representing push operation to move expander. */
3520 icode = optab_handler (push_optab, mode);
3521 if (icode != CODE_FOR_nothing)
3522 {
3523 struct expand_operand ops[1];
3524
3525 create_input_operand (&ops[0], x, mode);
3526 if (maybe_expand_insn (icode, 1, ops))
3527 return;
3528 }
3529 if (GET_MODE_SIZE (mode) == rounded_size)
3530 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3531 /* If we are to pad downward, adjust the stack pointer first and
3532 then store X into the stack location using an offset. This is
3533 because emit_move_insn does not know how to pad; it does not have
3534 access to type. */
3535 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3536 {
3537 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3538 HOST_WIDE_INT offset;
3539
3540 emit_move_insn (stack_pointer_rtx,
3541 expand_binop (Pmode,
3542 #ifdef STACK_GROWS_DOWNWARD
3543 sub_optab,
3544 #else
3545 add_optab,
3546 #endif
3547 stack_pointer_rtx,
3548 GEN_INT (rounded_size),
3549 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3550
3551 offset = (HOST_WIDE_INT) padding_size;
3552 #ifdef STACK_GROWS_DOWNWARD
3553 if (STACK_PUSH_CODE == POST_DEC)
3554 /* We have already decremented the stack pointer, so get the
3555 previous value. */
3556 offset += (HOST_WIDE_INT) rounded_size;
3557 #else
3558 if (STACK_PUSH_CODE == POST_INC)
3559 /* We have already incremented the stack pointer, so get the
3560 previous value. */
3561 offset -= (HOST_WIDE_INT) rounded_size;
3562 #endif
3563 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3564 }
3565 else
3566 {
3567 #ifdef STACK_GROWS_DOWNWARD
3568 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3569 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3570 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3571 #else
3572 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3573 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3574 GEN_INT (rounded_size));
3575 #endif
3576 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3577 }
3578
3579 dest = gen_rtx_MEM (mode, dest_addr);
3580
3581 if (type != 0)
3582 {
3583 set_mem_attributes (dest, type, 1);
3584
3585 if (flag_optimize_sibling_calls)
3586 /* Function incoming arguments may overlap with sibling call
3587 outgoing arguments and we cannot allow reordering of reads
3588 from function arguments with stores to outgoing arguments
3589 of sibling calls. */
3590 set_mem_alias_set (dest, 0);
3591 }
3592 emit_move_insn (dest, x);
3593 }
3594 #endif
3595
3596 /* Generate code to push X onto the stack, assuming it has mode MODE and
3597 type TYPE.
3598 MODE is redundant except when X is a CONST_INT (since they don't
3599 carry mode info).
3600 SIZE is an rtx for the size of data to be copied (in bytes),
3601 needed only if X is BLKmode.
3602
3603 ALIGN (in bits) is maximum alignment we can assume.
3604
3605 If PARTIAL and REG are both nonzero, then copy that many of the first
3606 bytes of X into registers starting with REG, and push the rest of X.
3607 The amount of space pushed is decreased by PARTIAL bytes.
3608 REG must be a hard register in this case.
3609 If REG is zero but PARTIAL is not, take any all others actions for an
3610 argument partially in registers, but do not actually load any
3611 registers.
3612
3613 EXTRA is the amount in bytes of extra space to leave next to this arg.
3614 This is ignored if an argument block has already been allocated.
3615
3616 On a machine that lacks real push insns, ARGS_ADDR is the address of
3617 the bottom of the argument block for this call. We use indexing off there
3618 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3619 argument block has not been preallocated.
3620
3621 ARGS_SO_FAR is the size of args previously pushed for this call.
3622
3623 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3624 for arguments passed in registers. If nonzero, it will be the number
3625 of bytes required. */
3626
3627 void
3628 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3629 unsigned int align, int partial, rtx reg, int extra,
3630 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3631 rtx alignment_pad)
3632 {
3633 rtx xinner;
3634 enum direction stack_direction
3635 #ifdef STACK_GROWS_DOWNWARD
3636 = downward;
3637 #else
3638 = upward;
3639 #endif
3640
3641 /* Decide where to pad the argument: `downward' for below,
3642 `upward' for above, or `none' for don't pad it.
3643 Default is below for small data on big-endian machines; else above. */
3644 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3645
3646 /* Invert direction if stack is post-decrement.
3647 FIXME: why? */
3648 if (STACK_PUSH_CODE == POST_DEC)
3649 if (where_pad != none)
3650 where_pad = (where_pad == downward ? upward : downward);
3651
3652 xinner = x;
3653
3654 if (mode == BLKmode
3655 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3656 {
3657 /* Copy a block into the stack, entirely or partially. */
3658
3659 rtx temp;
3660 int used;
3661 int offset;
3662 int skip;
3663
3664 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3665 used = partial - offset;
3666
3667 if (mode != BLKmode)
3668 {
3669 /* A value is to be stored in an insufficiently aligned
3670 stack slot; copy via a suitably aligned slot if
3671 necessary. */
3672 size = GEN_INT (GET_MODE_SIZE (mode));
3673 if (!MEM_P (xinner))
3674 {
3675 temp = assign_temp (type, 0, 1, 1);
3676 emit_move_insn (temp, xinner);
3677 xinner = temp;
3678 }
3679 }
3680
3681 gcc_assert (size);
3682
3683 /* USED is now the # of bytes we need not copy to the stack
3684 because registers will take care of them. */
3685
3686 if (partial != 0)
3687 xinner = adjust_address (xinner, BLKmode, used);
3688
3689 /* If the partial register-part of the arg counts in its stack size,
3690 skip the part of stack space corresponding to the registers.
3691 Otherwise, start copying to the beginning of the stack space,
3692 by setting SKIP to 0. */
3693 skip = (reg_parm_stack_space == 0) ? 0 : used;
3694
3695 #ifdef PUSH_ROUNDING
3696 /* Do it with several push insns if that doesn't take lots of insns
3697 and if there is no difficulty with push insns that skip bytes
3698 on the stack for alignment purposes. */
3699 if (args_addr == 0
3700 && PUSH_ARGS
3701 && CONST_INT_P (size)
3702 && skip == 0
3703 && MEM_ALIGN (xinner) >= align
3704 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3705 /* Here we avoid the case of a structure whose weak alignment
3706 forces many pushes of a small amount of data,
3707 and such small pushes do rounding that causes trouble. */
3708 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3709 || align >= BIGGEST_ALIGNMENT
3710 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3711 == (align / BITS_PER_UNIT)))
3712 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3713 {
3714 /* Push padding now if padding above and stack grows down,
3715 or if padding below and stack grows up.
3716 But if space already allocated, this has already been done. */
3717 if (extra && args_addr == 0
3718 && where_pad != none && where_pad != stack_direction)
3719 anti_adjust_stack (GEN_INT (extra));
3720
3721 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3722 }
3723 else
3724 #endif /* PUSH_ROUNDING */
3725 {
3726 rtx target;
3727
3728 /* Otherwise make space on the stack and copy the data
3729 to the address of that space. */
3730
3731 /* Deduct words put into registers from the size we must copy. */
3732 if (partial != 0)
3733 {
3734 if (CONST_INT_P (size))
3735 size = GEN_INT (INTVAL (size) - used);
3736 else
3737 size = expand_binop (GET_MODE (size), sub_optab, size,
3738 GEN_INT (used), NULL_RTX, 0,
3739 OPTAB_LIB_WIDEN);
3740 }
3741
3742 /* Get the address of the stack space.
3743 In this case, we do not deal with EXTRA separately.
3744 A single stack adjust will do. */
3745 if (! args_addr)
3746 {
3747 temp = push_block (size, extra, where_pad == downward);
3748 extra = 0;
3749 }
3750 else if (CONST_INT_P (args_so_far))
3751 temp = memory_address (BLKmode,
3752 plus_constant (args_addr,
3753 skip + INTVAL (args_so_far)));
3754 else
3755 temp = memory_address (BLKmode,
3756 plus_constant (gen_rtx_PLUS (Pmode,
3757 args_addr,
3758 args_so_far),
3759 skip));
3760
3761 if (!ACCUMULATE_OUTGOING_ARGS)
3762 {
3763 /* If the source is referenced relative to the stack pointer,
3764 copy it to another register to stabilize it. We do not need
3765 to do this if we know that we won't be changing sp. */
3766
3767 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3768 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3769 temp = copy_to_reg (temp);
3770 }
3771
3772 target = gen_rtx_MEM (BLKmode, temp);
3773
3774 /* We do *not* set_mem_attributes here, because incoming arguments
3775 may overlap with sibling call outgoing arguments and we cannot
3776 allow reordering of reads from function arguments with stores
3777 to outgoing arguments of sibling calls. We do, however, want
3778 to record the alignment of the stack slot. */
3779 /* ALIGN may well be better aligned than TYPE, e.g. due to
3780 PARM_BOUNDARY. Assume the caller isn't lying. */
3781 set_mem_align (target, align);
3782
3783 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3784 }
3785 }
3786 else if (partial > 0)
3787 {
3788 /* Scalar partly in registers. */
3789
3790 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3791 int i;
3792 int not_stack;
3793 /* # bytes of start of argument
3794 that we must make space for but need not store. */
3795 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3796 int args_offset = INTVAL (args_so_far);
3797 int skip;
3798
3799 /* Push padding now if padding above and stack grows down,
3800 or if padding below and stack grows up.
3801 But if space already allocated, this has already been done. */
3802 if (extra && args_addr == 0
3803 && where_pad != none && where_pad != stack_direction)
3804 anti_adjust_stack (GEN_INT (extra));
3805
3806 /* If we make space by pushing it, we might as well push
3807 the real data. Otherwise, we can leave OFFSET nonzero
3808 and leave the space uninitialized. */
3809 if (args_addr == 0)
3810 offset = 0;
3811
3812 /* Now NOT_STACK gets the number of words that we don't need to
3813 allocate on the stack. Convert OFFSET to words too. */
3814 not_stack = (partial - offset) / UNITS_PER_WORD;
3815 offset /= UNITS_PER_WORD;
3816
3817 /* If the partial register-part of the arg counts in its stack size,
3818 skip the part of stack space corresponding to the registers.
3819 Otherwise, start copying to the beginning of the stack space,
3820 by setting SKIP to 0. */
3821 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3822
3823 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3824 x = validize_mem (force_const_mem (mode, x));
3825
3826 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3827 SUBREGs of such registers are not allowed. */
3828 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3829 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3830 x = copy_to_reg (x);
3831
3832 /* Loop over all the words allocated on the stack for this arg. */
3833 /* We can do it by words, because any scalar bigger than a word
3834 has a size a multiple of a word. */
3835 #ifndef PUSH_ARGS_REVERSED
3836 for (i = not_stack; i < size; i++)
3837 #else
3838 for (i = size - 1; i >= not_stack; i--)
3839 #endif
3840 if (i >= not_stack + offset)
3841 emit_push_insn (operand_subword_force (x, i, mode),
3842 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3843 0, args_addr,
3844 GEN_INT (args_offset + ((i - not_stack + skip)
3845 * UNITS_PER_WORD)),
3846 reg_parm_stack_space, alignment_pad);
3847 }
3848 else
3849 {
3850 rtx addr;
3851 rtx dest;
3852
3853 /* Push padding now if padding above and stack grows down,
3854 or if padding below and stack grows up.
3855 But if space already allocated, this has already been done. */
3856 if (extra && args_addr == 0
3857 && where_pad != none && where_pad != stack_direction)
3858 anti_adjust_stack (GEN_INT (extra));
3859
3860 #ifdef PUSH_ROUNDING
3861 if (args_addr == 0 && PUSH_ARGS)
3862 emit_single_push_insn (mode, x, type);
3863 else
3864 #endif
3865 {
3866 if (CONST_INT_P (args_so_far))
3867 addr
3868 = memory_address (mode,
3869 plus_constant (args_addr,
3870 INTVAL (args_so_far)));
3871 else
3872 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3873 args_so_far));
3874 dest = gen_rtx_MEM (mode, addr);
3875
3876 /* We do *not* set_mem_attributes here, because incoming arguments
3877 may overlap with sibling call outgoing arguments and we cannot
3878 allow reordering of reads from function arguments with stores
3879 to outgoing arguments of sibling calls. We do, however, want
3880 to record the alignment of the stack slot. */
3881 /* ALIGN may well be better aligned than TYPE, e.g. due to
3882 PARM_BOUNDARY. Assume the caller isn't lying. */
3883 set_mem_align (dest, align);
3884
3885 emit_move_insn (dest, x);
3886 }
3887 }
3888
3889 /* If part should go in registers, copy that part
3890 into the appropriate registers. Do this now, at the end,
3891 since mem-to-mem copies above may do function calls. */
3892 if (partial > 0 && reg != 0)
3893 {
3894 /* Handle calls that pass values in multiple non-contiguous locations.
3895 The Irix 6 ABI has examples of this. */
3896 if (GET_CODE (reg) == PARALLEL)
3897 emit_group_load (reg, x, type, -1);
3898 else
3899 {
3900 gcc_assert (partial % UNITS_PER_WORD == 0);
3901 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3902 }
3903 }
3904
3905 if (extra && args_addr == 0 && where_pad == stack_direction)
3906 anti_adjust_stack (GEN_INT (extra));
3907
3908 if (alignment_pad && args_addr == 0)
3909 anti_adjust_stack (alignment_pad);
3910 }
3911 \f
3912 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3913 operations. */
3914
3915 static rtx
3916 get_subtarget (rtx x)
3917 {
3918 return (optimize
3919 || x == 0
3920 /* Only registers can be subtargets. */
3921 || !REG_P (x)
3922 /* Don't use hard regs to avoid extending their life. */
3923 || REGNO (x) < FIRST_PSEUDO_REGISTER
3924 ? 0 : x);
3925 }
3926
3927 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3928 FIELD is a bitfield. Returns true if the optimization was successful,
3929 and there's nothing else to do. */
3930
3931 static bool
3932 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3933 unsigned HOST_WIDE_INT bitpos,
3934 enum machine_mode mode1, rtx str_rtx,
3935 tree to, tree src)
3936 {
3937 enum machine_mode str_mode = GET_MODE (str_rtx);
3938 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3939 tree op0, op1;
3940 rtx value, result;
3941 optab binop;
3942 gimple srcstmt;
3943 enum tree_code code;
3944
3945 if (mode1 != VOIDmode
3946 || bitsize >= BITS_PER_WORD
3947 || str_bitsize > BITS_PER_WORD
3948 || TREE_SIDE_EFFECTS (to)
3949 || TREE_THIS_VOLATILE (to))
3950 return false;
3951
3952 STRIP_NOPS (src);
3953 if (TREE_CODE (src) != SSA_NAME)
3954 return false;
3955 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3956 return false;
3957
3958 srcstmt = get_gimple_for_ssa_name (src);
3959 if (!srcstmt
3960 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
3961 return false;
3962
3963 code = gimple_assign_rhs_code (srcstmt);
3964
3965 op0 = gimple_assign_rhs1 (srcstmt);
3966
3967 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
3968 to find its initialization. Hopefully the initialization will
3969 be from a bitfield load. */
3970 if (TREE_CODE (op0) == SSA_NAME)
3971 {
3972 gimple op0stmt = get_gimple_for_ssa_name (op0);
3973
3974 /* We want to eventually have OP0 be the same as TO, which
3975 should be a bitfield. */
3976 if (!op0stmt
3977 || !is_gimple_assign (op0stmt)
3978 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
3979 return false;
3980 op0 = gimple_assign_rhs1 (op0stmt);
3981 }
3982
3983 op1 = gimple_assign_rhs2 (srcstmt);
3984
3985 if (!operand_equal_p (to, op0, 0))
3986 return false;
3987
3988 if (MEM_P (str_rtx))
3989 {
3990 unsigned HOST_WIDE_INT offset1;
3991
3992 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3993 str_mode = word_mode;
3994 str_mode = get_best_mode (bitsize, bitpos,
3995 MEM_ALIGN (str_rtx), str_mode, 0);
3996 if (str_mode == VOIDmode)
3997 return false;
3998 str_bitsize = GET_MODE_BITSIZE (str_mode);
3999
4000 offset1 = bitpos;
4001 bitpos %= str_bitsize;
4002 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4003 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4004 }
4005 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4006 return false;
4007
4008 /* If the bit field covers the whole REG/MEM, store_field
4009 will likely generate better code. */
4010 if (bitsize >= str_bitsize)
4011 return false;
4012
4013 /* We can't handle fields split across multiple entities. */
4014 if (bitpos + bitsize > str_bitsize)
4015 return false;
4016
4017 if (BYTES_BIG_ENDIAN)
4018 bitpos = str_bitsize - bitpos - bitsize;
4019
4020 switch (code)
4021 {
4022 case PLUS_EXPR:
4023 case MINUS_EXPR:
4024 /* For now, just optimize the case of the topmost bitfield
4025 where we don't need to do any masking and also
4026 1 bit bitfields where xor can be used.
4027 We might win by one instruction for the other bitfields
4028 too if insv/extv instructions aren't used, so that
4029 can be added later. */
4030 if (bitpos + bitsize != str_bitsize
4031 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4032 break;
4033
4034 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4035 value = convert_modes (str_mode,
4036 TYPE_MODE (TREE_TYPE (op1)), value,
4037 TYPE_UNSIGNED (TREE_TYPE (op1)));
4038
4039 /* We may be accessing data outside the field, which means
4040 we can alias adjacent data. */
4041 if (MEM_P (str_rtx))
4042 {
4043 str_rtx = shallow_copy_rtx (str_rtx);
4044 set_mem_alias_set (str_rtx, 0);
4045 set_mem_expr (str_rtx, 0);
4046 }
4047
4048 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4049 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4050 {
4051 value = expand_and (str_mode, value, const1_rtx, NULL);
4052 binop = xor_optab;
4053 }
4054 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4055 build_int_cst (NULL_TREE, bitpos),
4056 NULL_RTX, 1);
4057 result = expand_binop (str_mode, binop, str_rtx,
4058 value, str_rtx, 1, OPTAB_WIDEN);
4059 if (result != str_rtx)
4060 emit_move_insn (str_rtx, result);
4061 return true;
4062
4063 case BIT_IOR_EXPR:
4064 case BIT_XOR_EXPR:
4065 if (TREE_CODE (op1) != INTEGER_CST)
4066 break;
4067 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4068 value = convert_modes (GET_MODE (str_rtx),
4069 TYPE_MODE (TREE_TYPE (op1)), value,
4070 TYPE_UNSIGNED (TREE_TYPE (op1)));
4071
4072 /* We may be accessing data outside the field, which means
4073 we can alias adjacent data. */
4074 if (MEM_P (str_rtx))
4075 {
4076 str_rtx = shallow_copy_rtx (str_rtx);
4077 set_mem_alias_set (str_rtx, 0);
4078 set_mem_expr (str_rtx, 0);
4079 }
4080
4081 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4082 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4083 {
4084 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4085 - 1);
4086 value = expand_and (GET_MODE (str_rtx), value, mask,
4087 NULL_RTX);
4088 }
4089 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4090 build_int_cst (NULL_TREE, bitpos),
4091 NULL_RTX, 1);
4092 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4093 value, str_rtx, 1, OPTAB_WIDEN);
4094 if (result != str_rtx)
4095 emit_move_insn (str_rtx, result);
4096 return true;
4097
4098 default:
4099 break;
4100 }
4101
4102 return false;
4103 }
4104
4105
4106 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4107 is true, try generating a nontemporal store. */
4108
4109 void
4110 expand_assignment (tree to, tree from, bool nontemporal)
4111 {
4112 rtx to_rtx = 0;
4113 rtx result;
4114 enum machine_mode mode;
4115 int align;
4116 enum insn_code icode;
4117
4118 /* Don't crash if the lhs of the assignment was erroneous. */
4119 if (TREE_CODE (to) == ERROR_MARK)
4120 {
4121 expand_normal (from);
4122 return;
4123 }
4124
4125 /* Optimize away no-op moves without side-effects. */
4126 if (operand_equal_p (to, from, 0))
4127 return;
4128
4129 mode = TYPE_MODE (TREE_TYPE (to));
4130 if ((TREE_CODE (to) == MEM_REF
4131 || TREE_CODE (to) == TARGET_MEM_REF)
4132 && mode != BLKmode
4133 && ((align = MAX (TYPE_ALIGN (TREE_TYPE (to)),
4134 get_object_alignment (to, BIGGEST_ALIGNMENT)))
4135 < (signed) GET_MODE_ALIGNMENT (mode))
4136 && ((icode = optab_handler (movmisalign_optab, mode))
4137 != CODE_FOR_nothing))
4138 {
4139 struct expand_operand ops[2];
4140 enum machine_mode address_mode;
4141 rtx reg, op0, mem;
4142
4143 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4144 reg = force_not_mem (reg);
4145
4146 if (TREE_CODE (to) == MEM_REF)
4147 {
4148 addr_space_t as
4149 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 1))));
4150 tree base = TREE_OPERAND (to, 0);
4151 address_mode = targetm.addr_space.address_mode (as);
4152 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4153 op0 = convert_memory_address_addr_space (address_mode, op0, as);
4154 if (!integer_zerop (TREE_OPERAND (to, 1)))
4155 {
4156 rtx off
4157 = immed_double_int_const (mem_ref_offset (to), address_mode);
4158 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
4159 }
4160 op0 = memory_address_addr_space (mode, op0, as);
4161 mem = gen_rtx_MEM (mode, op0);
4162 set_mem_attributes (mem, to, 0);
4163 set_mem_addr_space (mem, as);
4164 }
4165 else if (TREE_CODE (to) == TARGET_MEM_REF)
4166 {
4167 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (to));
4168 struct mem_address addr;
4169
4170 get_address_description (to, &addr);
4171 op0 = addr_for_mem_ref (&addr, as, true);
4172 op0 = memory_address_addr_space (mode, op0, as);
4173 mem = gen_rtx_MEM (mode, op0);
4174 set_mem_attributes (mem, to, 0);
4175 set_mem_addr_space (mem, as);
4176 }
4177 else
4178 gcc_unreachable ();
4179 if (TREE_THIS_VOLATILE (to))
4180 MEM_VOLATILE_P (mem) = 1;
4181
4182 create_fixed_operand (&ops[0], mem);
4183 create_input_operand (&ops[1], reg, mode);
4184 /* The movmisalign<mode> pattern cannot fail, else the assignment would
4185 silently be omitted. */
4186 expand_insn (icode, 2, ops);
4187 return;
4188 }
4189
4190 /* Assignment of a structure component needs special treatment
4191 if the structure component's rtx is not simply a MEM.
4192 Assignment of an array element at a constant index, and assignment of
4193 an array element in an unaligned packed structure field, has the same
4194 problem. */
4195 if (handled_component_p (to)
4196 /* ??? We only need to handle MEM_REF here if the access is not
4197 a full access of the base object. */
4198 || (TREE_CODE (to) == MEM_REF
4199 && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR)
4200 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4201 {
4202 enum machine_mode mode1;
4203 HOST_WIDE_INT bitsize, bitpos;
4204 tree offset;
4205 int unsignedp;
4206 int volatilep = 0;
4207 tree tem;
4208
4209 push_temp_slots ();
4210 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4211 &unsignedp, &volatilep, true);
4212
4213 /* If we are going to use store_bit_field and extract_bit_field,
4214 make sure to_rtx will be safe for multiple use. */
4215
4216 to_rtx = expand_normal (tem);
4217
4218 /* If the bitfield is volatile, we want to access it in the
4219 field's mode, not the computed mode.
4220 If a MEM has VOIDmode (external with incomplete type),
4221 use BLKmode for it instead. */
4222 if (MEM_P (to_rtx))
4223 {
4224 if (volatilep && flag_strict_volatile_bitfields > 0)
4225 to_rtx = adjust_address (to_rtx, mode1, 0);
4226 else if (GET_MODE (to_rtx) == VOIDmode)
4227 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4228 }
4229
4230 if (offset != 0)
4231 {
4232 enum machine_mode address_mode;
4233 rtx offset_rtx;
4234
4235 if (!MEM_P (to_rtx))
4236 {
4237 /* We can get constant negative offsets into arrays with broken
4238 user code. Translate this to a trap instead of ICEing. */
4239 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4240 expand_builtin_trap ();
4241 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4242 }
4243
4244 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4245 address_mode
4246 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4247 if (GET_MODE (offset_rtx) != address_mode)
4248 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4249
4250 /* A constant address in TO_RTX can have VOIDmode, we must not try
4251 to call force_reg for that case. Avoid that case. */
4252 if (MEM_P (to_rtx)
4253 && GET_MODE (to_rtx) == BLKmode
4254 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4255 && bitsize > 0
4256 && (bitpos % bitsize) == 0
4257 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4258 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4259 {
4260 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4261 bitpos = 0;
4262 }
4263
4264 to_rtx = offset_address (to_rtx, offset_rtx,
4265 highest_pow2_factor_for_target (to,
4266 offset));
4267 }
4268
4269 /* No action is needed if the target is not a memory and the field
4270 lies completely outside that target. This can occur if the source
4271 code contains an out-of-bounds access to a small array. */
4272 if (!MEM_P (to_rtx)
4273 && GET_MODE (to_rtx) != BLKmode
4274 && (unsigned HOST_WIDE_INT) bitpos
4275 >= GET_MODE_BITSIZE (GET_MODE (to_rtx)))
4276 {
4277 expand_normal (from);
4278 result = NULL;
4279 }
4280 /* Handle expand_expr of a complex value returning a CONCAT. */
4281 else if (GET_CODE (to_rtx) == CONCAT)
4282 {
4283 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from))))
4284 {
4285 gcc_assert (bitpos == 0);
4286 result = store_expr (from, to_rtx, false, nontemporal);
4287 }
4288 else
4289 {
4290 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4291 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4292 nontemporal);
4293 }
4294 }
4295 else
4296 {
4297 if (MEM_P (to_rtx))
4298 {
4299 /* If the field is at offset zero, we could have been given the
4300 DECL_RTX of the parent struct. Don't munge it. */
4301 to_rtx = shallow_copy_rtx (to_rtx);
4302
4303 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4304
4305 /* Deal with volatile and readonly fields. The former is only
4306 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4307 if (volatilep)
4308 MEM_VOLATILE_P (to_rtx) = 1;
4309 if (component_uses_parent_alias_set (to))
4310 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4311 }
4312
4313 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4314 to_rtx, to, from))
4315 result = NULL;
4316 else
4317 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4318 TREE_TYPE (tem), get_alias_set (to),
4319 nontemporal);
4320 }
4321
4322 if (result)
4323 preserve_temp_slots (result);
4324 free_temp_slots ();
4325 pop_temp_slots ();
4326 return;
4327 }
4328
4329 /* If the rhs is a function call and its value is not an aggregate,
4330 call the function before we start to compute the lhs.
4331 This is needed for correct code for cases such as
4332 val = setjmp (buf) on machines where reference to val
4333 requires loading up part of an address in a separate insn.
4334
4335 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4336 since it might be a promoted variable where the zero- or sign- extension
4337 needs to be done. Handling this in the normal way is safe because no
4338 computation is done before the call. The same is true for SSA names. */
4339 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4340 && COMPLETE_TYPE_P (TREE_TYPE (from))
4341 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4342 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4343 && REG_P (DECL_RTL (to)))
4344 || TREE_CODE (to) == SSA_NAME))
4345 {
4346 rtx value;
4347
4348 push_temp_slots ();
4349 value = expand_normal (from);
4350 if (to_rtx == 0)
4351 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4352
4353 /* Handle calls that return values in multiple non-contiguous locations.
4354 The Irix 6 ABI has examples of this. */
4355 if (GET_CODE (to_rtx) == PARALLEL)
4356 emit_group_load (to_rtx, value, TREE_TYPE (from),
4357 int_size_in_bytes (TREE_TYPE (from)));
4358 else if (GET_MODE (to_rtx) == BLKmode)
4359 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4360 else
4361 {
4362 if (POINTER_TYPE_P (TREE_TYPE (to)))
4363 value = convert_memory_address_addr_space
4364 (GET_MODE (to_rtx), value,
4365 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4366
4367 emit_move_insn (to_rtx, value);
4368 }
4369 preserve_temp_slots (to_rtx);
4370 free_temp_slots ();
4371 pop_temp_slots ();
4372 return;
4373 }
4374
4375 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4376 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4377
4378 if (to_rtx == 0)
4379 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4380
4381 /* Don't move directly into a return register. */
4382 if (TREE_CODE (to) == RESULT_DECL
4383 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4384 {
4385 rtx temp;
4386
4387 push_temp_slots ();
4388 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4389
4390 if (GET_CODE (to_rtx) == PARALLEL)
4391 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4392 int_size_in_bytes (TREE_TYPE (from)));
4393 else
4394 emit_move_insn (to_rtx, temp);
4395
4396 preserve_temp_slots (to_rtx);
4397 free_temp_slots ();
4398 pop_temp_slots ();
4399 return;
4400 }
4401
4402 /* In case we are returning the contents of an object which overlaps
4403 the place the value is being stored, use a safe function when copying
4404 a value through a pointer into a structure value return block. */
4405 if (TREE_CODE (to) == RESULT_DECL
4406 && TREE_CODE (from) == INDIRECT_REF
4407 && ADDR_SPACE_GENERIC_P
4408 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4409 && refs_may_alias_p (to, from)
4410 && cfun->returns_struct
4411 && !cfun->returns_pcc_struct)
4412 {
4413 rtx from_rtx, size;
4414
4415 push_temp_slots ();
4416 size = expr_size (from);
4417 from_rtx = expand_normal (from);
4418
4419 emit_library_call (memmove_libfunc, LCT_NORMAL,
4420 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4421 XEXP (from_rtx, 0), Pmode,
4422 convert_to_mode (TYPE_MODE (sizetype),
4423 size, TYPE_UNSIGNED (sizetype)),
4424 TYPE_MODE (sizetype));
4425
4426 preserve_temp_slots (to_rtx);
4427 free_temp_slots ();
4428 pop_temp_slots ();
4429 return;
4430 }
4431
4432 /* Compute FROM and store the value in the rtx we got. */
4433
4434 push_temp_slots ();
4435 result = store_expr (from, to_rtx, 0, nontemporal);
4436 preserve_temp_slots (result);
4437 free_temp_slots ();
4438 pop_temp_slots ();
4439 return;
4440 }
4441
4442 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4443 succeeded, false otherwise. */
4444
4445 bool
4446 emit_storent_insn (rtx to, rtx from)
4447 {
4448 struct expand_operand ops[2];
4449 enum machine_mode mode = GET_MODE (to);
4450 enum insn_code code = optab_handler (storent_optab, mode);
4451
4452 if (code == CODE_FOR_nothing)
4453 return false;
4454
4455 create_fixed_operand (&ops[0], to);
4456 create_input_operand (&ops[1], from, mode);
4457 return maybe_expand_insn (code, 2, ops);
4458 }
4459
4460 /* Generate code for computing expression EXP,
4461 and storing the value into TARGET.
4462
4463 If the mode is BLKmode then we may return TARGET itself.
4464 It turns out that in BLKmode it doesn't cause a problem.
4465 because C has no operators that could combine two different
4466 assignments into the same BLKmode object with different values
4467 with no sequence point. Will other languages need this to
4468 be more thorough?
4469
4470 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4471 stack, and block moves may need to be treated specially.
4472
4473 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4474
4475 rtx
4476 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4477 {
4478 rtx temp;
4479 rtx alt_rtl = NULL_RTX;
4480 location_t loc = EXPR_LOCATION (exp);
4481
4482 if (VOID_TYPE_P (TREE_TYPE (exp)))
4483 {
4484 /* C++ can generate ?: expressions with a throw expression in one
4485 branch and an rvalue in the other. Here, we resolve attempts to
4486 store the throw expression's nonexistent result. */
4487 gcc_assert (!call_param_p);
4488 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4489 return NULL_RTX;
4490 }
4491 if (TREE_CODE (exp) == COMPOUND_EXPR)
4492 {
4493 /* Perform first part of compound expression, then assign from second
4494 part. */
4495 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4496 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4497 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4498 nontemporal);
4499 }
4500 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4501 {
4502 /* For conditional expression, get safe form of the target. Then
4503 test the condition, doing the appropriate assignment on either
4504 side. This avoids the creation of unnecessary temporaries.
4505 For non-BLKmode, it is more efficient not to do this. */
4506
4507 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4508
4509 do_pending_stack_adjust ();
4510 NO_DEFER_POP;
4511 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
4512 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4513 nontemporal);
4514 emit_jump_insn (gen_jump (lab2));
4515 emit_barrier ();
4516 emit_label (lab1);
4517 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4518 nontemporal);
4519 emit_label (lab2);
4520 OK_DEFER_POP;
4521
4522 return NULL_RTX;
4523 }
4524 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4525 /* If this is a scalar in a register that is stored in a wider mode
4526 than the declared mode, compute the result into its declared mode
4527 and then convert to the wider mode. Our value is the computed
4528 expression. */
4529 {
4530 rtx inner_target = 0;
4531
4532 /* We can do the conversion inside EXP, which will often result
4533 in some optimizations. Do the conversion in two steps: first
4534 change the signedness, if needed, then the extend. But don't
4535 do this if the type of EXP is a subtype of something else
4536 since then the conversion might involve more than just
4537 converting modes. */
4538 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4539 && TREE_TYPE (TREE_TYPE (exp)) == 0
4540 && GET_MODE_PRECISION (GET_MODE (target))
4541 == TYPE_PRECISION (TREE_TYPE (exp)))
4542 {
4543 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4544 != SUBREG_PROMOTED_UNSIGNED_P (target))
4545 {
4546 /* Some types, e.g. Fortran's logical*4, won't have a signed
4547 version, so use the mode instead. */
4548 tree ntype
4549 = (signed_or_unsigned_type_for
4550 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4551 if (ntype == NULL)
4552 ntype = lang_hooks.types.type_for_mode
4553 (TYPE_MODE (TREE_TYPE (exp)),
4554 SUBREG_PROMOTED_UNSIGNED_P (target));
4555
4556 exp = fold_convert_loc (loc, ntype, exp);
4557 }
4558
4559 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4560 (GET_MODE (SUBREG_REG (target)),
4561 SUBREG_PROMOTED_UNSIGNED_P (target)),
4562 exp);
4563
4564 inner_target = SUBREG_REG (target);
4565 }
4566
4567 temp = expand_expr (exp, inner_target, VOIDmode,
4568 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4569
4570 /* If TEMP is a VOIDmode constant, use convert_modes to make
4571 sure that we properly convert it. */
4572 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4573 {
4574 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4575 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4576 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4577 GET_MODE (target), temp,
4578 SUBREG_PROMOTED_UNSIGNED_P (target));
4579 }
4580
4581 convert_move (SUBREG_REG (target), temp,
4582 SUBREG_PROMOTED_UNSIGNED_P (target));
4583
4584 return NULL_RTX;
4585 }
4586 else if ((TREE_CODE (exp) == STRING_CST
4587 || (TREE_CODE (exp) == MEM_REF
4588 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4589 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4590 == STRING_CST
4591 && integer_zerop (TREE_OPERAND (exp, 1))))
4592 && !nontemporal && !call_param_p
4593 && MEM_P (target))
4594 {
4595 /* Optimize initialization of an array with a STRING_CST. */
4596 HOST_WIDE_INT exp_len, str_copy_len;
4597 rtx dest_mem;
4598 tree str = TREE_CODE (exp) == STRING_CST
4599 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4600
4601 exp_len = int_expr_size (exp);
4602 if (exp_len <= 0)
4603 goto normal_expr;
4604
4605 if (TREE_STRING_LENGTH (str) <= 0)
4606 goto normal_expr;
4607
4608 str_copy_len = strlen (TREE_STRING_POINTER (str));
4609 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
4610 goto normal_expr;
4611
4612 str_copy_len = TREE_STRING_LENGTH (str);
4613 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
4614 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
4615 {
4616 str_copy_len += STORE_MAX_PIECES - 1;
4617 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4618 }
4619 str_copy_len = MIN (str_copy_len, exp_len);
4620 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4621 CONST_CAST (char *, TREE_STRING_POINTER (str)),
4622 MEM_ALIGN (target), false))
4623 goto normal_expr;
4624
4625 dest_mem = target;
4626
4627 dest_mem = store_by_pieces (dest_mem,
4628 str_copy_len, builtin_strncpy_read_str,
4629 CONST_CAST (char *,
4630 TREE_STRING_POINTER (str)),
4631 MEM_ALIGN (target), false,
4632 exp_len > str_copy_len ? 1 : 0);
4633 if (exp_len > str_copy_len)
4634 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4635 GEN_INT (exp_len - str_copy_len),
4636 BLOCK_OP_NORMAL);
4637 return NULL_RTX;
4638 }
4639 else
4640 {
4641 rtx tmp_target;
4642
4643 normal_expr:
4644 /* If we want to use a nontemporal store, force the value to
4645 register first. */
4646 tmp_target = nontemporal ? NULL_RTX : target;
4647 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4648 (call_param_p
4649 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4650 &alt_rtl);
4651 }
4652
4653 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4654 the same as that of TARGET, adjust the constant. This is needed, for
4655 example, in case it is a CONST_DOUBLE and we want only a word-sized
4656 value. */
4657 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4658 && TREE_CODE (exp) != ERROR_MARK
4659 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4660 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4661 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4662
4663 /* If value was not generated in the target, store it there.
4664 Convert the value to TARGET's type first if necessary and emit the
4665 pending incrementations that have been queued when expanding EXP.
4666 Note that we cannot emit the whole queue blindly because this will
4667 effectively disable the POST_INC optimization later.
4668
4669 If TEMP and TARGET compare equal according to rtx_equal_p, but
4670 one or both of them are volatile memory refs, we have to distinguish
4671 two cases:
4672 - expand_expr has used TARGET. In this case, we must not generate
4673 another copy. This can be detected by TARGET being equal according
4674 to == .
4675 - expand_expr has not used TARGET - that means that the source just
4676 happens to have the same RTX form. Since temp will have been created
4677 by expand_expr, it will compare unequal according to == .
4678 We must generate a copy in this case, to reach the correct number
4679 of volatile memory references. */
4680
4681 if ((! rtx_equal_p (temp, target)
4682 || (temp != target && (side_effects_p (temp)
4683 || side_effects_p (target))))
4684 && TREE_CODE (exp) != ERROR_MARK
4685 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4686 but TARGET is not valid memory reference, TEMP will differ
4687 from TARGET although it is really the same location. */
4688 && !(alt_rtl
4689 && rtx_equal_p (alt_rtl, target)
4690 && !side_effects_p (alt_rtl)
4691 && !side_effects_p (target))
4692 /* If there's nothing to copy, don't bother. Don't call
4693 expr_size unless necessary, because some front-ends (C++)
4694 expr_size-hook must not be given objects that are not
4695 supposed to be bit-copied or bit-initialized. */
4696 && expr_size (exp) != const0_rtx)
4697 {
4698 if (GET_MODE (temp) != GET_MODE (target)
4699 && GET_MODE (temp) != VOIDmode)
4700 {
4701 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4702 if (GET_MODE (target) == BLKmode
4703 && GET_MODE (temp) == BLKmode)
4704 emit_block_move (target, temp, expr_size (exp),
4705 (call_param_p
4706 ? BLOCK_OP_CALL_PARM
4707 : BLOCK_OP_NORMAL));
4708 else if (GET_MODE (target) == BLKmode)
4709 store_bit_field (target, INTVAL (expr_size (exp)) * BITS_PER_UNIT,
4710 0, GET_MODE (temp), temp);
4711 else
4712 convert_move (target, temp, unsignedp);
4713 }
4714
4715 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4716 {
4717 /* Handle copying a string constant into an array. The string
4718 constant may be shorter than the array. So copy just the string's
4719 actual length, and clear the rest. First get the size of the data
4720 type of the string, which is actually the size of the target. */
4721 rtx size = expr_size (exp);
4722
4723 if (CONST_INT_P (size)
4724 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4725 emit_block_move (target, temp, size,
4726 (call_param_p
4727 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4728 else
4729 {
4730 enum machine_mode pointer_mode
4731 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
4732 enum machine_mode address_mode
4733 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
4734
4735 /* Compute the size of the data to copy from the string. */
4736 tree copy_size
4737 = size_binop_loc (loc, MIN_EXPR,
4738 make_tree (sizetype, size),
4739 size_int (TREE_STRING_LENGTH (exp)));
4740 rtx copy_size_rtx
4741 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4742 (call_param_p
4743 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4744 rtx label = 0;
4745
4746 /* Copy that much. */
4747 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
4748 TYPE_UNSIGNED (sizetype));
4749 emit_block_move (target, temp, copy_size_rtx,
4750 (call_param_p
4751 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4752
4753 /* Figure out how much is left in TARGET that we have to clear.
4754 Do all calculations in pointer_mode. */
4755 if (CONST_INT_P (copy_size_rtx))
4756 {
4757 size = plus_constant (size, -INTVAL (copy_size_rtx));
4758 target = adjust_address (target, BLKmode,
4759 INTVAL (copy_size_rtx));
4760 }
4761 else
4762 {
4763 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4764 copy_size_rtx, NULL_RTX, 0,
4765 OPTAB_LIB_WIDEN);
4766
4767 if (GET_MODE (copy_size_rtx) != address_mode)
4768 copy_size_rtx = convert_to_mode (address_mode,
4769 copy_size_rtx,
4770 TYPE_UNSIGNED (sizetype));
4771
4772 target = offset_address (target, copy_size_rtx,
4773 highest_pow2_factor (copy_size));
4774 label = gen_label_rtx ();
4775 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4776 GET_MODE (size), 0, label);
4777 }
4778
4779 if (size != const0_rtx)
4780 clear_storage (target, size, BLOCK_OP_NORMAL);
4781
4782 if (label)
4783 emit_label (label);
4784 }
4785 }
4786 /* Handle calls that return values in multiple non-contiguous locations.
4787 The Irix 6 ABI has examples of this. */
4788 else if (GET_CODE (target) == PARALLEL)
4789 emit_group_load (target, temp, TREE_TYPE (exp),
4790 int_size_in_bytes (TREE_TYPE (exp)));
4791 else if (GET_MODE (temp) == BLKmode)
4792 emit_block_move (target, temp, expr_size (exp),
4793 (call_param_p
4794 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4795 else if (nontemporal
4796 && emit_storent_insn (target, temp))
4797 /* If we managed to emit a nontemporal store, there is nothing else to
4798 do. */
4799 ;
4800 else
4801 {
4802 temp = force_operand (temp, target);
4803 if (temp != target)
4804 emit_move_insn (target, temp);
4805 }
4806 }
4807
4808 return NULL_RTX;
4809 }
4810 \f
4811 /* Helper for categorize_ctor_elements. Identical interface. */
4812
4813 static bool
4814 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4815 HOST_WIDE_INT *p_elt_count,
4816 bool *p_must_clear)
4817 {
4818 unsigned HOST_WIDE_INT idx;
4819 HOST_WIDE_INT nz_elts, elt_count;
4820 tree value, purpose;
4821
4822 /* Whether CTOR is a valid constant initializer, in accordance with what
4823 initializer_constant_valid_p does. If inferred from the constructor
4824 elements, true until proven otherwise. */
4825 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4826 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4827
4828 nz_elts = 0;
4829 elt_count = 0;
4830
4831 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4832 {
4833 HOST_WIDE_INT mult = 1;
4834
4835 if (TREE_CODE (purpose) == RANGE_EXPR)
4836 {
4837 tree lo_index = TREE_OPERAND (purpose, 0);
4838 tree hi_index = TREE_OPERAND (purpose, 1);
4839
4840 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4841 mult = (tree_low_cst (hi_index, 1)
4842 - tree_low_cst (lo_index, 1) + 1);
4843 }
4844
4845 switch (TREE_CODE (value))
4846 {
4847 case CONSTRUCTOR:
4848 {
4849 HOST_WIDE_INT nz = 0, ic = 0;
4850
4851 bool const_elt_p
4852 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4853
4854 nz_elts += mult * nz;
4855 elt_count += mult * ic;
4856
4857 if (const_from_elts_p && const_p)
4858 const_p = const_elt_p;
4859 }
4860 break;
4861
4862 case INTEGER_CST:
4863 case REAL_CST:
4864 case FIXED_CST:
4865 if (!initializer_zerop (value))
4866 nz_elts += mult;
4867 elt_count += mult;
4868 break;
4869
4870 case STRING_CST:
4871 nz_elts += mult * TREE_STRING_LENGTH (value);
4872 elt_count += mult * TREE_STRING_LENGTH (value);
4873 break;
4874
4875 case COMPLEX_CST:
4876 if (!initializer_zerop (TREE_REALPART (value)))
4877 nz_elts += mult;
4878 if (!initializer_zerop (TREE_IMAGPART (value)))
4879 nz_elts += mult;
4880 elt_count += mult;
4881 break;
4882
4883 case VECTOR_CST:
4884 {
4885 tree v;
4886 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4887 {
4888 if (!initializer_zerop (TREE_VALUE (v)))
4889 nz_elts += mult;
4890 elt_count += mult;
4891 }
4892 }
4893 break;
4894
4895 default:
4896 {
4897 HOST_WIDE_INT tc = count_type_elements (TREE_TYPE (value), true);
4898 if (tc < 1)
4899 tc = 1;
4900 nz_elts += mult * tc;
4901 elt_count += mult * tc;
4902
4903 if (const_from_elts_p && const_p)
4904 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4905 != NULL_TREE;
4906 }
4907 break;
4908 }
4909 }
4910
4911 if (!*p_must_clear
4912 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4913 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4914 {
4915 tree init_sub_type;
4916 bool clear_this = true;
4917
4918 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4919 {
4920 /* We don't expect more than one element of the union to be
4921 initialized. Not sure what we should do otherwise... */
4922 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4923 == 1);
4924
4925 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4926 CONSTRUCTOR_ELTS (ctor),
4927 0)->value);
4928
4929 /* ??? We could look at each element of the union, and find the
4930 largest element. Which would avoid comparing the size of the
4931 initialized element against any tail padding in the union.
4932 Doesn't seem worth the effort... */
4933 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4934 TYPE_SIZE (init_sub_type)) == 1)
4935 {
4936 /* And now we have to find out if the element itself is fully
4937 constructed. E.g. for union { struct { int a, b; } s; } u
4938 = { .s = { .a = 1 } }. */
4939 if (elt_count == count_type_elements (init_sub_type, false))
4940 clear_this = false;
4941 }
4942 }
4943
4944 *p_must_clear = clear_this;
4945 }
4946
4947 *p_nz_elts += nz_elts;
4948 *p_elt_count += elt_count;
4949
4950 return const_p;
4951 }
4952
4953 /* Examine CTOR to discover:
4954 * how many scalar fields are set to nonzero values,
4955 and place it in *P_NZ_ELTS;
4956 * how many scalar fields in total are in CTOR,
4957 and place it in *P_ELT_COUNT.
4958 * if a type is a union, and the initializer from the constructor
4959 is not the largest element in the union, then set *p_must_clear.
4960
4961 Return whether or not CTOR is a valid static constant initializer, the same
4962 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4963
4964 bool
4965 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4966 HOST_WIDE_INT *p_elt_count,
4967 bool *p_must_clear)
4968 {
4969 *p_nz_elts = 0;
4970 *p_elt_count = 0;
4971 *p_must_clear = false;
4972
4973 return
4974 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4975 }
4976
4977 /* Count the number of scalars in TYPE. Return -1 on overflow or
4978 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4979 array member at the end of the structure. */
4980
4981 HOST_WIDE_INT
4982 count_type_elements (const_tree type, bool allow_flexarr)
4983 {
4984 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4985 switch (TREE_CODE (type))
4986 {
4987 case ARRAY_TYPE:
4988 {
4989 tree telts = array_type_nelts (type);
4990 if (telts && host_integerp (telts, 1))
4991 {
4992 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4993 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4994 if (n == 0)
4995 return 0;
4996 else if (max / n > m)
4997 return n * m;
4998 }
4999 return -1;
5000 }
5001
5002 case RECORD_TYPE:
5003 {
5004 HOST_WIDE_INT n = 0, t;
5005 tree f;
5006
5007 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5008 if (TREE_CODE (f) == FIELD_DECL)
5009 {
5010 t = count_type_elements (TREE_TYPE (f), false);
5011 if (t < 0)
5012 {
5013 /* Check for structures with flexible array member. */
5014 tree tf = TREE_TYPE (f);
5015 if (allow_flexarr
5016 && DECL_CHAIN (f) == NULL
5017 && TREE_CODE (tf) == ARRAY_TYPE
5018 && TYPE_DOMAIN (tf)
5019 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5020 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5021 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5022 && int_size_in_bytes (type) >= 0)
5023 break;
5024
5025 return -1;
5026 }
5027 n += t;
5028 }
5029
5030 return n;
5031 }
5032
5033 case UNION_TYPE:
5034 case QUAL_UNION_TYPE:
5035 return -1;
5036
5037 case COMPLEX_TYPE:
5038 return 2;
5039
5040 case VECTOR_TYPE:
5041 return TYPE_VECTOR_SUBPARTS (type);
5042
5043 case INTEGER_TYPE:
5044 case REAL_TYPE:
5045 case FIXED_POINT_TYPE:
5046 case ENUMERAL_TYPE:
5047 case BOOLEAN_TYPE:
5048 case POINTER_TYPE:
5049 case OFFSET_TYPE:
5050 case REFERENCE_TYPE:
5051 return 1;
5052
5053 case ERROR_MARK:
5054 return 0;
5055
5056 case VOID_TYPE:
5057 case METHOD_TYPE:
5058 case FUNCTION_TYPE:
5059 case LANG_TYPE:
5060 default:
5061 gcc_unreachable ();
5062 }
5063 }
5064
5065 /* Return 1 if EXP contains mostly (3/4) zeros. */
5066
5067 static int
5068 mostly_zeros_p (const_tree exp)
5069 {
5070 if (TREE_CODE (exp) == CONSTRUCTOR)
5071
5072 {
5073 HOST_WIDE_INT nz_elts, count, elts;
5074 bool must_clear;
5075
5076 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5077 if (must_clear)
5078 return 1;
5079
5080 elts = count_type_elements (TREE_TYPE (exp), false);
5081
5082 return nz_elts < elts / 4;
5083 }
5084
5085 return initializer_zerop (exp);
5086 }
5087
5088 /* Return 1 if EXP contains all zeros. */
5089
5090 static int
5091 all_zeros_p (const_tree exp)
5092 {
5093 if (TREE_CODE (exp) == CONSTRUCTOR)
5094
5095 {
5096 HOST_WIDE_INT nz_elts, count;
5097 bool must_clear;
5098
5099 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5100 return nz_elts == 0;
5101 }
5102
5103 return initializer_zerop (exp);
5104 }
5105 \f
5106 /* Helper function for store_constructor.
5107 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5108 TYPE is the type of the CONSTRUCTOR, not the element type.
5109 CLEARED is as for store_constructor.
5110 ALIAS_SET is the alias set to use for any stores.
5111
5112 This provides a recursive shortcut back to store_constructor when it isn't
5113 necessary to go through store_field. This is so that we can pass through
5114 the cleared field to let store_constructor know that we may not have to
5115 clear a substructure if the outer structure has already been cleared. */
5116
5117 static void
5118 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5119 HOST_WIDE_INT bitpos, enum machine_mode mode,
5120 tree exp, tree type, int cleared,
5121 alias_set_type alias_set)
5122 {
5123 if (TREE_CODE (exp) == CONSTRUCTOR
5124 /* We can only call store_constructor recursively if the size and
5125 bit position are on a byte boundary. */
5126 && bitpos % BITS_PER_UNIT == 0
5127 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5128 /* If we have a nonzero bitpos for a register target, then we just
5129 let store_field do the bitfield handling. This is unlikely to
5130 generate unnecessary clear instructions anyways. */
5131 && (bitpos == 0 || MEM_P (target)))
5132 {
5133 if (MEM_P (target))
5134 target
5135 = adjust_address (target,
5136 GET_MODE (target) == BLKmode
5137 || 0 != (bitpos
5138 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5139 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5140
5141
5142 /* Update the alias set, if required. */
5143 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5144 && MEM_ALIAS_SET (target) != 0)
5145 {
5146 target = copy_rtx (target);
5147 set_mem_alias_set (target, alias_set);
5148 }
5149
5150 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5151 }
5152 else
5153 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5154 }
5155
5156 /* Store the value of constructor EXP into the rtx TARGET.
5157 TARGET is either a REG or a MEM; we know it cannot conflict, since
5158 safe_from_p has been called.
5159 CLEARED is true if TARGET is known to have been zero'd.
5160 SIZE is the number of bytes of TARGET we are allowed to modify: this
5161 may not be the same as the size of EXP if we are assigning to a field
5162 which has been packed to exclude padding bits. */
5163
5164 static void
5165 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5166 {
5167 tree type = TREE_TYPE (exp);
5168 #ifdef WORD_REGISTER_OPERATIONS
5169 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5170 #endif
5171
5172 switch (TREE_CODE (type))
5173 {
5174 case RECORD_TYPE:
5175 case UNION_TYPE:
5176 case QUAL_UNION_TYPE:
5177 {
5178 unsigned HOST_WIDE_INT idx;
5179 tree field, value;
5180
5181 /* If size is zero or the target is already cleared, do nothing. */
5182 if (size == 0 || cleared)
5183 cleared = 1;
5184 /* We either clear the aggregate or indicate the value is dead. */
5185 else if ((TREE_CODE (type) == UNION_TYPE
5186 || TREE_CODE (type) == QUAL_UNION_TYPE)
5187 && ! CONSTRUCTOR_ELTS (exp))
5188 /* If the constructor is empty, clear the union. */
5189 {
5190 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5191 cleared = 1;
5192 }
5193
5194 /* If we are building a static constructor into a register,
5195 set the initial value as zero so we can fold the value into
5196 a constant. But if more than one register is involved,
5197 this probably loses. */
5198 else if (REG_P (target) && TREE_STATIC (exp)
5199 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5200 {
5201 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5202 cleared = 1;
5203 }
5204
5205 /* If the constructor has fewer fields than the structure or
5206 if we are initializing the structure to mostly zeros, clear
5207 the whole structure first. Don't do this if TARGET is a
5208 register whose mode size isn't equal to SIZE since
5209 clear_storage can't handle this case. */
5210 else if (size > 0
5211 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5212 != fields_length (type))
5213 || mostly_zeros_p (exp))
5214 && (!REG_P (target)
5215 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5216 == size)))
5217 {
5218 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5219 cleared = 1;
5220 }
5221
5222 if (REG_P (target) && !cleared)
5223 emit_clobber (target);
5224
5225 /* Store each element of the constructor into the
5226 corresponding field of TARGET. */
5227 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5228 {
5229 enum machine_mode mode;
5230 HOST_WIDE_INT bitsize;
5231 HOST_WIDE_INT bitpos = 0;
5232 tree offset;
5233 rtx to_rtx = target;
5234
5235 /* Just ignore missing fields. We cleared the whole
5236 structure, above, if any fields are missing. */
5237 if (field == 0)
5238 continue;
5239
5240 if (cleared && initializer_zerop (value))
5241 continue;
5242
5243 if (host_integerp (DECL_SIZE (field), 1))
5244 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5245 else
5246 bitsize = -1;
5247
5248 mode = DECL_MODE (field);
5249 if (DECL_BIT_FIELD (field))
5250 mode = VOIDmode;
5251
5252 offset = DECL_FIELD_OFFSET (field);
5253 if (host_integerp (offset, 0)
5254 && host_integerp (bit_position (field), 0))
5255 {
5256 bitpos = int_bit_position (field);
5257 offset = 0;
5258 }
5259 else
5260 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5261
5262 if (offset)
5263 {
5264 enum machine_mode address_mode;
5265 rtx offset_rtx;
5266
5267 offset
5268 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5269 make_tree (TREE_TYPE (exp),
5270 target));
5271
5272 offset_rtx = expand_normal (offset);
5273 gcc_assert (MEM_P (to_rtx));
5274
5275 address_mode
5276 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5277 if (GET_MODE (offset_rtx) != address_mode)
5278 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5279
5280 to_rtx = offset_address (to_rtx, offset_rtx,
5281 highest_pow2_factor (offset));
5282 }
5283
5284 #ifdef WORD_REGISTER_OPERATIONS
5285 /* If this initializes a field that is smaller than a
5286 word, at the start of a word, try to widen it to a full
5287 word. This special case allows us to output C++ member
5288 function initializations in a form that the optimizers
5289 can understand. */
5290 if (REG_P (target)
5291 && bitsize < BITS_PER_WORD
5292 && bitpos % BITS_PER_WORD == 0
5293 && GET_MODE_CLASS (mode) == MODE_INT
5294 && TREE_CODE (value) == INTEGER_CST
5295 && exp_size >= 0
5296 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5297 {
5298 tree type = TREE_TYPE (value);
5299
5300 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5301 {
5302 type = lang_hooks.types.type_for_size
5303 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5304 value = fold_convert (type, value);
5305 }
5306
5307 if (BYTES_BIG_ENDIAN)
5308 value
5309 = fold_build2 (LSHIFT_EXPR, type, value,
5310 build_int_cst (type,
5311 BITS_PER_WORD - bitsize));
5312 bitsize = BITS_PER_WORD;
5313 mode = word_mode;
5314 }
5315 #endif
5316
5317 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5318 && DECL_NONADDRESSABLE_P (field))
5319 {
5320 to_rtx = copy_rtx (to_rtx);
5321 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5322 }
5323
5324 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5325 value, type, cleared,
5326 get_alias_set (TREE_TYPE (field)));
5327 }
5328 break;
5329 }
5330 case ARRAY_TYPE:
5331 {
5332 tree value, index;
5333 unsigned HOST_WIDE_INT i;
5334 int need_to_clear;
5335 tree domain;
5336 tree elttype = TREE_TYPE (type);
5337 int const_bounds_p;
5338 HOST_WIDE_INT minelt = 0;
5339 HOST_WIDE_INT maxelt = 0;
5340
5341 domain = TYPE_DOMAIN (type);
5342 const_bounds_p = (TYPE_MIN_VALUE (domain)
5343 && TYPE_MAX_VALUE (domain)
5344 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5345 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5346
5347 /* If we have constant bounds for the range of the type, get them. */
5348 if (const_bounds_p)
5349 {
5350 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5351 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5352 }
5353
5354 /* If the constructor has fewer elements than the array, clear
5355 the whole array first. Similarly if this is static
5356 constructor of a non-BLKmode object. */
5357 if (cleared)
5358 need_to_clear = 0;
5359 else if (REG_P (target) && TREE_STATIC (exp))
5360 need_to_clear = 1;
5361 else
5362 {
5363 unsigned HOST_WIDE_INT idx;
5364 tree index, value;
5365 HOST_WIDE_INT count = 0, zero_count = 0;
5366 need_to_clear = ! const_bounds_p;
5367
5368 /* This loop is a more accurate version of the loop in
5369 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5370 is also needed to check for missing elements. */
5371 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5372 {
5373 HOST_WIDE_INT this_node_count;
5374
5375 if (need_to_clear)
5376 break;
5377
5378 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5379 {
5380 tree lo_index = TREE_OPERAND (index, 0);
5381 tree hi_index = TREE_OPERAND (index, 1);
5382
5383 if (! host_integerp (lo_index, 1)
5384 || ! host_integerp (hi_index, 1))
5385 {
5386 need_to_clear = 1;
5387 break;
5388 }
5389
5390 this_node_count = (tree_low_cst (hi_index, 1)
5391 - tree_low_cst (lo_index, 1) + 1);
5392 }
5393 else
5394 this_node_count = 1;
5395
5396 count += this_node_count;
5397 if (mostly_zeros_p (value))
5398 zero_count += this_node_count;
5399 }
5400
5401 /* Clear the entire array first if there are any missing
5402 elements, or if the incidence of zero elements is >=
5403 75%. */
5404 if (! need_to_clear
5405 && (count < maxelt - minelt + 1
5406 || 4 * zero_count >= 3 * count))
5407 need_to_clear = 1;
5408 }
5409
5410 if (need_to_clear && size > 0)
5411 {
5412 if (REG_P (target))
5413 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5414 else
5415 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5416 cleared = 1;
5417 }
5418
5419 if (!cleared && REG_P (target))
5420 /* Inform later passes that the old value is dead. */
5421 emit_clobber (target);
5422
5423 /* Store each element of the constructor into the
5424 corresponding element of TARGET, determined by counting the
5425 elements. */
5426 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5427 {
5428 enum machine_mode mode;
5429 HOST_WIDE_INT bitsize;
5430 HOST_WIDE_INT bitpos;
5431 rtx xtarget = target;
5432
5433 if (cleared && initializer_zerop (value))
5434 continue;
5435
5436 mode = TYPE_MODE (elttype);
5437 if (mode == BLKmode)
5438 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5439 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5440 : -1);
5441 else
5442 bitsize = GET_MODE_BITSIZE (mode);
5443
5444 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5445 {
5446 tree lo_index = TREE_OPERAND (index, 0);
5447 tree hi_index = TREE_OPERAND (index, 1);
5448 rtx index_r, pos_rtx;
5449 HOST_WIDE_INT lo, hi, count;
5450 tree position;
5451
5452 /* If the range is constant and "small", unroll the loop. */
5453 if (const_bounds_p
5454 && host_integerp (lo_index, 0)
5455 && host_integerp (hi_index, 0)
5456 && (lo = tree_low_cst (lo_index, 0),
5457 hi = tree_low_cst (hi_index, 0),
5458 count = hi - lo + 1,
5459 (!MEM_P (target)
5460 || count <= 2
5461 || (host_integerp (TYPE_SIZE (elttype), 1)
5462 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5463 <= 40 * 8)))))
5464 {
5465 lo -= minelt; hi -= minelt;
5466 for (; lo <= hi; lo++)
5467 {
5468 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5469
5470 if (MEM_P (target)
5471 && !MEM_KEEP_ALIAS_SET_P (target)
5472 && TREE_CODE (type) == ARRAY_TYPE
5473 && TYPE_NONALIASED_COMPONENT (type))
5474 {
5475 target = copy_rtx (target);
5476 MEM_KEEP_ALIAS_SET_P (target) = 1;
5477 }
5478
5479 store_constructor_field
5480 (target, bitsize, bitpos, mode, value, type, cleared,
5481 get_alias_set (elttype));
5482 }
5483 }
5484 else
5485 {
5486 rtx loop_start = gen_label_rtx ();
5487 rtx loop_end = gen_label_rtx ();
5488 tree exit_cond;
5489
5490 expand_normal (hi_index);
5491
5492 index = build_decl (EXPR_LOCATION (exp),
5493 VAR_DECL, NULL_TREE, domain);
5494 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5495 SET_DECL_RTL (index, index_r);
5496 store_expr (lo_index, index_r, 0, false);
5497
5498 /* Build the head of the loop. */
5499 do_pending_stack_adjust ();
5500 emit_label (loop_start);
5501
5502 /* Assign value to element index. */
5503 position =
5504 fold_convert (ssizetype,
5505 fold_build2 (MINUS_EXPR,
5506 TREE_TYPE (index),
5507 index,
5508 TYPE_MIN_VALUE (domain)));
5509
5510 position =
5511 size_binop (MULT_EXPR, position,
5512 fold_convert (ssizetype,
5513 TYPE_SIZE_UNIT (elttype)));
5514
5515 pos_rtx = expand_normal (position);
5516 xtarget = offset_address (target, pos_rtx,
5517 highest_pow2_factor (position));
5518 xtarget = adjust_address (xtarget, mode, 0);
5519 if (TREE_CODE (value) == CONSTRUCTOR)
5520 store_constructor (value, xtarget, cleared,
5521 bitsize / BITS_PER_UNIT);
5522 else
5523 store_expr (value, xtarget, 0, false);
5524
5525 /* Generate a conditional jump to exit the loop. */
5526 exit_cond = build2 (LT_EXPR, integer_type_node,
5527 index, hi_index);
5528 jumpif (exit_cond, loop_end, -1);
5529
5530 /* Update the loop counter, and jump to the head of
5531 the loop. */
5532 expand_assignment (index,
5533 build2 (PLUS_EXPR, TREE_TYPE (index),
5534 index, integer_one_node),
5535 false);
5536
5537 emit_jump (loop_start);
5538
5539 /* Build the end of the loop. */
5540 emit_label (loop_end);
5541 }
5542 }
5543 else if ((index != 0 && ! host_integerp (index, 0))
5544 || ! host_integerp (TYPE_SIZE (elttype), 1))
5545 {
5546 tree position;
5547
5548 if (index == 0)
5549 index = ssize_int (1);
5550
5551 if (minelt)
5552 index = fold_convert (ssizetype,
5553 fold_build2 (MINUS_EXPR,
5554 TREE_TYPE (index),
5555 index,
5556 TYPE_MIN_VALUE (domain)));
5557
5558 position =
5559 size_binop (MULT_EXPR, index,
5560 fold_convert (ssizetype,
5561 TYPE_SIZE_UNIT (elttype)));
5562 xtarget = offset_address (target,
5563 expand_normal (position),
5564 highest_pow2_factor (position));
5565 xtarget = adjust_address (xtarget, mode, 0);
5566 store_expr (value, xtarget, 0, false);
5567 }
5568 else
5569 {
5570 if (index != 0)
5571 bitpos = ((tree_low_cst (index, 0) - minelt)
5572 * tree_low_cst (TYPE_SIZE (elttype), 1));
5573 else
5574 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5575
5576 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5577 && TREE_CODE (type) == ARRAY_TYPE
5578 && TYPE_NONALIASED_COMPONENT (type))
5579 {
5580 target = copy_rtx (target);
5581 MEM_KEEP_ALIAS_SET_P (target) = 1;
5582 }
5583 store_constructor_field (target, bitsize, bitpos, mode, value,
5584 type, cleared, get_alias_set (elttype));
5585 }
5586 }
5587 break;
5588 }
5589
5590 case VECTOR_TYPE:
5591 {
5592 unsigned HOST_WIDE_INT idx;
5593 constructor_elt *ce;
5594 int i;
5595 int need_to_clear;
5596 int icode = 0;
5597 tree elttype = TREE_TYPE (type);
5598 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5599 enum machine_mode eltmode = TYPE_MODE (elttype);
5600 HOST_WIDE_INT bitsize;
5601 HOST_WIDE_INT bitpos;
5602 rtvec vector = NULL;
5603 unsigned n_elts;
5604 alias_set_type alias;
5605
5606 gcc_assert (eltmode != BLKmode);
5607
5608 n_elts = TYPE_VECTOR_SUBPARTS (type);
5609 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5610 {
5611 enum machine_mode mode = GET_MODE (target);
5612
5613 icode = (int) optab_handler (vec_init_optab, mode);
5614 if (icode != CODE_FOR_nothing)
5615 {
5616 unsigned int i;
5617
5618 vector = rtvec_alloc (n_elts);
5619 for (i = 0; i < n_elts; i++)
5620 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5621 }
5622 }
5623
5624 /* If the constructor has fewer elements than the vector,
5625 clear the whole array first. Similarly if this is static
5626 constructor of a non-BLKmode object. */
5627 if (cleared)
5628 need_to_clear = 0;
5629 else if (REG_P (target) && TREE_STATIC (exp))
5630 need_to_clear = 1;
5631 else
5632 {
5633 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5634 tree value;
5635
5636 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5637 {
5638 int n_elts_here = tree_low_cst
5639 (int_const_binop (TRUNC_DIV_EXPR,
5640 TYPE_SIZE (TREE_TYPE (value)),
5641 TYPE_SIZE (elttype), 0), 1);
5642
5643 count += n_elts_here;
5644 if (mostly_zeros_p (value))
5645 zero_count += n_elts_here;
5646 }
5647
5648 /* Clear the entire vector first if there are any missing elements,
5649 or if the incidence of zero elements is >= 75%. */
5650 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5651 }
5652
5653 if (need_to_clear && size > 0 && !vector)
5654 {
5655 if (REG_P (target))
5656 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5657 else
5658 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5659 cleared = 1;
5660 }
5661
5662 /* Inform later passes that the old value is dead. */
5663 if (!cleared && !vector && REG_P (target))
5664 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5665
5666 if (MEM_P (target))
5667 alias = MEM_ALIAS_SET (target);
5668 else
5669 alias = get_alias_set (elttype);
5670
5671 /* Store each element of the constructor into the corresponding
5672 element of TARGET, determined by counting the elements. */
5673 for (idx = 0, i = 0;
5674 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5675 idx++, i += bitsize / elt_size)
5676 {
5677 HOST_WIDE_INT eltpos;
5678 tree value = ce->value;
5679
5680 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5681 if (cleared && initializer_zerop (value))
5682 continue;
5683
5684 if (ce->index)
5685 eltpos = tree_low_cst (ce->index, 1);
5686 else
5687 eltpos = i;
5688
5689 if (vector)
5690 {
5691 /* Vector CONSTRUCTORs should only be built from smaller
5692 vectors in the case of BLKmode vectors. */
5693 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5694 RTVEC_ELT (vector, eltpos)
5695 = expand_normal (value);
5696 }
5697 else
5698 {
5699 enum machine_mode value_mode =
5700 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5701 ? TYPE_MODE (TREE_TYPE (value))
5702 : eltmode;
5703 bitpos = eltpos * elt_size;
5704 store_constructor_field (target, bitsize, bitpos,
5705 value_mode, value, type,
5706 cleared, alias);
5707 }
5708 }
5709
5710 if (vector)
5711 emit_insn (GEN_FCN (icode)
5712 (target,
5713 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5714 break;
5715 }
5716
5717 default:
5718 gcc_unreachable ();
5719 }
5720 }
5721
5722 /* Store the value of EXP (an expression tree)
5723 into a subfield of TARGET which has mode MODE and occupies
5724 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5725 If MODE is VOIDmode, it means that we are storing into a bit-field.
5726
5727 Always return const0_rtx unless we have something particular to
5728 return.
5729
5730 TYPE is the type of the underlying object,
5731
5732 ALIAS_SET is the alias set for the destination. This value will
5733 (in general) be different from that for TARGET, since TARGET is a
5734 reference to the containing structure.
5735
5736 If NONTEMPORAL is true, try generating a nontemporal store. */
5737
5738 static rtx
5739 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5740 enum machine_mode mode, tree exp, tree type,
5741 alias_set_type alias_set, bool nontemporal)
5742 {
5743 if (TREE_CODE (exp) == ERROR_MARK)
5744 return const0_rtx;
5745
5746 /* If we have nothing to store, do nothing unless the expression has
5747 side-effects. */
5748 if (bitsize == 0)
5749 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5750
5751 /* If we are storing into an unaligned field of an aligned union that is
5752 in a register, we may have the mode of TARGET being an integer mode but
5753 MODE == BLKmode. In that case, get an aligned object whose size and
5754 alignment are the same as TARGET and store TARGET into it (we can avoid
5755 the store if the field being stored is the entire width of TARGET). Then
5756 call ourselves recursively to store the field into a BLKmode version of
5757 that object. Finally, load from the object into TARGET. This is not
5758 very efficient in general, but should only be slightly more expensive
5759 than the otherwise-required unaligned accesses. Perhaps this can be
5760 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5761 twice, once with emit_move_insn and once via store_field. */
5762
5763 if (mode == BLKmode
5764 && (REG_P (target) || GET_CODE (target) == SUBREG))
5765 {
5766 rtx object = assign_temp (type, 0, 1, 1);
5767 rtx blk_object = adjust_address (object, BLKmode, 0);
5768
5769 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5770 emit_move_insn (object, target);
5771
5772 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5773 nontemporal);
5774
5775 emit_move_insn (target, object);
5776
5777 /* We want to return the BLKmode version of the data. */
5778 return blk_object;
5779 }
5780
5781 if (GET_CODE (target) == CONCAT)
5782 {
5783 /* We're storing into a struct containing a single __complex. */
5784
5785 gcc_assert (!bitpos);
5786 return store_expr (exp, target, 0, nontemporal);
5787 }
5788
5789 /* If the structure is in a register or if the component
5790 is a bit field, we cannot use addressing to access it.
5791 Use bit-field techniques or SUBREG to store in it. */
5792
5793 if (mode == VOIDmode
5794 || (mode != BLKmode && ! direct_store[(int) mode]
5795 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5796 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5797 || REG_P (target)
5798 || GET_CODE (target) == SUBREG
5799 /* If the field isn't aligned enough to store as an ordinary memref,
5800 store it as a bit field. */
5801 || (mode != BLKmode
5802 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5803 || bitpos % GET_MODE_ALIGNMENT (mode))
5804 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5805 || (bitpos % BITS_PER_UNIT != 0)))
5806 /* If the RHS and field are a constant size and the size of the
5807 RHS isn't the same size as the bitfield, we must use bitfield
5808 operations. */
5809 || (bitsize >= 0
5810 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5811 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
5812 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
5813 decl we must use bitfield operations. */
5814 || (bitsize >= 0
5815 && TREE_CODE (exp) == MEM_REF
5816 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5817 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5818 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
5819 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
5820 {
5821 rtx temp;
5822 gimple nop_def;
5823
5824 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5825 implies a mask operation. If the precision is the same size as
5826 the field we're storing into, that mask is redundant. This is
5827 particularly common with bit field assignments generated by the
5828 C front end. */
5829 nop_def = get_def_for_expr (exp, NOP_EXPR);
5830 if (nop_def)
5831 {
5832 tree type = TREE_TYPE (exp);
5833 if (INTEGRAL_TYPE_P (type)
5834 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5835 && bitsize == TYPE_PRECISION (type))
5836 {
5837 tree op = gimple_assign_rhs1 (nop_def);
5838 type = TREE_TYPE (op);
5839 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5840 exp = op;
5841 }
5842 }
5843
5844 temp = expand_normal (exp);
5845
5846 /* If BITSIZE is narrower than the size of the type of EXP
5847 we will be narrowing TEMP. Normally, what's wanted are the
5848 low-order bits. However, if EXP's type is a record and this is
5849 big-endian machine, we want the upper BITSIZE bits. */
5850 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5851 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5852 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5853 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5854 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5855 - bitsize),
5856 NULL_RTX, 1);
5857
5858 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5859 MODE. */
5860 if (mode != VOIDmode && mode != BLKmode
5861 && mode != TYPE_MODE (TREE_TYPE (exp)))
5862 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5863
5864 /* If the modes of TEMP and TARGET are both BLKmode, both
5865 must be in memory and BITPOS must be aligned on a byte
5866 boundary. If so, we simply do a block copy. Likewise
5867 for a BLKmode-like TARGET. */
5868 if (GET_MODE (temp) == BLKmode
5869 && (GET_MODE (target) == BLKmode
5870 || (MEM_P (target)
5871 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5872 && (bitpos % BITS_PER_UNIT) == 0
5873 && (bitsize % BITS_PER_UNIT) == 0)))
5874 {
5875 gcc_assert (MEM_P (target) && MEM_P (temp)
5876 && (bitpos % BITS_PER_UNIT) == 0);
5877
5878 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5879 emit_block_move (target, temp,
5880 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5881 / BITS_PER_UNIT),
5882 BLOCK_OP_NORMAL);
5883
5884 return const0_rtx;
5885 }
5886
5887 /* Store the value in the bitfield. */
5888 store_bit_field (target, bitsize, bitpos, mode, temp);
5889
5890 return const0_rtx;
5891 }
5892 else
5893 {
5894 /* Now build a reference to just the desired component. */
5895 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5896
5897 if (to_rtx == target)
5898 to_rtx = copy_rtx (to_rtx);
5899
5900 if (!MEM_SCALAR_P (to_rtx))
5901 MEM_IN_STRUCT_P (to_rtx) = 1;
5902 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5903 set_mem_alias_set (to_rtx, alias_set);
5904
5905 return store_expr (exp, to_rtx, 0, nontemporal);
5906 }
5907 }
5908 \f
5909 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5910 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5911 codes and find the ultimate containing object, which we return.
5912
5913 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5914 bit position, and *PUNSIGNEDP to the signedness of the field.
5915 If the position of the field is variable, we store a tree
5916 giving the variable offset (in units) in *POFFSET.
5917 This offset is in addition to the bit position.
5918 If the position is not variable, we store 0 in *POFFSET.
5919
5920 If any of the extraction expressions is volatile,
5921 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5922
5923 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5924 Otherwise, it is a mode that can be used to access the field.
5925
5926 If the field describes a variable-sized object, *PMODE is set to
5927 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5928 this case, but the address of the object can be found.
5929
5930 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5931 look through nodes that serve as markers of a greater alignment than
5932 the one that can be deduced from the expression. These nodes make it
5933 possible for front-ends to prevent temporaries from being created by
5934 the middle-end on alignment considerations. For that purpose, the
5935 normal operating mode at high-level is to always pass FALSE so that
5936 the ultimate containing object is really returned; moreover, the
5937 associated predicate handled_component_p will always return TRUE
5938 on these nodes, thus indicating that they are essentially handled
5939 by get_inner_reference. TRUE should only be passed when the caller
5940 is scanning the expression in order to build another representation
5941 and specifically knows how to handle these nodes; as such, this is
5942 the normal operating mode in the RTL expanders. */
5943
5944 tree
5945 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5946 HOST_WIDE_INT *pbitpos, tree *poffset,
5947 enum machine_mode *pmode, int *punsignedp,
5948 int *pvolatilep, bool keep_aligning)
5949 {
5950 tree size_tree = 0;
5951 enum machine_mode mode = VOIDmode;
5952 bool blkmode_bitfield = false;
5953 tree offset = size_zero_node;
5954 double_int bit_offset = double_int_zero;
5955
5956 /* First get the mode, signedness, and size. We do this from just the
5957 outermost expression. */
5958 *pbitsize = -1;
5959 if (TREE_CODE (exp) == COMPONENT_REF)
5960 {
5961 tree field = TREE_OPERAND (exp, 1);
5962 size_tree = DECL_SIZE (field);
5963 if (!DECL_BIT_FIELD (field))
5964 mode = DECL_MODE (field);
5965 else if (DECL_MODE (field) == BLKmode)
5966 blkmode_bitfield = true;
5967 else if (TREE_THIS_VOLATILE (exp)
5968 && flag_strict_volatile_bitfields > 0)
5969 /* Volatile bitfields should be accessed in the mode of the
5970 field's type, not the mode computed based on the bit
5971 size. */
5972 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
5973
5974 *punsignedp = DECL_UNSIGNED (field);
5975 }
5976 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5977 {
5978 size_tree = TREE_OPERAND (exp, 1);
5979 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
5980 || TYPE_UNSIGNED (TREE_TYPE (exp)));
5981
5982 /* For vector types, with the correct size of access, use the mode of
5983 inner type. */
5984 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5985 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5986 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5987 mode = TYPE_MODE (TREE_TYPE (exp));
5988 }
5989 else
5990 {
5991 mode = TYPE_MODE (TREE_TYPE (exp));
5992 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5993
5994 if (mode == BLKmode)
5995 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5996 else
5997 *pbitsize = GET_MODE_BITSIZE (mode);
5998 }
5999
6000 if (size_tree != 0)
6001 {
6002 if (! host_integerp (size_tree, 1))
6003 mode = BLKmode, *pbitsize = -1;
6004 else
6005 *pbitsize = tree_low_cst (size_tree, 1);
6006 }
6007
6008 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6009 and find the ultimate containing object. */
6010 while (1)
6011 {
6012 switch (TREE_CODE (exp))
6013 {
6014 case BIT_FIELD_REF:
6015 bit_offset
6016 = double_int_add (bit_offset,
6017 tree_to_double_int (TREE_OPERAND (exp, 2)));
6018 break;
6019
6020 case COMPONENT_REF:
6021 {
6022 tree field = TREE_OPERAND (exp, 1);
6023 tree this_offset = component_ref_field_offset (exp);
6024
6025 /* If this field hasn't been filled in yet, don't go past it.
6026 This should only happen when folding expressions made during
6027 type construction. */
6028 if (this_offset == 0)
6029 break;
6030
6031 offset = size_binop (PLUS_EXPR, offset, this_offset);
6032 bit_offset = double_int_add (bit_offset,
6033 tree_to_double_int
6034 (DECL_FIELD_BIT_OFFSET (field)));
6035
6036 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6037 }
6038 break;
6039
6040 case ARRAY_REF:
6041 case ARRAY_RANGE_REF:
6042 {
6043 tree index = TREE_OPERAND (exp, 1);
6044 tree low_bound = array_ref_low_bound (exp);
6045 tree unit_size = array_ref_element_size (exp);
6046
6047 /* We assume all arrays have sizes that are a multiple of a byte.
6048 First subtract the lower bound, if any, in the type of the
6049 index, then convert to sizetype and multiply by the size of
6050 the array element. */
6051 if (! integer_zerop (low_bound))
6052 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6053 index, low_bound);
6054
6055 offset = size_binop (PLUS_EXPR, offset,
6056 size_binop (MULT_EXPR,
6057 fold_convert (sizetype, index),
6058 unit_size));
6059 }
6060 break;
6061
6062 case REALPART_EXPR:
6063 break;
6064
6065 case IMAGPART_EXPR:
6066 bit_offset = double_int_add (bit_offset,
6067 uhwi_to_double_int (*pbitsize));
6068 break;
6069
6070 case VIEW_CONVERT_EXPR:
6071 if (keep_aligning && STRICT_ALIGNMENT
6072 && (TYPE_ALIGN (TREE_TYPE (exp))
6073 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6074 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6075 < BIGGEST_ALIGNMENT)
6076 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6077 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6078 goto done;
6079 break;
6080
6081 case MEM_REF:
6082 /* Hand back the decl for MEM[&decl, off]. */
6083 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6084 {
6085 tree off = TREE_OPERAND (exp, 1);
6086 if (!integer_zerop (off))
6087 {
6088 double_int boff, coff = mem_ref_offset (exp);
6089 boff = double_int_lshift (coff,
6090 BITS_PER_UNIT == 8
6091 ? 3 : exact_log2 (BITS_PER_UNIT),
6092 HOST_BITS_PER_DOUBLE_INT, true);
6093 bit_offset = double_int_add (bit_offset, boff);
6094 }
6095 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6096 }
6097 goto done;
6098
6099 default:
6100 goto done;
6101 }
6102
6103 /* If any reference in the chain is volatile, the effect is volatile. */
6104 if (TREE_THIS_VOLATILE (exp))
6105 *pvolatilep = 1;
6106
6107 exp = TREE_OPERAND (exp, 0);
6108 }
6109 done:
6110
6111 /* If OFFSET is constant, see if we can return the whole thing as a
6112 constant bit position. Make sure to handle overflow during
6113 this conversion. */
6114 if (host_integerp (offset, 0))
6115 {
6116 double_int tem = double_int_lshift (tree_to_double_int (offset),
6117 BITS_PER_UNIT == 8
6118 ? 3 : exact_log2 (BITS_PER_UNIT),
6119 HOST_BITS_PER_DOUBLE_INT, true);
6120 tem = double_int_add (tem, bit_offset);
6121 if (double_int_fits_in_shwi_p (tem))
6122 {
6123 *pbitpos = double_int_to_shwi (tem);
6124 *poffset = offset = NULL_TREE;
6125 }
6126 }
6127
6128 /* Otherwise, split it up. */
6129 if (offset)
6130 {
6131 *pbitpos = double_int_to_shwi (bit_offset);
6132 *poffset = offset;
6133 }
6134
6135 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6136 if (mode == VOIDmode
6137 && blkmode_bitfield
6138 && (*pbitpos % BITS_PER_UNIT) == 0
6139 && (*pbitsize % BITS_PER_UNIT) == 0)
6140 *pmode = BLKmode;
6141 else
6142 *pmode = mode;
6143
6144 return exp;
6145 }
6146
6147 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6148 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6149 EXP is marked as PACKED. */
6150
6151 bool
6152 contains_packed_reference (const_tree exp)
6153 {
6154 bool packed_p = false;
6155
6156 while (1)
6157 {
6158 switch (TREE_CODE (exp))
6159 {
6160 case COMPONENT_REF:
6161 {
6162 tree field = TREE_OPERAND (exp, 1);
6163 packed_p = DECL_PACKED (field)
6164 || TYPE_PACKED (TREE_TYPE (field))
6165 || TYPE_PACKED (TREE_TYPE (exp));
6166 if (packed_p)
6167 goto done;
6168 }
6169 break;
6170
6171 case BIT_FIELD_REF:
6172 case ARRAY_REF:
6173 case ARRAY_RANGE_REF:
6174 case REALPART_EXPR:
6175 case IMAGPART_EXPR:
6176 case VIEW_CONVERT_EXPR:
6177 break;
6178
6179 default:
6180 goto done;
6181 }
6182 exp = TREE_OPERAND (exp, 0);
6183 }
6184 done:
6185 return packed_p;
6186 }
6187
6188 /* Return a tree of sizetype representing the size, in bytes, of the element
6189 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6190
6191 tree
6192 array_ref_element_size (tree exp)
6193 {
6194 tree aligned_size = TREE_OPERAND (exp, 3);
6195 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6196 location_t loc = EXPR_LOCATION (exp);
6197
6198 /* If a size was specified in the ARRAY_REF, it's the size measured
6199 in alignment units of the element type. So multiply by that value. */
6200 if (aligned_size)
6201 {
6202 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6203 sizetype from another type of the same width and signedness. */
6204 if (TREE_TYPE (aligned_size) != sizetype)
6205 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6206 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6207 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6208 }
6209
6210 /* Otherwise, take the size from that of the element type. Substitute
6211 any PLACEHOLDER_EXPR that we have. */
6212 else
6213 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6214 }
6215
6216 /* Return a tree representing the lower bound of the array mentioned in
6217 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6218
6219 tree
6220 array_ref_low_bound (tree exp)
6221 {
6222 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6223
6224 /* If a lower bound is specified in EXP, use it. */
6225 if (TREE_OPERAND (exp, 2))
6226 return TREE_OPERAND (exp, 2);
6227
6228 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6229 substituting for a PLACEHOLDER_EXPR as needed. */
6230 if (domain_type && TYPE_MIN_VALUE (domain_type))
6231 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6232
6233 /* Otherwise, return a zero of the appropriate type. */
6234 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6235 }
6236
6237 /* Return a tree representing the upper bound of the array mentioned in
6238 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6239
6240 tree
6241 array_ref_up_bound (tree exp)
6242 {
6243 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6244
6245 /* If there is a domain type and it has an upper bound, use it, substituting
6246 for a PLACEHOLDER_EXPR as needed. */
6247 if (domain_type && TYPE_MAX_VALUE (domain_type))
6248 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6249
6250 /* Otherwise fail. */
6251 return NULL_TREE;
6252 }
6253
6254 /* Return a tree representing the offset, in bytes, of the field referenced
6255 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6256
6257 tree
6258 component_ref_field_offset (tree exp)
6259 {
6260 tree aligned_offset = TREE_OPERAND (exp, 2);
6261 tree field = TREE_OPERAND (exp, 1);
6262 location_t loc = EXPR_LOCATION (exp);
6263
6264 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6265 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6266 value. */
6267 if (aligned_offset)
6268 {
6269 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6270 sizetype from another type of the same width and signedness. */
6271 if (TREE_TYPE (aligned_offset) != sizetype)
6272 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6273 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6274 size_int (DECL_OFFSET_ALIGN (field)
6275 / BITS_PER_UNIT));
6276 }
6277
6278 /* Otherwise, take the offset from that of the field. Substitute
6279 any PLACEHOLDER_EXPR that we have. */
6280 else
6281 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6282 }
6283
6284 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6285
6286 static unsigned HOST_WIDE_INT
6287 target_align (const_tree target)
6288 {
6289 /* We might have a chain of nested references with intermediate misaligning
6290 bitfields components, so need to recurse to find out. */
6291
6292 unsigned HOST_WIDE_INT this_align, outer_align;
6293
6294 switch (TREE_CODE (target))
6295 {
6296 case BIT_FIELD_REF:
6297 return 1;
6298
6299 case COMPONENT_REF:
6300 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6301 outer_align = target_align (TREE_OPERAND (target, 0));
6302 return MIN (this_align, outer_align);
6303
6304 case ARRAY_REF:
6305 case ARRAY_RANGE_REF:
6306 this_align = TYPE_ALIGN (TREE_TYPE (target));
6307 outer_align = target_align (TREE_OPERAND (target, 0));
6308 return MIN (this_align, outer_align);
6309
6310 CASE_CONVERT:
6311 case NON_LVALUE_EXPR:
6312 case VIEW_CONVERT_EXPR:
6313 this_align = TYPE_ALIGN (TREE_TYPE (target));
6314 outer_align = target_align (TREE_OPERAND (target, 0));
6315 return MAX (this_align, outer_align);
6316
6317 default:
6318 return TYPE_ALIGN (TREE_TYPE (target));
6319 }
6320 }
6321
6322 \f
6323 /* Given an rtx VALUE that may contain additions and multiplications, return
6324 an equivalent value that just refers to a register, memory, or constant.
6325 This is done by generating instructions to perform the arithmetic and
6326 returning a pseudo-register containing the value.
6327
6328 The returned value may be a REG, SUBREG, MEM or constant. */
6329
6330 rtx
6331 force_operand (rtx value, rtx target)
6332 {
6333 rtx op1, op2;
6334 /* Use subtarget as the target for operand 0 of a binary operation. */
6335 rtx subtarget = get_subtarget (target);
6336 enum rtx_code code = GET_CODE (value);
6337
6338 /* Check for subreg applied to an expression produced by loop optimizer. */
6339 if (code == SUBREG
6340 && !REG_P (SUBREG_REG (value))
6341 && !MEM_P (SUBREG_REG (value)))
6342 {
6343 value
6344 = simplify_gen_subreg (GET_MODE (value),
6345 force_reg (GET_MODE (SUBREG_REG (value)),
6346 force_operand (SUBREG_REG (value),
6347 NULL_RTX)),
6348 GET_MODE (SUBREG_REG (value)),
6349 SUBREG_BYTE (value));
6350 code = GET_CODE (value);
6351 }
6352
6353 /* Check for a PIC address load. */
6354 if ((code == PLUS || code == MINUS)
6355 && XEXP (value, 0) == pic_offset_table_rtx
6356 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6357 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6358 || GET_CODE (XEXP (value, 1)) == CONST))
6359 {
6360 if (!subtarget)
6361 subtarget = gen_reg_rtx (GET_MODE (value));
6362 emit_move_insn (subtarget, value);
6363 return subtarget;
6364 }
6365
6366 if (ARITHMETIC_P (value))
6367 {
6368 op2 = XEXP (value, 1);
6369 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6370 subtarget = 0;
6371 if (code == MINUS && CONST_INT_P (op2))
6372 {
6373 code = PLUS;
6374 op2 = negate_rtx (GET_MODE (value), op2);
6375 }
6376
6377 /* Check for an addition with OP2 a constant integer and our first
6378 operand a PLUS of a virtual register and something else. In that
6379 case, we want to emit the sum of the virtual register and the
6380 constant first and then add the other value. This allows virtual
6381 register instantiation to simply modify the constant rather than
6382 creating another one around this addition. */
6383 if (code == PLUS && CONST_INT_P (op2)
6384 && GET_CODE (XEXP (value, 0)) == PLUS
6385 && REG_P (XEXP (XEXP (value, 0), 0))
6386 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6387 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6388 {
6389 rtx temp = expand_simple_binop (GET_MODE (value), code,
6390 XEXP (XEXP (value, 0), 0), op2,
6391 subtarget, 0, OPTAB_LIB_WIDEN);
6392 return expand_simple_binop (GET_MODE (value), code, temp,
6393 force_operand (XEXP (XEXP (value,
6394 0), 1), 0),
6395 target, 0, OPTAB_LIB_WIDEN);
6396 }
6397
6398 op1 = force_operand (XEXP (value, 0), subtarget);
6399 op2 = force_operand (op2, NULL_RTX);
6400 switch (code)
6401 {
6402 case MULT:
6403 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6404 case DIV:
6405 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6406 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6407 target, 1, OPTAB_LIB_WIDEN);
6408 else
6409 return expand_divmod (0,
6410 FLOAT_MODE_P (GET_MODE (value))
6411 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6412 GET_MODE (value), op1, op2, target, 0);
6413 case MOD:
6414 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6415 target, 0);
6416 case UDIV:
6417 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6418 target, 1);
6419 case UMOD:
6420 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6421 target, 1);
6422 case ASHIFTRT:
6423 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6424 target, 0, OPTAB_LIB_WIDEN);
6425 default:
6426 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6427 target, 1, OPTAB_LIB_WIDEN);
6428 }
6429 }
6430 if (UNARY_P (value))
6431 {
6432 if (!target)
6433 target = gen_reg_rtx (GET_MODE (value));
6434 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6435 switch (code)
6436 {
6437 case ZERO_EXTEND:
6438 case SIGN_EXTEND:
6439 case TRUNCATE:
6440 case FLOAT_EXTEND:
6441 case FLOAT_TRUNCATE:
6442 convert_move (target, op1, code == ZERO_EXTEND);
6443 return target;
6444
6445 case FIX:
6446 case UNSIGNED_FIX:
6447 expand_fix (target, op1, code == UNSIGNED_FIX);
6448 return target;
6449
6450 case FLOAT:
6451 case UNSIGNED_FLOAT:
6452 expand_float (target, op1, code == UNSIGNED_FLOAT);
6453 return target;
6454
6455 default:
6456 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6457 }
6458 }
6459
6460 #ifdef INSN_SCHEDULING
6461 /* On machines that have insn scheduling, we want all memory reference to be
6462 explicit, so we need to deal with such paradoxical SUBREGs. */
6463 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6464 && (GET_MODE_SIZE (GET_MODE (value))
6465 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6466 value
6467 = simplify_gen_subreg (GET_MODE (value),
6468 force_reg (GET_MODE (SUBREG_REG (value)),
6469 force_operand (SUBREG_REG (value),
6470 NULL_RTX)),
6471 GET_MODE (SUBREG_REG (value)),
6472 SUBREG_BYTE (value));
6473 #endif
6474
6475 return value;
6476 }
6477 \f
6478 /* Subroutine of expand_expr: return nonzero iff there is no way that
6479 EXP can reference X, which is being modified. TOP_P is nonzero if this
6480 call is going to be used to determine whether we need a temporary
6481 for EXP, as opposed to a recursive call to this function.
6482
6483 It is always safe for this routine to return zero since it merely
6484 searches for optimization opportunities. */
6485
6486 int
6487 safe_from_p (const_rtx x, tree exp, int top_p)
6488 {
6489 rtx exp_rtl = 0;
6490 int i, nops;
6491
6492 if (x == 0
6493 /* If EXP has varying size, we MUST use a target since we currently
6494 have no way of allocating temporaries of variable size
6495 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6496 So we assume here that something at a higher level has prevented a
6497 clash. This is somewhat bogus, but the best we can do. Only
6498 do this when X is BLKmode and when we are at the top level. */
6499 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6500 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6501 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6502 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6503 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6504 != INTEGER_CST)
6505 && GET_MODE (x) == BLKmode)
6506 /* If X is in the outgoing argument area, it is always safe. */
6507 || (MEM_P (x)
6508 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6509 || (GET_CODE (XEXP (x, 0)) == PLUS
6510 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6511 return 1;
6512
6513 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6514 find the underlying pseudo. */
6515 if (GET_CODE (x) == SUBREG)
6516 {
6517 x = SUBREG_REG (x);
6518 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6519 return 0;
6520 }
6521
6522 /* Now look at our tree code and possibly recurse. */
6523 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6524 {
6525 case tcc_declaration:
6526 exp_rtl = DECL_RTL_IF_SET (exp);
6527 break;
6528
6529 case tcc_constant:
6530 return 1;
6531
6532 case tcc_exceptional:
6533 if (TREE_CODE (exp) == TREE_LIST)
6534 {
6535 while (1)
6536 {
6537 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6538 return 0;
6539 exp = TREE_CHAIN (exp);
6540 if (!exp)
6541 return 1;
6542 if (TREE_CODE (exp) != TREE_LIST)
6543 return safe_from_p (x, exp, 0);
6544 }
6545 }
6546 else if (TREE_CODE (exp) == CONSTRUCTOR)
6547 {
6548 constructor_elt *ce;
6549 unsigned HOST_WIDE_INT idx;
6550
6551 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce)
6552 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6553 || !safe_from_p (x, ce->value, 0))
6554 return 0;
6555 return 1;
6556 }
6557 else if (TREE_CODE (exp) == ERROR_MARK)
6558 return 1; /* An already-visited SAVE_EXPR? */
6559 else
6560 return 0;
6561
6562 case tcc_statement:
6563 /* The only case we look at here is the DECL_INITIAL inside a
6564 DECL_EXPR. */
6565 return (TREE_CODE (exp) != DECL_EXPR
6566 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6567 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6568 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6569
6570 case tcc_binary:
6571 case tcc_comparison:
6572 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6573 return 0;
6574 /* Fall through. */
6575
6576 case tcc_unary:
6577 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6578
6579 case tcc_expression:
6580 case tcc_reference:
6581 case tcc_vl_exp:
6582 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6583 the expression. If it is set, we conflict iff we are that rtx or
6584 both are in memory. Otherwise, we check all operands of the
6585 expression recursively. */
6586
6587 switch (TREE_CODE (exp))
6588 {
6589 case ADDR_EXPR:
6590 /* If the operand is static or we are static, we can't conflict.
6591 Likewise if we don't conflict with the operand at all. */
6592 if (staticp (TREE_OPERAND (exp, 0))
6593 || TREE_STATIC (exp)
6594 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6595 return 1;
6596
6597 /* Otherwise, the only way this can conflict is if we are taking
6598 the address of a DECL a that address if part of X, which is
6599 very rare. */
6600 exp = TREE_OPERAND (exp, 0);
6601 if (DECL_P (exp))
6602 {
6603 if (!DECL_RTL_SET_P (exp)
6604 || !MEM_P (DECL_RTL (exp)))
6605 return 0;
6606 else
6607 exp_rtl = XEXP (DECL_RTL (exp), 0);
6608 }
6609 break;
6610
6611 case MEM_REF:
6612 if (MEM_P (x)
6613 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6614 get_alias_set (exp)))
6615 return 0;
6616 break;
6617
6618 case CALL_EXPR:
6619 /* Assume that the call will clobber all hard registers and
6620 all of memory. */
6621 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6622 || MEM_P (x))
6623 return 0;
6624 break;
6625
6626 case WITH_CLEANUP_EXPR:
6627 case CLEANUP_POINT_EXPR:
6628 /* Lowered by gimplify.c. */
6629 gcc_unreachable ();
6630
6631 case SAVE_EXPR:
6632 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6633
6634 default:
6635 break;
6636 }
6637
6638 /* If we have an rtx, we do not need to scan our operands. */
6639 if (exp_rtl)
6640 break;
6641
6642 nops = TREE_OPERAND_LENGTH (exp);
6643 for (i = 0; i < nops; i++)
6644 if (TREE_OPERAND (exp, i) != 0
6645 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6646 return 0;
6647
6648 break;
6649
6650 case tcc_type:
6651 /* Should never get a type here. */
6652 gcc_unreachable ();
6653 }
6654
6655 /* If we have an rtl, find any enclosed object. Then see if we conflict
6656 with it. */
6657 if (exp_rtl)
6658 {
6659 if (GET_CODE (exp_rtl) == SUBREG)
6660 {
6661 exp_rtl = SUBREG_REG (exp_rtl);
6662 if (REG_P (exp_rtl)
6663 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6664 return 0;
6665 }
6666
6667 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6668 are memory and they conflict. */
6669 return ! (rtx_equal_p (x, exp_rtl)
6670 || (MEM_P (x) && MEM_P (exp_rtl)
6671 && true_dependence (exp_rtl, VOIDmode, x,
6672 rtx_addr_varies_p)));
6673 }
6674
6675 /* If we reach here, it is safe. */
6676 return 1;
6677 }
6678
6679 \f
6680 /* Return the highest power of two that EXP is known to be a multiple of.
6681 This is used in updating alignment of MEMs in array references. */
6682
6683 unsigned HOST_WIDE_INT
6684 highest_pow2_factor (const_tree exp)
6685 {
6686 unsigned HOST_WIDE_INT c0, c1;
6687
6688 switch (TREE_CODE (exp))
6689 {
6690 case INTEGER_CST:
6691 /* We can find the lowest bit that's a one. If the low
6692 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6693 We need to handle this case since we can find it in a COND_EXPR,
6694 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6695 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6696 later ICE. */
6697 if (TREE_OVERFLOW (exp))
6698 return BIGGEST_ALIGNMENT;
6699 else
6700 {
6701 /* Note: tree_low_cst is intentionally not used here,
6702 we don't care about the upper bits. */
6703 c0 = TREE_INT_CST_LOW (exp);
6704 c0 &= -c0;
6705 return c0 ? c0 : BIGGEST_ALIGNMENT;
6706 }
6707 break;
6708
6709 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6710 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6711 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6712 return MIN (c0, c1);
6713
6714 case MULT_EXPR:
6715 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6716 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6717 return c0 * c1;
6718
6719 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6720 case CEIL_DIV_EXPR:
6721 if (integer_pow2p (TREE_OPERAND (exp, 1))
6722 && host_integerp (TREE_OPERAND (exp, 1), 1))
6723 {
6724 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6725 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6726 return MAX (1, c0 / c1);
6727 }
6728 break;
6729
6730 case BIT_AND_EXPR:
6731 /* The highest power of two of a bit-and expression is the maximum of
6732 that of its operands. We typically get here for a complex LHS and
6733 a constant negative power of two on the RHS to force an explicit
6734 alignment, so don't bother looking at the LHS. */
6735 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6736
6737 CASE_CONVERT:
6738 case SAVE_EXPR:
6739 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6740
6741 case COMPOUND_EXPR:
6742 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6743
6744 case COND_EXPR:
6745 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6746 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6747 return MIN (c0, c1);
6748
6749 default:
6750 break;
6751 }
6752
6753 return 1;
6754 }
6755
6756 /* Similar, except that the alignment requirements of TARGET are
6757 taken into account. Assume it is at least as aligned as its
6758 type, unless it is a COMPONENT_REF in which case the layout of
6759 the structure gives the alignment. */
6760
6761 static unsigned HOST_WIDE_INT
6762 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6763 {
6764 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6765 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6766
6767 return MAX (factor, talign);
6768 }
6769 \f
6770 /* Subroutine of expand_expr. Expand the two operands of a binary
6771 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6772 The value may be stored in TARGET if TARGET is nonzero. The
6773 MODIFIER argument is as documented by expand_expr. */
6774
6775 static void
6776 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6777 enum expand_modifier modifier)
6778 {
6779 if (! safe_from_p (target, exp1, 1))
6780 target = 0;
6781 if (operand_equal_p (exp0, exp1, 0))
6782 {
6783 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6784 *op1 = copy_rtx (*op0);
6785 }
6786 else
6787 {
6788 /* If we need to preserve evaluation order, copy exp0 into its own
6789 temporary variable so that it can't be clobbered by exp1. */
6790 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6791 exp0 = save_expr (exp0);
6792 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6793 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6794 }
6795 }
6796
6797 \f
6798 /* Return a MEM that contains constant EXP. DEFER is as for
6799 output_constant_def and MODIFIER is as for expand_expr. */
6800
6801 static rtx
6802 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6803 {
6804 rtx mem;
6805
6806 mem = output_constant_def (exp, defer);
6807 if (modifier != EXPAND_INITIALIZER)
6808 mem = use_anchored_address (mem);
6809 return mem;
6810 }
6811
6812 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6813 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6814
6815 static rtx
6816 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6817 enum expand_modifier modifier, addr_space_t as)
6818 {
6819 rtx result, subtarget;
6820 tree inner, offset;
6821 HOST_WIDE_INT bitsize, bitpos;
6822 int volatilep, unsignedp;
6823 enum machine_mode mode1;
6824
6825 /* If we are taking the address of a constant and are at the top level,
6826 we have to use output_constant_def since we can't call force_const_mem
6827 at top level. */
6828 /* ??? This should be considered a front-end bug. We should not be
6829 generating ADDR_EXPR of something that isn't an LVALUE. The only
6830 exception here is STRING_CST. */
6831 if (CONSTANT_CLASS_P (exp))
6832 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6833
6834 /* Everything must be something allowed by is_gimple_addressable. */
6835 switch (TREE_CODE (exp))
6836 {
6837 case INDIRECT_REF:
6838 /* This case will happen via recursion for &a->b. */
6839 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6840
6841 case MEM_REF:
6842 {
6843 tree tem = TREE_OPERAND (exp, 0);
6844 if (!integer_zerop (TREE_OPERAND (exp, 1)))
6845 tem = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6846 tem,
6847 double_int_to_tree (sizetype, mem_ref_offset (exp)));
6848 return expand_expr (tem, target, tmode, modifier);
6849 }
6850
6851 case CONST_DECL:
6852 /* Expand the initializer like constants above. */
6853 return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
6854
6855 case REALPART_EXPR:
6856 /* The real part of the complex number is always first, therefore
6857 the address is the same as the address of the parent object. */
6858 offset = 0;
6859 bitpos = 0;
6860 inner = TREE_OPERAND (exp, 0);
6861 break;
6862
6863 case IMAGPART_EXPR:
6864 /* The imaginary part of the complex number is always second.
6865 The expression is therefore always offset by the size of the
6866 scalar type. */
6867 offset = 0;
6868 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6869 inner = TREE_OPERAND (exp, 0);
6870 break;
6871
6872 default:
6873 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6874 expand_expr, as that can have various side effects; LABEL_DECLs for
6875 example, may not have their DECL_RTL set yet. Expand the rtl of
6876 CONSTRUCTORs too, which should yield a memory reference for the
6877 constructor's contents. Assume language specific tree nodes can
6878 be expanded in some interesting way. */
6879 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6880 if (DECL_P (exp)
6881 || TREE_CODE (exp) == CONSTRUCTOR
6882 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6883 {
6884 result = expand_expr (exp, target, tmode,
6885 modifier == EXPAND_INITIALIZER
6886 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6887
6888 /* If the DECL isn't in memory, then the DECL wasn't properly
6889 marked TREE_ADDRESSABLE, which will be either a front-end
6890 or a tree optimizer bug. */
6891 gcc_assert (MEM_P (result));
6892 result = XEXP (result, 0);
6893
6894 /* ??? Is this needed anymore? */
6895 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6896 {
6897 assemble_external (exp);
6898 TREE_USED (exp) = 1;
6899 }
6900
6901 if (modifier != EXPAND_INITIALIZER
6902 && modifier != EXPAND_CONST_ADDRESS)
6903 result = force_operand (result, target);
6904 return result;
6905 }
6906
6907 /* Pass FALSE as the last argument to get_inner_reference although
6908 we are expanding to RTL. The rationale is that we know how to
6909 handle "aligning nodes" here: we can just bypass them because
6910 they won't change the final object whose address will be returned
6911 (they actually exist only for that purpose). */
6912 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6913 &mode1, &unsignedp, &volatilep, false);
6914 break;
6915 }
6916
6917 /* We must have made progress. */
6918 gcc_assert (inner != exp);
6919
6920 subtarget = offset || bitpos ? NULL_RTX : target;
6921 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6922 inner alignment, force the inner to be sufficiently aligned. */
6923 if (CONSTANT_CLASS_P (inner)
6924 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6925 {
6926 inner = copy_node (inner);
6927 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6928 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6929 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6930 }
6931 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
6932
6933 if (offset)
6934 {
6935 rtx tmp;
6936
6937 if (modifier != EXPAND_NORMAL)
6938 result = force_operand (result, NULL);
6939 tmp = expand_expr (offset, NULL_RTX, tmode,
6940 modifier == EXPAND_INITIALIZER
6941 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6942
6943 result = convert_memory_address_addr_space (tmode, result, as);
6944 tmp = convert_memory_address_addr_space (tmode, tmp, as);
6945
6946 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6947 result = simplify_gen_binary (PLUS, tmode, result, tmp);
6948 else
6949 {
6950 subtarget = bitpos ? NULL_RTX : target;
6951 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6952 1, OPTAB_LIB_WIDEN);
6953 }
6954 }
6955
6956 if (bitpos)
6957 {
6958 /* Someone beforehand should have rejected taking the address
6959 of such an object. */
6960 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6961
6962 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6963 if (modifier < EXPAND_SUM)
6964 result = force_operand (result, target);
6965 }
6966
6967 return result;
6968 }
6969
6970 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6971 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6972
6973 static rtx
6974 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6975 enum expand_modifier modifier)
6976 {
6977 addr_space_t as = ADDR_SPACE_GENERIC;
6978 enum machine_mode address_mode = Pmode;
6979 enum machine_mode pointer_mode = ptr_mode;
6980 enum machine_mode rmode;
6981 rtx result;
6982
6983 /* Target mode of VOIDmode says "whatever's natural". */
6984 if (tmode == VOIDmode)
6985 tmode = TYPE_MODE (TREE_TYPE (exp));
6986
6987 if (POINTER_TYPE_P (TREE_TYPE (exp)))
6988 {
6989 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
6990 address_mode = targetm.addr_space.address_mode (as);
6991 pointer_mode = targetm.addr_space.pointer_mode (as);
6992 }
6993
6994 /* We can get called with some Weird Things if the user does silliness
6995 like "(short) &a". In that case, convert_memory_address won't do
6996 the right thing, so ignore the given target mode. */
6997 if (tmode != address_mode && tmode != pointer_mode)
6998 tmode = address_mode;
6999
7000 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7001 tmode, modifier, as);
7002
7003 /* Despite expand_expr claims concerning ignoring TMODE when not
7004 strictly convenient, stuff breaks if we don't honor it. Note
7005 that combined with the above, we only do this for pointer modes. */
7006 rmode = GET_MODE (result);
7007 if (rmode == VOIDmode)
7008 rmode = tmode;
7009 if (rmode != tmode)
7010 result = convert_memory_address_addr_space (tmode, result, as);
7011
7012 return result;
7013 }
7014
7015 /* Generate code for computing CONSTRUCTOR EXP.
7016 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7017 is TRUE, instead of creating a temporary variable in memory
7018 NULL is returned and the caller needs to handle it differently. */
7019
7020 static rtx
7021 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7022 bool avoid_temp_mem)
7023 {
7024 tree type = TREE_TYPE (exp);
7025 enum machine_mode mode = TYPE_MODE (type);
7026
7027 /* Try to avoid creating a temporary at all. This is possible
7028 if all of the initializer is zero.
7029 FIXME: try to handle all [0..255] initializers we can handle
7030 with memset. */
7031 if (TREE_STATIC (exp)
7032 && !TREE_ADDRESSABLE (exp)
7033 && target != 0 && mode == BLKmode
7034 && all_zeros_p (exp))
7035 {
7036 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7037 return target;
7038 }
7039
7040 /* All elts simple constants => refer to a constant in memory. But
7041 if this is a non-BLKmode mode, let it store a field at a time
7042 since that should make a CONST_INT or CONST_DOUBLE when we
7043 fold. Likewise, if we have a target we can use, it is best to
7044 store directly into the target unless the type is large enough
7045 that memcpy will be used. If we are making an initializer and
7046 all operands are constant, put it in memory as well.
7047
7048 FIXME: Avoid trying to fill vector constructors piece-meal.
7049 Output them with output_constant_def below unless we're sure
7050 they're zeros. This should go away when vector initializers
7051 are treated like VECTOR_CST instead of arrays. */
7052 if ((TREE_STATIC (exp)
7053 && ((mode == BLKmode
7054 && ! (target != 0 && safe_from_p (target, exp, 1)))
7055 || TREE_ADDRESSABLE (exp)
7056 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7057 && (! MOVE_BY_PIECES_P
7058 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7059 TYPE_ALIGN (type)))
7060 && ! mostly_zeros_p (exp))))
7061 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7062 && TREE_CONSTANT (exp)))
7063 {
7064 rtx constructor;
7065
7066 if (avoid_temp_mem)
7067 return NULL_RTX;
7068
7069 constructor = expand_expr_constant (exp, 1, modifier);
7070
7071 if (modifier != EXPAND_CONST_ADDRESS
7072 && modifier != EXPAND_INITIALIZER
7073 && modifier != EXPAND_SUM)
7074 constructor = validize_mem (constructor);
7075
7076 return constructor;
7077 }
7078
7079 /* Handle calls that pass values in multiple non-contiguous
7080 locations. The Irix 6 ABI has examples of this. */
7081 if (target == 0 || ! safe_from_p (target, exp, 1)
7082 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7083 {
7084 if (avoid_temp_mem)
7085 return NULL_RTX;
7086
7087 target
7088 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7089 | (TREE_READONLY (exp)
7090 * TYPE_QUAL_CONST))),
7091 0, TREE_ADDRESSABLE (exp), 1);
7092 }
7093
7094 store_constructor (exp, target, 0, int_expr_size (exp));
7095 return target;
7096 }
7097
7098
7099 /* expand_expr: generate code for computing expression EXP.
7100 An rtx for the computed value is returned. The value is never null.
7101 In the case of a void EXP, const0_rtx is returned.
7102
7103 The value may be stored in TARGET if TARGET is nonzero.
7104 TARGET is just a suggestion; callers must assume that
7105 the rtx returned may not be the same as TARGET.
7106
7107 If TARGET is CONST0_RTX, it means that the value will be ignored.
7108
7109 If TMODE is not VOIDmode, it suggests generating the
7110 result in mode TMODE. But this is done only when convenient.
7111 Otherwise, TMODE is ignored and the value generated in its natural mode.
7112 TMODE is just a suggestion; callers must assume that
7113 the rtx returned may not have mode TMODE.
7114
7115 Note that TARGET may have neither TMODE nor MODE. In that case, it
7116 probably will not be used.
7117
7118 If MODIFIER is EXPAND_SUM then when EXP is an addition
7119 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7120 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7121 products as above, or REG or MEM, or constant.
7122 Ordinarily in such cases we would output mul or add instructions
7123 and then return a pseudo reg containing the sum.
7124
7125 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7126 it also marks a label as absolutely required (it can't be dead).
7127 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7128 This is used for outputting expressions used in initializers.
7129
7130 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7131 with a constant address even if that address is not normally legitimate.
7132 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7133
7134 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7135 a call parameter. Such targets require special care as we haven't yet
7136 marked TARGET so that it's safe from being trashed by libcalls. We
7137 don't want to use TARGET for anything but the final result;
7138 Intermediate values must go elsewhere. Additionally, calls to
7139 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7140
7141 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7142 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7143 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7144 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7145 recursively. */
7146
7147 rtx
7148 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7149 enum expand_modifier modifier, rtx *alt_rtl)
7150 {
7151 rtx ret;
7152
7153 /* Handle ERROR_MARK before anybody tries to access its type. */
7154 if (TREE_CODE (exp) == ERROR_MARK
7155 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7156 {
7157 ret = CONST0_RTX (tmode);
7158 return ret ? ret : const0_rtx;
7159 }
7160
7161 /* If this is an expression of some kind and it has an associated line
7162 number, then emit the line number before expanding the expression.
7163
7164 We need to save and restore the file and line information so that
7165 errors discovered during expansion are emitted with the right
7166 information. It would be better of the diagnostic routines
7167 used the file/line information embedded in the tree nodes rather
7168 than globals. */
7169 if (cfun && EXPR_HAS_LOCATION (exp))
7170 {
7171 location_t saved_location = input_location;
7172 location_t saved_curr_loc = get_curr_insn_source_location ();
7173 tree saved_block = get_curr_insn_block ();
7174 input_location = EXPR_LOCATION (exp);
7175 set_curr_insn_source_location (input_location);
7176
7177 /* Record where the insns produced belong. */
7178 set_curr_insn_block (TREE_BLOCK (exp));
7179
7180 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7181
7182 input_location = saved_location;
7183 set_curr_insn_block (saved_block);
7184 set_curr_insn_source_location (saved_curr_loc);
7185 }
7186 else
7187 {
7188 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7189 }
7190
7191 return ret;
7192 }
7193
7194 rtx
7195 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7196 enum expand_modifier modifier)
7197 {
7198 rtx op0, op1, op2, temp;
7199 tree type;
7200 int unsignedp;
7201 enum machine_mode mode;
7202 enum tree_code code = ops->code;
7203 optab this_optab;
7204 rtx subtarget, original_target;
7205 int ignore;
7206 bool reduce_bit_field;
7207 location_t loc = ops->location;
7208 tree treeop0, treeop1, treeop2;
7209 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7210 ? reduce_to_bit_field_precision ((expr), \
7211 target, \
7212 type) \
7213 : (expr))
7214
7215 type = ops->type;
7216 mode = TYPE_MODE (type);
7217 unsignedp = TYPE_UNSIGNED (type);
7218
7219 treeop0 = ops->op0;
7220 treeop1 = ops->op1;
7221 treeop2 = ops->op2;
7222
7223 /* We should be called only on simple (binary or unary) expressions,
7224 exactly those that are valid in gimple expressions that aren't
7225 GIMPLE_SINGLE_RHS (or invalid). */
7226 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7227 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7228 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7229
7230 ignore = (target == const0_rtx
7231 || ((CONVERT_EXPR_CODE_P (code)
7232 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7233 && TREE_CODE (type) == VOID_TYPE));
7234
7235 /* We should be called only if we need the result. */
7236 gcc_assert (!ignore);
7237
7238 /* An operation in what may be a bit-field type needs the
7239 result to be reduced to the precision of the bit-field type,
7240 which is narrower than that of the type's mode. */
7241 reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE
7242 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7243
7244 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7245 target = 0;
7246
7247 /* Use subtarget as the target for operand 0 of a binary operation. */
7248 subtarget = get_subtarget (target);
7249 original_target = target;
7250
7251 switch (code)
7252 {
7253 case NON_LVALUE_EXPR:
7254 case PAREN_EXPR:
7255 CASE_CONVERT:
7256 if (treeop0 == error_mark_node)
7257 return const0_rtx;
7258
7259 if (TREE_CODE (type) == UNION_TYPE)
7260 {
7261 tree valtype = TREE_TYPE (treeop0);
7262
7263 /* If both input and output are BLKmode, this conversion isn't doing
7264 anything except possibly changing memory attribute. */
7265 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7266 {
7267 rtx result = expand_expr (treeop0, target, tmode,
7268 modifier);
7269
7270 result = copy_rtx (result);
7271 set_mem_attributes (result, type, 0);
7272 return result;
7273 }
7274
7275 if (target == 0)
7276 {
7277 if (TYPE_MODE (type) != BLKmode)
7278 target = gen_reg_rtx (TYPE_MODE (type));
7279 else
7280 target = assign_temp (type, 0, 1, 1);
7281 }
7282
7283 if (MEM_P (target))
7284 /* Store data into beginning of memory target. */
7285 store_expr (treeop0,
7286 adjust_address (target, TYPE_MODE (valtype), 0),
7287 modifier == EXPAND_STACK_PARM,
7288 false);
7289
7290 else
7291 {
7292 gcc_assert (REG_P (target));
7293
7294 /* Store this field into a union of the proper type. */
7295 store_field (target,
7296 MIN ((int_size_in_bytes (TREE_TYPE
7297 (treeop0))
7298 * BITS_PER_UNIT),
7299 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7300 0, TYPE_MODE (valtype), treeop0,
7301 type, 0, false);
7302 }
7303
7304 /* Return the entire union. */
7305 return target;
7306 }
7307
7308 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7309 {
7310 op0 = expand_expr (treeop0, target, VOIDmode,
7311 modifier);
7312
7313 /* If the signedness of the conversion differs and OP0 is
7314 a promoted SUBREG, clear that indication since we now
7315 have to do the proper extension. */
7316 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7317 && GET_CODE (op0) == SUBREG)
7318 SUBREG_PROMOTED_VAR_P (op0) = 0;
7319
7320 return REDUCE_BIT_FIELD (op0);
7321 }
7322
7323 op0 = expand_expr (treeop0, NULL_RTX, mode,
7324 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7325 if (GET_MODE (op0) == mode)
7326 ;
7327
7328 /* If OP0 is a constant, just convert it into the proper mode. */
7329 else if (CONSTANT_P (op0))
7330 {
7331 tree inner_type = TREE_TYPE (treeop0);
7332 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7333
7334 if (modifier == EXPAND_INITIALIZER)
7335 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7336 subreg_lowpart_offset (mode,
7337 inner_mode));
7338 else
7339 op0= convert_modes (mode, inner_mode, op0,
7340 TYPE_UNSIGNED (inner_type));
7341 }
7342
7343 else if (modifier == EXPAND_INITIALIZER)
7344 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7345
7346 else if (target == 0)
7347 op0 = convert_to_mode (mode, op0,
7348 TYPE_UNSIGNED (TREE_TYPE
7349 (treeop0)));
7350 else
7351 {
7352 convert_move (target, op0,
7353 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7354 op0 = target;
7355 }
7356
7357 return REDUCE_BIT_FIELD (op0);
7358
7359 case ADDR_SPACE_CONVERT_EXPR:
7360 {
7361 tree treeop0_type = TREE_TYPE (treeop0);
7362 addr_space_t as_to;
7363 addr_space_t as_from;
7364
7365 gcc_assert (POINTER_TYPE_P (type));
7366 gcc_assert (POINTER_TYPE_P (treeop0_type));
7367
7368 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7369 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7370
7371 /* Conversions between pointers to the same address space should
7372 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7373 gcc_assert (as_to != as_from);
7374
7375 /* Ask target code to handle conversion between pointers
7376 to overlapping address spaces. */
7377 if (targetm.addr_space.subset_p (as_to, as_from)
7378 || targetm.addr_space.subset_p (as_from, as_to))
7379 {
7380 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7381 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7382 gcc_assert (op0);
7383 return op0;
7384 }
7385
7386 /* For disjoint address spaces, converting anything but
7387 a null pointer invokes undefined behaviour. We simply
7388 always return a null pointer here. */
7389 return CONST0_RTX (mode);
7390 }
7391
7392 case POINTER_PLUS_EXPR:
7393 /* Even though the sizetype mode and the pointer's mode can be different
7394 expand is able to handle this correctly and get the correct result out
7395 of the PLUS_EXPR code. */
7396 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7397 if sizetype precision is smaller than pointer precision. */
7398 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7399 treeop1 = fold_convert_loc (loc, type,
7400 fold_convert_loc (loc, ssizetype,
7401 treeop1));
7402 case PLUS_EXPR:
7403 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7404 something else, make sure we add the register to the constant and
7405 then to the other thing. This case can occur during strength
7406 reduction and doing it this way will produce better code if the
7407 frame pointer or argument pointer is eliminated.
7408
7409 fold-const.c will ensure that the constant is always in the inner
7410 PLUS_EXPR, so the only case we need to do anything about is if
7411 sp, ap, or fp is our second argument, in which case we must swap
7412 the innermost first argument and our second argument. */
7413
7414 if (TREE_CODE (treeop0) == PLUS_EXPR
7415 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7416 && TREE_CODE (treeop1) == VAR_DECL
7417 && (DECL_RTL (treeop1) == frame_pointer_rtx
7418 || DECL_RTL (treeop1) == stack_pointer_rtx
7419 || DECL_RTL (treeop1) == arg_pointer_rtx))
7420 {
7421 tree t = treeop1;
7422
7423 treeop1 = TREE_OPERAND (treeop0, 0);
7424 TREE_OPERAND (treeop0, 0) = t;
7425 }
7426
7427 /* If the result is to be ptr_mode and we are adding an integer to
7428 something, we might be forming a constant. So try to use
7429 plus_constant. If it produces a sum and we can't accept it,
7430 use force_operand. This allows P = &ARR[const] to generate
7431 efficient code on machines where a SYMBOL_REF is not a valid
7432 address.
7433
7434 If this is an EXPAND_SUM call, always return the sum. */
7435 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7436 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7437 {
7438 if (modifier == EXPAND_STACK_PARM)
7439 target = 0;
7440 if (TREE_CODE (treeop0) == INTEGER_CST
7441 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7442 && TREE_CONSTANT (treeop1))
7443 {
7444 rtx constant_part;
7445
7446 op1 = expand_expr (treeop1, subtarget, VOIDmode,
7447 EXPAND_SUM);
7448 /* Use immed_double_const to ensure that the constant is
7449 truncated according to the mode of OP1, then sign extended
7450 to a HOST_WIDE_INT. Using the constant directly can result
7451 in non-canonical RTL in a 64x32 cross compile. */
7452 constant_part
7453 = immed_double_const (TREE_INT_CST_LOW (treeop0),
7454 (HOST_WIDE_INT) 0,
7455 TYPE_MODE (TREE_TYPE (treeop1)));
7456 op1 = plus_constant (op1, INTVAL (constant_part));
7457 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7458 op1 = force_operand (op1, target);
7459 return REDUCE_BIT_FIELD (op1);
7460 }
7461
7462 else if (TREE_CODE (treeop1) == INTEGER_CST
7463 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7464 && TREE_CONSTANT (treeop0))
7465 {
7466 rtx constant_part;
7467
7468 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7469 (modifier == EXPAND_INITIALIZER
7470 ? EXPAND_INITIALIZER : EXPAND_SUM));
7471 if (! CONSTANT_P (op0))
7472 {
7473 op1 = expand_expr (treeop1, NULL_RTX,
7474 VOIDmode, modifier);
7475 /* Return a PLUS if modifier says it's OK. */
7476 if (modifier == EXPAND_SUM
7477 || modifier == EXPAND_INITIALIZER)
7478 return simplify_gen_binary (PLUS, mode, op0, op1);
7479 goto binop2;
7480 }
7481 /* Use immed_double_const to ensure that the constant is
7482 truncated according to the mode of OP1, then sign extended
7483 to a HOST_WIDE_INT. Using the constant directly can result
7484 in non-canonical RTL in a 64x32 cross compile. */
7485 constant_part
7486 = immed_double_const (TREE_INT_CST_LOW (treeop1),
7487 (HOST_WIDE_INT) 0,
7488 TYPE_MODE (TREE_TYPE (treeop0)));
7489 op0 = plus_constant (op0, INTVAL (constant_part));
7490 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7491 op0 = force_operand (op0, target);
7492 return REDUCE_BIT_FIELD (op0);
7493 }
7494 }
7495
7496 /* Use TER to expand pointer addition of a negated value
7497 as pointer subtraction. */
7498 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
7499 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
7500 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
7501 && TREE_CODE (treeop1) == SSA_NAME
7502 && TYPE_MODE (TREE_TYPE (treeop0))
7503 == TYPE_MODE (TREE_TYPE (treeop1)))
7504 {
7505 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
7506 if (def)
7507 {
7508 treeop1 = gimple_assign_rhs1 (def);
7509 code = MINUS_EXPR;
7510 goto do_minus;
7511 }
7512 }
7513
7514 /* No sense saving up arithmetic to be done
7515 if it's all in the wrong mode to form part of an address.
7516 And force_operand won't know whether to sign-extend or
7517 zero-extend. */
7518 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7519 || mode != ptr_mode)
7520 {
7521 expand_operands (treeop0, treeop1,
7522 subtarget, &op0, &op1, EXPAND_NORMAL);
7523 if (op0 == const0_rtx)
7524 return op1;
7525 if (op1 == const0_rtx)
7526 return op0;
7527 goto binop2;
7528 }
7529
7530 expand_operands (treeop0, treeop1,
7531 subtarget, &op0, &op1, modifier);
7532 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7533
7534 case MINUS_EXPR:
7535 do_minus:
7536 /* For initializers, we are allowed to return a MINUS of two
7537 symbolic constants. Here we handle all cases when both operands
7538 are constant. */
7539 /* Handle difference of two symbolic constants,
7540 for the sake of an initializer. */
7541 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7542 && really_constant_p (treeop0)
7543 && really_constant_p (treeop1))
7544 {
7545 expand_operands (treeop0, treeop1,
7546 NULL_RTX, &op0, &op1, modifier);
7547
7548 /* If the last operand is a CONST_INT, use plus_constant of
7549 the negated constant. Else make the MINUS. */
7550 if (CONST_INT_P (op1))
7551 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7552 else
7553 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7554 }
7555
7556 /* No sense saving up arithmetic to be done
7557 if it's all in the wrong mode to form part of an address.
7558 And force_operand won't know whether to sign-extend or
7559 zero-extend. */
7560 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7561 || mode != ptr_mode)
7562 goto binop;
7563
7564 expand_operands (treeop0, treeop1,
7565 subtarget, &op0, &op1, modifier);
7566
7567 /* Convert A - const to A + (-const). */
7568 if (CONST_INT_P (op1))
7569 {
7570 op1 = negate_rtx (mode, op1);
7571 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7572 }
7573
7574 goto binop2;
7575
7576 case WIDEN_MULT_PLUS_EXPR:
7577 case WIDEN_MULT_MINUS_EXPR:
7578 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
7579 op2 = expand_normal (treeop2);
7580 target = expand_widen_pattern_expr (ops, op0, op1, op2,
7581 target, unsignedp);
7582 return target;
7583
7584 case WIDEN_MULT_EXPR:
7585 /* If first operand is constant, swap them.
7586 Thus the following special case checks need only
7587 check the second operand. */
7588 if (TREE_CODE (treeop0) == INTEGER_CST)
7589 {
7590 tree t1 = treeop0;
7591 treeop0 = treeop1;
7592 treeop1 = t1;
7593 }
7594
7595 /* First, check if we have a multiplication of one signed and one
7596 unsigned operand. */
7597 if (TREE_CODE (treeop1) != INTEGER_CST
7598 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
7599 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
7600 {
7601 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
7602 this_optab = usmul_widen_optab;
7603 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7604 {
7605 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7606 {
7607 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7608 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7609 EXPAND_NORMAL);
7610 else
7611 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
7612 EXPAND_NORMAL);
7613 goto binop3;
7614 }
7615 }
7616 }
7617 /* Check for a multiplication with matching signedness. */
7618 else if ((TREE_CODE (treeop1) == INTEGER_CST
7619 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
7620 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
7621 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
7622 {
7623 tree op0type = TREE_TYPE (treeop0);
7624 enum machine_mode innermode = TYPE_MODE (op0type);
7625 bool zextend_p = TYPE_UNSIGNED (op0type);
7626 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7627 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7628
7629 if (mode == GET_MODE_2XWIDER_MODE (innermode)
7630 && TREE_CODE (treeop0) != INTEGER_CST)
7631 {
7632 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7633 {
7634 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7635 EXPAND_NORMAL);
7636 temp = expand_widening_mult (mode, op0, op1, target,
7637 unsignedp, this_optab);
7638 return REDUCE_BIT_FIELD (temp);
7639 }
7640 if (optab_handler (other_optab, mode) != CODE_FOR_nothing
7641 && innermode == word_mode)
7642 {
7643 rtx htem, hipart;
7644 op0 = expand_normal (treeop0);
7645 if (TREE_CODE (treeop1) == INTEGER_CST)
7646 op1 = convert_modes (innermode, mode,
7647 expand_normal (treeop1), unsignedp);
7648 else
7649 op1 = expand_normal (treeop1);
7650 temp = expand_binop (mode, other_optab, op0, op1, target,
7651 unsignedp, OPTAB_LIB_WIDEN);
7652 hipart = gen_highpart (innermode, temp);
7653 htem = expand_mult_highpart_adjust (innermode, hipart,
7654 op0, op1, hipart,
7655 zextend_p);
7656 if (htem != hipart)
7657 emit_move_insn (hipart, htem);
7658 return REDUCE_BIT_FIELD (temp);
7659 }
7660 }
7661 }
7662 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
7663 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
7664 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7665 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7666
7667 case FMA_EXPR:
7668 {
7669 optab opt = fma_optab;
7670 gimple def0, def2;
7671
7672 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
7673 call. */
7674 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
7675 {
7676 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
7677 tree call_expr;
7678
7679 gcc_assert (fn != NULL_TREE);
7680 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
7681 return expand_builtin (call_expr, target, subtarget, mode, false);
7682 }
7683
7684 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
7685 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
7686
7687 op0 = op2 = NULL;
7688
7689 if (def0 && def2
7690 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
7691 {
7692 opt = fnms_optab;
7693 op0 = expand_normal (gimple_assign_rhs1 (def0));
7694 op2 = expand_normal (gimple_assign_rhs1 (def2));
7695 }
7696 else if (def0
7697 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
7698 {
7699 opt = fnma_optab;
7700 op0 = expand_normal (gimple_assign_rhs1 (def0));
7701 }
7702 else if (def2
7703 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
7704 {
7705 opt = fms_optab;
7706 op2 = expand_normal (gimple_assign_rhs1 (def2));
7707 }
7708
7709 if (op0 == NULL)
7710 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
7711 if (op2 == NULL)
7712 op2 = expand_normal (treeop2);
7713 op1 = expand_normal (treeop1);
7714
7715 return expand_ternary_op (TYPE_MODE (type), opt,
7716 op0, op1, op2, target, 0);
7717 }
7718
7719 case MULT_EXPR:
7720 /* If this is a fixed-point operation, then we cannot use the code
7721 below because "expand_mult" doesn't support sat/no-sat fixed-point
7722 multiplications. */
7723 if (ALL_FIXED_POINT_MODE_P (mode))
7724 goto binop;
7725
7726 /* If first operand is constant, swap them.
7727 Thus the following special case checks need only
7728 check the second operand. */
7729 if (TREE_CODE (treeop0) == INTEGER_CST)
7730 {
7731 tree t1 = treeop0;
7732 treeop0 = treeop1;
7733 treeop1 = t1;
7734 }
7735
7736 /* Attempt to return something suitable for generating an
7737 indexed address, for machines that support that. */
7738
7739 if (modifier == EXPAND_SUM && mode == ptr_mode
7740 && host_integerp (treeop1, 0))
7741 {
7742 tree exp1 = treeop1;
7743
7744 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7745 EXPAND_SUM);
7746
7747 if (!REG_P (op0))
7748 op0 = force_operand (op0, NULL_RTX);
7749 if (!REG_P (op0))
7750 op0 = copy_to_mode_reg (mode, op0);
7751
7752 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7753 gen_int_mode (tree_low_cst (exp1, 0),
7754 TYPE_MODE (TREE_TYPE (exp1)))));
7755 }
7756
7757 if (modifier == EXPAND_STACK_PARM)
7758 target = 0;
7759
7760 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7761 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7762
7763 case TRUNC_DIV_EXPR:
7764 case FLOOR_DIV_EXPR:
7765 case CEIL_DIV_EXPR:
7766 case ROUND_DIV_EXPR:
7767 case EXACT_DIV_EXPR:
7768 /* If this is a fixed-point operation, then we cannot use the code
7769 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7770 divisions. */
7771 if (ALL_FIXED_POINT_MODE_P (mode))
7772 goto binop;
7773
7774 if (modifier == EXPAND_STACK_PARM)
7775 target = 0;
7776 /* Possible optimization: compute the dividend with EXPAND_SUM
7777 then if the divisor is constant can optimize the case
7778 where some terms of the dividend have coeffs divisible by it. */
7779 expand_operands (treeop0, treeop1,
7780 subtarget, &op0, &op1, EXPAND_NORMAL);
7781 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7782
7783 case RDIV_EXPR:
7784 goto binop;
7785
7786 case TRUNC_MOD_EXPR:
7787 case FLOOR_MOD_EXPR:
7788 case CEIL_MOD_EXPR:
7789 case ROUND_MOD_EXPR:
7790 if (modifier == EXPAND_STACK_PARM)
7791 target = 0;
7792 expand_operands (treeop0, treeop1,
7793 subtarget, &op0, &op1, EXPAND_NORMAL);
7794 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7795
7796 case FIXED_CONVERT_EXPR:
7797 op0 = expand_normal (treeop0);
7798 if (target == 0 || modifier == EXPAND_STACK_PARM)
7799 target = gen_reg_rtx (mode);
7800
7801 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7802 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7803 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7804 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7805 else
7806 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7807 return target;
7808
7809 case FIX_TRUNC_EXPR:
7810 op0 = expand_normal (treeop0);
7811 if (target == 0 || modifier == EXPAND_STACK_PARM)
7812 target = gen_reg_rtx (mode);
7813 expand_fix (target, op0, unsignedp);
7814 return target;
7815
7816 case FLOAT_EXPR:
7817 op0 = expand_normal (treeop0);
7818 if (target == 0 || modifier == EXPAND_STACK_PARM)
7819 target = gen_reg_rtx (mode);
7820 /* expand_float can't figure out what to do if FROM has VOIDmode.
7821 So give it the correct mode. With -O, cse will optimize this. */
7822 if (GET_MODE (op0) == VOIDmode)
7823 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7824 op0);
7825 expand_float (target, op0,
7826 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7827 return target;
7828
7829 case NEGATE_EXPR:
7830 op0 = expand_expr (treeop0, subtarget,
7831 VOIDmode, EXPAND_NORMAL);
7832 if (modifier == EXPAND_STACK_PARM)
7833 target = 0;
7834 temp = expand_unop (mode,
7835 optab_for_tree_code (NEGATE_EXPR, type,
7836 optab_default),
7837 op0, target, 0);
7838 gcc_assert (temp);
7839 return REDUCE_BIT_FIELD (temp);
7840
7841 case ABS_EXPR:
7842 op0 = expand_expr (treeop0, subtarget,
7843 VOIDmode, EXPAND_NORMAL);
7844 if (modifier == EXPAND_STACK_PARM)
7845 target = 0;
7846
7847 /* ABS_EXPR is not valid for complex arguments. */
7848 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7849 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7850
7851 /* Unsigned abs is simply the operand. Testing here means we don't
7852 risk generating incorrect code below. */
7853 if (TYPE_UNSIGNED (type))
7854 return op0;
7855
7856 return expand_abs (mode, op0, target, unsignedp,
7857 safe_from_p (target, treeop0, 1));
7858
7859 case MAX_EXPR:
7860 case MIN_EXPR:
7861 target = original_target;
7862 if (target == 0
7863 || modifier == EXPAND_STACK_PARM
7864 || (MEM_P (target) && MEM_VOLATILE_P (target))
7865 || GET_MODE (target) != mode
7866 || (REG_P (target)
7867 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7868 target = gen_reg_rtx (mode);
7869 expand_operands (treeop0, treeop1,
7870 target, &op0, &op1, EXPAND_NORMAL);
7871
7872 /* First try to do it with a special MIN or MAX instruction.
7873 If that does not win, use a conditional jump to select the proper
7874 value. */
7875 this_optab = optab_for_tree_code (code, type, optab_default);
7876 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7877 OPTAB_WIDEN);
7878 if (temp != 0)
7879 return temp;
7880
7881 /* At this point, a MEM target is no longer useful; we will get better
7882 code without it. */
7883
7884 if (! REG_P (target))
7885 target = gen_reg_rtx (mode);
7886
7887 /* If op1 was placed in target, swap op0 and op1. */
7888 if (target != op0 && target == op1)
7889 {
7890 temp = op0;
7891 op0 = op1;
7892 op1 = temp;
7893 }
7894
7895 /* We generate better code and avoid problems with op1 mentioning
7896 target by forcing op1 into a pseudo if it isn't a constant. */
7897 if (! CONSTANT_P (op1))
7898 op1 = force_reg (mode, op1);
7899
7900 {
7901 enum rtx_code comparison_code;
7902 rtx cmpop1 = op1;
7903
7904 if (code == MAX_EXPR)
7905 comparison_code = unsignedp ? GEU : GE;
7906 else
7907 comparison_code = unsignedp ? LEU : LE;
7908
7909 /* Canonicalize to comparisons against 0. */
7910 if (op1 == const1_rtx)
7911 {
7912 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7913 or (a != 0 ? a : 1) for unsigned.
7914 For MIN we are safe converting (a <= 1 ? a : 1)
7915 into (a <= 0 ? a : 1) */
7916 cmpop1 = const0_rtx;
7917 if (code == MAX_EXPR)
7918 comparison_code = unsignedp ? NE : GT;
7919 }
7920 if (op1 == constm1_rtx && !unsignedp)
7921 {
7922 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7923 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7924 cmpop1 = const0_rtx;
7925 if (code == MIN_EXPR)
7926 comparison_code = LT;
7927 }
7928 #ifdef HAVE_conditional_move
7929 /* Use a conditional move if possible. */
7930 if (can_conditionally_move_p (mode))
7931 {
7932 rtx insn;
7933
7934 /* ??? Same problem as in expmed.c: emit_conditional_move
7935 forces a stack adjustment via compare_from_rtx, and we
7936 lose the stack adjustment if the sequence we are about
7937 to create is discarded. */
7938 do_pending_stack_adjust ();
7939
7940 start_sequence ();
7941
7942 /* Try to emit the conditional move. */
7943 insn = emit_conditional_move (target, comparison_code,
7944 op0, cmpop1, mode,
7945 op0, op1, mode,
7946 unsignedp);
7947
7948 /* If we could do the conditional move, emit the sequence,
7949 and return. */
7950 if (insn)
7951 {
7952 rtx seq = get_insns ();
7953 end_sequence ();
7954 emit_insn (seq);
7955 return target;
7956 }
7957
7958 /* Otherwise discard the sequence and fall back to code with
7959 branches. */
7960 end_sequence ();
7961 }
7962 #endif
7963 if (target != op0)
7964 emit_move_insn (target, op0);
7965
7966 temp = gen_label_rtx ();
7967 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
7968 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
7969 -1);
7970 }
7971 emit_move_insn (target, op1);
7972 emit_label (temp);
7973 return target;
7974
7975 case BIT_NOT_EXPR:
7976 op0 = expand_expr (treeop0, subtarget,
7977 VOIDmode, EXPAND_NORMAL);
7978 if (modifier == EXPAND_STACK_PARM)
7979 target = 0;
7980 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7981 gcc_assert (temp);
7982 return temp;
7983
7984 /* ??? Can optimize bitwise operations with one arg constant.
7985 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7986 and (a bitwise1 b) bitwise2 b (etc)
7987 but that is probably not worth while. */
7988
7989 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7990 boolean values when we want in all cases to compute both of them. In
7991 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7992 as actual zero-or-1 values and then bitwise anding. In cases where
7993 there cannot be any side effects, better code would be made by
7994 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7995 how to recognize those cases. */
7996
7997 case TRUTH_AND_EXPR:
7998 code = BIT_AND_EXPR;
7999 case BIT_AND_EXPR:
8000 goto binop;
8001
8002 case TRUTH_OR_EXPR:
8003 code = BIT_IOR_EXPR;
8004 case BIT_IOR_EXPR:
8005 goto binop;
8006
8007 case TRUTH_XOR_EXPR:
8008 code = BIT_XOR_EXPR;
8009 case BIT_XOR_EXPR:
8010 goto binop;
8011
8012 case LROTATE_EXPR:
8013 case RROTATE_EXPR:
8014 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8015 || (GET_MODE_PRECISION (TYPE_MODE (type))
8016 == TYPE_PRECISION (type)));
8017 /* fall through */
8018
8019 case LSHIFT_EXPR:
8020 case RSHIFT_EXPR:
8021 /* If this is a fixed-point operation, then we cannot use the code
8022 below because "expand_shift" doesn't support sat/no-sat fixed-point
8023 shifts. */
8024 if (ALL_FIXED_POINT_MODE_P (mode))
8025 goto binop;
8026
8027 if (! safe_from_p (subtarget, treeop1, 1))
8028 subtarget = 0;
8029 if (modifier == EXPAND_STACK_PARM)
8030 target = 0;
8031 op0 = expand_expr (treeop0, subtarget,
8032 VOIDmode, EXPAND_NORMAL);
8033 temp = expand_shift (code, mode, op0, treeop1, target,
8034 unsignedp);
8035 if (code == LSHIFT_EXPR)
8036 temp = REDUCE_BIT_FIELD (temp);
8037 return temp;
8038
8039 /* Could determine the answer when only additive constants differ. Also,
8040 the addition of one can be handled by changing the condition. */
8041 case LT_EXPR:
8042 case LE_EXPR:
8043 case GT_EXPR:
8044 case GE_EXPR:
8045 case EQ_EXPR:
8046 case NE_EXPR:
8047 case UNORDERED_EXPR:
8048 case ORDERED_EXPR:
8049 case UNLT_EXPR:
8050 case UNLE_EXPR:
8051 case UNGT_EXPR:
8052 case UNGE_EXPR:
8053 case UNEQ_EXPR:
8054 case LTGT_EXPR:
8055 temp = do_store_flag (ops,
8056 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8057 tmode != VOIDmode ? tmode : mode);
8058 if (temp)
8059 return temp;
8060
8061 /* Use a compare and a jump for BLKmode comparisons, or for function
8062 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8063
8064 if ((target == 0
8065 || modifier == EXPAND_STACK_PARM
8066 || ! safe_from_p (target, treeop0, 1)
8067 || ! safe_from_p (target, treeop1, 1)
8068 /* Make sure we don't have a hard reg (such as function's return
8069 value) live across basic blocks, if not optimizing. */
8070 || (!optimize && REG_P (target)
8071 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8072 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8073
8074 emit_move_insn (target, const0_rtx);
8075
8076 op1 = gen_label_rtx ();
8077 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8078
8079 emit_move_insn (target, const1_rtx);
8080
8081 emit_label (op1);
8082 return target;
8083
8084 case TRUTH_NOT_EXPR:
8085 if (modifier == EXPAND_STACK_PARM)
8086 target = 0;
8087 op0 = expand_expr (treeop0, target,
8088 VOIDmode, EXPAND_NORMAL);
8089 /* The parser is careful to generate TRUTH_NOT_EXPR
8090 only with operands that are always zero or one. */
8091 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8092 target, 1, OPTAB_LIB_WIDEN);
8093 gcc_assert (temp);
8094 return temp;
8095
8096 case COMPLEX_EXPR:
8097 /* Get the rtx code of the operands. */
8098 op0 = expand_normal (treeop0);
8099 op1 = expand_normal (treeop1);
8100
8101 if (!target)
8102 target = gen_reg_rtx (TYPE_MODE (type));
8103
8104 /* Move the real (op0) and imaginary (op1) parts to their location. */
8105 write_complex_part (target, op0, false);
8106 write_complex_part (target, op1, true);
8107
8108 return target;
8109
8110 case WIDEN_SUM_EXPR:
8111 {
8112 tree oprnd0 = treeop0;
8113 tree oprnd1 = treeop1;
8114
8115 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8116 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8117 target, unsignedp);
8118 return target;
8119 }
8120
8121 case REDUC_MAX_EXPR:
8122 case REDUC_MIN_EXPR:
8123 case REDUC_PLUS_EXPR:
8124 {
8125 op0 = expand_normal (treeop0);
8126 this_optab = optab_for_tree_code (code, type, optab_default);
8127 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8128 gcc_assert (temp);
8129 return temp;
8130 }
8131
8132 case VEC_EXTRACT_EVEN_EXPR:
8133 case VEC_EXTRACT_ODD_EXPR:
8134 {
8135 expand_operands (treeop0, treeop1,
8136 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8137 this_optab = optab_for_tree_code (code, type, optab_default);
8138 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8139 OPTAB_WIDEN);
8140 gcc_assert (temp);
8141 return temp;
8142 }
8143
8144 case VEC_INTERLEAVE_HIGH_EXPR:
8145 case VEC_INTERLEAVE_LOW_EXPR:
8146 {
8147 expand_operands (treeop0, treeop1,
8148 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8149 this_optab = optab_for_tree_code (code, type, optab_default);
8150 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8151 OPTAB_WIDEN);
8152 gcc_assert (temp);
8153 return temp;
8154 }
8155
8156 case VEC_LSHIFT_EXPR:
8157 case VEC_RSHIFT_EXPR:
8158 {
8159 target = expand_vec_shift_expr (ops, target);
8160 return target;
8161 }
8162
8163 case VEC_UNPACK_HI_EXPR:
8164 case VEC_UNPACK_LO_EXPR:
8165 {
8166 op0 = expand_normal (treeop0);
8167 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8168 target, unsignedp);
8169 gcc_assert (temp);
8170 return temp;
8171 }
8172
8173 case VEC_UNPACK_FLOAT_HI_EXPR:
8174 case VEC_UNPACK_FLOAT_LO_EXPR:
8175 {
8176 op0 = expand_normal (treeop0);
8177 /* The signedness is determined from input operand. */
8178 temp = expand_widen_pattern_expr
8179 (ops, op0, NULL_RTX, NULL_RTX,
8180 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8181
8182 gcc_assert (temp);
8183 return temp;
8184 }
8185
8186 case VEC_WIDEN_MULT_HI_EXPR:
8187 case VEC_WIDEN_MULT_LO_EXPR:
8188 {
8189 tree oprnd0 = treeop0;
8190 tree oprnd1 = treeop1;
8191
8192 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8193 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8194 target, unsignedp);
8195 gcc_assert (target);
8196 return target;
8197 }
8198
8199 case VEC_PACK_TRUNC_EXPR:
8200 case VEC_PACK_SAT_EXPR:
8201 case VEC_PACK_FIX_TRUNC_EXPR:
8202 mode = TYPE_MODE (TREE_TYPE (treeop0));
8203 goto binop;
8204
8205 case DOT_PROD_EXPR:
8206 {
8207 tree oprnd0 = treeop0;
8208 tree oprnd1 = treeop1;
8209 tree oprnd2 = treeop2;
8210 rtx op2;
8211
8212 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8213 op2 = expand_normal (oprnd2);
8214 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8215 target, unsignedp);
8216 return target;
8217 }
8218
8219 case REALIGN_LOAD_EXPR:
8220 {
8221 tree oprnd0 = treeop0;
8222 tree oprnd1 = treeop1;
8223 tree oprnd2 = treeop2;
8224 rtx op2;
8225
8226 this_optab = optab_for_tree_code (code, type, optab_default);
8227 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8228 op2 = expand_normal (oprnd2);
8229 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8230 target, unsignedp);
8231 gcc_assert (temp);
8232 return temp;
8233 }
8234
8235 default:
8236 gcc_unreachable ();
8237 }
8238
8239 /* Here to do an ordinary binary operator. */
8240 binop:
8241 expand_operands (treeop0, treeop1,
8242 subtarget, &op0, &op1, EXPAND_NORMAL);
8243 binop2:
8244 this_optab = optab_for_tree_code (code, type, optab_default);
8245 binop3:
8246 if (modifier == EXPAND_STACK_PARM)
8247 target = 0;
8248 temp = expand_binop (mode, this_optab, op0, op1, target,
8249 unsignedp, OPTAB_LIB_WIDEN);
8250 gcc_assert (temp);
8251 return REDUCE_BIT_FIELD (temp);
8252 }
8253 #undef REDUCE_BIT_FIELD
8254
8255 rtx
8256 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8257 enum expand_modifier modifier, rtx *alt_rtl)
8258 {
8259 rtx op0, op1, temp, decl_rtl;
8260 tree type;
8261 int unsignedp;
8262 enum machine_mode mode;
8263 enum tree_code code = TREE_CODE (exp);
8264 rtx subtarget, original_target;
8265 int ignore;
8266 tree context;
8267 bool reduce_bit_field;
8268 location_t loc = EXPR_LOCATION (exp);
8269 struct separate_ops ops;
8270 tree treeop0, treeop1, treeop2;
8271 tree ssa_name = NULL_TREE;
8272 gimple g;
8273
8274 type = TREE_TYPE (exp);
8275 mode = TYPE_MODE (type);
8276 unsignedp = TYPE_UNSIGNED (type);
8277
8278 treeop0 = treeop1 = treeop2 = NULL_TREE;
8279 if (!VL_EXP_CLASS_P (exp))
8280 switch (TREE_CODE_LENGTH (code))
8281 {
8282 default:
8283 case 3: treeop2 = TREE_OPERAND (exp, 2);
8284 case 2: treeop1 = TREE_OPERAND (exp, 1);
8285 case 1: treeop0 = TREE_OPERAND (exp, 0);
8286 case 0: break;
8287 }
8288 ops.code = code;
8289 ops.type = type;
8290 ops.op0 = treeop0;
8291 ops.op1 = treeop1;
8292 ops.op2 = treeop2;
8293 ops.location = loc;
8294
8295 ignore = (target == const0_rtx
8296 || ((CONVERT_EXPR_CODE_P (code)
8297 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8298 && TREE_CODE (type) == VOID_TYPE));
8299
8300 /* An operation in what may be a bit-field type needs the
8301 result to be reduced to the precision of the bit-field type,
8302 which is narrower than that of the type's mode. */
8303 reduce_bit_field = (!ignore
8304 && TREE_CODE (type) == INTEGER_TYPE
8305 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8306
8307 /* If we are going to ignore this result, we need only do something
8308 if there is a side-effect somewhere in the expression. If there
8309 is, short-circuit the most common cases here. Note that we must
8310 not call expand_expr with anything but const0_rtx in case this
8311 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8312
8313 if (ignore)
8314 {
8315 if (! TREE_SIDE_EFFECTS (exp))
8316 return const0_rtx;
8317
8318 /* Ensure we reference a volatile object even if value is ignored, but
8319 don't do this if all we are doing is taking its address. */
8320 if (TREE_THIS_VOLATILE (exp)
8321 && TREE_CODE (exp) != FUNCTION_DECL
8322 && mode != VOIDmode && mode != BLKmode
8323 && modifier != EXPAND_CONST_ADDRESS)
8324 {
8325 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8326 if (MEM_P (temp))
8327 copy_to_reg (temp);
8328 return const0_rtx;
8329 }
8330
8331 if (TREE_CODE_CLASS (code) == tcc_unary
8332 || code == COMPONENT_REF || code == INDIRECT_REF)
8333 return expand_expr (treeop0, const0_rtx, VOIDmode,
8334 modifier);
8335
8336 else if (TREE_CODE_CLASS (code) == tcc_binary
8337 || TREE_CODE_CLASS (code) == tcc_comparison
8338 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8339 {
8340 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8341 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8342 return const0_rtx;
8343 }
8344 else if (code == BIT_FIELD_REF)
8345 {
8346 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8347 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8348 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8349 return const0_rtx;
8350 }
8351
8352 target = 0;
8353 }
8354
8355 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8356 target = 0;
8357
8358 /* Use subtarget as the target for operand 0 of a binary operation. */
8359 subtarget = get_subtarget (target);
8360 original_target = target;
8361
8362 switch (code)
8363 {
8364 case LABEL_DECL:
8365 {
8366 tree function = decl_function_context (exp);
8367
8368 temp = label_rtx (exp);
8369 temp = gen_rtx_LABEL_REF (Pmode, temp);
8370
8371 if (function != current_function_decl
8372 && function != 0)
8373 LABEL_REF_NONLOCAL_P (temp) = 1;
8374
8375 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8376 return temp;
8377 }
8378
8379 case SSA_NAME:
8380 /* ??? ivopts calls expander, without any preparation from
8381 out-of-ssa. So fake instructions as if this was an access to the
8382 base variable. This unnecessarily allocates a pseudo, see how we can
8383 reuse it, if partition base vars have it set already. */
8384 if (!currently_expanding_to_rtl)
8385 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
8386 NULL);
8387
8388 g = get_gimple_for_ssa_name (exp);
8389 /* For EXPAND_INITIALIZER try harder to get something simpler. */
8390 if (g == NULL
8391 && modifier == EXPAND_INITIALIZER
8392 && !SSA_NAME_IS_DEFAULT_DEF (exp)
8393 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
8394 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
8395 g = SSA_NAME_DEF_STMT (exp);
8396 if (g)
8397 return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
8398 modifier, NULL);
8399
8400 ssa_name = exp;
8401 decl_rtl = get_rtx_for_ssa_name (ssa_name);
8402 exp = SSA_NAME_VAR (ssa_name);
8403 goto expand_decl_rtl;
8404
8405 case PARM_DECL:
8406 case VAR_DECL:
8407 /* If a static var's type was incomplete when the decl was written,
8408 but the type is complete now, lay out the decl now. */
8409 if (DECL_SIZE (exp) == 0
8410 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8411 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8412 layout_decl (exp, 0);
8413
8414 /* ... fall through ... */
8415
8416 case FUNCTION_DECL:
8417 case RESULT_DECL:
8418 decl_rtl = DECL_RTL (exp);
8419 expand_decl_rtl:
8420 gcc_assert (decl_rtl);
8421 decl_rtl = copy_rtx (decl_rtl);
8422 /* Record writes to register variables. */
8423 if (modifier == EXPAND_WRITE && REG_P (decl_rtl)
8424 && REGNO (decl_rtl) < FIRST_PSEUDO_REGISTER)
8425 {
8426 int i = REGNO (decl_rtl);
8427 int nregs = hard_regno_nregs[i][GET_MODE (decl_rtl)];
8428 while (nregs)
8429 {
8430 SET_HARD_REG_BIT (crtl->asm_clobbers, i);
8431 i++;
8432 nregs--;
8433 }
8434 }
8435
8436 /* Ensure variable marked as used even if it doesn't go through
8437 a parser. If it hasn't be used yet, write out an external
8438 definition. */
8439 if (! TREE_USED (exp))
8440 {
8441 assemble_external (exp);
8442 TREE_USED (exp) = 1;
8443 }
8444
8445 /* Show we haven't gotten RTL for this yet. */
8446 temp = 0;
8447
8448 /* Variables inherited from containing functions should have
8449 been lowered by this point. */
8450 context = decl_function_context (exp);
8451 gcc_assert (!context
8452 || context == current_function_decl
8453 || TREE_STATIC (exp)
8454 || DECL_EXTERNAL (exp)
8455 /* ??? C++ creates functions that are not TREE_STATIC. */
8456 || TREE_CODE (exp) == FUNCTION_DECL);
8457
8458 /* This is the case of an array whose size is to be determined
8459 from its initializer, while the initializer is still being parsed.
8460 See expand_decl. */
8461
8462 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8463 temp = validize_mem (decl_rtl);
8464
8465 /* If DECL_RTL is memory, we are in the normal case and the
8466 address is not valid, get the address into a register. */
8467
8468 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8469 {
8470 if (alt_rtl)
8471 *alt_rtl = decl_rtl;
8472 decl_rtl = use_anchored_address (decl_rtl);
8473 if (modifier != EXPAND_CONST_ADDRESS
8474 && modifier != EXPAND_SUM
8475 && !memory_address_addr_space_p (DECL_MODE (exp),
8476 XEXP (decl_rtl, 0),
8477 MEM_ADDR_SPACE (decl_rtl)))
8478 temp = replace_equiv_address (decl_rtl,
8479 copy_rtx (XEXP (decl_rtl, 0)));
8480 }
8481
8482 /* If we got something, return it. But first, set the alignment
8483 if the address is a register. */
8484 if (temp != 0)
8485 {
8486 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8487 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8488
8489 return temp;
8490 }
8491
8492 /* If the mode of DECL_RTL does not match that of the decl, it
8493 must be a promoted value. We return a SUBREG of the wanted mode,
8494 but mark it so that we know that it was already extended. */
8495 if (REG_P (decl_rtl) && GET_MODE (decl_rtl) != DECL_MODE (exp))
8496 {
8497 enum machine_mode pmode;
8498
8499 /* Get the signedness to be used for this variable. Ensure we get
8500 the same mode we got when the variable was declared. */
8501 if (code == SSA_NAME
8502 && (g = SSA_NAME_DEF_STMT (ssa_name))
8503 && gimple_code (g) == GIMPLE_CALL)
8504 pmode = promote_function_mode (type, mode, &unsignedp,
8505 TREE_TYPE
8506 (TREE_TYPE (gimple_call_fn (g))),
8507 2);
8508 else
8509 pmode = promote_decl_mode (exp, &unsignedp);
8510 gcc_assert (GET_MODE (decl_rtl) == pmode);
8511
8512 temp = gen_lowpart_SUBREG (mode, decl_rtl);
8513 SUBREG_PROMOTED_VAR_P (temp) = 1;
8514 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8515 return temp;
8516 }
8517
8518 return decl_rtl;
8519
8520 case INTEGER_CST:
8521 temp = immed_double_const (TREE_INT_CST_LOW (exp),
8522 TREE_INT_CST_HIGH (exp), mode);
8523
8524 return temp;
8525
8526 case VECTOR_CST:
8527 {
8528 tree tmp = NULL_TREE;
8529 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8530 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8531 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8532 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8533 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8534 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8535 return const_vector_from_tree (exp);
8536 if (GET_MODE_CLASS (mode) == MODE_INT)
8537 {
8538 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8539 if (type_for_mode)
8540 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8541 }
8542 if (!tmp)
8543 tmp = build_constructor_from_list (type,
8544 TREE_VECTOR_CST_ELTS (exp));
8545 return expand_expr (tmp, ignore ? const0_rtx : target,
8546 tmode, modifier);
8547 }
8548
8549 case CONST_DECL:
8550 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8551
8552 case REAL_CST:
8553 /* If optimized, generate immediate CONST_DOUBLE
8554 which will be turned into memory by reload if necessary.
8555
8556 We used to force a register so that loop.c could see it. But
8557 this does not allow gen_* patterns to perform optimizations with
8558 the constants. It also produces two insns in cases like "x = 1.0;".
8559 On most machines, floating-point constants are not permitted in
8560 many insns, so we'd end up copying it to a register in any case.
8561
8562 Now, we do the copying in expand_binop, if appropriate. */
8563 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8564 TYPE_MODE (TREE_TYPE (exp)));
8565
8566 case FIXED_CST:
8567 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8568 TYPE_MODE (TREE_TYPE (exp)));
8569
8570 case COMPLEX_CST:
8571 /* Handle evaluating a complex constant in a CONCAT target. */
8572 if (original_target && GET_CODE (original_target) == CONCAT)
8573 {
8574 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8575 rtx rtarg, itarg;
8576
8577 rtarg = XEXP (original_target, 0);
8578 itarg = XEXP (original_target, 1);
8579
8580 /* Move the real and imaginary parts separately. */
8581 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8582 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8583
8584 if (op0 != rtarg)
8585 emit_move_insn (rtarg, op0);
8586 if (op1 != itarg)
8587 emit_move_insn (itarg, op1);
8588
8589 return original_target;
8590 }
8591
8592 /* ... fall through ... */
8593
8594 case STRING_CST:
8595 temp = expand_expr_constant (exp, 1, modifier);
8596
8597 /* temp contains a constant address.
8598 On RISC machines where a constant address isn't valid,
8599 make some insns to get that address into a register. */
8600 if (modifier != EXPAND_CONST_ADDRESS
8601 && modifier != EXPAND_INITIALIZER
8602 && modifier != EXPAND_SUM
8603 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8604 MEM_ADDR_SPACE (temp)))
8605 return replace_equiv_address (temp,
8606 copy_rtx (XEXP (temp, 0)));
8607 return temp;
8608
8609 case SAVE_EXPR:
8610 {
8611 tree val = treeop0;
8612 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8613
8614 if (!SAVE_EXPR_RESOLVED_P (exp))
8615 {
8616 /* We can indeed still hit this case, typically via builtin
8617 expanders calling save_expr immediately before expanding
8618 something. Assume this means that we only have to deal
8619 with non-BLKmode values. */
8620 gcc_assert (GET_MODE (ret) != BLKmode);
8621
8622 val = build_decl (EXPR_LOCATION (exp),
8623 VAR_DECL, NULL, TREE_TYPE (exp));
8624 DECL_ARTIFICIAL (val) = 1;
8625 DECL_IGNORED_P (val) = 1;
8626 treeop0 = val;
8627 TREE_OPERAND (exp, 0) = treeop0;
8628 SAVE_EXPR_RESOLVED_P (exp) = 1;
8629
8630 if (!CONSTANT_P (ret))
8631 ret = copy_to_reg (ret);
8632 SET_DECL_RTL (val, ret);
8633 }
8634
8635 return ret;
8636 }
8637
8638
8639 case CONSTRUCTOR:
8640 /* If we don't need the result, just ensure we evaluate any
8641 subexpressions. */
8642 if (ignore)
8643 {
8644 unsigned HOST_WIDE_INT idx;
8645 tree value;
8646
8647 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8648 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8649
8650 return const0_rtx;
8651 }
8652
8653 return expand_constructor (exp, target, modifier, false);
8654
8655 case TARGET_MEM_REF:
8656 {
8657 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8658 struct mem_address addr;
8659 int icode, align;
8660
8661 get_address_description (exp, &addr);
8662 op0 = addr_for_mem_ref (&addr, as, true);
8663 op0 = memory_address_addr_space (mode, op0, as);
8664 temp = gen_rtx_MEM (mode, op0);
8665 set_mem_attributes (temp, exp, 0);
8666 set_mem_addr_space (temp, as);
8667 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
8668 get_object_alignment (exp, BIGGEST_ALIGNMENT));
8669 if (mode != BLKmode
8670 && (unsigned) align < GET_MODE_ALIGNMENT (mode)
8671 /* If the target does not have special handling for unaligned
8672 loads of mode then it can use regular moves for them. */
8673 && ((icode = optab_handler (movmisalign_optab, mode))
8674 != CODE_FOR_nothing))
8675 {
8676 rtx reg, insn;
8677
8678 /* We've already validated the memory, and we're creating a
8679 new pseudo destination. The predicates really can't fail. */
8680 reg = gen_reg_rtx (mode);
8681
8682 /* Nor can the insn generator. */
8683 insn = GEN_FCN (icode) (reg, temp);
8684 gcc_assert (insn != NULL_RTX);
8685 emit_insn (insn);
8686
8687 return reg;
8688 }
8689 return temp;
8690 }
8691
8692 case MEM_REF:
8693 {
8694 addr_space_t as
8695 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8696 enum machine_mode address_mode;
8697 tree base = TREE_OPERAND (exp, 0);
8698 gimple def_stmt;
8699 int icode, align;
8700 /* Handle expansion of non-aliased memory with non-BLKmode. That
8701 might end up in a register. */
8702 if (TREE_CODE (base) == ADDR_EXPR)
8703 {
8704 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
8705 tree bit_offset;
8706 base = TREE_OPERAND (base, 0);
8707 if (!DECL_P (base))
8708 {
8709 HOST_WIDE_INT off;
8710 base = get_addr_base_and_unit_offset (base, &off);
8711 gcc_assert (base);
8712 offset += off;
8713 }
8714 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
8715 decl we must use bitfield operations. */
8716 if (DECL_P (base)
8717 && !TREE_ADDRESSABLE (base)
8718 && DECL_MODE (base) != BLKmode
8719 && DECL_RTL_SET_P (base)
8720 && !MEM_P (DECL_RTL (base)))
8721 {
8722 tree bftype;
8723 if (offset == 0
8724 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
8725 && (GET_MODE_BITSIZE (DECL_MODE (base))
8726 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
8727 return expand_expr (build1 (VIEW_CONVERT_EXPR,
8728 TREE_TYPE (exp), base),
8729 target, tmode, modifier);
8730 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
8731 bftype = TREE_TYPE (base);
8732 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
8733 bftype = TREE_TYPE (exp);
8734 return expand_expr (build3 (BIT_FIELD_REF, bftype,
8735 base,
8736 TYPE_SIZE (TREE_TYPE (exp)),
8737 bit_offset),
8738 target, tmode, modifier);
8739 }
8740 }
8741 address_mode = targetm.addr_space.address_mode (as);
8742 base = TREE_OPERAND (exp, 0);
8743 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
8744 {
8745 tree mask = gimple_assign_rhs2 (def_stmt);
8746 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
8747 gimple_assign_rhs1 (def_stmt), mask);
8748 TREE_OPERAND (exp, 0) = base;
8749 }
8750 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
8751 get_object_alignment (exp, BIGGEST_ALIGNMENT));
8752 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
8753 op0 = memory_address_addr_space (address_mode, op0, as);
8754 if (!integer_zerop (TREE_OPERAND (exp, 1)))
8755 {
8756 rtx off
8757 = immed_double_int_const (mem_ref_offset (exp), address_mode);
8758 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
8759 }
8760 op0 = memory_address_addr_space (mode, op0, as);
8761 temp = gen_rtx_MEM (mode, op0);
8762 set_mem_attributes (temp, exp, 0);
8763 set_mem_addr_space (temp, as);
8764 if (TREE_THIS_VOLATILE (exp))
8765 MEM_VOLATILE_P (temp) = 1;
8766 if (mode != BLKmode
8767 && (unsigned) align < GET_MODE_ALIGNMENT (mode)
8768 /* If the target does not have special handling for unaligned
8769 loads of mode then it can use regular moves for them. */
8770 && ((icode = optab_handler (movmisalign_optab, mode))
8771 != CODE_FOR_nothing))
8772 {
8773 rtx reg, insn;
8774
8775 /* We've already validated the memory, and we're creating a
8776 new pseudo destination. The predicates really can't fail. */
8777 reg = gen_reg_rtx (mode);
8778
8779 /* Nor can the insn generator. */
8780 insn = GEN_FCN (icode) (reg, temp);
8781 emit_insn (insn);
8782
8783 return reg;
8784 }
8785 return temp;
8786 }
8787
8788 case ARRAY_REF:
8789
8790 {
8791 tree array = treeop0;
8792 tree index = treeop1;
8793
8794 /* Fold an expression like: "foo"[2].
8795 This is not done in fold so it won't happen inside &.
8796 Don't fold if this is for wide characters since it's too
8797 difficult to do correctly and this is a very rare case. */
8798
8799 if (modifier != EXPAND_CONST_ADDRESS
8800 && modifier != EXPAND_INITIALIZER
8801 && modifier != EXPAND_MEMORY)
8802 {
8803 tree t = fold_read_from_constant_string (exp);
8804
8805 if (t)
8806 return expand_expr (t, target, tmode, modifier);
8807 }
8808
8809 /* If this is a constant index into a constant array,
8810 just get the value from the array. Handle both the cases when
8811 we have an explicit constructor and when our operand is a variable
8812 that was declared const. */
8813
8814 if (modifier != EXPAND_CONST_ADDRESS
8815 && modifier != EXPAND_INITIALIZER
8816 && modifier != EXPAND_MEMORY
8817 && TREE_CODE (array) == CONSTRUCTOR
8818 && ! TREE_SIDE_EFFECTS (array)
8819 && TREE_CODE (index) == INTEGER_CST)
8820 {
8821 unsigned HOST_WIDE_INT ix;
8822 tree field, value;
8823
8824 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8825 field, value)
8826 if (tree_int_cst_equal (field, index))
8827 {
8828 if (!TREE_SIDE_EFFECTS (value))
8829 return expand_expr (fold (value), target, tmode, modifier);
8830 break;
8831 }
8832 }
8833
8834 else if (optimize >= 1
8835 && modifier != EXPAND_CONST_ADDRESS
8836 && modifier != EXPAND_INITIALIZER
8837 && modifier != EXPAND_MEMORY
8838 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8839 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8840 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8841 && const_value_known_p (array))
8842 {
8843 if (TREE_CODE (index) == INTEGER_CST)
8844 {
8845 tree init = DECL_INITIAL (array);
8846
8847 if (TREE_CODE (init) == CONSTRUCTOR)
8848 {
8849 unsigned HOST_WIDE_INT ix;
8850 tree field, value;
8851
8852 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8853 field, value)
8854 if (tree_int_cst_equal (field, index))
8855 {
8856 if (TREE_SIDE_EFFECTS (value))
8857 break;
8858
8859 if (TREE_CODE (value) == CONSTRUCTOR)
8860 {
8861 /* If VALUE is a CONSTRUCTOR, this
8862 optimization is only useful if
8863 this doesn't store the CONSTRUCTOR
8864 into memory. If it does, it is more
8865 efficient to just load the data from
8866 the array directly. */
8867 rtx ret = expand_constructor (value, target,
8868 modifier, true);
8869 if (ret == NULL_RTX)
8870 break;
8871 }
8872
8873 return expand_expr (fold (value), target, tmode,
8874 modifier);
8875 }
8876 }
8877 else if(TREE_CODE (init) == STRING_CST)
8878 {
8879 tree index1 = index;
8880 tree low_bound = array_ref_low_bound (exp);
8881 index1 = fold_convert_loc (loc, sizetype,
8882 treeop1);
8883
8884 /* Optimize the special-case of a zero lower bound.
8885
8886 We convert the low_bound to sizetype to avoid some problems
8887 with constant folding. (E.g. suppose the lower bound is 1,
8888 and its mode is QI. Without the conversion,l (ARRAY
8889 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8890 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8891
8892 if (! integer_zerop (low_bound))
8893 index1 = size_diffop_loc (loc, index1,
8894 fold_convert_loc (loc, sizetype,
8895 low_bound));
8896
8897 if (0 > compare_tree_int (index1,
8898 TREE_STRING_LENGTH (init)))
8899 {
8900 tree type = TREE_TYPE (TREE_TYPE (init));
8901 enum machine_mode mode = TYPE_MODE (type);
8902
8903 if (GET_MODE_CLASS (mode) == MODE_INT
8904 && GET_MODE_SIZE (mode) == 1)
8905 return gen_int_mode (TREE_STRING_POINTER (init)
8906 [TREE_INT_CST_LOW (index1)],
8907 mode);
8908 }
8909 }
8910 }
8911 }
8912 }
8913 goto normal_inner_ref;
8914
8915 case COMPONENT_REF:
8916 /* If the operand is a CONSTRUCTOR, we can just extract the
8917 appropriate field if it is present. */
8918 if (TREE_CODE (treeop0) == CONSTRUCTOR)
8919 {
8920 unsigned HOST_WIDE_INT idx;
8921 tree field, value;
8922
8923 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8924 idx, field, value)
8925 if (field == treeop1
8926 /* We can normally use the value of the field in the
8927 CONSTRUCTOR. However, if this is a bitfield in
8928 an integral mode that we can fit in a HOST_WIDE_INT,
8929 we must mask only the number of bits in the bitfield,
8930 since this is done implicitly by the constructor. If
8931 the bitfield does not meet either of those conditions,
8932 we can't do this optimization. */
8933 && (! DECL_BIT_FIELD (field)
8934 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8935 && (GET_MODE_BITSIZE (DECL_MODE (field))
8936 <= HOST_BITS_PER_WIDE_INT))))
8937 {
8938 if (DECL_BIT_FIELD (field)
8939 && modifier == EXPAND_STACK_PARM)
8940 target = 0;
8941 op0 = expand_expr (value, target, tmode, modifier);
8942 if (DECL_BIT_FIELD (field))
8943 {
8944 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8945 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8946
8947 if (TYPE_UNSIGNED (TREE_TYPE (field)))
8948 {
8949 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8950 op0 = expand_and (imode, op0, op1, target);
8951 }
8952 else
8953 {
8954 tree count
8955 = build_int_cst (NULL_TREE,
8956 GET_MODE_BITSIZE (imode) - bitsize);
8957
8958 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
8959 target, 0);
8960 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
8961 target, 0);
8962 }
8963 }
8964
8965 return op0;
8966 }
8967 }
8968 goto normal_inner_ref;
8969
8970 case BIT_FIELD_REF:
8971 case ARRAY_RANGE_REF:
8972 normal_inner_ref:
8973 {
8974 enum machine_mode mode1, mode2;
8975 HOST_WIDE_INT bitsize, bitpos;
8976 tree offset;
8977 int volatilep = 0, must_force_mem;
8978 bool packedp = false;
8979 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8980 &mode1, &unsignedp, &volatilep, true);
8981 rtx orig_op0, memloc;
8982
8983 /* If we got back the original object, something is wrong. Perhaps
8984 we are evaluating an expression too early. In any event, don't
8985 infinitely recurse. */
8986 gcc_assert (tem != exp);
8987
8988 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
8989 || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
8990 && DECL_PACKED (TREE_OPERAND (exp, 1))))
8991 packedp = true;
8992
8993 /* If TEM's type is a union of variable size, pass TARGET to the inner
8994 computation, since it will need a temporary and TARGET is known
8995 to have to do. This occurs in unchecked conversion in Ada. */
8996 orig_op0 = op0
8997 = expand_expr (tem,
8998 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
8999 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9000 != INTEGER_CST)
9001 && modifier != EXPAND_STACK_PARM
9002 ? target : NULL_RTX),
9003 VOIDmode,
9004 (modifier == EXPAND_INITIALIZER
9005 || modifier == EXPAND_CONST_ADDRESS
9006 || modifier == EXPAND_STACK_PARM)
9007 ? modifier : EXPAND_NORMAL);
9008
9009
9010 /* If the bitfield is volatile, we want to access it in the
9011 field's mode, not the computed mode.
9012 If a MEM has VOIDmode (external with incomplete type),
9013 use BLKmode for it instead. */
9014 if (MEM_P (op0))
9015 {
9016 if (volatilep && flag_strict_volatile_bitfields > 0)
9017 op0 = adjust_address (op0, mode1, 0);
9018 else if (GET_MODE (op0) == VOIDmode)
9019 op0 = adjust_address (op0, BLKmode, 0);
9020 }
9021
9022 mode2
9023 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9024
9025 /* If we have either an offset, a BLKmode result, or a reference
9026 outside the underlying object, we must force it to memory.
9027 Such a case can occur in Ada if we have unchecked conversion
9028 of an expression from a scalar type to an aggregate type or
9029 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9030 passed a partially uninitialized object or a view-conversion
9031 to a larger size. */
9032 must_force_mem = (offset
9033 || mode1 == BLKmode
9034 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9035
9036 /* Handle CONCAT first. */
9037 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9038 {
9039 if (bitpos == 0
9040 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9041 return op0;
9042 if (bitpos == 0
9043 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9044 && bitsize)
9045 {
9046 op0 = XEXP (op0, 0);
9047 mode2 = GET_MODE (op0);
9048 }
9049 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9050 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9051 && bitpos
9052 && bitsize)
9053 {
9054 op0 = XEXP (op0, 1);
9055 bitpos = 0;
9056 mode2 = GET_MODE (op0);
9057 }
9058 else
9059 /* Otherwise force into memory. */
9060 must_force_mem = 1;
9061 }
9062
9063 /* If this is a constant, put it in a register if it is a legitimate
9064 constant and we don't need a memory reference. */
9065 if (CONSTANT_P (op0)
9066 && mode2 != BLKmode
9067 && LEGITIMATE_CONSTANT_P (op0)
9068 && !must_force_mem)
9069 op0 = force_reg (mode2, op0);
9070
9071 /* Otherwise, if this is a constant, try to force it to the constant
9072 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9073 is a legitimate constant. */
9074 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9075 op0 = validize_mem (memloc);
9076
9077 /* Otherwise, if this is a constant or the object is not in memory
9078 and need be, put it there. */
9079 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9080 {
9081 tree nt = build_qualified_type (TREE_TYPE (tem),
9082 (TYPE_QUALS (TREE_TYPE (tem))
9083 | TYPE_QUAL_CONST));
9084 memloc = assign_temp (nt, 1, 1, 1);
9085 emit_move_insn (memloc, op0);
9086 op0 = memloc;
9087 }
9088
9089 if (offset)
9090 {
9091 enum machine_mode address_mode;
9092 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9093 EXPAND_SUM);
9094
9095 gcc_assert (MEM_P (op0));
9096
9097 address_mode
9098 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9099 if (GET_MODE (offset_rtx) != address_mode)
9100 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9101
9102 if (GET_MODE (op0) == BLKmode
9103 /* A constant address in OP0 can have VOIDmode, we must
9104 not try to call force_reg in that case. */
9105 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9106 && bitsize != 0
9107 && (bitpos % bitsize) == 0
9108 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9109 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9110 {
9111 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9112 bitpos = 0;
9113 }
9114
9115 op0 = offset_address (op0, offset_rtx,
9116 highest_pow2_factor (offset));
9117 }
9118
9119 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9120 record its alignment as BIGGEST_ALIGNMENT. */
9121 if (MEM_P (op0) && bitpos == 0 && offset != 0
9122 && is_aligning_offset (offset, tem))
9123 set_mem_align (op0, BIGGEST_ALIGNMENT);
9124
9125 /* Don't forget about volatility even if this is a bitfield. */
9126 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9127 {
9128 if (op0 == orig_op0)
9129 op0 = copy_rtx (op0);
9130
9131 MEM_VOLATILE_P (op0) = 1;
9132 }
9133
9134 /* In cases where an aligned union has an unaligned object
9135 as a field, we might be extracting a BLKmode value from
9136 an integer-mode (e.g., SImode) object. Handle this case
9137 by doing the extract into an object as wide as the field
9138 (which we know to be the width of a basic mode), then
9139 storing into memory, and changing the mode to BLKmode. */
9140 if (mode1 == VOIDmode
9141 || REG_P (op0) || GET_CODE (op0) == SUBREG
9142 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9143 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9144 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9145 && modifier != EXPAND_CONST_ADDRESS
9146 && modifier != EXPAND_INITIALIZER)
9147 /* If the field is volatile, we always want an aligned
9148 access. Only do this if the access is not already naturally
9149 aligned, otherwise "normal" (non-bitfield) volatile fields
9150 become non-addressable. */
9151 || (volatilep && flag_strict_volatile_bitfields > 0
9152 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
9153 /* If the field isn't aligned enough to fetch as a memref,
9154 fetch it as a bit field. */
9155 || (mode1 != BLKmode
9156 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9157 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9158 || (MEM_P (op0)
9159 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9160 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9161 && ((modifier == EXPAND_CONST_ADDRESS
9162 || modifier == EXPAND_INITIALIZER)
9163 ? STRICT_ALIGNMENT
9164 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9165 || (bitpos % BITS_PER_UNIT != 0)))
9166 /* If the type and the field are a constant size and the
9167 size of the type isn't the same size as the bitfield,
9168 we must use bitfield operations. */
9169 || (bitsize >= 0
9170 && TYPE_SIZE (TREE_TYPE (exp))
9171 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9172 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9173 bitsize)))
9174 {
9175 enum machine_mode ext_mode = mode;
9176
9177 if (ext_mode == BLKmode
9178 && ! (target != 0 && MEM_P (op0)
9179 && MEM_P (target)
9180 && bitpos % BITS_PER_UNIT == 0))
9181 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9182
9183 if (ext_mode == BLKmode)
9184 {
9185 if (target == 0)
9186 target = assign_temp (type, 0, 1, 1);
9187
9188 if (bitsize == 0)
9189 return target;
9190
9191 /* In this case, BITPOS must start at a byte boundary and
9192 TARGET, if specified, must be a MEM. */
9193 gcc_assert (MEM_P (op0)
9194 && (!target || MEM_P (target))
9195 && !(bitpos % BITS_PER_UNIT));
9196
9197 emit_block_move (target,
9198 adjust_address (op0, VOIDmode,
9199 bitpos / BITS_PER_UNIT),
9200 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9201 / BITS_PER_UNIT),
9202 (modifier == EXPAND_STACK_PARM
9203 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9204
9205 return target;
9206 }
9207
9208 op0 = validize_mem (op0);
9209
9210 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9211 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9212
9213 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
9214 (modifier == EXPAND_STACK_PARM
9215 ? NULL_RTX : target),
9216 ext_mode, ext_mode);
9217
9218 /* If the result is a record type and BITSIZE is narrower than
9219 the mode of OP0, an integral mode, and this is a big endian
9220 machine, we must put the field into the high-order bits. */
9221 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9222 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9223 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9224 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9225 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
9226 - bitsize),
9227 op0, 1);
9228
9229 /* If the result type is BLKmode, store the data into a temporary
9230 of the appropriate type, but with the mode corresponding to the
9231 mode for the data we have (op0's mode). It's tempting to make
9232 this a constant type, since we know it's only being stored once,
9233 but that can cause problems if we are taking the address of this
9234 COMPONENT_REF because the MEM of any reference via that address
9235 will have flags corresponding to the type, which will not
9236 necessarily be constant. */
9237 if (mode == BLKmode)
9238 {
9239 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9240 rtx new_rtx;
9241
9242 /* If the reference doesn't use the alias set of its type,
9243 we cannot create the temporary using that type. */
9244 if (component_uses_parent_alias_set (exp))
9245 {
9246 new_rtx = assign_stack_local (ext_mode, size, 0);
9247 set_mem_alias_set (new_rtx, get_alias_set (exp));
9248 }
9249 else
9250 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9251
9252 emit_move_insn (new_rtx, op0);
9253 op0 = copy_rtx (new_rtx);
9254 PUT_MODE (op0, BLKmode);
9255 set_mem_attributes (op0, exp, 1);
9256 }
9257
9258 return op0;
9259 }
9260
9261 /* If the result is BLKmode, use that to access the object
9262 now as well. */
9263 if (mode == BLKmode)
9264 mode1 = BLKmode;
9265
9266 /* Get a reference to just this component. */
9267 if (modifier == EXPAND_CONST_ADDRESS
9268 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9269 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9270 else
9271 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9272
9273 if (op0 == orig_op0)
9274 op0 = copy_rtx (op0);
9275
9276 set_mem_attributes (op0, exp, 0);
9277 if (REG_P (XEXP (op0, 0)))
9278 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9279
9280 MEM_VOLATILE_P (op0) |= volatilep;
9281 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9282 || modifier == EXPAND_CONST_ADDRESS
9283 || modifier == EXPAND_INITIALIZER)
9284 return op0;
9285 else if (target == 0)
9286 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9287
9288 convert_move (target, op0, unsignedp);
9289 return target;
9290 }
9291
9292 case OBJ_TYPE_REF:
9293 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9294
9295 case CALL_EXPR:
9296 /* All valid uses of __builtin_va_arg_pack () are removed during
9297 inlining. */
9298 if (CALL_EXPR_VA_ARG_PACK (exp))
9299 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9300 {
9301 tree fndecl = get_callee_fndecl (exp), attr;
9302
9303 if (fndecl
9304 && (attr = lookup_attribute ("error",
9305 DECL_ATTRIBUTES (fndecl))) != NULL)
9306 error ("%Kcall to %qs declared with attribute error: %s",
9307 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9308 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9309 if (fndecl
9310 && (attr = lookup_attribute ("warning",
9311 DECL_ATTRIBUTES (fndecl))) != NULL)
9312 warning_at (tree_nonartificial_location (exp),
9313 0, "%Kcall to %qs declared with attribute warning: %s",
9314 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9315 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9316
9317 /* Check for a built-in function. */
9318 if (fndecl && DECL_BUILT_IN (fndecl))
9319 {
9320 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9321 return expand_builtin (exp, target, subtarget, tmode, ignore);
9322 }
9323 }
9324 return expand_call (exp, target, ignore);
9325
9326 case VIEW_CONVERT_EXPR:
9327 op0 = NULL_RTX;
9328
9329 /* If we are converting to BLKmode, try to avoid an intermediate
9330 temporary by fetching an inner memory reference. */
9331 if (mode == BLKmode
9332 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9333 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9334 && handled_component_p (treeop0))
9335 {
9336 enum machine_mode mode1;
9337 HOST_WIDE_INT bitsize, bitpos;
9338 tree offset;
9339 int unsignedp;
9340 int volatilep = 0;
9341 tree tem
9342 = get_inner_reference (treeop0, &bitsize, &bitpos,
9343 &offset, &mode1, &unsignedp, &volatilep,
9344 true);
9345 rtx orig_op0;
9346
9347 /* ??? We should work harder and deal with non-zero offsets. */
9348 if (!offset
9349 && (bitpos % BITS_PER_UNIT) == 0
9350 && bitsize >= 0
9351 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9352 {
9353 /* See the normal_inner_ref case for the rationale. */
9354 orig_op0
9355 = expand_expr (tem,
9356 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9357 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9358 != INTEGER_CST)
9359 && modifier != EXPAND_STACK_PARM
9360 ? target : NULL_RTX),
9361 VOIDmode,
9362 (modifier == EXPAND_INITIALIZER
9363 || modifier == EXPAND_CONST_ADDRESS
9364 || modifier == EXPAND_STACK_PARM)
9365 ? modifier : EXPAND_NORMAL);
9366
9367 if (MEM_P (orig_op0))
9368 {
9369 op0 = orig_op0;
9370
9371 /* Get a reference to just this component. */
9372 if (modifier == EXPAND_CONST_ADDRESS
9373 || modifier == EXPAND_SUM
9374 || modifier == EXPAND_INITIALIZER)
9375 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9376 else
9377 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9378
9379 if (op0 == orig_op0)
9380 op0 = copy_rtx (op0);
9381
9382 set_mem_attributes (op0, treeop0, 0);
9383 if (REG_P (XEXP (op0, 0)))
9384 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9385
9386 MEM_VOLATILE_P (op0) |= volatilep;
9387 }
9388 }
9389 }
9390
9391 if (!op0)
9392 op0 = expand_expr (treeop0,
9393 NULL_RTX, VOIDmode, modifier);
9394
9395 /* If the input and output modes are both the same, we are done. */
9396 if (mode == GET_MODE (op0))
9397 ;
9398 /* If neither mode is BLKmode, and both modes are the same size
9399 then we can use gen_lowpart. */
9400 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9401 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9402 && !COMPLEX_MODE_P (GET_MODE (op0)))
9403 {
9404 if (GET_CODE (op0) == SUBREG)
9405 op0 = force_reg (GET_MODE (op0), op0);
9406 temp = gen_lowpart_common (mode, op0);
9407 if (temp)
9408 op0 = temp;
9409 else
9410 {
9411 if (!REG_P (op0) && !MEM_P (op0))
9412 op0 = force_reg (GET_MODE (op0), op0);
9413 op0 = gen_lowpart (mode, op0);
9414 }
9415 }
9416 /* If both types are integral, convert from one mode to the other. */
9417 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
9418 op0 = convert_modes (mode, GET_MODE (op0), op0,
9419 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9420 /* As a last resort, spill op0 to memory, and reload it in a
9421 different mode. */
9422 else if (!MEM_P (op0))
9423 {
9424 /* If the operand is not a MEM, force it into memory. Since we
9425 are going to be changing the mode of the MEM, don't call
9426 force_const_mem for constants because we don't allow pool
9427 constants to change mode. */
9428 tree inner_type = TREE_TYPE (treeop0);
9429
9430 gcc_assert (!TREE_ADDRESSABLE (exp));
9431
9432 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9433 target
9434 = assign_stack_temp_for_type
9435 (TYPE_MODE (inner_type),
9436 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9437
9438 emit_move_insn (target, op0);
9439 op0 = target;
9440 }
9441
9442 /* At this point, OP0 is in the correct mode. If the output type is
9443 such that the operand is known to be aligned, indicate that it is.
9444 Otherwise, we need only be concerned about alignment for non-BLKmode
9445 results. */
9446 if (MEM_P (op0))
9447 {
9448 op0 = copy_rtx (op0);
9449
9450 if (TYPE_ALIGN_OK (type))
9451 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9452 else if (STRICT_ALIGNMENT
9453 && mode != BLKmode
9454 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9455 {
9456 tree inner_type = TREE_TYPE (treeop0);
9457 HOST_WIDE_INT temp_size
9458 = MAX (int_size_in_bytes (inner_type),
9459 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9460 rtx new_rtx
9461 = assign_stack_temp_for_type (mode, temp_size, 0, type);
9462 rtx new_with_op0_mode
9463 = adjust_address (new_rtx, GET_MODE (op0), 0);
9464
9465 gcc_assert (!TREE_ADDRESSABLE (exp));
9466
9467 if (GET_MODE (op0) == BLKmode)
9468 emit_block_move (new_with_op0_mode, op0,
9469 GEN_INT (GET_MODE_SIZE (mode)),
9470 (modifier == EXPAND_STACK_PARM
9471 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9472 else
9473 emit_move_insn (new_with_op0_mode, op0);
9474
9475 op0 = new_rtx;
9476 }
9477
9478 op0 = adjust_address (op0, mode, 0);
9479 }
9480
9481 return op0;
9482
9483 /* Use a compare and a jump for BLKmode comparisons, or for function
9484 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9485
9486 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9487 are occassionally created by folding during expansion. */
9488 case TRUTH_ANDIF_EXPR:
9489 case TRUTH_ORIF_EXPR:
9490 if (! ignore
9491 && (target == 0
9492 || modifier == EXPAND_STACK_PARM
9493 || ! safe_from_p (target, treeop0, 1)
9494 || ! safe_from_p (target, treeop1, 1)
9495 /* Make sure we don't have a hard reg (such as function's return
9496 value) live across basic blocks, if not optimizing. */
9497 || (!optimize && REG_P (target)
9498 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9499 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9500
9501 if (target)
9502 emit_move_insn (target, const0_rtx);
9503
9504 op1 = gen_label_rtx ();
9505 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9506
9507 if (target)
9508 emit_move_insn (target, const1_rtx);
9509
9510 emit_label (op1);
9511 return ignore ? const0_rtx : target;
9512
9513 case STATEMENT_LIST:
9514 {
9515 tree_stmt_iterator iter;
9516
9517 gcc_assert (ignore);
9518
9519 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9520 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9521 }
9522 return const0_rtx;
9523
9524 case COND_EXPR:
9525 /* A COND_EXPR with its type being VOID_TYPE represents a
9526 conditional jump and is handled in
9527 expand_gimple_cond_expr. */
9528 gcc_assert (!VOID_TYPE_P (type));
9529
9530 /* Note that COND_EXPRs whose type is a structure or union
9531 are required to be constructed to contain assignments of
9532 a temporary variable, so that we can evaluate them here
9533 for side effect only. If type is void, we must do likewise. */
9534
9535 gcc_assert (!TREE_ADDRESSABLE (type)
9536 && !ignore
9537 && TREE_TYPE (treeop1) != void_type_node
9538 && TREE_TYPE (treeop2) != void_type_node);
9539
9540 /* If we are not to produce a result, we have no target. Otherwise,
9541 if a target was specified use it; it will not be used as an
9542 intermediate target unless it is safe. If no target, use a
9543 temporary. */
9544
9545 if (modifier != EXPAND_STACK_PARM
9546 && original_target
9547 && safe_from_p (original_target, treeop0, 1)
9548 && GET_MODE (original_target) == mode
9549 #ifdef HAVE_conditional_move
9550 && (! can_conditionally_move_p (mode)
9551 || REG_P (original_target))
9552 #endif
9553 && !MEM_P (original_target))
9554 temp = original_target;
9555 else
9556 temp = assign_temp (type, 0, 0, 1);
9557
9558 do_pending_stack_adjust ();
9559 NO_DEFER_POP;
9560 op0 = gen_label_rtx ();
9561 op1 = gen_label_rtx ();
9562 jumpifnot (treeop0, op0, -1);
9563 store_expr (treeop1, temp,
9564 modifier == EXPAND_STACK_PARM,
9565 false);
9566
9567 emit_jump_insn (gen_jump (op1));
9568 emit_barrier ();
9569 emit_label (op0);
9570 store_expr (treeop2, temp,
9571 modifier == EXPAND_STACK_PARM,
9572 false);
9573
9574 emit_label (op1);
9575 OK_DEFER_POP;
9576 return temp;
9577
9578 case VEC_COND_EXPR:
9579 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9580 return target;
9581
9582 case MODIFY_EXPR:
9583 {
9584 tree lhs = treeop0;
9585 tree rhs = treeop1;
9586 gcc_assert (ignore);
9587
9588 /* Check for |= or &= of a bitfield of size one into another bitfield
9589 of size 1. In this case, (unless we need the result of the
9590 assignment) we can do this more efficiently with a
9591 test followed by an assignment, if necessary.
9592
9593 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9594 things change so we do, this code should be enhanced to
9595 support it. */
9596 if (TREE_CODE (lhs) == COMPONENT_REF
9597 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9598 || TREE_CODE (rhs) == BIT_AND_EXPR)
9599 && TREE_OPERAND (rhs, 0) == lhs
9600 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9601 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9602 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9603 {
9604 rtx label = gen_label_rtx ();
9605 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9606 do_jump (TREE_OPERAND (rhs, 1),
9607 value ? label : 0,
9608 value ? 0 : label, -1);
9609 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9610 MOVE_NONTEMPORAL (exp));
9611 do_pending_stack_adjust ();
9612 emit_label (label);
9613 return const0_rtx;
9614 }
9615
9616 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9617 return const0_rtx;
9618 }
9619
9620 case ADDR_EXPR:
9621 return expand_expr_addr_expr (exp, target, tmode, modifier);
9622
9623 case REALPART_EXPR:
9624 op0 = expand_normal (treeop0);
9625 return read_complex_part (op0, false);
9626
9627 case IMAGPART_EXPR:
9628 op0 = expand_normal (treeop0);
9629 return read_complex_part (op0, true);
9630
9631 case RETURN_EXPR:
9632 case LABEL_EXPR:
9633 case GOTO_EXPR:
9634 case SWITCH_EXPR:
9635 case ASM_EXPR:
9636 /* Expanded in cfgexpand.c. */
9637 gcc_unreachable ();
9638
9639 case TRY_CATCH_EXPR:
9640 case CATCH_EXPR:
9641 case EH_FILTER_EXPR:
9642 case TRY_FINALLY_EXPR:
9643 /* Lowered by tree-eh.c. */
9644 gcc_unreachable ();
9645
9646 case WITH_CLEANUP_EXPR:
9647 case CLEANUP_POINT_EXPR:
9648 case TARGET_EXPR:
9649 case CASE_LABEL_EXPR:
9650 case VA_ARG_EXPR:
9651 case BIND_EXPR:
9652 case INIT_EXPR:
9653 case CONJ_EXPR:
9654 case COMPOUND_EXPR:
9655 case PREINCREMENT_EXPR:
9656 case PREDECREMENT_EXPR:
9657 case POSTINCREMENT_EXPR:
9658 case POSTDECREMENT_EXPR:
9659 case LOOP_EXPR:
9660 case EXIT_EXPR:
9661 /* Lowered by gimplify.c. */
9662 gcc_unreachable ();
9663
9664 case FDESC_EXPR:
9665 /* Function descriptors are not valid except for as
9666 initialization constants, and should not be expanded. */
9667 gcc_unreachable ();
9668
9669 case WITH_SIZE_EXPR:
9670 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9671 have pulled out the size to use in whatever context it needed. */
9672 return expand_expr_real (treeop0, original_target, tmode,
9673 modifier, alt_rtl);
9674
9675 case COMPOUND_LITERAL_EXPR:
9676 {
9677 /* Initialize the anonymous variable declared in the compound
9678 literal, then return the variable. */
9679 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9680
9681 /* Create RTL for this variable. */
9682 if (!DECL_RTL_SET_P (decl))
9683 {
9684 if (DECL_HARD_REGISTER (decl))
9685 /* The user specified an assembler name for this variable.
9686 Set that up now. */
9687 rest_of_decl_compilation (decl, 0, 0);
9688 else
9689 expand_decl (decl);
9690 }
9691
9692 return expand_expr_real (decl, original_target, tmode,
9693 modifier, alt_rtl);
9694 }
9695
9696 default:
9697 return expand_expr_real_2 (&ops, target, tmode, modifier);
9698 }
9699 }
9700 \f
9701 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9702 signedness of TYPE), possibly returning the result in TARGET. */
9703 static rtx
9704 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9705 {
9706 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9707 if (target && GET_MODE (target) != GET_MODE (exp))
9708 target = 0;
9709 /* For constant values, reduce using build_int_cst_type. */
9710 if (CONST_INT_P (exp))
9711 {
9712 HOST_WIDE_INT value = INTVAL (exp);
9713 tree t = build_int_cst_type (type, value);
9714 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9715 }
9716 else if (TYPE_UNSIGNED (type))
9717 {
9718 rtx mask = immed_double_int_const (double_int_mask (prec),
9719 GET_MODE (exp));
9720 return expand_and (GET_MODE (exp), exp, mask, target);
9721 }
9722 else
9723 {
9724 tree count = build_int_cst (NULL_TREE,
9725 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9726 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9727 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9728 }
9729 }
9730 \f
9731 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9732 when applied to the address of EXP produces an address known to be
9733 aligned more than BIGGEST_ALIGNMENT. */
9734
9735 static int
9736 is_aligning_offset (const_tree offset, const_tree exp)
9737 {
9738 /* Strip off any conversions. */
9739 while (CONVERT_EXPR_P (offset))
9740 offset = TREE_OPERAND (offset, 0);
9741
9742 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9743 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9744 if (TREE_CODE (offset) != BIT_AND_EXPR
9745 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9746 || compare_tree_int (TREE_OPERAND (offset, 1),
9747 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9748 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9749 return 0;
9750
9751 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9752 It must be NEGATE_EXPR. Then strip any more conversions. */
9753 offset = TREE_OPERAND (offset, 0);
9754 while (CONVERT_EXPR_P (offset))
9755 offset = TREE_OPERAND (offset, 0);
9756
9757 if (TREE_CODE (offset) != NEGATE_EXPR)
9758 return 0;
9759
9760 offset = TREE_OPERAND (offset, 0);
9761 while (CONVERT_EXPR_P (offset))
9762 offset = TREE_OPERAND (offset, 0);
9763
9764 /* This must now be the address of EXP. */
9765 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9766 }
9767 \f
9768 /* Return the tree node if an ARG corresponds to a string constant or zero
9769 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9770 in bytes within the string that ARG is accessing. The type of the
9771 offset will be `sizetype'. */
9772
9773 tree
9774 string_constant (tree arg, tree *ptr_offset)
9775 {
9776 tree array, offset, lower_bound;
9777 STRIP_NOPS (arg);
9778
9779 if (TREE_CODE (arg) == ADDR_EXPR)
9780 {
9781 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9782 {
9783 *ptr_offset = size_zero_node;
9784 return TREE_OPERAND (arg, 0);
9785 }
9786 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9787 {
9788 array = TREE_OPERAND (arg, 0);
9789 offset = size_zero_node;
9790 }
9791 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9792 {
9793 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9794 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9795 if (TREE_CODE (array) != STRING_CST
9796 && TREE_CODE (array) != VAR_DECL)
9797 return 0;
9798
9799 /* Check if the array has a nonzero lower bound. */
9800 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9801 if (!integer_zerop (lower_bound))
9802 {
9803 /* If the offset and base aren't both constants, return 0. */
9804 if (TREE_CODE (lower_bound) != INTEGER_CST)
9805 return 0;
9806 if (TREE_CODE (offset) != INTEGER_CST)
9807 return 0;
9808 /* Adjust offset by the lower bound. */
9809 offset = size_diffop (fold_convert (sizetype, offset),
9810 fold_convert (sizetype, lower_bound));
9811 }
9812 }
9813 else
9814 return 0;
9815 }
9816 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9817 {
9818 tree arg0 = TREE_OPERAND (arg, 0);
9819 tree arg1 = TREE_OPERAND (arg, 1);
9820
9821 STRIP_NOPS (arg0);
9822 STRIP_NOPS (arg1);
9823
9824 if (TREE_CODE (arg0) == ADDR_EXPR
9825 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9826 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9827 {
9828 array = TREE_OPERAND (arg0, 0);
9829 offset = arg1;
9830 }
9831 else if (TREE_CODE (arg1) == ADDR_EXPR
9832 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9833 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9834 {
9835 array = TREE_OPERAND (arg1, 0);
9836 offset = arg0;
9837 }
9838 else
9839 return 0;
9840 }
9841 else
9842 return 0;
9843
9844 if (TREE_CODE (array) == STRING_CST)
9845 {
9846 *ptr_offset = fold_convert (sizetype, offset);
9847 return array;
9848 }
9849 else if (TREE_CODE (array) == VAR_DECL
9850 || TREE_CODE (array) == CONST_DECL)
9851 {
9852 int length;
9853
9854 /* Variables initialized to string literals can be handled too. */
9855 if (!const_value_known_p (array)
9856 || !DECL_INITIAL (array)
9857 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9858 return 0;
9859
9860 /* Avoid const char foo[4] = "abcde"; */
9861 if (DECL_SIZE_UNIT (array) == NULL_TREE
9862 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9863 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9864 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9865 return 0;
9866
9867 /* If variable is bigger than the string literal, OFFSET must be constant
9868 and inside of the bounds of the string literal. */
9869 offset = fold_convert (sizetype, offset);
9870 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9871 && (! host_integerp (offset, 1)
9872 || compare_tree_int (offset, length) >= 0))
9873 return 0;
9874
9875 *ptr_offset = offset;
9876 return DECL_INITIAL (array);
9877 }
9878
9879 return 0;
9880 }
9881 \f
9882 /* Generate code to calculate OPS, and exploded expression
9883 using a store-flag instruction and return an rtx for the result.
9884 OPS reflects a comparison.
9885
9886 If TARGET is nonzero, store the result there if convenient.
9887
9888 Return zero if there is no suitable set-flag instruction
9889 available on this machine.
9890
9891 Once expand_expr has been called on the arguments of the comparison,
9892 we are committed to doing the store flag, since it is not safe to
9893 re-evaluate the expression. We emit the store-flag insn by calling
9894 emit_store_flag, but only expand the arguments if we have a reason
9895 to believe that emit_store_flag will be successful. If we think that
9896 it will, but it isn't, we have to simulate the store-flag with a
9897 set/jump/set sequence. */
9898
9899 static rtx
9900 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9901 {
9902 enum rtx_code code;
9903 tree arg0, arg1, type;
9904 tree tem;
9905 enum machine_mode operand_mode;
9906 int unsignedp;
9907 rtx op0, op1;
9908 rtx subtarget = target;
9909 location_t loc = ops->location;
9910
9911 arg0 = ops->op0;
9912 arg1 = ops->op1;
9913
9914 /* Don't crash if the comparison was erroneous. */
9915 if (arg0 == error_mark_node || arg1 == error_mark_node)
9916 return const0_rtx;
9917
9918 type = TREE_TYPE (arg0);
9919 operand_mode = TYPE_MODE (type);
9920 unsignedp = TYPE_UNSIGNED (type);
9921
9922 /* We won't bother with BLKmode store-flag operations because it would mean
9923 passing a lot of information to emit_store_flag. */
9924 if (operand_mode == BLKmode)
9925 return 0;
9926
9927 /* We won't bother with store-flag operations involving function pointers
9928 when function pointers must be canonicalized before comparisons. */
9929 #ifdef HAVE_canonicalize_funcptr_for_compare
9930 if (HAVE_canonicalize_funcptr_for_compare
9931 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9932 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9933 == FUNCTION_TYPE))
9934 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9935 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9936 == FUNCTION_TYPE))))
9937 return 0;
9938 #endif
9939
9940 STRIP_NOPS (arg0);
9941 STRIP_NOPS (arg1);
9942
9943 /* Get the rtx comparison code to use. We know that EXP is a comparison
9944 operation of some type. Some comparisons against 1 and -1 can be
9945 converted to comparisons with zero. Do so here so that the tests
9946 below will be aware that we have a comparison with zero. These
9947 tests will not catch constants in the first operand, but constants
9948 are rarely passed as the first operand. */
9949
9950 switch (ops->code)
9951 {
9952 case EQ_EXPR:
9953 code = EQ;
9954 break;
9955 case NE_EXPR:
9956 code = NE;
9957 break;
9958 case LT_EXPR:
9959 if (integer_onep (arg1))
9960 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9961 else
9962 code = unsignedp ? LTU : LT;
9963 break;
9964 case LE_EXPR:
9965 if (! unsignedp && integer_all_onesp (arg1))
9966 arg1 = integer_zero_node, code = LT;
9967 else
9968 code = unsignedp ? LEU : LE;
9969 break;
9970 case GT_EXPR:
9971 if (! unsignedp && integer_all_onesp (arg1))
9972 arg1 = integer_zero_node, code = GE;
9973 else
9974 code = unsignedp ? GTU : GT;
9975 break;
9976 case GE_EXPR:
9977 if (integer_onep (arg1))
9978 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9979 else
9980 code = unsignedp ? GEU : GE;
9981 break;
9982
9983 case UNORDERED_EXPR:
9984 code = UNORDERED;
9985 break;
9986 case ORDERED_EXPR:
9987 code = ORDERED;
9988 break;
9989 case UNLT_EXPR:
9990 code = UNLT;
9991 break;
9992 case UNLE_EXPR:
9993 code = UNLE;
9994 break;
9995 case UNGT_EXPR:
9996 code = UNGT;
9997 break;
9998 case UNGE_EXPR:
9999 code = UNGE;
10000 break;
10001 case UNEQ_EXPR:
10002 code = UNEQ;
10003 break;
10004 case LTGT_EXPR:
10005 code = LTGT;
10006 break;
10007
10008 default:
10009 gcc_unreachable ();
10010 }
10011
10012 /* Put a constant second. */
10013 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10014 || TREE_CODE (arg0) == FIXED_CST)
10015 {
10016 tem = arg0; arg0 = arg1; arg1 = tem;
10017 code = swap_condition (code);
10018 }
10019
10020 /* If this is an equality or inequality test of a single bit, we can
10021 do this by shifting the bit being tested to the low-order bit and
10022 masking the result with the constant 1. If the condition was EQ,
10023 we xor it with 1. This does not require an scc insn and is faster
10024 than an scc insn even if we have it.
10025
10026 The code to make this transformation was moved into fold_single_bit_test,
10027 so we just call into the folder and expand its result. */
10028
10029 if ((code == NE || code == EQ)
10030 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10031 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10032 {
10033 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10034 return expand_expr (fold_single_bit_test (loc,
10035 code == NE ? NE_EXPR : EQ_EXPR,
10036 arg0, arg1, type),
10037 target, VOIDmode, EXPAND_NORMAL);
10038 }
10039
10040 if (! get_subtarget (target)
10041 || GET_MODE (subtarget) != operand_mode)
10042 subtarget = 0;
10043
10044 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10045
10046 if (target == 0)
10047 target = gen_reg_rtx (mode);
10048
10049 /* Try a cstore if possible. */
10050 return emit_store_flag_force (target, code, op0, op1,
10051 operand_mode, unsignedp, 1);
10052 }
10053 \f
10054
10055 /* Stubs in case we haven't got a casesi insn. */
10056 #ifndef HAVE_casesi
10057 # define HAVE_casesi 0
10058 # define gen_casesi(a, b, c, d, e) (0)
10059 # define CODE_FOR_casesi CODE_FOR_nothing
10060 #endif
10061
10062 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10063 0 otherwise (i.e. if there is no casesi instruction). */
10064 int
10065 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10066 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10067 rtx fallback_label ATTRIBUTE_UNUSED)
10068 {
10069 struct expand_operand ops[5];
10070 enum machine_mode index_mode = SImode;
10071 int index_bits = GET_MODE_BITSIZE (index_mode);
10072 rtx op1, op2, index;
10073
10074 if (! HAVE_casesi)
10075 return 0;
10076
10077 /* Convert the index to SImode. */
10078 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10079 {
10080 enum machine_mode omode = TYPE_MODE (index_type);
10081 rtx rangertx = expand_normal (range);
10082
10083 /* We must handle the endpoints in the original mode. */
10084 index_expr = build2 (MINUS_EXPR, index_type,
10085 index_expr, minval);
10086 minval = integer_zero_node;
10087 index = expand_normal (index_expr);
10088 if (default_label)
10089 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10090 omode, 1, default_label);
10091 /* Now we can safely truncate. */
10092 index = convert_to_mode (index_mode, index, 0);
10093 }
10094 else
10095 {
10096 if (TYPE_MODE (index_type) != index_mode)
10097 {
10098 index_type = lang_hooks.types.type_for_size (index_bits, 0);
10099 index_expr = fold_convert (index_type, index_expr);
10100 }
10101
10102 index = expand_normal (index_expr);
10103 }
10104
10105 do_pending_stack_adjust ();
10106
10107 op1 = expand_normal (minval);
10108 op2 = expand_normal (range);
10109
10110 create_input_operand (&ops[0], index, index_mode);
10111 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10112 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10113 create_fixed_operand (&ops[3], table_label);
10114 create_fixed_operand (&ops[4], (default_label
10115 ? default_label
10116 : fallback_label));
10117 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10118 return 1;
10119 }
10120
10121 /* Attempt to generate a tablejump instruction; same concept. */
10122 #ifndef HAVE_tablejump
10123 #define HAVE_tablejump 0
10124 #define gen_tablejump(x, y) (0)
10125 #endif
10126
10127 /* Subroutine of the next function.
10128
10129 INDEX is the value being switched on, with the lowest value
10130 in the table already subtracted.
10131 MODE is its expected mode (needed if INDEX is constant).
10132 RANGE is the length of the jump table.
10133 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10134
10135 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10136 index value is out of range. */
10137
10138 static void
10139 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10140 rtx default_label)
10141 {
10142 rtx temp, vector;
10143
10144 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10145 cfun->cfg->max_jumptable_ents = INTVAL (range);
10146
10147 /* Do an unsigned comparison (in the proper mode) between the index
10148 expression and the value which represents the length of the range.
10149 Since we just finished subtracting the lower bound of the range
10150 from the index expression, this comparison allows us to simultaneously
10151 check that the original index expression value is both greater than
10152 or equal to the minimum value of the range and less than or equal to
10153 the maximum value of the range. */
10154
10155 if (default_label)
10156 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10157 default_label);
10158
10159 /* If index is in range, it must fit in Pmode.
10160 Convert to Pmode so we can index with it. */
10161 if (mode != Pmode)
10162 index = convert_to_mode (Pmode, index, 1);
10163
10164 /* Don't let a MEM slip through, because then INDEX that comes
10165 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10166 and break_out_memory_refs will go to work on it and mess it up. */
10167 #ifdef PIC_CASE_VECTOR_ADDRESS
10168 if (flag_pic && !REG_P (index))
10169 index = copy_to_mode_reg (Pmode, index);
10170 #endif
10171
10172 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10173 GET_MODE_SIZE, because this indicates how large insns are. The other
10174 uses should all be Pmode, because they are addresses. This code
10175 could fail if addresses and insns are not the same size. */
10176 index = gen_rtx_PLUS (Pmode,
10177 gen_rtx_MULT (Pmode, index,
10178 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10179 gen_rtx_LABEL_REF (Pmode, table_label));
10180 #ifdef PIC_CASE_VECTOR_ADDRESS
10181 if (flag_pic)
10182 index = PIC_CASE_VECTOR_ADDRESS (index);
10183 else
10184 #endif
10185 index = memory_address (CASE_VECTOR_MODE, index);
10186 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10187 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10188 convert_move (temp, vector, 0);
10189
10190 emit_jump_insn (gen_tablejump (temp, table_label));
10191
10192 /* If we are generating PIC code or if the table is PC-relative, the
10193 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10194 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10195 emit_barrier ();
10196 }
10197
10198 int
10199 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10200 rtx table_label, rtx default_label)
10201 {
10202 rtx index;
10203
10204 if (! HAVE_tablejump)
10205 return 0;
10206
10207 index_expr = fold_build2 (MINUS_EXPR, index_type,
10208 fold_convert (index_type, index_expr),
10209 fold_convert (index_type, minval));
10210 index = expand_normal (index_expr);
10211 do_pending_stack_adjust ();
10212
10213 do_tablejump (index, TYPE_MODE (index_type),
10214 convert_modes (TYPE_MODE (index_type),
10215 TYPE_MODE (TREE_TYPE (range)),
10216 expand_normal (range),
10217 TYPE_UNSIGNED (TREE_TYPE (range))),
10218 table_label, default_label);
10219 return 1;
10220 }
10221
10222 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10223 static rtx
10224 const_vector_from_tree (tree exp)
10225 {
10226 rtvec v;
10227 int units, i;
10228 tree link, elt;
10229 enum machine_mode inner, mode;
10230
10231 mode = TYPE_MODE (TREE_TYPE (exp));
10232
10233 if (initializer_zerop (exp))
10234 return CONST0_RTX (mode);
10235
10236 units = GET_MODE_NUNITS (mode);
10237 inner = GET_MODE_INNER (mode);
10238
10239 v = rtvec_alloc (units);
10240
10241 link = TREE_VECTOR_CST_ELTS (exp);
10242 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10243 {
10244 elt = TREE_VALUE (link);
10245
10246 if (TREE_CODE (elt) == REAL_CST)
10247 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10248 inner);
10249 else if (TREE_CODE (elt) == FIXED_CST)
10250 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10251 inner);
10252 else
10253 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
10254 inner);
10255 }
10256
10257 /* Initialize remaining elements to 0. */
10258 for (; i < units; ++i)
10259 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10260
10261 return gen_rtx_CONST_VECTOR (mode, v);
10262 }
10263
10264 /* Build a decl for a personality function given a language prefix. */
10265
10266 tree
10267 build_personality_function (const char *lang)
10268 {
10269 const char *unwind_and_version;
10270 tree decl, type;
10271 char *name;
10272
10273 switch (targetm.except_unwind_info (&global_options))
10274 {
10275 case UI_NONE:
10276 return NULL;
10277 case UI_SJLJ:
10278 unwind_and_version = "_sj0";
10279 break;
10280 case UI_DWARF2:
10281 case UI_TARGET:
10282 unwind_and_version = "_v0";
10283 break;
10284 default:
10285 gcc_unreachable ();
10286 }
10287
10288 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
10289
10290 type = build_function_type_list (integer_type_node, integer_type_node,
10291 long_long_unsigned_type_node,
10292 ptr_type_node, ptr_type_node, NULL_TREE);
10293 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10294 get_identifier (name), type);
10295 DECL_ARTIFICIAL (decl) = 1;
10296 DECL_EXTERNAL (decl) = 1;
10297 TREE_PUBLIC (decl) = 1;
10298
10299 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10300 are the flags assigned by targetm.encode_section_info. */
10301 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10302
10303 return decl;
10304 }
10305
10306 /* Extracts the personality function of DECL and returns the corresponding
10307 libfunc. */
10308
10309 rtx
10310 get_personality_function (tree decl)
10311 {
10312 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10313 enum eh_personality_kind pk;
10314
10315 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10316 if (pk == eh_personality_none)
10317 return NULL;
10318
10319 if (!personality
10320 && pk == eh_personality_any)
10321 personality = lang_hooks.eh_personality ();
10322
10323 if (pk == eh_personality_lang)
10324 gcc_assert (personality != NULL_TREE);
10325
10326 return XEXP (DECL_RTL (personality), 0);
10327 }
10328
10329 #include "gt-expr.h"