Implement interleave via permutation.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "reload.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
51 #include "target.h"
52 #include "common/common-target.h"
53 #include "timevar.h"
54 #include "df.h"
55 #include "diagnostic.h"
56 #include "ssaexpand.h"
57 #include "target-globals.h"
58 #include "params.h"
59
60 /* Decide whether a function's arguments should be processed
61 from first to last or from last to first.
62
63 They should if the stack and args grow in opposite directions, but
64 only if we have push insns. */
65
66 #ifdef PUSH_ROUNDING
67
68 #ifndef PUSH_ARGS_REVERSED
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
72 #endif
73
74 #endif
75
76 #ifndef STACK_PUSH_CODE
77 #ifdef STACK_GROWS_DOWNWARD
78 #define STACK_PUSH_CODE PRE_DEC
79 #else
80 #define STACK_PUSH_CODE PRE_INC
81 #endif
82 #endif
83
84
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91 int cse_not_expected;
92
93 /* This structure is used by move_by_pieces to describe the move to
94 be performed. */
95 struct move_by_pieces_d
96 {
97 rtx to;
98 rtx to_addr;
99 int autinc_to;
100 int explicit_inc_to;
101 rtx from;
102 rtx from_addr;
103 int autinc_from;
104 int explicit_inc_from;
105 unsigned HOST_WIDE_INT len;
106 HOST_WIDE_INT offset;
107 int reverse;
108 };
109
110 /* This structure is used by store_by_pieces to describe the clear to
111 be performed. */
112
113 struct store_by_pieces_d
114 {
115 rtx to;
116 rtx to_addr;
117 int autinc_to;
118 int explicit_inc_to;
119 unsigned HOST_WIDE_INT len;
120 HOST_WIDE_INT offset;
121 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 void *constfundata;
123 int reverse;
124 };
125
126 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
127 unsigned int,
128 unsigned int);
129 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
130 struct move_by_pieces_d *);
131 static bool block_move_libcall_safe_for_call_parm (void);
132 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
133 static tree emit_block_move_libcall_fn (int);
134 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
135 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
136 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
137 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
138 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
139 struct store_by_pieces_d *);
140 static tree clear_storage_libcall_fn (int);
141 static rtx compress_float_constant (rtx, rtx);
142 static rtx get_subtarget (rtx);
143 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
144 HOST_WIDE_INT, enum machine_mode,
145 tree, tree, int, alias_set_type);
146 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
147 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
148 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
149 enum machine_mode,
150 tree, tree, alias_set_type, bool);
151
152 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
153
154 static int is_aligning_offset (const_tree, const_tree);
155 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
156 enum expand_modifier);
157 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
158 static rtx do_store_flag (sepops, rtx, enum machine_mode);
159 #ifdef PUSH_ROUNDING
160 static void emit_single_push_insn (enum machine_mode, rtx, tree);
161 #endif
162 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
163 static rtx const_vector_from_tree (tree);
164 static void write_complex_part (rtx, rtx, bool);
165
166 /* This macro is used to determine whether move_by_pieces should be called
167 to perform a structure copy. */
168 #ifndef MOVE_BY_PIECES_P
169 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
170 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
171 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
172 #endif
173
174 /* This macro is used to determine whether clear_by_pieces should be
175 called to clear storage. */
176 #ifndef CLEAR_BY_PIECES_P
177 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
179 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
180 #endif
181
182 /* This macro is used to determine whether store_by_pieces should be
183 called to "memset" storage with byte values other than zero. */
184 #ifndef SET_BY_PIECES_P
185 #define SET_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
188 #endif
189
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memcpy" storage when the source is a constant string. */
192 #ifndef STORE_BY_PIECES_P
193 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
196 #endif
197
198 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
199
200 #ifndef SLOW_UNALIGNED_ACCESS
201 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
202 #endif
203 \f
204 /* This is run to set up which modes can be used
205 directly in memory and to initialize the block move optab. It is run
206 at the beginning of compilation and when the target is reinitialized. */
207
208 void
209 init_expr_target (void)
210 {
211 rtx insn, pat;
212 enum machine_mode mode;
213 int num_clobbers;
214 rtx mem, mem1;
215 rtx reg;
216
217 /* Try indexing by frame ptr and try by stack ptr.
218 It is known that on the Convex the stack ptr isn't a valid index.
219 With luck, one or the other is valid on any machine. */
220 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
221 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
222
223 /* A scratch register we can modify in-place below to avoid
224 useless RTL allocations. */
225 reg = gen_rtx_REG (VOIDmode, -1);
226
227 insn = rtx_alloc (INSN);
228 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
229 PATTERN (insn) = pat;
230
231 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
232 mode = (enum machine_mode) ((int) mode + 1))
233 {
234 int regno;
235
236 direct_load[(int) mode] = direct_store[(int) mode] = 0;
237 PUT_MODE (mem, mode);
238 PUT_MODE (mem1, mode);
239 PUT_MODE (reg, mode);
240
241 /* See if there is some register that can be used in this mode and
242 directly loaded or stored from memory. */
243
244 if (mode != VOIDmode && mode != BLKmode)
245 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
246 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
247 regno++)
248 {
249 if (! HARD_REGNO_MODE_OK (regno, mode))
250 continue;
251
252 SET_REGNO (reg, regno);
253
254 SET_SRC (pat) = mem;
255 SET_DEST (pat) = reg;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_load[(int) mode] = 1;
258
259 SET_SRC (pat) = mem1;
260 SET_DEST (pat) = reg;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_load[(int) mode] = 1;
263
264 SET_SRC (pat) = reg;
265 SET_DEST (pat) = mem;
266 if (recog (pat, insn, &num_clobbers) >= 0)
267 direct_store[(int) mode] = 1;
268
269 SET_SRC (pat) = reg;
270 SET_DEST (pat) = mem1;
271 if (recog (pat, insn, &num_clobbers) >= 0)
272 direct_store[(int) mode] = 1;
273 }
274 }
275
276 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
277
278 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
279 mode = GET_MODE_WIDER_MODE (mode))
280 {
281 enum machine_mode srcmode;
282 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
283 srcmode = GET_MODE_WIDER_MODE (srcmode))
284 {
285 enum insn_code ic;
286
287 ic = can_extend_p (mode, srcmode, 0);
288 if (ic == CODE_FOR_nothing)
289 continue;
290
291 PUT_MODE (mem, srcmode);
292
293 if (insn_operand_matches (ic, 1, mem))
294 float_extend_from_mem[mode][srcmode] = true;
295 }
296 }
297 }
298
299 /* This is run at the start of compiling a function. */
300
301 void
302 init_expr (void)
303 {
304 memset (&crtl->expr, 0, sizeof (crtl->expr));
305 }
306 \f
307 /* Copy data from FROM to TO, where the machine modes are not the same.
308 Both modes may be integer, or both may be floating, or both may be
309 fixed-point.
310 UNSIGNEDP should be nonzero if FROM is an unsigned type.
311 This causes zero-extension instead of sign-extension. */
312
313 void
314 convert_move (rtx to, rtx from, int unsignedp)
315 {
316 enum machine_mode to_mode = GET_MODE (to);
317 enum machine_mode from_mode = GET_MODE (from);
318 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
319 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
320 enum insn_code code;
321 rtx libcall;
322
323 /* rtx code for making an equivalent value. */
324 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
325 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
326
327
328 gcc_assert (to_real == from_real);
329 gcc_assert (to_mode != BLKmode);
330 gcc_assert (from_mode != BLKmode);
331
332 /* If the source and destination are already the same, then there's
333 nothing to do. */
334 if (to == from)
335 return;
336
337 /* If FROM is a SUBREG that indicates that we have already done at least
338 the required extension, strip it. We don't handle such SUBREGs as
339 TO here. */
340
341 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
342 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
343 >= GET_MODE_PRECISION (to_mode))
344 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
345 from = gen_lowpart (to_mode, from), from_mode = to_mode;
346
347 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
348
349 if (to_mode == from_mode
350 || (from_mode == VOIDmode && CONSTANT_P (from)))
351 {
352 emit_move_insn (to, from);
353 return;
354 }
355
356 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
357 {
358 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
359
360 if (VECTOR_MODE_P (to_mode))
361 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
362 else
363 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
364
365 emit_move_insn (to, from);
366 return;
367 }
368
369 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
370 {
371 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
372 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
373 return;
374 }
375
376 if (to_real)
377 {
378 rtx value, insns;
379 convert_optab tab;
380
381 gcc_assert ((GET_MODE_PRECISION (from_mode)
382 != GET_MODE_PRECISION (to_mode))
383 || (DECIMAL_FLOAT_MODE_P (from_mode)
384 != DECIMAL_FLOAT_MODE_P (to_mode)));
385
386 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
387 /* Conversion between decimal float and binary float, same size. */
388 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
389 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
390 tab = sext_optab;
391 else
392 tab = trunc_optab;
393
394 /* Try converting directly if the insn is supported. */
395
396 code = convert_optab_handler (tab, to_mode, from_mode);
397 if (code != CODE_FOR_nothing)
398 {
399 emit_unop_insn (code, to, from,
400 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
401 return;
402 }
403
404 /* Otherwise use a libcall. */
405 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
406
407 /* Is this conversion implemented yet? */
408 gcc_assert (libcall);
409
410 start_sequence ();
411 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
412 1, from, from_mode);
413 insns = get_insns ();
414 end_sequence ();
415 emit_libcall_block (insns, to, value,
416 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
417 from)
418 : gen_rtx_FLOAT_EXTEND (to_mode, from));
419 return;
420 }
421
422 /* Handle pointer conversion. */ /* SPEE 900220. */
423 /* Targets are expected to provide conversion insns between PxImode and
424 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
425 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
426 {
427 enum machine_mode full_mode
428 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
429
430 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
431 != CODE_FOR_nothing);
432
433 if (full_mode != from_mode)
434 from = convert_to_mode (full_mode, from, unsignedp);
435 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
436 to, from, UNKNOWN);
437 return;
438 }
439 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
440 {
441 rtx new_from;
442 enum machine_mode full_mode
443 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
444
445 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)
446 != CODE_FOR_nothing);
447
448 if (to_mode == full_mode)
449 {
450 emit_unop_insn (convert_optab_handler (sext_optab, full_mode,
451 from_mode),
452 to, from, UNKNOWN);
453 return;
454 }
455
456 new_from = gen_reg_rtx (full_mode);
457 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode),
458 new_from, from, UNKNOWN);
459
460 /* else proceed to integer conversions below. */
461 from_mode = full_mode;
462 from = new_from;
463 }
464
465 /* Make sure both are fixed-point modes or both are not. */
466 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
467 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
468 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
469 {
470 /* If we widen from_mode to to_mode and they are in the same class,
471 we won't saturate the result.
472 Otherwise, always saturate the result to play safe. */
473 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
474 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
475 expand_fixed_convert (to, from, 0, 0);
476 else
477 expand_fixed_convert (to, from, 0, 1);
478 return;
479 }
480
481 /* Now both modes are integers. */
482
483 /* Handle expanding beyond a word. */
484 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
485 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
486 {
487 rtx insns;
488 rtx lowpart;
489 rtx fill_value;
490 rtx lowfrom;
491 int i;
492 enum machine_mode lowpart_mode;
493 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
494
495 /* Try converting directly if the insn is supported. */
496 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
497 != CODE_FOR_nothing)
498 {
499 /* If FROM is a SUBREG, put it into a register. Do this
500 so that we always generate the same set of insns for
501 better cse'ing; if an intermediate assignment occurred,
502 we won't be doing the operation directly on the SUBREG. */
503 if (optimize > 0 && GET_CODE (from) == SUBREG)
504 from = force_reg (from_mode, from);
505 emit_unop_insn (code, to, from, equiv_code);
506 return;
507 }
508 /* Next, try converting via full word. */
509 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
510 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
511 != CODE_FOR_nothing))
512 {
513 rtx word_to = gen_reg_rtx (word_mode);
514 if (REG_P (to))
515 {
516 if (reg_overlap_mentioned_p (to, from))
517 from = force_reg (from_mode, from);
518 emit_clobber (to);
519 }
520 convert_move (word_to, from, unsignedp);
521 emit_unop_insn (code, to, word_to, equiv_code);
522 return;
523 }
524
525 /* No special multiword conversion insn; do it by hand. */
526 start_sequence ();
527
528 /* Since we will turn this into a no conflict block, we must ensure
529 that the source does not overlap the target. */
530
531 if (reg_overlap_mentioned_p (to, from))
532 from = force_reg (from_mode, from);
533
534 /* Get a copy of FROM widened to a word, if necessary. */
535 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
536 lowpart_mode = word_mode;
537 else
538 lowpart_mode = from_mode;
539
540 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
541
542 lowpart = gen_lowpart (lowpart_mode, to);
543 emit_move_insn (lowpart, lowfrom);
544
545 /* Compute the value to put in each remaining word. */
546 if (unsignedp)
547 fill_value = const0_rtx;
548 else
549 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
550 LT, lowfrom, const0_rtx,
551 VOIDmode, 0, -1);
552
553 /* Fill the remaining words. */
554 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
555 {
556 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
557 rtx subword = operand_subword (to, index, 1, to_mode);
558
559 gcc_assert (subword);
560
561 if (fill_value != subword)
562 emit_move_insn (subword, fill_value);
563 }
564
565 insns = get_insns ();
566 end_sequence ();
567
568 emit_insn (insns);
569 return;
570 }
571
572 /* Truncating multi-word to a word or less. */
573 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
574 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
575 {
576 if (!((MEM_P (from)
577 && ! MEM_VOLATILE_P (from)
578 && direct_load[(int) to_mode]
579 && ! mode_dependent_address_p (XEXP (from, 0)))
580 || REG_P (from)
581 || GET_CODE (from) == SUBREG))
582 from = force_reg (from_mode, from);
583 convert_move (to, gen_lowpart (word_mode, from), 0);
584 return;
585 }
586
587 /* Now follow all the conversions between integers
588 no more than a word long. */
589
590 /* For truncation, usually we can just refer to FROM in a narrower mode. */
591 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
592 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
593 {
594 if (!((MEM_P (from)
595 && ! MEM_VOLATILE_P (from)
596 && direct_load[(int) to_mode]
597 && ! mode_dependent_address_p (XEXP (from, 0)))
598 || REG_P (from)
599 || GET_CODE (from) == SUBREG))
600 from = force_reg (from_mode, from);
601 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
602 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
603 from = copy_to_reg (from);
604 emit_move_insn (to, gen_lowpart (to_mode, from));
605 return;
606 }
607
608 /* Handle extension. */
609 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
610 {
611 /* Convert directly if that works. */
612 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
613 != CODE_FOR_nothing)
614 {
615 emit_unop_insn (code, to, from, equiv_code);
616 return;
617 }
618 else
619 {
620 enum machine_mode intermediate;
621 rtx tmp;
622 int shift_amount;
623
624 /* Search for a mode to convert via. */
625 for (intermediate = from_mode; intermediate != VOIDmode;
626 intermediate = GET_MODE_WIDER_MODE (intermediate))
627 if (((can_extend_p (to_mode, intermediate, unsignedp)
628 != CODE_FOR_nothing)
629 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
630 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
631 && (can_extend_p (intermediate, from_mode, unsignedp)
632 != CODE_FOR_nothing))
633 {
634 convert_move (to, convert_to_mode (intermediate, from,
635 unsignedp), unsignedp);
636 return;
637 }
638
639 /* No suitable intermediate mode.
640 Generate what we need with shifts. */
641 shift_amount = (GET_MODE_PRECISION (to_mode)
642 - GET_MODE_PRECISION (from_mode));
643 from = gen_lowpart (to_mode, force_reg (from_mode, from));
644 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
645 to, unsignedp);
646 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
647 to, unsignedp);
648 if (tmp != to)
649 emit_move_insn (to, tmp);
650 return;
651 }
652 }
653
654 /* Support special truncate insns for certain modes. */
655 if (convert_optab_handler (trunc_optab, to_mode,
656 from_mode) != CODE_FOR_nothing)
657 {
658 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
659 to, from, UNKNOWN);
660 return;
661 }
662
663 /* Handle truncation of volatile memrefs, and so on;
664 the things that couldn't be truncated directly,
665 and for which there was no special instruction.
666
667 ??? Code above formerly short-circuited this, for most integer
668 mode pairs, with a force_reg in from_mode followed by a recursive
669 call to this routine. Appears always to have been wrong. */
670 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
671 {
672 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
673 emit_move_insn (to, temp);
674 return;
675 }
676
677 /* Mode combination is not recognized. */
678 gcc_unreachable ();
679 }
680
681 /* Return an rtx for a value that would result
682 from converting X to mode MODE.
683 Both X and MODE may be floating, or both integer.
684 UNSIGNEDP is nonzero if X is an unsigned value.
685 This can be done by referring to a part of X in place
686 or by copying to a new temporary with conversion. */
687
688 rtx
689 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
690 {
691 return convert_modes (mode, VOIDmode, x, unsignedp);
692 }
693
694 /* Return an rtx for a value that would result
695 from converting X from mode OLDMODE to mode MODE.
696 Both modes may be floating, or both integer.
697 UNSIGNEDP is nonzero if X is an unsigned value.
698
699 This can be done by referring to a part of X in place
700 or by copying to a new temporary with conversion.
701
702 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
703
704 rtx
705 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
706 {
707 rtx temp;
708
709 /* If FROM is a SUBREG that indicates that we have already done at least
710 the required extension, strip it. */
711
712 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
713 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
714 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
715 x = gen_lowpart (mode, x);
716
717 if (GET_MODE (x) != VOIDmode)
718 oldmode = GET_MODE (x);
719
720 if (mode == oldmode)
721 return x;
722
723 /* There is one case that we must handle specially: If we are converting
724 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
725 we are to interpret the constant as unsigned, gen_lowpart will do
726 the wrong if the constant appears negative. What we want to do is
727 make the high-order word of the constant zero, not all ones. */
728
729 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
730 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
731 && CONST_INT_P (x) && INTVAL (x) < 0)
732 {
733 double_int val = uhwi_to_double_int (INTVAL (x));
734
735 /* We need to zero extend VAL. */
736 if (oldmode != VOIDmode)
737 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
738
739 return immed_double_int_const (val, mode);
740 }
741
742 /* We can do this with a gen_lowpart if both desired and current modes
743 are integer, and this is either a constant integer, a register, or a
744 non-volatile MEM. Except for the constant case where MODE is no
745 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
746
747 if ((CONST_INT_P (x)
748 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
749 || (GET_MODE_CLASS (mode) == MODE_INT
750 && GET_MODE_CLASS (oldmode) == MODE_INT
751 && (GET_CODE (x) == CONST_DOUBLE
752 || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
753 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
754 && direct_load[(int) mode])
755 || (REG_P (x)
756 && (! HARD_REGISTER_P (x)
757 || HARD_REGNO_MODE_OK (REGNO (x), mode))
758 && TRULY_NOOP_TRUNCATION_MODES_P (mode,
759 GET_MODE (x))))))))
760 {
761 /* ?? If we don't know OLDMODE, we have to assume here that
762 X does not need sign- or zero-extension. This may not be
763 the case, but it's the best we can do. */
764 if (CONST_INT_P (x) && oldmode != VOIDmode
765 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
766 {
767 HOST_WIDE_INT val = INTVAL (x);
768
769 /* We must sign or zero-extend in this case. Start by
770 zero-extending, then sign extend if we need to. */
771 val &= GET_MODE_MASK (oldmode);
772 if (! unsignedp
773 && val_signbit_known_set_p (oldmode, val))
774 val |= ~GET_MODE_MASK (oldmode);
775
776 return gen_int_mode (val, mode);
777 }
778
779 return gen_lowpart (mode, x);
780 }
781
782 /* Converting from integer constant into mode is always equivalent to an
783 subreg operation. */
784 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
785 {
786 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
787 return simplify_gen_subreg (mode, x, oldmode, 0);
788 }
789
790 temp = gen_reg_rtx (mode);
791 convert_move (temp, x, unsignedp);
792 return temp;
793 }
794 \f
795 /* Return the largest alignment we can use for doing a move (or store)
796 of MAX_PIECES. ALIGN is the largest alignment we could use. */
797
798 static unsigned int
799 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
800 {
801 enum machine_mode tmode;
802
803 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
804 if (align >= GET_MODE_ALIGNMENT (tmode))
805 align = GET_MODE_ALIGNMENT (tmode);
806 else
807 {
808 enum machine_mode tmode, xmode;
809
810 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
811 tmode != VOIDmode;
812 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
813 if (GET_MODE_SIZE (tmode) > max_pieces
814 || SLOW_UNALIGNED_ACCESS (tmode, align))
815 break;
816
817 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
818 }
819
820 return align;
821 }
822
823 /* Return the widest integer mode no wider than SIZE. If no such mode
824 can be found, return VOIDmode. */
825
826 static enum machine_mode
827 widest_int_mode_for_size (unsigned int size)
828 {
829 enum machine_mode tmode, mode = VOIDmode;
830
831 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
832 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
833 if (GET_MODE_SIZE (tmode) < size)
834 mode = tmode;
835
836 return mode;
837 }
838
839 /* STORE_MAX_PIECES is the number of bytes at a time that we can
840 store efficiently. Due to internal GCC limitations, this is
841 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
842 for an immediate constant. */
843
844 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
845
846 /* Determine whether the LEN bytes can be moved by using several move
847 instructions. Return nonzero if a call to move_by_pieces should
848 succeed. */
849
850 int
851 can_move_by_pieces (unsigned HOST_WIDE_INT len,
852 unsigned int align ATTRIBUTE_UNUSED)
853 {
854 return MOVE_BY_PIECES_P (len, align);
855 }
856
857 /* Generate several move instructions to copy LEN bytes from block FROM to
858 block TO. (These are MEM rtx's with BLKmode).
859
860 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
861 used to push FROM to the stack.
862
863 ALIGN is maximum stack alignment we can assume.
864
865 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
866 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
867 stpcpy. */
868
869 rtx
870 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
871 unsigned int align, int endp)
872 {
873 struct move_by_pieces_d data;
874 enum machine_mode to_addr_mode, from_addr_mode
875 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
876 rtx to_addr, from_addr = XEXP (from, 0);
877 unsigned int max_size = MOVE_MAX_PIECES + 1;
878 enum insn_code icode;
879
880 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
881
882 data.offset = 0;
883 data.from_addr = from_addr;
884 if (to)
885 {
886 to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
887 to_addr = XEXP (to, 0);
888 data.to = to;
889 data.autinc_to
890 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
891 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
892 data.reverse
893 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
894 }
895 else
896 {
897 to_addr_mode = VOIDmode;
898 to_addr = NULL_RTX;
899 data.to = NULL_RTX;
900 data.autinc_to = 1;
901 #ifdef STACK_GROWS_DOWNWARD
902 data.reverse = 1;
903 #else
904 data.reverse = 0;
905 #endif
906 }
907 data.to_addr = to_addr;
908 data.from = from;
909 data.autinc_from
910 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
911 || GET_CODE (from_addr) == POST_INC
912 || GET_CODE (from_addr) == POST_DEC);
913
914 data.explicit_inc_from = 0;
915 data.explicit_inc_to = 0;
916 if (data.reverse) data.offset = len;
917 data.len = len;
918
919 /* If copying requires more than two move insns,
920 copy addresses to registers (to make displacements shorter)
921 and use post-increment if available. */
922 if (!(data.autinc_from && data.autinc_to)
923 && move_by_pieces_ninsns (len, align, max_size) > 2)
924 {
925 /* Find the mode of the largest move...
926 MODE might not be used depending on the definitions of the
927 USE_* macros below. */
928 enum machine_mode mode ATTRIBUTE_UNUSED
929 = widest_int_mode_for_size (max_size);
930
931 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
932 {
933 data.from_addr = copy_to_mode_reg (from_addr_mode,
934 plus_constant (from_addr, len));
935 data.autinc_from = 1;
936 data.explicit_inc_from = -1;
937 }
938 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
939 {
940 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
941 data.autinc_from = 1;
942 data.explicit_inc_from = 1;
943 }
944 if (!data.autinc_from && CONSTANT_P (from_addr))
945 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
946 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
947 {
948 data.to_addr = copy_to_mode_reg (to_addr_mode,
949 plus_constant (to_addr, len));
950 data.autinc_to = 1;
951 data.explicit_inc_to = -1;
952 }
953 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
954 {
955 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
956 data.autinc_to = 1;
957 data.explicit_inc_to = 1;
958 }
959 if (!data.autinc_to && CONSTANT_P (to_addr))
960 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
961 }
962
963 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
964
965 /* First move what we can in the largest integer mode, then go to
966 successively smaller modes. */
967
968 while (max_size > 1)
969 {
970 enum machine_mode mode = widest_int_mode_for_size (max_size);
971
972 if (mode == VOIDmode)
973 break;
974
975 icode = optab_handler (mov_optab, mode);
976 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
977 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
978
979 max_size = GET_MODE_SIZE (mode);
980 }
981
982 /* The code above should have handled everything. */
983 gcc_assert (!data.len);
984
985 if (endp)
986 {
987 rtx to1;
988
989 gcc_assert (!data.reverse);
990 if (data.autinc_to)
991 {
992 if (endp == 2)
993 {
994 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
995 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
996 else
997 data.to_addr = copy_to_mode_reg (to_addr_mode,
998 plus_constant (data.to_addr,
999 -1));
1000 }
1001 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1002 data.offset);
1003 }
1004 else
1005 {
1006 if (endp == 2)
1007 --data.offset;
1008 to1 = adjust_address (data.to, QImode, data.offset);
1009 }
1010 return to1;
1011 }
1012 else
1013 return data.to;
1014 }
1015
1016 /* Return number of insns required to move L bytes by pieces.
1017 ALIGN (in bits) is maximum alignment we can assume. */
1018
1019 static unsigned HOST_WIDE_INT
1020 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1021 unsigned int max_size)
1022 {
1023 unsigned HOST_WIDE_INT n_insns = 0;
1024
1025 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1026
1027 while (max_size > 1)
1028 {
1029 enum machine_mode mode;
1030 enum insn_code icode;
1031
1032 mode = widest_int_mode_for_size (max_size);
1033
1034 if (mode == VOIDmode)
1035 break;
1036
1037 icode = optab_handler (mov_optab, mode);
1038 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1039 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1040
1041 max_size = GET_MODE_SIZE (mode);
1042 }
1043
1044 gcc_assert (!l);
1045 return n_insns;
1046 }
1047
1048 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1049 with move instructions for mode MODE. GENFUN is the gen_... function
1050 to make a move insn for that mode. DATA has all the other info. */
1051
1052 static void
1053 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1054 struct move_by_pieces_d *data)
1055 {
1056 unsigned int size = GET_MODE_SIZE (mode);
1057 rtx to1 = NULL_RTX, from1;
1058
1059 while (data->len >= size)
1060 {
1061 if (data->reverse)
1062 data->offset -= size;
1063
1064 if (data->to)
1065 {
1066 if (data->autinc_to)
1067 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1068 data->offset);
1069 else
1070 to1 = adjust_address (data->to, mode, data->offset);
1071 }
1072
1073 if (data->autinc_from)
1074 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1075 data->offset);
1076 else
1077 from1 = adjust_address (data->from, mode, data->offset);
1078
1079 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1080 emit_insn (gen_add2_insn (data->to_addr,
1081 GEN_INT (-(HOST_WIDE_INT)size)));
1082 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1083 emit_insn (gen_add2_insn (data->from_addr,
1084 GEN_INT (-(HOST_WIDE_INT)size)));
1085
1086 if (data->to)
1087 emit_insn ((*genfun) (to1, from1));
1088 else
1089 {
1090 #ifdef PUSH_ROUNDING
1091 emit_single_push_insn (mode, from1, NULL);
1092 #else
1093 gcc_unreachable ();
1094 #endif
1095 }
1096
1097 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1098 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1099 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1100 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1101
1102 if (! data->reverse)
1103 data->offset += size;
1104
1105 data->len -= size;
1106 }
1107 }
1108 \f
1109 /* Emit code to move a block Y to a block X. This may be done with
1110 string-move instructions, with multiple scalar move instructions,
1111 or with a library call.
1112
1113 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1114 SIZE is an rtx that says how long they are.
1115 ALIGN is the maximum alignment we can assume they have.
1116 METHOD describes what kind of copy this is, and what mechanisms may be used.
1117
1118 Return the address of the new block, if memcpy is called and returns it,
1119 0 otherwise. */
1120
1121 rtx
1122 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1123 unsigned int expected_align, HOST_WIDE_INT expected_size)
1124 {
1125 bool may_use_call;
1126 rtx retval = 0;
1127 unsigned int align;
1128
1129 gcc_assert (size);
1130 if (CONST_INT_P (size)
1131 && INTVAL (size) == 0)
1132 return 0;
1133
1134 switch (method)
1135 {
1136 case BLOCK_OP_NORMAL:
1137 case BLOCK_OP_TAILCALL:
1138 may_use_call = true;
1139 break;
1140
1141 case BLOCK_OP_CALL_PARM:
1142 may_use_call = block_move_libcall_safe_for_call_parm ();
1143
1144 /* Make inhibit_defer_pop nonzero around the library call
1145 to force it to pop the arguments right away. */
1146 NO_DEFER_POP;
1147 break;
1148
1149 case BLOCK_OP_NO_LIBCALL:
1150 may_use_call = false;
1151 break;
1152
1153 default:
1154 gcc_unreachable ();
1155 }
1156
1157 gcc_assert (MEM_P (x) && MEM_P (y));
1158 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1159 gcc_assert (align >= BITS_PER_UNIT);
1160
1161 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1162 block copy is more efficient for other large modes, e.g. DCmode. */
1163 x = adjust_address (x, BLKmode, 0);
1164 y = adjust_address (y, BLKmode, 0);
1165
1166 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1167 can be incorrect is coming from __builtin_memcpy. */
1168 if (CONST_INT_P (size))
1169 {
1170 x = shallow_copy_rtx (x);
1171 y = shallow_copy_rtx (y);
1172 set_mem_size (x, INTVAL (size));
1173 set_mem_size (y, INTVAL (size));
1174 }
1175
1176 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1177 move_by_pieces (x, y, INTVAL (size), align, 0);
1178 else if (emit_block_move_via_movmem (x, y, size, align,
1179 expected_align, expected_size))
1180 ;
1181 else if (may_use_call
1182 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1183 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1184 {
1185 /* Since x and y are passed to a libcall, mark the corresponding
1186 tree EXPR as addressable. */
1187 tree y_expr = MEM_EXPR (y);
1188 tree x_expr = MEM_EXPR (x);
1189 if (y_expr)
1190 mark_addressable (y_expr);
1191 if (x_expr)
1192 mark_addressable (x_expr);
1193 retval = emit_block_move_via_libcall (x, y, size,
1194 method == BLOCK_OP_TAILCALL);
1195 }
1196
1197 else
1198 emit_block_move_via_loop (x, y, size, align);
1199
1200 if (method == BLOCK_OP_CALL_PARM)
1201 OK_DEFER_POP;
1202
1203 return retval;
1204 }
1205
1206 rtx
1207 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1208 {
1209 return emit_block_move_hints (x, y, size, method, 0, -1);
1210 }
1211
1212 /* A subroutine of emit_block_move. Returns true if calling the
1213 block move libcall will not clobber any parameters which may have
1214 already been placed on the stack. */
1215
1216 static bool
1217 block_move_libcall_safe_for_call_parm (void)
1218 {
1219 #if defined (REG_PARM_STACK_SPACE)
1220 tree fn;
1221 #endif
1222
1223 /* If arguments are pushed on the stack, then they're safe. */
1224 if (PUSH_ARGS)
1225 return true;
1226
1227 /* If registers go on the stack anyway, any argument is sure to clobber
1228 an outgoing argument. */
1229 #if defined (REG_PARM_STACK_SPACE)
1230 fn = emit_block_move_libcall_fn (false);
1231 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1232 depend on its argument. */
1233 (void) fn;
1234 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1235 && REG_PARM_STACK_SPACE (fn) != 0)
1236 return false;
1237 #endif
1238
1239 /* If any argument goes in memory, then it might clobber an outgoing
1240 argument. */
1241 {
1242 CUMULATIVE_ARGS args_so_far_v;
1243 cumulative_args_t args_so_far;
1244 tree fn, arg;
1245
1246 fn = emit_block_move_libcall_fn (false);
1247 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1248 args_so_far = pack_cumulative_args (&args_so_far_v);
1249
1250 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1251 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1252 {
1253 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1254 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1255 NULL_TREE, true);
1256 if (!tmp || !REG_P (tmp))
1257 return false;
1258 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1259 return false;
1260 targetm.calls.function_arg_advance (args_so_far, mode,
1261 NULL_TREE, true);
1262 }
1263 }
1264 return true;
1265 }
1266
1267 /* A subroutine of emit_block_move. Expand a movmem pattern;
1268 return true if successful. */
1269
1270 static bool
1271 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1272 unsigned int expected_align, HOST_WIDE_INT expected_size)
1273 {
1274 int save_volatile_ok = volatile_ok;
1275 enum machine_mode mode;
1276
1277 if (expected_align < align)
1278 expected_align = align;
1279
1280 /* Since this is a move insn, we don't care about volatility. */
1281 volatile_ok = 1;
1282
1283 /* Try the most limited insn first, because there's no point
1284 including more than one in the machine description unless
1285 the more limited one has some advantage. */
1286
1287 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1288 mode = GET_MODE_WIDER_MODE (mode))
1289 {
1290 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1291
1292 if (code != CODE_FOR_nothing
1293 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1294 here because if SIZE is less than the mode mask, as it is
1295 returned by the macro, it will definitely be less than the
1296 actual mode mask. */
1297 && ((CONST_INT_P (size)
1298 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1299 <= (GET_MODE_MASK (mode) >> 1)))
1300 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
1301 {
1302 struct expand_operand ops[6];
1303 unsigned int nops;
1304
1305 /* ??? When called via emit_block_move_for_call, it'd be
1306 nice if there were some way to inform the backend, so
1307 that it doesn't fail the expansion because it thinks
1308 emitting the libcall would be more efficient. */
1309 nops = insn_data[(int) code].n_generator_args;
1310 gcc_assert (nops == 4 || nops == 6);
1311
1312 create_fixed_operand (&ops[0], x);
1313 create_fixed_operand (&ops[1], y);
1314 /* The check above guarantees that this size conversion is valid. */
1315 create_convert_operand_to (&ops[2], size, mode, true);
1316 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1317 if (nops == 6)
1318 {
1319 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1320 create_integer_operand (&ops[5], expected_size);
1321 }
1322 if (maybe_expand_insn (code, nops, ops))
1323 {
1324 volatile_ok = save_volatile_ok;
1325 return true;
1326 }
1327 }
1328 }
1329
1330 volatile_ok = save_volatile_ok;
1331 return false;
1332 }
1333
1334 /* A subroutine of emit_block_move. Expand a call to memcpy.
1335 Return the return value from memcpy, 0 otherwise. */
1336
1337 rtx
1338 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1339 {
1340 rtx dst_addr, src_addr;
1341 tree call_expr, fn, src_tree, dst_tree, size_tree;
1342 enum machine_mode size_mode;
1343 rtx retval;
1344
1345 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1346 pseudos. We can then place those new pseudos into a VAR_DECL and
1347 use them later. */
1348
1349 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1350 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1351
1352 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1353 src_addr = convert_memory_address (ptr_mode, src_addr);
1354
1355 dst_tree = make_tree (ptr_type_node, dst_addr);
1356 src_tree = make_tree (ptr_type_node, src_addr);
1357
1358 size_mode = TYPE_MODE (sizetype);
1359
1360 size = convert_to_mode (size_mode, size, 1);
1361 size = copy_to_mode_reg (size_mode, size);
1362
1363 /* It is incorrect to use the libcall calling conventions to call
1364 memcpy in this context. This could be a user call to memcpy and
1365 the user may wish to examine the return value from memcpy. For
1366 targets where libcalls and normal calls have different conventions
1367 for returning pointers, we could end up generating incorrect code. */
1368
1369 size_tree = make_tree (sizetype, size);
1370
1371 fn = emit_block_move_libcall_fn (true);
1372 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1373 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1374
1375 retval = expand_normal (call_expr);
1376
1377 return retval;
1378 }
1379
1380 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1381 for the function we use for block copies. The first time FOR_CALL
1382 is true, we call assemble_external. */
1383
1384 static GTY(()) tree block_move_fn;
1385
1386 void
1387 init_block_move_fn (const char *asmspec)
1388 {
1389 if (!block_move_fn)
1390 {
1391 tree args, fn;
1392
1393 fn = get_identifier ("memcpy");
1394 args = build_function_type_list (ptr_type_node, ptr_type_node,
1395 const_ptr_type_node, sizetype,
1396 NULL_TREE);
1397
1398 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1399 DECL_EXTERNAL (fn) = 1;
1400 TREE_PUBLIC (fn) = 1;
1401 DECL_ARTIFICIAL (fn) = 1;
1402 TREE_NOTHROW (fn) = 1;
1403 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1404 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1405
1406 block_move_fn = fn;
1407 }
1408
1409 if (asmspec)
1410 set_user_assembler_name (block_move_fn, asmspec);
1411 }
1412
1413 static tree
1414 emit_block_move_libcall_fn (int for_call)
1415 {
1416 static bool emitted_extern;
1417
1418 if (!block_move_fn)
1419 init_block_move_fn (NULL);
1420
1421 if (for_call && !emitted_extern)
1422 {
1423 emitted_extern = true;
1424 make_decl_rtl (block_move_fn);
1425 assemble_external (block_move_fn);
1426 }
1427
1428 return block_move_fn;
1429 }
1430
1431 /* A subroutine of emit_block_move. Copy the data via an explicit
1432 loop. This is used only when libcalls are forbidden. */
1433 /* ??? It'd be nice to copy in hunks larger than QImode. */
1434
1435 static void
1436 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1437 unsigned int align ATTRIBUTE_UNUSED)
1438 {
1439 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1440 enum machine_mode x_addr_mode
1441 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1442 enum machine_mode y_addr_mode
1443 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1444 enum machine_mode iter_mode;
1445
1446 iter_mode = GET_MODE (size);
1447 if (iter_mode == VOIDmode)
1448 iter_mode = word_mode;
1449
1450 top_label = gen_label_rtx ();
1451 cmp_label = gen_label_rtx ();
1452 iter = gen_reg_rtx (iter_mode);
1453
1454 emit_move_insn (iter, const0_rtx);
1455
1456 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1457 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1458 do_pending_stack_adjust ();
1459
1460 emit_jump (cmp_label);
1461 emit_label (top_label);
1462
1463 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1464 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1465
1466 if (x_addr_mode != y_addr_mode)
1467 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1468 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1469
1470 x = change_address (x, QImode, x_addr);
1471 y = change_address (y, QImode, y_addr);
1472
1473 emit_move_insn (x, y);
1474
1475 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1476 true, OPTAB_LIB_WIDEN);
1477 if (tmp != iter)
1478 emit_move_insn (iter, tmp);
1479
1480 emit_label (cmp_label);
1481
1482 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1483 true, top_label);
1484 }
1485 \f
1486 /* Copy all or part of a value X into registers starting at REGNO.
1487 The number of registers to be filled is NREGS. */
1488
1489 void
1490 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1491 {
1492 int i;
1493 #ifdef HAVE_load_multiple
1494 rtx pat;
1495 rtx last;
1496 #endif
1497
1498 if (nregs == 0)
1499 return;
1500
1501 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1502 x = validize_mem (force_const_mem (mode, x));
1503
1504 /* See if the machine can do this with a load multiple insn. */
1505 #ifdef HAVE_load_multiple
1506 if (HAVE_load_multiple)
1507 {
1508 last = get_last_insn ();
1509 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1510 GEN_INT (nregs));
1511 if (pat)
1512 {
1513 emit_insn (pat);
1514 return;
1515 }
1516 else
1517 delete_insns_since (last);
1518 }
1519 #endif
1520
1521 for (i = 0; i < nregs; i++)
1522 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1523 operand_subword_force (x, i, mode));
1524 }
1525
1526 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1527 The number of registers to be filled is NREGS. */
1528
1529 void
1530 move_block_from_reg (int regno, rtx x, int nregs)
1531 {
1532 int i;
1533
1534 if (nregs == 0)
1535 return;
1536
1537 /* See if the machine can do this with a store multiple insn. */
1538 #ifdef HAVE_store_multiple
1539 if (HAVE_store_multiple)
1540 {
1541 rtx last = get_last_insn ();
1542 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1543 GEN_INT (nregs));
1544 if (pat)
1545 {
1546 emit_insn (pat);
1547 return;
1548 }
1549 else
1550 delete_insns_since (last);
1551 }
1552 #endif
1553
1554 for (i = 0; i < nregs; i++)
1555 {
1556 rtx tem = operand_subword (x, i, 1, BLKmode);
1557
1558 gcc_assert (tem);
1559
1560 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1561 }
1562 }
1563
1564 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1565 ORIG, where ORIG is a non-consecutive group of registers represented by
1566 a PARALLEL. The clone is identical to the original except in that the
1567 original set of registers is replaced by a new set of pseudo registers.
1568 The new set has the same modes as the original set. */
1569
1570 rtx
1571 gen_group_rtx (rtx orig)
1572 {
1573 int i, length;
1574 rtx *tmps;
1575
1576 gcc_assert (GET_CODE (orig) == PARALLEL);
1577
1578 length = XVECLEN (orig, 0);
1579 tmps = XALLOCAVEC (rtx, length);
1580
1581 /* Skip a NULL entry in first slot. */
1582 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1583
1584 if (i)
1585 tmps[0] = 0;
1586
1587 for (; i < length; i++)
1588 {
1589 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1590 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1591
1592 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1593 }
1594
1595 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1596 }
1597
1598 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1599 except that values are placed in TMPS[i], and must later be moved
1600 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1601
1602 static void
1603 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1604 {
1605 rtx src;
1606 int start, i;
1607 enum machine_mode m = GET_MODE (orig_src);
1608
1609 gcc_assert (GET_CODE (dst) == PARALLEL);
1610
1611 if (m != VOIDmode
1612 && !SCALAR_INT_MODE_P (m)
1613 && !MEM_P (orig_src)
1614 && GET_CODE (orig_src) != CONCAT)
1615 {
1616 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1617 if (imode == BLKmode)
1618 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1619 else
1620 src = gen_reg_rtx (imode);
1621 if (imode != BLKmode)
1622 src = gen_lowpart (GET_MODE (orig_src), src);
1623 emit_move_insn (src, orig_src);
1624 /* ...and back again. */
1625 if (imode != BLKmode)
1626 src = gen_lowpart (imode, src);
1627 emit_group_load_1 (tmps, dst, src, type, ssize);
1628 return;
1629 }
1630
1631 /* Check for a NULL entry, used to indicate that the parameter goes
1632 both on the stack and in registers. */
1633 if (XEXP (XVECEXP (dst, 0, 0), 0))
1634 start = 0;
1635 else
1636 start = 1;
1637
1638 /* Process the pieces. */
1639 for (i = start; i < XVECLEN (dst, 0); i++)
1640 {
1641 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1642 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1643 unsigned int bytelen = GET_MODE_SIZE (mode);
1644 int shift = 0;
1645
1646 /* Handle trailing fragments that run over the size of the struct. */
1647 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1648 {
1649 /* Arrange to shift the fragment to where it belongs.
1650 extract_bit_field loads to the lsb of the reg. */
1651 if (
1652 #ifdef BLOCK_REG_PADDING
1653 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1654 == (BYTES_BIG_ENDIAN ? upward : downward)
1655 #else
1656 BYTES_BIG_ENDIAN
1657 #endif
1658 )
1659 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1660 bytelen = ssize - bytepos;
1661 gcc_assert (bytelen > 0);
1662 }
1663
1664 /* If we won't be loading directly from memory, protect the real source
1665 from strange tricks we might play; but make sure that the source can
1666 be loaded directly into the destination. */
1667 src = orig_src;
1668 if (!MEM_P (orig_src)
1669 && (!CONSTANT_P (orig_src)
1670 || (GET_MODE (orig_src) != mode
1671 && GET_MODE (orig_src) != VOIDmode)))
1672 {
1673 if (GET_MODE (orig_src) == VOIDmode)
1674 src = gen_reg_rtx (mode);
1675 else
1676 src = gen_reg_rtx (GET_MODE (orig_src));
1677
1678 emit_move_insn (src, orig_src);
1679 }
1680
1681 /* Optimize the access just a bit. */
1682 if (MEM_P (src)
1683 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1684 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1685 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1686 && bytelen == GET_MODE_SIZE (mode))
1687 {
1688 tmps[i] = gen_reg_rtx (mode);
1689 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1690 }
1691 else if (COMPLEX_MODE_P (mode)
1692 && GET_MODE (src) == mode
1693 && bytelen == GET_MODE_SIZE (mode))
1694 /* Let emit_move_complex do the bulk of the work. */
1695 tmps[i] = src;
1696 else if (GET_CODE (src) == CONCAT)
1697 {
1698 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1699 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1700
1701 if ((bytepos == 0 && bytelen == slen0)
1702 || (bytepos != 0 && bytepos + bytelen <= slen))
1703 {
1704 /* The following assumes that the concatenated objects all
1705 have the same size. In this case, a simple calculation
1706 can be used to determine the object and the bit field
1707 to be extracted. */
1708 tmps[i] = XEXP (src, bytepos / slen0);
1709 if (! CONSTANT_P (tmps[i])
1710 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1711 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1712 (bytepos % slen0) * BITS_PER_UNIT,
1713 1, false, NULL_RTX, mode, mode);
1714 }
1715 else
1716 {
1717 rtx mem;
1718
1719 gcc_assert (!bytepos);
1720 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1721 emit_move_insn (mem, src);
1722 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1723 0, 1, false, NULL_RTX, mode, mode);
1724 }
1725 }
1726 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1727 SIMD register, which is currently broken. While we get GCC
1728 to emit proper RTL for these cases, let's dump to memory. */
1729 else if (VECTOR_MODE_P (GET_MODE (dst))
1730 && REG_P (src))
1731 {
1732 int slen = GET_MODE_SIZE (GET_MODE (src));
1733 rtx mem;
1734
1735 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1736 emit_move_insn (mem, src);
1737 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1738 }
1739 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1740 && XVECLEN (dst, 0) > 1)
1741 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1742 else if (CONSTANT_P (src))
1743 {
1744 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1745
1746 if (len == ssize)
1747 tmps[i] = src;
1748 else
1749 {
1750 rtx first, second;
1751
1752 gcc_assert (2 * len == ssize);
1753 split_double (src, &first, &second);
1754 if (i)
1755 tmps[i] = second;
1756 else
1757 tmps[i] = first;
1758 }
1759 }
1760 else if (REG_P (src) && GET_MODE (src) == mode)
1761 tmps[i] = src;
1762 else
1763 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1764 bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1765 mode, mode);
1766
1767 if (shift)
1768 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1769 shift, tmps[i], 0);
1770 }
1771 }
1772
1773 /* Emit code to move a block SRC of type TYPE to a block DST,
1774 where DST is non-consecutive registers represented by a PARALLEL.
1775 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1776 if not known. */
1777
1778 void
1779 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1780 {
1781 rtx *tmps;
1782 int i;
1783
1784 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1785 emit_group_load_1 (tmps, dst, src, type, ssize);
1786
1787 /* Copy the extracted pieces into the proper (probable) hard regs. */
1788 for (i = 0; i < XVECLEN (dst, 0); i++)
1789 {
1790 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1791 if (d == NULL)
1792 continue;
1793 emit_move_insn (d, tmps[i]);
1794 }
1795 }
1796
1797 /* Similar, but load SRC into new pseudos in a format that looks like
1798 PARALLEL. This can later be fed to emit_group_move to get things
1799 in the right place. */
1800
1801 rtx
1802 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1803 {
1804 rtvec vec;
1805 int i;
1806
1807 vec = rtvec_alloc (XVECLEN (parallel, 0));
1808 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1809
1810 /* Convert the vector to look just like the original PARALLEL, except
1811 with the computed values. */
1812 for (i = 0; i < XVECLEN (parallel, 0); i++)
1813 {
1814 rtx e = XVECEXP (parallel, 0, i);
1815 rtx d = XEXP (e, 0);
1816
1817 if (d)
1818 {
1819 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1820 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1821 }
1822 RTVEC_ELT (vec, i) = e;
1823 }
1824
1825 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1826 }
1827
1828 /* Emit code to move a block SRC to block DST, where SRC and DST are
1829 non-consecutive groups of registers, each represented by a PARALLEL. */
1830
1831 void
1832 emit_group_move (rtx dst, rtx src)
1833 {
1834 int i;
1835
1836 gcc_assert (GET_CODE (src) == PARALLEL
1837 && GET_CODE (dst) == PARALLEL
1838 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1839
1840 /* Skip first entry if NULL. */
1841 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1842 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1843 XEXP (XVECEXP (src, 0, i), 0));
1844 }
1845
1846 /* Move a group of registers represented by a PARALLEL into pseudos. */
1847
1848 rtx
1849 emit_group_move_into_temps (rtx src)
1850 {
1851 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1852 int i;
1853
1854 for (i = 0; i < XVECLEN (src, 0); i++)
1855 {
1856 rtx e = XVECEXP (src, 0, i);
1857 rtx d = XEXP (e, 0);
1858
1859 if (d)
1860 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1861 RTVEC_ELT (vec, i) = e;
1862 }
1863
1864 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1865 }
1866
1867 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1868 where SRC is non-consecutive registers represented by a PARALLEL.
1869 SSIZE represents the total size of block ORIG_DST, or -1 if not
1870 known. */
1871
1872 void
1873 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1874 {
1875 rtx *tmps, dst;
1876 int start, finish, i;
1877 enum machine_mode m = GET_MODE (orig_dst);
1878
1879 gcc_assert (GET_CODE (src) == PARALLEL);
1880
1881 if (!SCALAR_INT_MODE_P (m)
1882 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1883 {
1884 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1885 if (imode == BLKmode)
1886 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1887 else
1888 dst = gen_reg_rtx (imode);
1889 emit_group_store (dst, src, type, ssize);
1890 if (imode != BLKmode)
1891 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1892 emit_move_insn (orig_dst, dst);
1893 return;
1894 }
1895
1896 /* Check for a NULL entry, used to indicate that the parameter goes
1897 both on the stack and in registers. */
1898 if (XEXP (XVECEXP (src, 0, 0), 0))
1899 start = 0;
1900 else
1901 start = 1;
1902 finish = XVECLEN (src, 0);
1903
1904 tmps = XALLOCAVEC (rtx, finish);
1905
1906 /* Copy the (probable) hard regs into pseudos. */
1907 for (i = start; i < finish; i++)
1908 {
1909 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1910 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1911 {
1912 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1913 emit_move_insn (tmps[i], reg);
1914 }
1915 else
1916 tmps[i] = reg;
1917 }
1918
1919 /* If we won't be storing directly into memory, protect the real destination
1920 from strange tricks we might play. */
1921 dst = orig_dst;
1922 if (GET_CODE (dst) == PARALLEL)
1923 {
1924 rtx temp;
1925
1926 /* We can get a PARALLEL dst if there is a conditional expression in
1927 a return statement. In that case, the dst and src are the same,
1928 so no action is necessary. */
1929 if (rtx_equal_p (dst, src))
1930 return;
1931
1932 /* It is unclear if we can ever reach here, but we may as well handle
1933 it. Allocate a temporary, and split this into a store/load to/from
1934 the temporary. */
1935
1936 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1937 emit_group_store (temp, src, type, ssize);
1938 emit_group_load (dst, temp, type, ssize);
1939 return;
1940 }
1941 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1942 {
1943 enum machine_mode outer = GET_MODE (dst);
1944 enum machine_mode inner;
1945 HOST_WIDE_INT bytepos;
1946 bool done = false;
1947 rtx temp;
1948
1949 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1950 dst = gen_reg_rtx (outer);
1951
1952 /* Make life a bit easier for combine. */
1953 /* If the first element of the vector is the low part
1954 of the destination mode, use a paradoxical subreg to
1955 initialize the destination. */
1956 if (start < finish)
1957 {
1958 inner = GET_MODE (tmps[start]);
1959 bytepos = subreg_lowpart_offset (inner, outer);
1960 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1961 {
1962 temp = simplify_gen_subreg (outer, tmps[start],
1963 inner, 0);
1964 if (temp)
1965 {
1966 emit_move_insn (dst, temp);
1967 done = true;
1968 start++;
1969 }
1970 }
1971 }
1972
1973 /* If the first element wasn't the low part, try the last. */
1974 if (!done
1975 && start < finish - 1)
1976 {
1977 inner = GET_MODE (tmps[finish - 1]);
1978 bytepos = subreg_lowpart_offset (inner, outer);
1979 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1980 {
1981 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1982 inner, 0);
1983 if (temp)
1984 {
1985 emit_move_insn (dst, temp);
1986 done = true;
1987 finish--;
1988 }
1989 }
1990 }
1991
1992 /* Otherwise, simply initialize the result to zero. */
1993 if (!done)
1994 emit_move_insn (dst, CONST0_RTX (outer));
1995 }
1996
1997 /* Process the pieces. */
1998 for (i = start; i < finish; i++)
1999 {
2000 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2001 enum machine_mode mode = GET_MODE (tmps[i]);
2002 unsigned int bytelen = GET_MODE_SIZE (mode);
2003 unsigned int adj_bytelen = bytelen;
2004 rtx dest = dst;
2005
2006 /* Handle trailing fragments that run over the size of the struct. */
2007 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2008 adj_bytelen = ssize - bytepos;
2009
2010 if (GET_CODE (dst) == CONCAT)
2011 {
2012 if (bytepos + adj_bytelen
2013 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2014 dest = XEXP (dst, 0);
2015 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2016 {
2017 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2018 dest = XEXP (dst, 1);
2019 }
2020 else
2021 {
2022 enum machine_mode dest_mode = GET_MODE (dest);
2023 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2024
2025 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2026
2027 if (GET_MODE_ALIGNMENT (dest_mode)
2028 >= GET_MODE_ALIGNMENT (tmp_mode))
2029 {
2030 dest = assign_stack_temp (dest_mode,
2031 GET_MODE_SIZE (dest_mode),
2032 0);
2033 emit_move_insn (adjust_address (dest,
2034 tmp_mode,
2035 bytepos),
2036 tmps[i]);
2037 dst = dest;
2038 }
2039 else
2040 {
2041 dest = assign_stack_temp (tmp_mode,
2042 GET_MODE_SIZE (tmp_mode),
2043 0);
2044 emit_move_insn (dest, tmps[i]);
2045 dst = adjust_address (dest, dest_mode, bytepos);
2046 }
2047 break;
2048 }
2049 }
2050
2051 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2052 {
2053 /* store_bit_field always takes its value from the lsb.
2054 Move the fragment to the lsb if it's not already there. */
2055 if (
2056 #ifdef BLOCK_REG_PADDING
2057 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2058 == (BYTES_BIG_ENDIAN ? upward : downward)
2059 #else
2060 BYTES_BIG_ENDIAN
2061 #endif
2062 )
2063 {
2064 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2065 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2066 shift, tmps[i], 0);
2067 }
2068 bytelen = adj_bytelen;
2069 }
2070
2071 /* Optimize the access just a bit. */
2072 if (MEM_P (dest)
2073 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2074 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2075 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2076 && bytelen == GET_MODE_SIZE (mode))
2077 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2078 else
2079 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2080 0, 0, mode, tmps[i]);
2081 }
2082
2083 /* Copy from the pseudo into the (probable) hard reg. */
2084 if (orig_dst != dst)
2085 emit_move_insn (orig_dst, dst);
2086 }
2087
2088 /* Generate code to copy a BLKmode object of TYPE out of a
2089 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2090 is null, a stack temporary is created. TGTBLK is returned.
2091
2092 The purpose of this routine is to handle functions that return
2093 BLKmode structures in registers. Some machines (the PA for example)
2094 want to return all small structures in registers regardless of the
2095 structure's alignment. */
2096
2097 rtx
2098 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2099 {
2100 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2101 rtx src = NULL, dst = NULL;
2102 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2103 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2104 enum machine_mode copy_mode;
2105
2106 if (tgtblk == 0)
2107 {
2108 tgtblk = assign_temp (build_qualified_type (type,
2109 (TYPE_QUALS (type)
2110 | TYPE_QUAL_CONST)),
2111 0, 1, 1);
2112 preserve_temp_slots (tgtblk);
2113 }
2114
2115 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2116 into a new pseudo which is a full word. */
2117
2118 if (GET_MODE (srcreg) != BLKmode
2119 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2120 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2121
2122 /* If the structure doesn't take up a whole number of words, see whether
2123 SRCREG is padded on the left or on the right. If it's on the left,
2124 set PADDING_CORRECTION to the number of bits to skip.
2125
2126 In most ABIs, the structure will be returned at the least end of
2127 the register, which translates to right padding on little-endian
2128 targets and left padding on big-endian targets. The opposite
2129 holds if the structure is returned at the most significant
2130 end of the register. */
2131 if (bytes % UNITS_PER_WORD != 0
2132 && (targetm.calls.return_in_msb (type)
2133 ? !BYTES_BIG_ENDIAN
2134 : BYTES_BIG_ENDIAN))
2135 padding_correction
2136 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2137
2138 /* Copy the structure BITSIZE bits at a time. If the target lives in
2139 memory, take care of not reading/writing past its end by selecting
2140 a copy mode suited to BITSIZE. This should always be possible given
2141 how it is computed.
2142
2143 We could probably emit more efficient code for machines which do not use
2144 strict alignment, but it doesn't seem worth the effort at the current
2145 time. */
2146
2147 copy_mode = word_mode;
2148 if (MEM_P (tgtblk))
2149 {
2150 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2151 if (mem_mode != BLKmode)
2152 copy_mode = mem_mode;
2153 }
2154
2155 for (bitpos = 0, xbitpos = padding_correction;
2156 bitpos < bytes * BITS_PER_UNIT;
2157 bitpos += bitsize, xbitpos += bitsize)
2158 {
2159 /* We need a new source operand each time xbitpos is on a
2160 word boundary and when xbitpos == padding_correction
2161 (the first time through). */
2162 if (xbitpos % BITS_PER_WORD == 0
2163 || xbitpos == padding_correction)
2164 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2165 GET_MODE (srcreg));
2166
2167 /* We need a new destination operand each time bitpos is on
2168 a word boundary. */
2169 if (bitpos % BITS_PER_WORD == 0)
2170 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2171
2172 /* Use xbitpos for the source extraction (right justified) and
2173 bitpos for the destination store (left justified). */
2174 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2175 extract_bit_field (src, bitsize,
2176 xbitpos % BITS_PER_WORD, 1, false,
2177 NULL_RTX, copy_mode, copy_mode));
2178 }
2179
2180 return tgtblk;
2181 }
2182
2183 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2184 register if it contains any data, otherwise return null.
2185
2186 This is used on targets that return BLKmode values in registers. */
2187
2188 rtx
2189 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2190 {
2191 int i, n_regs;
2192 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2193 unsigned int bitsize;
2194 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2195 enum machine_mode dst_mode;
2196
2197 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2198
2199 x = expand_normal (src);
2200
2201 bytes = int_size_in_bytes (TREE_TYPE (src));
2202 if (bytes == 0)
2203 return NULL_RTX;
2204
2205 /* If the structure doesn't take up a whole number of words, see
2206 whether the register value should be padded on the left or on
2207 the right. Set PADDING_CORRECTION to the number of padding
2208 bits needed on the left side.
2209
2210 In most ABIs, the structure will be returned at the least end of
2211 the register, which translates to right padding on little-endian
2212 targets and left padding on big-endian targets. The opposite
2213 holds if the structure is returned at the most significant
2214 end of the register. */
2215 if (bytes % UNITS_PER_WORD != 0
2216 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2217 ? !BYTES_BIG_ENDIAN
2218 : BYTES_BIG_ENDIAN))
2219 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2220 * BITS_PER_UNIT));
2221
2222 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2223 dst_words = XALLOCAVEC (rtx, n_regs);
2224 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2225
2226 /* Copy the structure BITSIZE bits at a time. */
2227 for (bitpos = 0, xbitpos = padding_correction;
2228 bitpos < bytes * BITS_PER_UNIT;
2229 bitpos += bitsize, xbitpos += bitsize)
2230 {
2231 /* We need a new destination pseudo each time xbitpos is
2232 on a word boundary and when xbitpos == padding_correction
2233 (the first time through). */
2234 if (xbitpos % BITS_PER_WORD == 0
2235 || xbitpos == padding_correction)
2236 {
2237 /* Generate an appropriate register. */
2238 dst_word = gen_reg_rtx (word_mode);
2239 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2240
2241 /* Clear the destination before we move anything into it. */
2242 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2243 }
2244
2245 /* We need a new source operand each time bitpos is on a word
2246 boundary. */
2247 if (bitpos % BITS_PER_WORD == 0)
2248 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2249
2250 /* Use bitpos for the source extraction (left justified) and
2251 xbitpos for the destination store (right justified). */
2252 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2253 0, 0, word_mode,
2254 extract_bit_field (src_word, bitsize,
2255 bitpos % BITS_PER_WORD, 1, false,
2256 NULL_RTX, word_mode, word_mode));
2257 }
2258
2259 if (mode == BLKmode)
2260 {
2261 /* Find the smallest integer mode large enough to hold the
2262 entire structure. */
2263 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2264 mode != VOIDmode;
2265 mode = GET_MODE_WIDER_MODE (mode))
2266 /* Have we found a large enough mode? */
2267 if (GET_MODE_SIZE (mode) >= bytes)
2268 break;
2269
2270 /* A suitable mode should have been found. */
2271 gcc_assert (mode != VOIDmode);
2272 }
2273
2274 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2275 dst_mode = word_mode;
2276 else
2277 dst_mode = mode;
2278 dst = gen_reg_rtx (dst_mode);
2279
2280 for (i = 0; i < n_regs; i++)
2281 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2282
2283 if (mode != dst_mode)
2284 dst = gen_lowpart (mode, dst);
2285
2286 return dst;
2287 }
2288
2289 /* Add a USE expression for REG to the (possibly empty) list pointed
2290 to by CALL_FUSAGE. REG must denote a hard register. */
2291
2292 void
2293 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2294 {
2295 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2296
2297 *call_fusage
2298 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2299 }
2300
2301 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2302 starting at REGNO. All of these registers must be hard registers. */
2303
2304 void
2305 use_regs (rtx *call_fusage, int regno, int nregs)
2306 {
2307 int i;
2308
2309 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2310
2311 for (i = 0; i < nregs; i++)
2312 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2313 }
2314
2315 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2316 PARALLEL REGS. This is for calls that pass values in multiple
2317 non-contiguous locations. The Irix 6 ABI has examples of this. */
2318
2319 void
2320 use_group_regs (rtx *call_fusage, rtx regs)
2321 {
2322 int i;
2323
2324 for (i = 0; i < XVECLEN (regs, 0); i++)
2325 {
2326 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2327
2328 /* A NULL entry means the parameter goes both on the stack and in
2329 registers. This can also be a MEM for targets that pass values
2330 partially on the stack and partially in registers. */
2331 if (reg != 0 && REG_P (reg))
2332 use_reg (call_fusage, reg);
2333 }
2334 }
2335
2336 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2337 assigment and the code of the expresion on the RHS is CODE. Return
2338 NULL otherwise. */
2339
2340 static gimple
2341 get_def_for_expr (tree name, enum tree_code code)
2342 {
2343 gimple def_stmt;
2344
2345 if (TREE_CODE (name) != SSA_NAME)
2346 return NULL;
2347
2348 def_stmt = get_gimple_for_ssa_name (name);
2349 if (!def_stmt
2350 || gimple_assign_rhs_code (def_stmt) != code)
2351 return NULL;
2352
2353 return def_stmt;
2354 }
2355 \f
2356
2357 /* Determine whether the LEN bytes generated by CONSTFUN can be
2358 stored to memory using several move instructions. CONSTFUNDATA is
2359 a pointer which will be passed as argument in every CONSTFUN call.
2360 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2361 a memset operation and false if it's a copy of a constant string.
2362 Return nonzero if a call to store_by_pieces should succeed. */
2363
2364 int
2365 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2366 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2367 void *constfundata, unsigned int align, bool memsetp)
2368 {
2369 unsigned HOST_WIDE_INT l;
2370 unsigned int max_size;
2371 HOST_WIDE_INT offset = 0;
2372 enum machine_mode mode;
2373 enum insn_code icode;
2374 int reverse;
2375 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2376 rtx cst ATTRIBUTE_UNUSED;
2377
2378 if (len == 0)
2379 return 1;
2380
2381 if (! (memsetp
2382 ? SET_BY_PIECES_P (len, align)
2383 : STORE_BY_PIECES_P (len, align)))
2384 return 0;
2385
2386 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2387
2388 /* We would first store what we can in the largest integer mode, then go to
2389 successively smaller modes. */
2390
2391 for (reverse = 0;
2392 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2393 reverse++)
2394 {
2395 l = len;
2396 max_size = STORE_MAX_PIECES + 1;
2397 while (max_size > 1)
2398 {
2399 mode = widest_int_mode_for_size (max_size);
2400
2401 if (mode == VOIDmode)
2402 break;
2403
2404 icode = optab_handler (mov_optab, mode);
2405 if (icode != CODE_FOR_nothing
2406 && align >= GET_MODE_ALIGNMENT (mode))
2407 {
2408 unsigned int size = GET_MODE_SIZE (mode);
2409
2410 while (l >= size)
2411 {
2412 if (reverse)
2413 offset -= size;
2414
2415 cst = (*constfun) (constfundata, offset, mode);
2416 if (!targetm.legitimate_constant_p (mode, cst))
2417 return 0;
2418
2419 if (!reverse)
2420 offset += size;
2421
2422 l -= size;
2423 }
2424 }
2425
2426 max_size = GET_MODE_SIZE (mode);
2427 }
2428
2429 /* The code above should have handled everything. */
2430 gcc_assert (!l);
2431 }
2432
2433 return 1;
2434 }
2435
2436 /* Generate several move instructions to store LEN bytes generated by
2437 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2438 pointer which will be passed as argument in every CONSTFUN call.
2439 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2440 a memset operation and false if it's a copy of a constant string.
2441 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2442 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2443 stpcpy. */
2444
2445 rtx
2446 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2447 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2448 void *constfundata, unsigned int align, bool memsetp, int endp)
2449 {
2450 enum machine_mode to_addr_mode
2451 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2452 struct store_by_pieces_d data;
2453
2454 if (len == 0)
2455 {
2456 gcc_assert (endp != 2);
2457 return to;
2458 }
2459
2460 gcc_assert (memsetp
2461 ? SET_BY_PIECES_P (len, align)
2462 : STORE_BY_PIECES_P (len, align));
2463 data.constfun = constfun;
2464 data.constfundata = constfundata;
2465 data.len = len;
2466 data.to = to;
2467 store_by_pieces_1 (&data, align);
2468 if (endp)
2469 {
2470 rtx to1;
2471
2472 gcc_assert (!data.reverse);
2473 if (data.autinc_to)
2474 {
2475 if (endp == 2)
2476 {
2477 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2478 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2479 else
2480 data.to_addr = copy_to_mode_reg (to_addr_mode,
2481 plus_constant (data.to_addr,
2482 -1));
2483 }
2484 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2485 data.offset);
2486 }
2487 else
2488 {
2489 if (endp == 2)
2490 --data.offset;
2491 to1 = adjust_address (data.to, QImode, data.offset);
2492 }
2493 return to1;
2494 }
2495 else
2496 return data.to;
2497 }
2498
2499 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2500 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2501
2502 static void
2503 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2504 {
2505 struct store_by_pieces_d data;
2506
2507 if (len == 0)
2508 return;
2509
2510 data.constfun = clear_by_pieces_1;
2511 data.constfundata = NULL;
2512 data.len = len;
2513 data.to = to;
2514 store_by_pieces_1 (&data, align);
2515 }
2516
2517 /* Callback routine for clear_by_pieces.
2518 Return const0_rtx unconditionally. */
2519
2520 static rtx
2521 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2522 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2523 enum machine_mode mode ATTRIBUTE_UNUSED)
2524 {
2525 return const0_rtx;
2526 }
2527
2528 /* Subroutine of clear_by_pieces and store_by_pieces.
2529 Generate several move instructions to store LEN bytes of block TO. (A MEM
2530 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2531
2532 static void
2533 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2534 unsigned int align ATTRIBUTE_UNUSED)
2535 {
2536 enum machine_mode to_addr_mode
2537 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2538 rtx to_addr = XEXP (data->to, 0);
2539 unsigned int max_size = STORE_MAX_PIECES + 1;
2540 enum insn_code icode;
2541
2542 data->offset = 0;
2543 data->to_addr = to_addr;
2544 data->autinc_to
2545 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2546 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2547
2548 data->explicit_inc_to = 0;
2549 data->reverse
2550 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2551 if (data->reverse)
2552 data->offset = data->len;
2553
2554 /* If storing requires more than two move insns,
2555 copy addresses to registers (to make displacements shorter)
2556 and use post-increment if available. */
2557 if (!data->autinc_to
2558 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2559 {
2560 /* Determine the main mode we'll be using.
2561 MODE might not be used depending on the definitions of the
2562 USE_* macros below. */
2563 enum machine_mode mode ATTRIBUTE_UNUSED
2564 = widest_int_mode_for_size (max_size);
2565
2566 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2567 {
2568 data->to_addr = copy_to_mode_reg (to_addr_mode,
2569 plus_constant (to_addr, data->len));
2570 data->autinc_to = 1;
2571 data->explicit_inc_to = -1;
2572 }
2573
2574 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2575 && ! data->autinc_to)
2576 {
2577 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2578 data->autinc_to = 1;
2579 data->explicit_inc_to = 1;
2580 }
2581
2582 if ( !data->autinc_to && CONSTANT_P (to_addr))
2583 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2584 }
2585
2586 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2587
2588 /* First store what we can in the largest integer mode, then go to
2589 successively smaller modes. */
2590
2591 while (max_size > 1)
2592 {
2593 enum machine_mode mode = widest_int_mode_for_size (max_size);
2594
2595 if (mode == VOIDmode)
2596 break;
2597
2598 icode = optab_handler (mov_optab, mode);
2599 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2600 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2601
2602 max_size = GET_MODE_SIZE (mode);
2603 }
2604
2605 /* The code above should have handled everything. */
2606 gcc_assert (!data->len);
2607 }
2608
2609 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2610 with move instructions for mode MODE. GENFUN is the gen_... function
2611 to make a move insn for that mode. DATA has all the other info. */
2612
2613 static void
2614 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2615 struct store_by_pieces_d *data)
2616 {
2617 unsigned int size = GET_MODE_SIZE (mode);
2618 rtx to1, cst;
2619
2620 while (data->len >= size)
2621 {
2622 if (data->reverse)
2623 data->offset -= size;
2624
2625 if (data->autinc_to)
2626 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2627 data->offset);
2628 else
2629 to1 = adjust_address (data->to, mode, data->offset);
2630
2631 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2632 emit_insn (gen_add2_insn (data->to_addr,
2633 GEN_INT (-(HOST_WIDE_INT) size)));
2634
2635 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2636 emit_insn ((*genfun) (to1, cst));
2637
2638 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2639 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2640
2641 if (! data->reverse)
2642 data->offset += size;
2643
2644 data->len -= size;
2645 }
2646 }
2647 \f
2648 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2649 its length in bytes. */
2650
2651 rtx
2652 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2653 unsigned int expected_align, HOST_WIDE_INT expected_size)
2654 {
2655 enum machine_mode mode = GET_MODE (object);
2656 unsigned int align;
2657
2658 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2659
2660 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2661 just move a zero. Otherwise, do this a piece at a time. */
2662 if (mode != BLKmode
2663 && CONST_INT_P (size)
2664 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2665 {
2666 rtx zero = CONST0_RTX (mode);
2667 if (zero != NULL)
2668 {
2669 emit_move_insn (object, zero);
2670 return NULL;
2671 }
2672
2673 if (COMPLEX_MODE_P (mode))
2674 {
2675 zero = CONST0_RTX (GET_MODE_INNER (mode));
2676 if (zero != NULL)
2677 {
2678 write_complex_part (object, zero, 0);
2679 write_complex_part (object, zero, 1);
2680 return NULL;
2681 }
2682 }
2683 }
2684
2685 if (size == const0_rtx)
2686 return NULL;
2687
2688 align = MEM_ALIGN (object);
2689
2690 if (CONST_INT_P (size)
2691 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2692 clear_by_pieces (object, INTVAL (size), align);
2693 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2694 expected_align, expected_size))
2695 ;
2696 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2697 return set_storage_via_libcall (object, size, const0_rtx,
2698 method == BLOCK_OP_TAILCALL);
2699 else
2700 gcc_unreachable ();
2701
2702 return NULL;
2703 }
2704
2705 rtx
2706 clear_storage (rtx object, rtx size, enum block_op_methods method)
2707 {
2708 return clear_storage_hints (object, size, method, 0, -1);
2709 }
2710
2711
2712 /* A subroutine of clear_storage. Expand a call to memset.
2713 Return the return value of memset, 0 otherwise. */
2714
2715 rtx
2716 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2717 {
2718 tree call_expr, fn, object_tree, size_tree, val_tree;
2719 enum machine_mode size_mode;
2720 rtx retval;
2721
2722 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2723 place those into new pseudos into a VAR_DECL and use them later. */
2724
2725 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2726
2727 size_mode = TYPE_MODE (sizetype);
2728 size = convert_to_mode (size_mode, size, 1);
2729 size = copy_to_mode_reg (size_mode, size);
2730
2731 /* It is incorrect to use the libcall calling conventions to call
2732 memset in this context. This could be a user call to memset and
2733 the user may wish to examine the return value from memset. For
2734 targets where libcalls and normal calls have different conventions
2735 for returning pointers, we could end up generating incorrect code. */
2736
2737 object_tree = make_tree (ptr_type_node, object);
2738 if (!CONST_INT_P (val))
2739 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2740 size_tree = make_tree (sizetype, size);
2741 val_tree = make_tree (integer_type_node, val);
2742
2743 fn = clear_storage_libcall_fn (true);
2744 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2745 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2746
2747 retval = expand_normal (call_expr);
2748
2749 return retval;
2750 }
2751
2752 /* A subroutine of set_storage_via_libcall. Create the tree node
2753 for the function we use for block clears. The first time FOR_CALL
2754 is true, we call assemble_external. */
2755
2756 tree block_clear_fn;
2757
2758 void
2759 init_block_clear_fn (const char *asmspec)
2760 {
2761 if (!block_clear_fn)
2762 {
2763 tree fn, args;
2764
2765 fn = get_identifier ("memset");
2766 args = build_function_type_list (ptr_type_node, ptr_type_node,
2767 integer_type_node, sizetype,
2768 NULL_TREE);
2769
2770 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2771 DECL_EXTERNAL (fn) = 1;
2772 TREE_PUBLIC (fn) = 1;
2773 DECL_ARTIFICIAL (fn) = 1;
2774 TREE_NOTHROW (fn) = 1;
2775 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2776 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2777
2778 block_clear_fn = fn;
2779 }
2780
2781 if (asmspec)
2782 set_user_assembler_name (block_clear_fn, asmspec);
2783 }
2784
2785 static tree
2786 clear_storage_libcall_fn (int for_call)
2787 {
2788 static bool emitted_extern;
2789
2790 if (!block_clear_fn)
2791 init_block_clear_fn (NULL);
2792
2793 if (for_call && !emitted_extern)
2794 {
2795 emitted_extern = true;
2796 make_decl_rtl (block_clear_fn);
2797 assemble_external (block_clear_fn);
2798 }
2799
2800 return block_clear_fn;
2801 }
2802 \f
2803 /* Expand a setmem pattern; return true if successful. */
2804
2805 bool
2806 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2807 unsigned int expected_align, HOST_WIDE_INT expected_size)
2808 {
2809 /* Try the most limited insn first, because there's no point
2810 including more than one in the machine description unless
2811 the more limited one has some advantage. */
2812
2813 enum machine_mode mode;
2814
2815 if (expected_align < align)
2816 expected_align = align;
2817
2818 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2819 mode = GET_MODE_WIDER_MODE (mode))
2820 {
2821 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2822
2823 if (code != CODE_FOR_nothing
2824 /* We don't need MODE to be narrower than
2825 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2826 the mode mask, as it is returned by the macro, it will
2827 definitely be less than the actual mode mask. */
2828 && ((CONST_INT_P (size)
2829 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2830 <= (GET_MODE_MASK (mode) >> 1)))
2831 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
2832 {
2833 struct expand_operand ops[6];
2834 unsigned int nops;
2835
2836 nops = insn_data[(int) code].n_generator_args;
2837 gcc_assert (nops == 4 || nops == 6);
2838
2839 create_fixed_operand (&ops[0], object);
2840 /* The check above guarantees that this size conversion is valid. */
2841 create_convert_operand_to (&ops[1], size, mode, true);
2842 create_convert_operand_from (&ops[2], val, byte_mode, true);
2843 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2844 if (nops == 6)
2845 {
2846 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2847 create_integer_operand (&ops[5], expected_size);
2848 }
2849 if (maybe_expand_insn (code, nops, ops))
2850 return true;
2851 }
2852 }
2853
2854 return false;
2855 }
2856
2857 \f
2858 /* Write to one of the components of the complex value CPLX. Write VAL to
2859 the real part if IMAG_P is false, and the imaginary part if its true. */
2860
2861 static void
2862 write_complex_part (rtx cplx, rtx val, bool imag_p)
2863 {
2864 enum machine_mode cmode;
2865 enum machine_mode imode;
2866 unsigned ibitsize;
2867
2868 if (GET_CODE (cplx) == CONCAT)
2869 {
2870 emit_move_insn (XEXP (cplx, imag_p), val);
2871 return;
2872 }
2873
2874 cmode = GET_MODE (cplx);
2875 imode = GET_MODE_INNER (cmode);
2876 ibitsize = GET_MODE_BITSIZE (imode);
2877
2878 /* For MEMs simplify_gen_subreg may generate an invalid new address
2879 because, e.g., the original address is considered mode-dependent
2880 by the target, which restricts simplify_subreg from invoking
2881 adjust_address_nv. Instead of preparing fallback support for an
2882 invalid address, we call adjust_address_nv directly. */
2883 if (MEM_P (cplx))
2884 {
2885 emit_move_insn (adjust_address_nv (cplx, imode,
2886 imag_p ? GET_MODE_SIZE (imode) : 0),
2887 val);
2888 return;
2889 }
2890
2891 /* If the sub-object is at least word sized, then we know that subregging
2892 will work. This special case is important, since store_bit_field
2893 wants to operate on integer modes, and there's rarely an OImode to
2894 correspond to TCmode. */
2895 if (ibitsize >= BITS_PER_WORD
2896 /* For hard regs we have exact predicates. Assume we can split
2897 the original object if it spans an even number of hard regs.
2898 This special case is important for SCmode on 64-bit platforms
2899 where the natural size of floating-point regs is 32-bit. */
2900 || (REG_P (cplx)
2901 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2902 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2903 {
2904 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2905 imag_p ? GET_MODE_SIZE (imode) : 0);
2906 if (part)
2907 {
2908 emit_move_insn (part, val);
2909 return;
2910 }
2911 else
2912 /* simplify_gen_subreg may fail for sub-word MEMs. */
2913 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2914 }
2915
2916 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
2917 }
2918
2919 /* Extract one of the components of the complex value CPLX. Extract the
2920 real part if IMAG_P is false, and the imaginary part if it's true. */
2921
2922 static rtx
2923 read_complex_part (rtx cplx, bool imag_p)
2924 {
2925 enum machine_mode cmode, imode;
2926 unsigned ibitsize;
2927
2928 if (GET_CODE (cplx) == CONCAT)
2929 return XEXP (cplx, imag_p);
2930
2931 cmode = GET_MODE (cplx);
2932 imode = GET_MODE_INNER (cmode);
2933 ibitsize = GET_MODE_BITSIZE (imode);
2934
2935 /* Special case reads from complex constants that got spilled to memory. */
2936 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2937 {
2938 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2939 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2940 {
2941 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2942 if (CONSTANT_CLASS_P (part))
2943 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2944 }
2945 }
2946
2947 /* For MEMs simplify_gen_subreg may generate an invalid new address
2948 because, e.g., the original address is considered mode-dependent
2949 by the target, which restricts simplify_subreg from invoking
2950 adjust_address_nv. Instead of preparing fallback support for an
2951 invalid address, we call adjust_address_nv directly. */
2952 if (MEM_P (cplx))
2953 return adjust_address_nv (cplx, imode,
2954 imag_p ? GET_MODE_SIZE (imode) : 0);
2955
2956 /* If the sub-object is at least word sized, then we know that subregging
2957 will work. This special case is important, since extract_bit_field
2958 wants to operate on integer modes, and there's rarely an OImode to
2959 correspond to TCmode. */
2960 if (ibitsize >= BITS_PER_WORD
2961 /* For hard regs we have exact predicates. Assume we can split
2962 the original object if it spans an even number of hard regs.
2963 This special case is important for SCmode on 64-bit platforms
2964 where the natural size of floating-point regs is 32-bit. */
2965 || (REG_P (cplx)
2966 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2967 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2968 {
2969 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2970 imag_p ? GET_MODE_SIZE (imode) : 0);
2971 if (ret)
2972 return ret;
2973 else
2974 /* simplify_gen_subreg may fail for sub-word MEMs. */
2975 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2976 }
2977
2978 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2979 true, false, NULL_RTX, imode, imode);
2980 }
2981 \f
2982 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2983 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2984 represented in NEW_MODE. If FORCE is true, this will never happen, as
2985 we'll force-create a SUBREG if needed. */
2986
2987 static rtx
2988 emit_move_change_mode (enum machine_mode new_mode,
2989 enum machine_mode old_mode, rtx x, bool force)
2990 {
2991 rtx ret;
2992
2993 if (push_operand (x, GET_MODE (x)))
2994 {
2995 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2996 MEM_COPY_ATTRIBUTES (ret, x);
2997 }
2998 else if (MEM_P (x))
2999 {
3000 /* We don't have to worry about changing the address since the
3001 size in bytes is supposed to be the same. */
3002 if (reload_in_progress)
3003 {
3004 /* Copy the MEM to change the mode and move any
3005 substitutions from the old MEM to the new one. */
3006 ret = adjust_address_nv (x, new_mode, 0);
3007 copy_replacements (x, ret);
3008 }
3009 else
3010 ret = adjust_address (x, new_mode, 0);
3011 }
3012 else
3013 {
3014 /* Note that we do want simplify_subreg's behavior of validating
3015 that the new mode is ok for a hard register. If we were to use
3016 simplify_gen_subreg, we would create the subreg, but would
3017 probably run into the target not being able to implement it. */
3018 /* Except, of course, when FORCE is true, when this is exactly what
3019 we want. Which is needed for CCmodes on some targets. */
3020 if (force)
3021 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3022 else
3023 ret = simplify_subreg (new_mode, x, old_mode, 0);
3024 }
3025
3026 return ret;
3027 }
3028
3029 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3030 an integer mode of the same size as MODE. Returns the instruction
3031 emitted, or NULL if such a move could not be generated. */
3032
3033 static rtx
3034 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3035 {
3036 enum machine_mode imode;
3037 enum insn_code code;
3038
3039 /* There must exist a mode of the exact size we require. */
3040 imode = int_mode_for_mode (mode);
3041 if (imode == BLKmode)
3042 return NULL_RTX;
3043
3044 /* The target must support moves in this mode. */
3045 code = optab_handler (mov_optab, imode);
3046 if (code == CODE_FOR_nothing)
3047 return NULL_RTX;
3048
3049 x = emit_move_change_mode (imode, mode, x, force);
3050 if (x == NULL_RTX)
3051 return NULL_RTX;
3052 y = emit_move_change_mode (imode, mode, y, force);
3053 if (y == NULL_RTX)
3054 return NULL_RTX;
3055 return emit_insn (GEN_FCN (code) (x, y));
3056 }
3057
3058 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3059 Return an equivalent MEM that does not use an auto-increment. */
3060
3061 static rtx
3062 emit_move_resolve_push (enum machine_mode mode, rtx x)
3063 {
3064 enum rtx_code code = GET_CODE (XEXP (x, 0));
3065 HOST_WIDE_INT adjust;
3066 rtx temp;
3067
3068 adjust = GET_MODE_SIZE (mode);
3069 #ifdef PUSH_ROUNDING
3070 adjust = PUSH_ROUNDING (adjust);
3071 #endif
3072 if (code == PRE_DEC || code == POST_DEC)
3073 adjust = -adjust;
3074 else if (code == PRE_MODIFY || code == POST_MODIFY)
3075 {
3076 rtx expr = XEXP (XEXP (x, 0), 1);
3077 HOST_WIDE_INT val;
3078
3079 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3080 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3081 val = INTVAL (XEXP (expr, 1));
3082 if (GET_CODE (expr) == MINUS)
3083 val = -val;
3084 gcc_assert (adjust == val || adjust == -val);
3085 adjust = val;
3086 }
3087
3088 /* Do not use anti_adjust_stack, since we don't want to update
3089 stack_pointer_delta. */
3090 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3091 GEN_INT (adjust), stack_pointer_rtx,
3092 0, OPTAB_LIB_WIDEN);
3093 if (temp != stack_pointer_rtx)
3094 emit_move_insn (stack_pointer_rtx, temp);
3095
3096 switch (code)
3097 {
3098 case PRE_INC:
3099 case PRE_DEC:
3100 case PRE_MODIFY:
3101 temp = stack_pointer_rtx;
3102 break;
3103 case POST_INC:
3104 case POST_DEC:
3105 case POST_MODIFY:
3106 temp = plus_constant (stack_pointer_rtx, -adjust);
3107 break;
3108 default:
3109 gcc_unreachable ();
3110 }
3111
3112 return replace_equiv_address (x, temp);
3113 }
3114
3115 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3116 X is known to satisfy push_operand, and MODE is known to be complex.
3117 Returns the last instruction emitted. */
3118
3119 rtx
3120 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3121 {
3122 enum machine_mode submode = GET_MODE_INNER (mode);
3123 bool imag_first;
3124
3125 #ifdef PUSH_ROUNDING
3126 unsigned int submodesize = GET_MODE_SIZE (submode);
3127
3128 /* In case we output to the stack, but the size is smaller than the
3129 machine can push exactly, we need to use move instructions. */
3130 if (PUSH_ROUNDING (submodesize) != submodesize)
3131 {
3132 x = emit_move_resolve_push (mode, x);
3133 return emit_move_insn (x, y);
3134 }
3135 #endif
3136
3137 /* Note that the real part always precedes the imag part in memory
3138 regardless of machine's endianness. */
3139 switch (GET_CODE (XEXP (x, 0)))
3140 {
3141 case PRE_DEC:
3142 case POST_DEC:
3143 imag_first = true;
3144 break;
3145 case PRE_INC:
3146 case POST_INC:
3147 imag_first = false;
3148 break;
3149 default:
3150 gcc_unreachable ();
3151 }
3152
3153 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3154 read_complex_part (y, imag_first));
3155 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3156 read_complex_part (y, !imag_first));
3157 }
3158
3159 /* A subroutine of emit_move_complex. Perform the move from Y to X
3160 via two moves of the parts. Returns the last instruction emitted. */
3161
3162 rtx
3163 emit_move_complex_parts (rtx x, rtx y)
3164 {
3165 /* Show the output dies here. This is necessary for SUBREGs
3166 of pseudos since we cannot track their lifetimes correctly;
3167 hard regs shouldn't appear here except as return values. */
3168 if (!reload_completed && !reload_in_progress
3169 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3170 emit_clobber (x);
3171
3172 write_complex_part (x, read_complex_part (y, false), false);
3173 write_complex_part (x, read_complex_part (y, true), true);
3174
3175 return get_last_insn ();
3176 }
3177
3178 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3179 MODE is known to be complex. Returns the last instruction emitted. */
3180
3181 static rtx
3182 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3183 {
3184 bool try_int;
3185
3186 /* Need to take special care for pushes, to maintain proper ordering
3187 of the data, and possibly extra padding. */
3188 if (push_operand (x, mode))
3189 return emit_move_complex_push (mode, x, y);
3190
3191 /* See if we can coerce the target into moving both values at once. */
3192
3193 /* Move floating point as parts. */
3194 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3195 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3196 try_int = false;
3197 /* Not possible if the values are inherently not adjacent. */
3198 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3199 try_int = false;
3200 /* Is possible if both are registers (or subregs of registers). */
3201 else if (register_operand (x, mode) && register_operand (y, mode))
3202 try_int = true;
3203 /* If one of the operands is a memory, and alignment constraints
3204 are friendly enough, we may be able to do combined memory operations.
3205 We do not attempt this if Y is a constant because that combination is
3206 usually better with the by-parts thing below. */
3207 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3208 && (!STRICT_ALIGNMENT
3209 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3210 try_int = true;
3211 else
3212 try_int = false;
3213
3214 if (try_int)
3215 {
3216 rtx ret;
3217
3218 /* For memory to memory moves, optimal behavior can be had with the
3219 existing block move logic. */
3220 if (MEM_P (x) && MEM_P (y))
3221 {
3222 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3223 BLOCK_OP_NO_LIBCALL);
3224 return get_last_insn ();
3225 }
3226
3227 ret = emit_move_via_integer (mode, x, y, true);
3228 if (ret)
3229 return ret;
3230 }
3231
3232 return emit_move_complex_parts (x, y);
3233 }
3234
3235 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3236 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3237
3238 static rtx
3239 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3240 {
3241 rtx ret;
3242
3243 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3244 if (mode != CCmode)
3245 {
3246 enum insn_code code = optab_handler (mov_optab, CCmode);
3247 if (code != CODE_FOR_nothing)
3248 {
3249 x = emit_move_change_mode (CCmode, mode, x, true);
3250 y = emit_move_change_mode (CCmode, mode, y, true);
3251 return emit_insn (GEN_FCN (code) (x, y));
3252 }
3253 }
3254
3255 /* Otherwise, find the MODE_INT mode of the same width. */
3256 ret = emit_move_via_integer (mode, x, y, false);
3257 gcc_assert (ret != NULL);
3258 return ret;
3259 }
3260
3261 /* Return true if word I of OP lies entirely in the
3262 undefined bits of a paradoxical subreg. */
3263
3264 static bool
3265 undefined_operand_subword_p (const_rtx op, int i)
3266 {
3267 enum machine_mode innermode, innermostmode;
3268 int offset;
3269 if (GET_CODE (op) != SUBREG)
3270 return false;
3271 innermode = GET_MODE (op);
3272 innermostmode = GET_MODE (SUBREG_REG (op));
3273 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3274 /* The SUBREG_BYTE represents offset, as if the value were stored in
3275 memory, except for a paradoxical subreg where we define
3276 SUBREG_BYTE to be 0; undo this exception as in
3277 simplify_subreg. */
3278 if (SUBREG_BYTE (op) == 0
3279 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3280 {
3281 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3282 if (WORDS_BIG_ENDIAN)
3283 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3284 if (BYTES_BIG_ENDIAN)
3285 offset += difference % UNITS_PER_WORD;
3286 }
3287 if (offset >= GET_MODE_SIZE (innermostmode)
3288 || offset <= -GET_MODE_SIZE (word_mode))
3289 return true;
3290 return false;
3291 }
3292
3293 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3294 MODE is any multi-word or full-word mode that lacks a move_insn
3295 pattern. Note that you will get better code if you define such
3296 patterns, even if they must turn into multiple assembler instructions. */
3297
3298 static rtx
3299 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3300 {
3301 rtx last_insn = 0;
3302 rtx seq, inner;
3303 bool need_clobber;
3304 int i;
3305
3306 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3307
3308 /* If X is a push on the stack, do the push now and replace
3309 X with a reference to the stack pointer. */
3310 if (push_operand (x, mode))
3311 x = emit_move_resolve_push (mode, x);
3312
3313 /* If we are in reload, see if either operand is a MEM whose address
3314 is scheduled for replacement. */
3315 if (reload_in_progress && MEM_P (x)
3316 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3317 x = replace_equiv_address_nv (x, inner);
3318 if (reload_in_progress && MEM_P (y)
3319 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3320 y = replace_equiv_address_nv (y, inner);
3321
3322 start_sequence ();
3323
3324 need_clobber = false;
3325 for (i = 0;
3326 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3327 i++)
3328 {
3329 rtx xpart = operand_subword (x, i, 1, mode);
3330 rtx ypart;
3331
3332 /* Do not generate code for a move if it would come entirely
3333 from the undefined bits of a paradoxical subreg. */
3334 if (undefined_operand_subword_p (y, i))
3335 continue;
3336
3337 ypart = operand_subword (y, i, 1, mode);
3338
3339 /* If we can't get a part of Y, put Y into memory if it is a
3340 constant. Otherwise, force it into a register. Then we must
3341 be able to get a part of Y. */
3342 if (ypart == 0 && CONSTANT_P (y))
3343 {
3344 y = use_anchored_address (force_const_mem (mode, y));
3345 ypart = operand_subword (y, i, 1, mode);
3346 }
3347 else if (ypart == 0)
3348 ypart = operand_subword_force (y, i, mode);
3349
3350 gcc_assert (xpart && ypart);
3351
3352 need_clobber |= (GET_CODE (xpart) == SUBREG);
3353
3354 last_insn = emit_move_insn (xpart, ypart);
3355 }
3356
3357 seq = get_insns ();
3358 end_sequence ();
3359
3360 /* Show the output dies here. This is necessary for SUBREGs
3361 of pseudos since we cannot track their lifetimes correctly;
3362 hard regs shouldn't appear here except as return values.
3363 We never want to emit such a clobber after reload. */
3364 if (x != y
3365 && ! (reload_in_progress || reload_completed)
3366 && need_clobber != 0)
3367 emit_clobber (x);
3368
3369 emit_insn (seq);
3370
3371 return last_insn;
3372 }
3373
3374 /* Low level part of emit_move_insn.
3375 Called just like emit_move_insn, but assumes X and Y
3376 are basically valid. */
3377
3378 rtx
3379 emit_move_insn_1 (rtx x, rtx y)
3380 {
3381 enum machine_mode mode = GET_MODE (x);
3382 enum insn_code code;
3383
3384 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3385
3386 code = optab_handler (mov_optab, mode);
3387 if (code != CODE_FOR_nothing)
3388 return emit_insn (GEN_FCN (code) (x, y));
3389
3390 /* Expand complex moves by moving real part and imag part. */
3391 if (COMPLEX_MODE_P (mode))
3392 return emit_move_complex (mode, x, y);
3393
3394 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3395 || ALL_FIXED_POINT_MODE_P (mode))
3396 {
3397 rtx result = emit_move_via_integer (mode, x, y, true);
3398
3399 /* If we can't find an integer mode, use multi words. */
3400 if (result)
3401 return result;
3402 else
3403 return emit_move_multi_word (mode, x, y);
3404 }
3405
3406 if (GET_MODE_CLASS (mode) == MODE_CC)
3407 return emit_move_ccmode (mode, x, y);
3408
3409 /* Try using a move pattern for the corresponding integer mode. This is
3410 only safe when simplify_subreg can convert MODE constants into integer
3411 constants. At present, it can only do this reliably if the value
3412 fits within a HOST_WIDE_INT. */
3413 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3414 {
3415 rtx ret = emit_move_via_integer (mode, x, y, false);
3416 if (ret)
3417 return ret;
3418 }
3419
3420 return emit_move_multi_word (mode, x, y);
3421 }
3422
3423 /* Generate code to copy Y into X.
3424 Both Y and X must have the same mode, except that
3425 Y can be a constant with VOIDmode.
3426 This mode cannot be BLKmode; use emit_block_move for that.
3427
3428 Return the last instruction emitted. */
3429
3430 rtx
3431 emit_move_insn (rtx x, rtx y)
3432 {
3433 enum machine_mode mode = GET_MODE (x);
3434 rtx y_cst = NULL_RTX;
3435 rtx last_insn, set;
3436
3437 gcc_assert (mode != BLKmode
3438 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3439
3440 if (CONSTANT_P (y))
3441 {
3442 if (optimize
3443 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3444 && (last_insn = compress_float_constant (x, y)))
3445 return last_insn;
3446
3447 y_cst = y;
3448
3449 if (!targetm.legitimate_constant_p (mode, y))
3450 {
3451 y = force_const_mem (mode, y);
3452
3453 /* If the target's cannot_force_const_mem prevented the spill,
3454 assume that the target's move expanders will also take care
3455 of the non-legitimate constant. */
3456 if (!y)
3457 y = y_cst;
3458 else
3459 y = use_anchored_address (y);
3460 }
3461 }
3462
3463 /* If X or Y are memory references, verify that their addresses are valid
3464 for the machine. */
3465 if (MEM_P (x)
3466 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3467 MEM_ADDR_SPACE (x))
3468 && ! push_operand (x, GET_MODE (x))))
3469 x = validize_mem (x);
3470
3471 if (MEM_P (y)
3472 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3473 MEM_ADDR_SPACE (y)))
3474 y = validize_mem (y);
3475
3476 gcc_assert (mode != BLKmode);
3477
3478 last_insn = emit_move_insn_1 (x, y);
3479
3480 if (y_cst && REG_P (x)
3481 && (set = single_set (last_insn)) != NULL_RTX
3482 && SET_DEST (set) == x
3483 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3484 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3485
3486 return last_insn;
3487 }
3488
3489 /* If Y is representable exactly in a narrower mode, and the target can
3490 perform the extension directly from constant or memory, then emit the
3491 move as an extension. */
3492
3493 static rtx
3494 compress_float_constant (rtx x, rtx y)
3495 {
3496 enum machine_mode dstmode = GET_MODE (x);
3497 enum machine_mode orig_srcmode = GET_MODE (y);
3498 enum machine_mode srcmode;
3499 REAL_VALUE_TYPE r;
3500 int oldcost, newcost;
3501 bool speed = optimize_insn_for_speed_p ();
3502
3503 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3504
3505 if (targetm.legitimate_constant_p (dstmode, y))
3506 oldcost = set_src_cost (y, speed);
3507 else
3508 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3509
3510 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3511 srcmode != orig_srcmode;
3512 srcmode = GET_MODE_WIDER_MODE (srcmode))
3513 {
3514 enum insn_code ic;
3515 rtx trunc_y, last_insn;
3516
3517 /* Skip if the target can't extend this way. */
3518 ic = can_extend_p (dstmode, srcmode, 0);
3519 if (ic == CODE_FOR_nothing)
3520 continue;
3521
3522 /* Skip if the narrowed value isn't exact. */
3523 if (! exact_real_truncate (srcmode, &r))
3524 continue;
3525
3526 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3527
3528 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3529 {
3530 /* Skip if the target needs extra instructions to perform
3531 the extension. */
3532 if (!insn_operand_matches (ic, 1, trunc_y))
3533 continue;
3534 /* This is valid, but may not be cheaper than the original. */
3535 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3536 speed);
3537 if (oldcost < newcost)
3538 continue;
3539 }
3540 else if (float_extend_from_mem[dstmode][srcmode])
3541 {
3542 trunc_y = force_const_mem (srcmode, trunc_y);
3543 /* This is valid, but may not be cheaper than the original. */
3544 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3545 speed);
3546 if (oldcost < newcost)
3547 continue;
3548 trunc_y = validize_mem (trunc_y);
3549 }
3550 else
3551 continue;
3552
3553 /* For CSE's benefit, force the compressed constant pool entry
3554 into a new pseudo. This constant may be used in different modes,
3555 and if not, combine will put things back together for us. */
3556 trunc_y = force_reg (srcmode, trunc_y);
3557 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3558 last_insn = get_last_insn ();
3559
3560 if (REG_P (x))
3561 set_unique_reg_note (last_insn, REG_EQUAL, y);
3562
3563 return last_insn;
3564 }
3565
3566 return NULL_RTX;
3567 }
3568 \f
3569 /* Pushing data onto the stack. */
3570
3571 /* Push a block of length SIZE (perhaps variable)
3572 and return an rtx to address the beginning of the block.
3573 The value may be virtual_outgoing_args_rtx.
3574
3575 EXTRA is the number of bytes of padding to push in addition to SIZE.
3576 BELOW nonzero means this padding comes at low addresses;
3577 otherwise, the padding comes at high addresses. */
3578
3579 rtx
3580 push_block (rtx size, int extra, int below)
3581 {
3582 rtx temp;
3583
3584 size = convert_modes (Pmode, ptr_mode, size, 1);
3585 if (CONSTANT_P (size))
3586 anti_adjust_stack (plus_constant (size, extra));
3587 else if (REG_P (size) && extra == 0)
3588 anti_adjust_stack (size);
3589 else
3590 {
3591 temp = copy_to_mode_reg (Pmode, size);
3592 if (extra != 0)
3593 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3594 temp, 0, OPTAB_LIB_WIDEN);
3595 anti_adjust_stack (temp);
3596 }
3597
3598 #ifndef STACK_GROWS_DOWNWARD
3599 if (0)
3600 #else
3601 if (1)
3602 #endif
3603 {
3604 temp = virtual_outgoing_args_rtx;
3605 if (extra != 0 && below)
3606 temp = plus_constant (temp, extra);
3607 }
3608 else
3609 {
3610 if (CONST_INT_P (size))
3611 temp = plus_constant (virtual_outgoing_args_rtx,
3612 -INTVAL (size) - (below ? 0 : extra));
3613 else if (extra != 0 && !below)
3614 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3615 negate_rtx (Pmode, plus_constant (size, extra)));
3616 else
3617 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3618 negate_rtx (Pmode, size));
3619 }
3620
3621 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3622 }
3623
3624 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3625
3626 static rtx
3627 mem_autoinc_base (rtx mem)
3628 {
3629 if (MEM_P (mem))
3630 {
3631 rtx addr = XEXP (mem, 0);
3632 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3633 return XEXP (addr, 0);
3634 }
3635 return NULL;
3636 }
3637
3638 /* A utility routine used here, in reload, and in try_split. The insns
3639 after PREV up to and including LAST are known to adjust the stack,
3640 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3641 placing notes as appropriate. PREV may be NULL, indicating the
3642 entire insn sequence prior to LAST should be scanned.
3643
3644 The set of allowed stack pointer modifications is small:
3645 (1) One or more auto-inc style memory references (aka pushes),
3646 (2) One or more addition/subtraction with the SP as destination,
3647 (3) A single move insn with the SP as destination,
3648 (4) A call_pop insn.
3649
3650 Insns in the sequence that do not modify the SP are ignored.
3651
3652 The return value is the amount of adjustment that can be trivially
3653 verified, via immediate operand or auto-inc. If the adjustment
3654 cannot be trivially extracted, the return value is INT_MIN. */
3655
3656 HOST_WIDE_INT
3657 find_args_size_adjust (rtx insn)
3658 {
3659 rtx dest, set, pat;
3660 int i;
3661
3662 pat = PATTERN (insn);
3663 set = NULL;
3664
3665 /* Look for a call_pop pattern. */
3666 if (CALL_P (insn))
3667 {
3668 /* We have to allow non-call_pop patterns for the case
3669 of emit_single_push_insn of a TLS address. */
3670 if (GET_CODE (pat) != PARALLEL)
3671 return 0;
3672
3673 /* All call_pop have a stack pointer adjust in the parallel.
3674 The call itself is always first, and the stack adjust is
3675 usually last, so search from the end. */
3676 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3677 {
3678 set = XVECEXP (pat, 0, i);
3679 if (GET_CODE (set) != SET)
3680 continue;
3681 dest = SET_DEST (set);
3682 if (dest == stack_pointer_rtx)
3683 break;
3684 }
3685 /* We'd better have found the stack pointer adjust. */
3686 if (i == 0)
3687 return 0;
3688 /* Fall through to process the extracted SET and DEST
3689 as if it was a standalone insn. */
3690 }
3691 else if (GET_CODE (pat) == SET)
3692 set = pat;
3693 else if ((set = single_set (insn)) != NULL)
3694 ;
3695 else if (GET_CODE (pat) == PARALLEL)
3696 {
3697 /* ??? Some older ports use a parallel with a stack adjust
3698 and a store for a PUSH_ROUNDING pattern, rather than a
3699 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3700 /* ??? See h8300 and m68k, pushqi1. */
3701 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3702 {
3703 set = XVECEXP (pat, 0, i);
3704 if (GET_CODE (set) != SET)
3705 continue;
3706 dest = SET_DEST (set);
3707 if (dest == stack_pointer_rtx)
3708 break;
3709
3710 /* We do not expect an auto-inc of the sp in the parallel. */
3711 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3712 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3713 != stack_pointer_rtx);
3714 }
3715 if (i < 0)
3716 return 0;
3717 }
3718 else
3719 return 0;
3720
3721 dest = SET_DEST (set);
3722
3723 /* Look for direct modifications of the stack pointer. */
3724 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3725 {
3726 /* Look for a trivial adjustment, otherwise assume nothing. */
3727 /* Note that the SPU restore_stack_block pattern refers to
3728 the stack pointer in V4SImode. Consider that non-trivial. */
3729 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3730 && GET_CODE (SET_SRC (set)) == PLUS
3731 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3732 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3733 return INTVAL (XEXP (SET_SRC (set), 1));
3734 /* ??? Reload can generate no-op moves, which will be cleaned
3735 up later. Recognize it and continue searching. */
3736 else if (rtx_equal_p (dest, SET_SRC (set)))
3737 return 0;
3738 else
3739 return HOST_WIDE_INT_MIN;
3740 }
3741 else
3742 {
3743 rtx mem, addr;
3744
3745 /* Otherwise only think about autoinc patterns. */
3746 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3747 {
3748 mem = dest;
3749 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3750 != stack_pointer_rtx);
3751 }
3752 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3753 mem = SET_SRC (set);
3754 else
3755 return 0;
3756
3757 addr = XEXP (mem, 0);
3758 switch (GET_CODE (addr))
3759 {
3760 case PRE_INC:
3761 case POST_INC:
3762 return GET_MODE_SIZE (GET_MODE (mem));
3763 case PRE_DEC:
3764 case POST_DEC:
3765 return -GET_MODE_SIZE (GET_MODE (mem));
3766 case PRE_MODIFY:
3767 case POST_MODIFY:
3768 addr = XEXP (addr, 1);
3769 gcc_assert (GET_CODE (addr) == PLUS);
3770 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3771 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3772 return INTVAL (XEXP (addr, 1));
3773 default:
3774 gcc_unreachable ();
3775 }
3776 }
3777 }
3778
3779 int
3780 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3781 {
3782 int args_size = end_args_size;
3783 bool saw_unknown = false;
3784 rtx insn;
3785
3786 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3787 {
3788 HOST_WIDE_INT this_delta;
3789
3790 if (!NONDEBUG_INSN_P (insn))
3791 continue;
3792
3793 this_delta = find_args_size_adjust (insn);
3794 if (this_delta == 0)
3795 continue;
3796
3797 gcc_assert (!saw_unknown);
3798 if (this_delta == HOST_WIDE_INT_MIN)
3799 saw_unknown = true;
3800
3801 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3802 #ifdef STACK_GROWS_DOWNWARD
3803 this_delta = -this_delta;
3804 #endif
3805 args_size -= this_delta;
3806 }
3807
3808 return saw_unknown ? INT_MIN : args_size;
3809 }
3810
3811 #ifdef PUSH_ROUNDING
3812 /* Emit single push insn. */
3813
3814 static void
3815 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3816 {
3817 rtx dest_addr;
3818 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3819 rtx dest;
3820 enum insn_code icode;
3821
3822 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3823 /* If there is push pattern, use it. Otherwise try old way of throwing
3824 MEM representing push operation to move expander. */
3825 icode = optab_handler (push_optab, mode);
3826 if (icode != CODE_FOR_nothing)
3827 {
3828 struct expand_operand ops[1];
3829
3830 create_input_operand (&ops[0], x, mode);
3831 if (maybe_expand_insn (icode, 1, ops))
3832 return;
3833 }
3834 if (GET_MODE_SIZE (mode) == rounded_size)
3835 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3836 /* If we are to pad downward, adjust the stack pointer first and
3837 then store X into the stack location using an offset. This is
3838 because emit_move_insn does not know how to pad; it does not have
3839 access to type. */
3840 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3841 {
3842 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3843 HOST_WIDE_INT offset;
3844
3845 emit_move_insn (stack_pointer_rtx,
3846 expand_binop (Pmode,
3847 #ifdef STACK_GROWS_DOWNWARD
3848 sub_optab,
3849 #else
3850 add_optab,
3851 #endif
3852 stack_pointer_rtx,
3853 GEN_INT (rounded_size),
3854 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3855
3856 offset = (HOST_WIDE_INT) padding_size;
3857 #ifdef STACK_GROWS_DOWNWARD
3858 if (STACK_PUSH_CODE == POST_DEC)
3859 /* We have already decremented the stack pointer, so get the
3860 previous value. */
3861 offset += (HOST_WIDE_INT) rounded_size;
3862 #else
3863 if (STACK_PUSH_CODE == POST_INC)
3864 /* We have already incremented the stack pointer, so get the
3865 previous value. */
3866 offset -= (HOST_WIDE_INT) rounded_size;
3867 #endif
3868 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3869 }
3870 else
3871 {
3872 #ifdef STACK_GROWS_DOWNWARD
3873 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3874 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3875 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3876 #else
3877 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3878 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3879 GEN_INT (rounded_size));
3880 #endif
3881 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3882 }
3883
3884 dest = gen_rtx_MEM (mode, dest_addr);
3885
3886 if (type != 0)
3887 {
3888 set_mem_attributes (dest, type, 1);
3889
3890 if (flag_optimize_sibling_calls)
3891 /* Function incoming arguments may overlap with sibling call
3892 outgoing arguments and we cannot allow reordering of reads
3893 from function arguments with stores to outgoing arguments
3894 of sibling calls. */
3895 set_mem_alias_set (dest, 0);
3896 }
3897 emit_move_insn (dest, x);
3898 }
3899
3900 /* Emit and annotate a single push insn. */
3901
3902 static void
3903 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3904 {
3905 int delta, old_delta = stack_pointer_delta;
3906 rtx prev = get_last_insn ();
3907 rtx last;
3908
3909 emit_single_push_insn_1 (mode, x, type);
3910
3911 last = get_last_insn ();
3912
3913 /* Notice the common case where we emitted exactly one insn. */
3914 if (PREV_INSN (last) == prev)
3915 {
3916 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
3917 return;
3918 }
3919
3920 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
3921 gcc_assert (delta == INT_MIN || delta == old_delta);
3922 }
3923 #endif
3924
3925 /* Generate code to push X onto the stack, assuming it has mode MODE and
3926 type TYPE.
3927 MODE is redundant except when X is a CONST_INT (since they don't
3928 carry mode info).
3929 SIZE is an rtx for the size of data to be copied (in bytes),
3930 needed only if X is BLKmode.
3931
3932 ALIGN (in bits) is maximum alignment we can assume.
3933
3934 If PARTIAL and REG are both nonzero, then copy that many of the first
3935 bytes of X into registers starting with REG, and push the rest of X.
3936 The amount of space pushed is decreased by PARTIAL bytes.
3937 REG must be a hard register in this case.
3938 If REG is zero but PARTIAL is not, take any all others actions for an
3939 argument partially in registers, but do not actually load any
3940 registers.
3941
3942 EXTRA is the amount in bytes of extra space to leave next to this arg.
3943 This is ignored if an argument block has already been allocated.
3944
3945 On a machine that lacks real push insns, ARGS_ADDR is the address of
3946 the bottom of the argument block for this call. We use indexing off there
3947 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3948 argument block has not been preallocated.
3949
3950 ARGS_SO_FAR is the size of args previously pushed for this call.
3951
3952 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3953 for arguments passed in registers. If nonzero, it will be the number
3954 of bytes required. */
3955
3956 void
3957 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3958 unsigned int align, int partial, rtx reg, int extra,
3959 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3960 rtx alignment_pad)
3961 {
3962 rtx xinner;
3963 enum direction stack_direction
3964 #ifdef STACK_GROWS_DOWNWARD
3965 = downward;
3966 #else
3967 = upward;
3968 #endif
3969
3970 /* Decide where to pad the argument: `downward' for below,
3971 `upward' for above, or `none' for don't pad it.
3972 Default is below for small data on big-endian machines; else above. */
3973 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3974
3975 /* Invert direction if stack is post-decrement.
3976 FIXME: why? */
3977 if (STACK_PUSH_CODE == POST_DEC)
3978 if (where_pad != none)
3979 where_pad = (where_pad == downward ? upward : downward);
3980
3981 xinner = x;
3982
3983 if (mode == BLKmode
3984 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3985 {
3986 /* Copy a block into the stack, entirely or partially. */
3987
3988 rtx temp;
3989 int used;
3990 int offset;
3991 int skip;
3992
3993 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3994 used = partial - offset;
3995
3996 if (mode != BLKmode)
3997 {
3998 /* A value is to be stored in an insufficiently aligned
3999 stack slot; copy via a suitably aligned slot if
4000 necessary. */
4001 size = GEN_INT (GET_MODE_SIZE (mode));
4002 if (!MEM_P (xinner))
4003 {
4004 temp = assign_temp (type, 0, 1, 1);
4005 emit_move_insn (temp, xinner);
4006 xinner = temp;
4007 }
4008 }
4009
4010 gcc_assert (size);
4011
4012 /* USED is now the # of bytes we need not copy to the stack
4013 because registers will take care of them. */
4014
4015 if (partial != 0)
4016 xinner = adjust_address (xinner, BLKmode, used);
4017
4018 /* If the partial register-part of the arg counts in its stack size,
4019 skip the part of stack space corresponding to the registers.
4020 Otherwise, start copying to the beginning of the stack space,
4021 by setting SKIP to 0. */
4022 skip = (reg_parm_stack_space == 0) ? 0 : used;
4023
4024 #ifdef PUSH_ROUNDING
4025 /* Do it with several push insns if that doesn't take lots of insns
4026 and if there is no difficulty with push insns that skip bytes
4027 on the stack for alignment purposes. */
4028 if (args_addr == 0
4029 && PUSH_ARGS
4030 && CONST_INT_P (size)
4031 && skip == 0
4032 && MEM_ALIGN (xinner) >= align
4033 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4034 /* Here we avoid the case of a structure whose weak alignment
4035 forces many pushes of a small amount of data,
4036 and such small pushes do rounding that causes trouble. */
4037 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4038 || align >= BIGGEST_ALIGNMENT
4039 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4040 == (align / BITS_PER_UNIT)))
4041 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4042 {
4043 /* Push padding now if padding above and stack grows down,
4044 or if padding below and stack grows up.
4045 But if space already allocated, this has already been done. */
4046 if (extra && args_addr == 0
4047 && where_pad != none && where_pad != stack_direction)
4048 anti_adjust_stack (GEN_INT (extra));
4049
4050 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4051 }
4052 else
4053 #endif /* PUSH_ROUNDING */
4054 {
4055 rtx target;
4056
4057 /* Otherwise make space on the stack and copy the data
4058 to the address of that space. */
4059
4060 /* Deduct words put into registers from the size we must copy. */
4061 if (partial != 0)
4062 {
4063 if (CONST_INT_P (size))
4064 size = GEN_INT (INTVAL (size) - used);
4065 else
4066 size = expand_binop (GET_MODE (size), sub_optab, size,
4067 GEN_INT (used), NULL_RTX, 0,
4068 OPTAB_LIB_WIDEN);
4069 }
4070
4071 /* Get the address of the stack space.
4072 In this case, we do not deal with EXTRA separately.
4073 A single stack adjust will do. */
4074 if (! args_addr)
4075 {
4076 temp = push_block (size, extra, where_pad == downward);
4077 extra = 0;
4078 }
4079 else if (CONST_INT_P (args_so_far))
4080 temp = memory_address (BLKmode,
4081 plus_constant (args_addr,
4082 skip + INTVAL (args_so_far)));
4083 else
4084 temp = memory_address (BLKmode,
4085 plus_constant (gen_rtx_PLUS (Pmode,
4086 args_addr,
4087 args_so_far),
4088 skip));
4089
4090 if (!ACCUMULATE_OUTGOING_ARGS)
4091 {
4092 /* If the source is referenced relative to the stack pointer,
4093 copy it to another register to stabilize it. We do not need
4094 to do this if we know that we won't be changing sp. */
4095
4096 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4097 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4098 temp = copy_to_reg (temp);
4099 }
4100
4101 target = gen_rtx_MEM (BLKmode, temp);
4102
4103 /* We do *not* set_mem_attributes here, because incoming arguments
4104 may overlap with sibling call outgoing arguments and we cannot
4105 allow reordering of reads from function arguments with stores
4106 to outgoing arguments of sibling calls. We do, however, want
4107 to record the alignment of the stack slot. */
4108 /* ALIGN may well be better aligned than TYPE, e.g. due to
4109 PARM_BOUNDARY. Assume the caller isn't lying. */
4110 set_mem_align (target, align);
4111
4112 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4113 }
4114 }
4115 else if (partial > 0)
4116 {
4117 /* Scalar partly in registers. */
4118
4119 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4120 int i;
4121 int not_stack;
4122 /* # bytes of start of argument
4123 that we must make space for but need not store. */
4124 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4125 int args_offset = INTVAL (args_so_far);
4126 int skip;
4127
4128 /* Push padding now if padding above and stack grows down,
4129 or if padding below and stack grows up.
4130 But if space already allocated, this has already been done. */
4131 if (extra && args_addr == 0
4132 && where_pad != none && where_pad != stack_direction)
4133 anti_adjust_stack (GEN_INT (extra));
4134
4135 /* If we make space by pushing it, we might as well push
4136 the real data. Otherwise, we can leave OFFSET nonzero
4137 and leave the space uninitialized. */
4138 if (args_addr == 0)
4139 offset = 0;
4140
4141 /* Now NOT_STACK gets the number of words that we don't need to
4142 allocate on the stack. Convert OFFSET to words too. */
4143 not_stack = (partial - offset) / UNITS_PER_WORD;
4144 offset /= UNITS_PER_WORD;
4145
4146 /* If the partial register-part of the arg counts in its stack size,
4147 skip the part of stack space corresponding to the registers.
4148 Otherwise, start copying to the beginning of the stack space,
4149 by setting SKIP to 0. */
4150 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4151
4152 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4153 x = validize_mem (force_const_mem (mode, x));
4154
4155 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4156 SUBREGs of such registers are not allowed. */
4157 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4158 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4159 x = copy_to_reg (x);
4160
4161 /* Loop over all the words allocated on the stack for this arg. */
4162 /* We can do it by words, because any scalar bigger than a word
4163 has a size a multiple of a word. */
4164 #ifndef PUSH_ARGS_REVERSED
4165 for (i = not_stack; i < size; i++)
4166 #else
4167 for (i = size - 1; i >= not_stack; i--)
4168 #endif
4169 if (i >= not_stack + offset)
4170 emit_push_insn (operand_subword_force (x, i, mode),
4171 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4172 0, args_addr,
4173 GEN_INT (args_offset + ((i - not_stack + skip)
4174 * UNITS_PER_WORD)),
4175 reg_parm_stack_space, alignment_pad);
4176 }
4177 else
4178 {
4179 rtx addr;
4180 rtx dest;
4181
4182 /* Push padding now if padding above and stack grows down,
4183 or if padding below and stack grows up.
4184 But if space already allocated, this has already been done. */
4185 if (extra && args_addr == 0
4186 && where_pad != none && where_pad != stack_direction)
4187 anti_adjust_stack (GEN_INT (extra));
4188
4189 #ifdef PUSH_ROUNDING
4190 if (args_addr == 0 && PUSH_ARGS)
4191 emit_single_push_insn (mode, x, type);
4192 else
4193 #endif
4194 {
4195 if (CONST_INT_P (args_so_far))
4196 addr
4197 = memory_address (mode,
4198 plus_constant (args_addr,
4199 INTVAL (args_so_far)));
4200 else
4201 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4202 args_so_far));
4203 dest = gen_rtx_MEM (mode, addr);
4204
4205 /* We do *not* set_mem_attributes here, because incoming arguments
4206 may overlap with sibling call outgoing arguments and we cannot
4207 allow reordering of reads from function arguments with stores
4208 to outgoing arguments of sibling calls. We do, however, want
4209 to record the alignment of the stack slot. */
4210 /* ALIGN may well be better aligned than TYPE, e.g. due to
4211 PARM_BOUNDARY. Assume the caller isn't lying. */
4212 set_mem_align (dest, align);
4213
4214 emit_move_insn (dest, x);
4215 }
4216 }
4217
4218 /* If part should go in registers, copy that part
4219 into the appropriate registers. Do this now, at the end,
4220 since mem-to-mem copies above may do function calls. */
4221 if (partial > 0 && reg != 0)
4222 {
4223 /* Handle calls that pass values in multiple non-contiguous locations.
4224 The Irix 6 ABI has examples of this. */
4225 if (GET_CODE (reg) == PARALLEL)
4226 emit_group_load (reg, x, type, -1);
4227 else
4228 {
4229 gcc_assert (partial % UNITS_PER_WORD == 0);
4230 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4231 }
4232 }
4233
4234 if (extra && args_addr == 0 && where_pad == stack_direction)
4235 anti_adjust_stack (GEN_INT (extra));
4236
4237 if (alignment_pad && args_addr == 0)
4238 anti_adjust_stack (alignment_pad);
4239 }
4240 \f
4241 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4242 operations. */
4243
4244 static rtx
4245 get_subtarget (rtx x)
4246 {
4247 return (optimize
4248 || x == 0
4249 /* Only registers can be subtargets. */
4250 || !REG_P (x)
4251 /* Don't use hard regs to avoid extending their life. */
4252 || REGNO (x) < FIRST_PSEUDO_REGISTER
4253 ? 0 : x);
4254 }
4255
4256 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4257 FIELD is a bitfield. Returns true if the optimization was successful,
4258 and there's nothing else to do. */
4259
4260 static bool
4261 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4262 unsigned HOST_WIDE_INT bitpos,
4263 unsigned HOST_WIDE_INT bitregion_start,
4264 unsigned HOST_WIDE_INT bitregion_end,
4265 enum machine_mode mode1, rtx str_rtx,
4266 tree to, tree src)
4267 {
4268 enum machine_mode str_mode = GET_MODE (str_rtx);
4269 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4270 tree op0, op1;
4271 rtx value, result;
4272 optab binop;
4273 gimple srcstmt;
4274 enum tree_code code;
4275
4276 if (mode1 != VOIDmode
4277 || bitsize >= BITS_PER_WORD
4278 || str_bitsize > BITS_PER_WORD
4279 || TREE_SIDE_EFFECTS (to)
4280 || TREE_THIS_VOLATILE (to))
4281 return false;
4282
4283 STRIP_NOPS (src);
4284 if (TREE_CODE (src) != SSA_NAME)
4285 return false;
4286 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4287 return false;
4288
4289 srcstmt = get_gimple_for_ssa_name (src);
4290 if (!srcstmt
4291 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4292 return false;
4293
4294 code = gimple_assign_rhs_code (srcstmt);
4295
4296 op0 = gimple_assign_rhs1 (srcstmt);
4297
4298 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4299 to find its initialization. Hopefully the initialization will
4300 be from a bitfield load. */
4301 if (TREE_CODE (op0) == SSA_NAME)
4302 {
4303 gimple op0stmt = get_gimple_for_ssa_name (op0);
4304
4305 /* We want to eventually have OP0 be the same as TO, which
4306 should be a bitfield. */
4307 if (!op0stmt
4308 || !is_gimple_assign (op0stmt)
4309 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4310 return false;
4311 op0 = gimple_assign_rhs1 (op0stmt);
4312 }
4313
4314 op1 = gimple_assign_rhs2 (srcstmt);
4315
4316 if (!operand_equal_p (to, op0, 0))
4317 return false;
4318
4319 if (MEM_P (str_rtx))
4320 {
4321 unsigned HOST_WIDE_INT offset1;
4322
4323 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4324 str_mode = word_mode;
4325 str_mode = get_best_mode (bitsize, bitpos,
4326 bitregion_start, bitregion_end,
4327 MEM_ALIGN (str_rtx), str_mode, 0);
4328 if (str_mode == VOIDmode)
4329 return false;
4330 str_bitsize = GET_MODE_BITSIZE (str_mode);
4331
4332 offset1 = bitpos;
4333 bitpos %= str_bitsize;
4334 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4335 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4336 }
4337 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4338 return false;
4339
4340 /* If the bit field covers the whole REG/MEM, store_field
4341 will likely generate better code. */
4342 if (bitsize >= str_bitsize)
4343 return false;
4344
4345 /* We can't handle fields split across multiple entities. */
4346 if (bitpos + bitsize > str_bitsize)
4347 return false;
4348
4349 if (BYTES_BIG_ENDIAN)
4350 bitpos = str_bitsize - bitpos - bitsize;
4351
4352 switch (code)
4353 {
4354 case PLUS_EXPR:
4355 case MINUS_EXPR:
4356 /* For now, just optimize the case of the topmost bitfield
4357 where we don't need to do any masking and also
4358 1 bit bitfields where xor can be used.
4359 We might win by one instruction for the other bitfields
4360 too if insv/extv instructions aren't used, so that
4361 can be added later. */
4362 if (bitpos + bitsize != str_bitsize
4363 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4364 break;
4365
4366 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4367 value = convert_modes (str_mode,
4368 TYPE_MODE (TREE_TYPE (op1)), value,
4369 TYPE_UNSIGNED (TREE_TYPE (op1)));
4370
4371 /* We may be accessing data outside the field, which means
4372 we can alias adjacent data. */
4373 if (MEM_P (str_rtx))
4374 {
4375 str_rtx = shallow_copy_rtx (str_rtx);
4376 set_mem_alias_set (str_rtx, 0);
4377 set_mem_expr (str_rtx, 0);
4378 }
4379
4380 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4381 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4382 {
4383 value = expand_and (str_mode, value, const1_rtx, NULL);
4384 binop = xor_optab;
4385 }
4386 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4387 bitpos, NULL_RTX, 1);
4388 result = expand_binop (str_mode, binop, str_rtx,
4389 value, str_rtx, 1, OPTAB_WIDEN);
4390 if (result != str_rtx)
4391 emit_move_insn (str_rtx, result);
4392 return true;
4393
4394 case BIT_IOR_EXPR:
4395 case BIT_XOR_EXPR:
4396 if (TREE_CODE (op1) != INTEGER_CST)
4397 break;
4398 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4399 value = convert_modes (GET_MODE (str_rtx),
4400 TYPE_MODE (TREE_TYPE (op1)), value,
4401 TYPE_UNSIGNED (TREE_TYPE (op1)));
4402
4403 /* We may be accessing data outside the field, which means
4404 we can alias adjacent data. */
4405 if (MEM_P (str_rtx))
4406 {
4407 str_rtx = shallow_copy_rtx (str_rtx);
4408 set_mem_alias_set (str_rtx, 0);
4409 set_mem_expr (str_rtx, 0);
4410 }
4411
4412 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4413 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4414 {
4415 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4416 - 1);
4417 value = expand_and (GET_MODE (str_rtx), value, mask,
4418 NULL_RTX);
4419 }
4420 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4421 bitpos, NULL_RTX, 1);
4422 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4423 value, str_rtx, 1, OPTAB_WIDEN);
4424 if (result != str_rtx)
4425 emit_move_insn (str_rtx, result);
4426 return true;
4427
4428 default:
4429 break;
4430 }
4431
4432 return false;
4433 }
4434
4435 /* In the C++ memory model, consecutive bit fields in a structure are
4436 considered one memory location.
4437
4438 Given a COMPONENT_REF, this function returns the bit range of
4439 consecutive bits in which this COMPONENT_REF belongs in. The
4440 values are returned in *BITSTART and *BITEND. If either the C++
4441 memory model is not activated, or this memory access is not thread
4442 visible, 0 is returned in *BITSTART and *BITEND.
4443
4444 EXP is the COMPONENT_REF.
4445 INNERDECL is the actual object being referenced.
4446 BITPOS is the position in bits where the bit starts within the structure.
4447 BITSIZE is size in bits of the field being referenced in EXP.
4448
4449 For example, while storing into FOO.A here...
4450
4451 struct {
4452 BIT 0:
4453 unsigned int a : 4;
4454 unsigned int b : 1;
4455 BIT 8:
4456 unsigned char c;
4457 unsigned int d : 6;
4458 } foo;
4459
4460 ...we are not allowed to store past <b>, so for the layout above, a
4461 range of 0..7 (because no one cares if we store into the
4462 padding). */
4463
4464 static void
4465 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4466 unsigned HOST_WIDE_INT *bitend,
4467 tree exp, tree innerdecl,
4468 HOST_WIDE_INT bitpos, HOST_WIDE_INT bitsize)
4469 {
4470 tree field, record_type, fld;
4471 bool found_field = false;
4472 bool prev_field_is_bitfield;
4473
4474 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4475
4476 /* If other threads can't see this value, no need to restrict stores. */
4477 if (ALLOW_STORE_DATA_RACES
4478 || ((TREE_CODE (innerdecl) == MEM_REF
4479 || TREE_CODE (innerdecl) == TARGET_MEM_REF)
4480 && !ptr_deref_may_alias_global_p (TREE_OPERAND (innerdecl, 0)))
4481 || (DECL_P (innerdecl)
4482 && ((TREE_CODE (innerdecl) == VAR_DECL
4483 && DECL_THREAD_LOCAL_P (innerdecl))
4484 || !TREE_STATIC (innerdecl))))
4485 {
4486 *bitstart = *bitend = 0;
4487 return;
4488 }
4489
4490 /* Bit field we're storing into. */
4491 field = TREE_OPERAND (exp, 1);
4492 record_type = DECL_FIELD_CONTEXT (field);
4493
4494 /* Count the contiguous bitfields for the memory location that
4495 contains FIELD. */
4496 *bitstart = 0;
4497 prev_field_is_bitfield = true;
4498 for (fld = TYPE_FIELDS (record_type); fld; fld = DECL_CHAIN (fld))
4499 {
4500 tree t, offset;
4501 enum machine_mode mode;
4502 int unsignedp, volatilep;
4503
4504 if (TREE_CODE (fld) != FIELD_DECL)
4505 continue;
4506
4507 t = build3 (COMPONENT_REF, TREE_TYPE (exp),
4508 unshare_expr (TREE_OPERAND (exp, 0)),
4509 fld, NULL_TREE);
4510 get_inner_reference (t, &bitsize, &bitpos, &offset,
4511 &mode, &unsignedp, &volatilep, true);
4512
4513 if (field == fld)
4514 found_field = true;
4515
4516 if (DECL_BIT_FIELD_TYPE (fld) && bitsize > 0)
4517 {
4518 if (prev_field_is_bitfield == false)
4519 {
4520 *bitstart = bitpos;
4521 prev_field_is_bitfield = true;
4522 }
4523 }
4524 else
4525 {
4526 prev_field_is_bitfield = false;
4527 if (found_field)
4528 break;
4529 }
4530 }
4531 gcc_assert (found_field);
4532
4533 if (fld)
4534 {
4535 /* We found the end of the bit field sequence. Include the
4536 padding up to the next field and be done. */
4537 *bitend = bitpos - 1;
4538 }
4539 else
4540 {
4541 /* If this is the last element in the structure, include the padding
4542 at the end of structure. */
4543 *bitend = TREE_INT_CST_LOW (TYPE_SIZE (record_type)) - 1;
4544 }
4545 }
4546
4547 /* Return the alignment of the object EXP, also considering its type
4548 when we do not know of explicit misalignment.
4549 ??? Note that, in the general case, the type of an expression is not kept
4550 consistent with misalignment information by the front-end, for
4551 example when taking the address of a member of a packed structure.
4552 However, in most of the cases, expressions have the alignment of
4553 their type, so we optimistically fall back to the alignment of the
4554 type when we cannot compute a misalignment. */
4555
4556 static unsigned int
4557 get_object_or_type_alignment (tree exp)
4558 {
4559 unsigned HOST_WIDE_INT misalign;
4560 unsigned int align = get_object_alignment_1 (exp, &misalign);
4561 if (misalign != 0)
4562 align = (misalign & -misalign);
4563 else
4564 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)), align);
4565 return align;
4566 }
4567
4568 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4569 is true, try generating a nontemporal store. */
4570
4571 void
4572 expand_assignment (tree to, tree from, bool nontemporal)
4573 {
4574 rtx to_rtx = 0;
4575 rtx result;
4576 enum machine_mode mode;
4577 unsigned int align;
4578 enum insn_code icode;
4579
4580 /* Don't crash if the lhs of the assignment was erroneous. */
4581 if (TREE_CODE (to) == ERROR_MARK)
4582 {
4583 expand_normal (from);
4584 return;
4585 }
4586
4587 /* Optimize away no-op moves without side-effects. */
4588 if (operand_equal_p (to, from, 0))
4589 return;
4590
4591 mode = TYPE_MODE (TREE_TYPE (to));
4592 if ((TREE_CODE (to) == MEM_REF
4593 || TREE_CODE (to) == TARGET_MEM_REF)
4594 && mode != BLKmode
4595 && ((align = get_object_or_type_alignment (to))
4596 < GET_MODE_ALIGNMENT (mode))
4597 && ((icode = optab_handler (movmisalign_optab, mode))
4598 != CODE_FOR_nothing))
4599 {
4600 struct expand_operand ops[2];
4601 enum machine_mode address_mode;
4602 rtx reg, op0, mem;
4603
4604 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4605 reg = force_not_mem (reg);
4606
4607 if (TREE_CODE (to) == MEM_REF)
4608 {
4609 addr_space_t as
4610 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 1))));
4611 tree base = TREE_OPERAND (to, 0);
4612 address_mode = targetm.addr_space.address_mode (as);
4613 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4614 op0 = convert_memory_address_addr_space (address_mode, op0, as);
4615 if (!integer_zerop (TREE_OPERAND (to, 1)))
4616 {
4617 rtx off
4618 = immed_double_int_const (mem_ref_offset (to), address_mode);
4619 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
4620 }
4621 op0 = memory_address_addr_space (mode, op0, as);
4622 mem = gen_rtx_MEM (mode, op0);
4623 set_mem_attributes (mem, to, 0);
4624 set_mem_addr_space (mem, as);
4625 }
4626 else if (TREE_CODE (to) == TARGET_MEM_REF)
4627 {
4628 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (to));
4629 struct mem_address addr;
4630
4631 get_address_description (to, &addr);
4632 op0 = addr_for_mem_ref (&addr, as, true);
4633 op0 = memory_address_addr_space (mode, op0, as);
4634 mem = gen_rtx_MEM (mode, op0);
4635 set_mem_attributes (mem, to, 0);
4636 set_mem_addr_space (mem, as);
4637 }
4638 else
4639 gcc_unreachable ();
4640 if (TREE_THIS_VOLATILE (to))
4641 MEM_VOLATILE_P (mem) = 1;
4642
4643 create_fixed_operand (&ops[0], mem);
4644 create_input_operand (&ops[1], reg, mode);
4645 /* The movmisalign<mode> pattern cannot fail, else the assignment would
4646 silently be omitted. */
4647 expand_insn (icode, 2, ops);
4648 return;
4649 }
4650
4651 /* Assignment of a structure component needs special treatment
4652 if the structure component's rtx is not simply a MEM.
4653 Assignment of an array element at a constant index, and assignment of
4654 an array element in an unaligned packed structure field, has the same
4655 problem. */
4656 if (handled_component_p (to)
4657 /* ??? We only need to handle MEM_REF here if the access is not
4658 a full access of the base object. */
4659 || (TREE_CODE (to) == MEM_REF
4660 && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR)
4661 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4662 {
4663 enum machine_mode mode1;
4664 HOST_WIDE_INT bitsize, bitpos;
4665 unsigned HOST_WIDE_INT bitregion_start = 0;
4666 unsigned HOST_WIDE_INT bitregion_end = 0;
4667 tree offset;
4668 int unsignedp;
4669 int volatilep = 0;
4670 tree tem;
4671
4672 push_temp_slots ();
4673 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4674 &unsignedp, &volatilep, true);
4675
4676 if (TREE_CODE (to) == COMPONENT_REF
4677 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4678 get_bit_range (&bitregion_start, &bitregion_end,
4679 to, tem, bitpos, bitsize);
4680
4681 /* If we are going to use store_bit_field and extract_bit_field,
4682 make sure to_rtx will be safe for multiple use. */
4683
4684 to_rtx = expand_normal (tem);
4685
4686 /* If the bitfield is volatile, we want to access it in the
4687 field's mode, not the computed mode.
4688 If a MEM has VOIDmode (external with incomplete type),
4689 use BLKmode for it instead. */
4690 if (MEM_P (to_rtx))
4691 {
4692 if (volatilep && flag_strict_volatile_bitfields > 0)
4693 to_rtx = adjust_address (to_rtx, mode1, 0);
4694 else if (GET_MODE (to_rtx) == VOIDmode)
4695 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4696 }
4697
4698 if (offset != 0)
4699 {
4700 enum machine_mode address_mode;
4701 rtx offset_rtx;
4702
4703 if (!MEM_P (to_rtx))
4704 {
4705 /* We can get constant negative offsets into arrays with broken
4706 user code. Translate this to a trap instead of ICEing. */
4707 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4708 expand_builtin_trap ();
4709 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4710 }
4711
4712 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4713 address_mode
4714 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4715 if (GET_MODE (offset_rtx) != address_mode)
4716 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4717
4718 /* A constant address in TO_RTX can have VOIDmode, we must not try
4719 to call force_reg for that case. Avoid that case. */
4720 if (MEM_P (to_rtx)
4721 && GET_MODE (to_rtx) == BLKmode
4722 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4723 && bitsize > 0
4724 && (bitpos % bitsize) == 0
4725 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4726 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4727 {
4728 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4729 bitpos = 0;
4730 }
4731
4732 to_rtx = offset_address (to_rtx, offset_rtx,
4733 highest_pow2_factor_for_target (to,
4734 offset));
4735 }
4736
4737 /* No action is needed if the target is not a memory and the field
4738 lies completely outside that target. This can occur if the source
4739 code contains an out-of-bounds access to a small array. */
4740 if (!MEM_P (to_rtx)
4741 && GET_MODE (to_rtx) != BLKmode
4742 && (unsigned HOST_WIDE_INT) bitpos
4743 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4744 {
4745 expand_normal (from);
4746 result = NULL;
4747 }
4748 /* Handle expand_expr of a complex value returning a CONCAT. */
4749 else if (GET_CODE (to_rtx) == CONCAT)
4750 {
4751 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4752 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4753 && bitpos == 0
4754 && bitsize == mode_bitsize)
4755 result = store_expr (from, to_rtx, false, nontemporal);
4756 else if (bitsize == mode_bitsize / 2
4757 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4758 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4759 nontemporal);
4760 else if (bitpos + bitsize <= mode_bitsize / 2)
4761 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4762 bitregion_start, bitregion_end,
4763 mode1, from, TREE_TYPE (tem),
4764 get_alias_set (to), nontemporal);
4765 else if (bitpos >= mode_bitsize / 2)
4766 result = store_field (XEXP (to_rtx, 1), bitsize,
4767 bitpos - mode_bitsize / 2,
4768 bitregion_start, bitregion_end,
4769 mode1, from,
4770 TREE_TYPE (tem), get_alias_set (to),
4771 nontemporal);
4772 else if (bitpos == 0 && bitsize == mode_bitsize)
4773 {
4774 rtx from_rtx;
4775 result = expand_normal (from);
4776 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4777 TYPE_MODE (TREE_TYPE (from)), 0);
4778 emit_move_insn (XEXP (to_rtx, 0),
4779 read_complex_part (from_rtx, false));
4780 emit_move_insn (XEXP (to_rtx, 1),
4781 read_complex_part (from_rtx, true));
4782 }
4783 else
4784 {
4785 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4786 GET_MODE_SIZE (GET_MODE (to_rtx)),
4787 0);
4788 write_complex_part (temp, XEXP (to_rtx, 0), false);
4789 write_complex_part (temp, XEXP (to_rtx, 1), true);
4790 result = store_field (temp, bitsize, bitpos,
4791 bitregion_start, bitregion_end,
4792 mode1, from,
4793 TREE_TYPE (tem), get_alias_set (to),
4794 nontemporal);
4795 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4796 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4797 }
4798 }
4799 else
4800 {
4801 if (MEM_P (to_rtx))
4802 {
4803 /* If the field is at offset zero, we could have been given the
4804 DECL_RTX of the parent struct. Don't munge it. */
4805 to_rtx = shallow_copy_rtx (to_rtx);
4806
4807 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4808
4809 /* Deal with volatile and readonly fields. The former is only
4810 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4811 if (volatilep)
4812 MEM_VOLATILE_P (to_rtx) = 1;
4813 if (component_uses_parent_alias_set (to))
4814 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4815 }
4816
4817 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4818 bitregion_start, bitregion_end,
4819 mode1,
4820 to_rtx, to, from))
4821 result = NULL;
4822 else
4823 result = store_field (to_rtx, bitsize, bitpos,
4824 bitregion_start, bitregion_end,
4825 mode1, from,
4826 TREE_TYPE (tem), get_alias_set (to),
4827 nontemporal);
4828 }
4829
4830 if (result)
4831 preserve_temp_slots (result);
4832 free_temp_slots ();
4833 pop_temp_slots ();
4834 return;
4835 }
4836
4837 /* If the rhs is a function call and its value is not an aggregate,
4838 call the function before we start to compute the lhs.
4839 This is needed for correct code for cases such as
4840 val = setjmp (buf) on machines where reference to val
4841 requires loading up part of an address in a separate insn.
4842
4843 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4844 since it might be a promoted variable where the zero- or sign- extension
4845 needs to be done. Handling this in the normal way is safe because no
4846 computation is done before the call. The same is true for SSA names. */
4847 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4848 && COMPLETE_TYPE_P (TREE_TYPE (from))
4849 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4850 && ! (((TREE_CODE (to) == VAR_DECL
4851 || TREE_CODE (to) == PARM_DECL
4852 || TREE_CODE (to) == RESULT_DECL)
4853 && REG_P (DECL_RTL (to)))
4854 || TREE_CODE (to) == SSA_NAME))
4855 {
4856 rtx value;
4857
4858 push_temp_slots ();
4859 value = expand_normal (from);
4860 if (to_rtx == 0)
4861 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4862
4863 /* Handle calls that return values in multiple non-contiguous locations.
4864 The Irix 6 ABI has examples of this. */
4865 if (GET_CODE (to_rtx) == PARALLEL)
4866 emit_group_load (to_rtx, value, TREE_TYPE (from),
4867 int_size_in_bytes (TREE_TYPE (from)));
4868 else if (GET_MODE (to_rtx) == BLKmode)
4869 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4870 else
4871 {
4872 if (POINTER_TYPE_P (TREE_TYPE (to)))
4873 value = convert_memory_address_addr_space
4874 (GET_MODE (to_rtx), value,
4875 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4876
4877 emit_move_insn (to_rtx, value);
4878 }
4879 preserve_temp_slots (to_rtx);
4880 free_temp_slots ();
4881 pop_temp_slots ();
4882 return;
4883 }
4884
4885 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4886 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4887
4888 if (to_rtx == 0)
4889 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4890
4891 /* Don't move directly into a return register. */
4892 if (TREE_CODE (to) == RESULT_DECL
4893 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4894 {
4895 rtx temp;
4896
4897 push_temp_slots ();
4898 if (REG_P (to_rtx) && TYPE_MODE (TREE_TYPE (from)) == BLKmode)
4899 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
4900 else
4901 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4902
4903 if (GET_CODE (to_rtx) == PARALLEL)
4904 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4905 int_size_in_bytes (TREE_TYPE (from)));
4906 else if (temp)
4907 emit_move_insn (to_rtx, temp);
4908
4909 preserve_temp_slots (to_rtx);
4910 free_temp_slots ();
4911 pop_temp_slots ();
4912 return;
4913 }
4914
4915 /* In case we are returning the contents of an object which overlaps
4916 the place the value is being stored, use a safe function when copying
4917 a value through a pointer into a structure value return block. */
4918 if (TREE_CODE (to) == RESULT_DECL
4919 && TREE_CODE (from) == INDIRECT_REF
4920 && ADDR_SPACE_GENERIC_P
4921 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4922 && refs_may_alias_p (to, from)
4923 && cfun->returns_struct
4924 && !cfun->returns_pcc_struct)
4925 {
4926 rtx from_rtx, size;
4927
4928 push_temp_slots ();
4929 size = expr_size (from);
4930 from_rtx = expand_normal (from);
4931
4932 emit_library_call (memmove_libfunc, LCT_NORMAL,
4933 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4934 XEXP (from_rtx, 0), Pmode,
4935 convert_to_mode (TYPE_MODE (sizetype),
4936 size, TYPE_UNSIGNED (sizetype)),
4937 TYPE_MODE (sizetype));
4938
4939 preserve_temp_slots (to_rtx);
4940 free_temp_slots ();
4941 pop_temp_slots ();
4942 return;
4943 }
4944
4945 /* Compute FROM and store the value in the rtx we got. */
4946
4947 push_temp_slots ();
4948 result = store_expr (from, to_rtx, 0, nontemporal);
4949 preserve_temp_slots (result);
4950 free_temp_slots ();
4951 pop_temp_slots ();
4952 return;
4953 }
4954
4955 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4956 succeeded, false otherwise. */
4957
4958 bool
4959 emit_storent_insn (rtx to, rtx from)
4960 {
4961 struct expand_operand ops[2];
4962 enum machine_mode mode = GET_MODE (to);
4963 enum insn_code code = optab_handler (storent_optab, mode);
4964
4965 if (code == CODE_FOR_nothing)
4966 return false;
4967
4968 create_fixed_operand (&ops[0], to);
4969 create_input_operand (&ops[1], from, mode);
4970 return maybe_expand_insn (code, 2, ops);
4971 }
4972
4973 /* Generate code for computing expression EXP,
4974 and storing the value into TARGET.
4975
4976 If the mode is BLKmode then we may return TARGET itself.
4977 It turns out that in BLKmode it doesn't cause a problem.
4978 because C has no operators that could combine two different
4979 assignments into the same BLKmode object with different values
4980 with no sequence point. Will other languages need this to
4981 be more thorough?
4982
4983 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4984 stack, and block moves may need to be treated specially.
4985
4986 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4987
4988 rtx
4989 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4990 {
4991 rtx temp;
4992 rtx alt_rtl = NULL_RTX;
4993 location_t loc = EXPR_LOCATION (exp);
4994
4995 if (VOID_TYPE_P (TREE_TYPE (exp)))
4996 {
4997 /* C++ can generate ?: expressions with a throw expression in one
4998 branch and an rvalue in the other. Here, we resolve attempts to
4999 store the throw expression's nonexistent result. */
5000 gcc_assert (!call_param_p);
5001 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5002 return NULL_RTX;
5003 }
5004 if (TREE_CODE (exp) == COMPOUND_EXPR)
5005 {
5006 /* Perform first part of compound expression, then assign from second
5007 part. */
5008 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5009 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5010 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5011 nontemporal);
5012 }
5013 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5014 {
5015 /* For conditional expression, get safe form of the target. Then
5016 test the condition, doing the appropriate assignment on either
5017 side. This avoids the creation of unnecessary temporaries.
5018 For non-BLKmode, it is more efficient not to do this. */
5019
5020 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5021
5022 do_pending_stack_adjust ();
5023 NO_DEFER_POP;
5024 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5025 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5026 nontemporal);
5027 emit_jump_insn (gen_jump (lab2));
5028 emit_barrier ();
5029 emit_label (lab1);
5030 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5031 nontemporal);
5032 emit_label (lab2);
5033 OK_DEFER_POP;
5034
5035 return NULL_RTX;
5036 }
5037 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5038 /* If this is a scalar in a register that is stored in a wider mode
5039 than the declared mode, compute the result into its declared mode
5040 and then convert to the wider mode. Our value is the computed
5041 expression. */
5042 {
5043 rtx inner_target = 0;
5044
5045 /* We can do the conversion inside EXP, which will often result
5046 in some optimizations. Do the conversion in two steps: first
5047 change the signedness, if needed, then the extend. But don't
5048 do this if the type of EXP is a subtype of something else
5049 since then the conversion might involve more than just
5050 converting modes. */
5051 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5052 && TREE_TYPE (TREE_TYPE (exp)) == 0
5053 && GET_MODE_PRECISION (GET_MODE (target))
5054 == TYPE_PRECISION (TREE_TYPE (exp)))
5055 {
5056 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5057 != SUBREG_PROMOTED_UNSIGNED_P (target))
5058 {
5059 /* Some types, e.g. Fortran's logical*4, won't have a signed
5060 version, so use the mode instead. */
5061 tree ntype
5062 = (signed_or_unsigned_type_for
5063 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5064 if (ntype == NULL)
5065 ntype = lang_hooks.types.type_for_mode
5066 (TYPE_MODE (TREE_TYPE (exp)),
5067 SUBREG_PROMOTED_UNSIGNED_P (target));
5068
5069 exp = fold_convert_loc (loc, ntype, exp);
5070 }
5071
5072 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5073 (GET_MODE (SUBREG_REG (target)),
5074 SUBREG_PROMOTED_UNSIGNED_P (target)),
5075 exp);
5076
5077 inner_target = SUBREG_REG (target);
5078 }
5079
5080 temp = expand_expr (exp, inner_target, VOIDmode,
5081 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5082
5083 /* If TEMP is a VOIDmode constant, use convert_modes to make
5084 sure that we properly convert it. */
5085 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5086 {
5087 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5088 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5089 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5090 GET_MODE (target), temp,
5091 SUBREG_PROMOTED_UNSIGNED_P (target));
5092 }
5093
5094 convert_move (SUBREG_REG (target), temp,
5095 SUBREG_PROMOTED_UNSIGNED_P (target));
5096
5097 return NULL_RTX;
5098 }
5099 else if ((TREE_CODE (exp) == STRING_CST
5100 || (TREE_CODE (exp) == MEM_REF
5101 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5102 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5103 == STRING_CST
5104 && integer_zerop (TREE_OPERAND (exp, 1))))
5105 && !nontemporal && !call_param_p
5106 && MEM_P (target))
5107 {
5108 /* Optimize initialization of an array with a STRING_CST. */
5109 HOST_WIDE_INT exp_len, str_copy_len;
5110 rtx dest_mem;
5111 tree str = TREE_CODE (exp) == STRING_CST
5112 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5113
5114 exp_len = int_expr_size (exp);
5115 if (exp_len <= 0)
5116 goto normal_expr;
5117
5118 if (TREE_STRING_LENGTH (str) <= 0)
5119 goto normal_expr;
5120
5121 str_copy_len = strlen (TREE_STRING_POINTER (str));
5122 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5123 goto normal_expr;
5124
5125 str_copy_len = TREE_STRING_LENGTH (str);
5126 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5127 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5128 {
5129 str_copy_len += STORE_MAX_PIECES - 1;
5130 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5131 }
5132 str_copy_len = MIN (str_copy_len, exp_len);
5133 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5134 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5135 MEM_ALIGN (target), false))
5136 goto normal_expr;
5137
5138 dest_mem = target;
5139
5140 dest_mem = store_by_pieces (dest_mem,
5141 str_copy_len, builtin_strncpy_read_str,
5142 CONST_CAST (char *,
5143 TREE_STRING_POINTER (str)),
5144 MEM_ALIGN (target), false,
5145 exp_len > str_copy_len ? 1 : 0);
5146 if (exp_len > str_copy_len)
5147 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5148 GEN_INT (exp_len - str_copy_len),
5149 BLOCK_OP_NORMAL);
5150 return NULL_RTX;
5151 }
5152 else
5153 {
5154 rtx tmp_target;
5155
5156 normal_expr:
5157 /* If we want to use a nontemporal store, force the value to
5158 register first. */
5159 tmp_target = nontemporal ? NULL_RTX : target;
5160 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5161 (call_param_p
5162 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5163 &alt_rtl);
5164 }
5165
5166 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5167 the same as that of TARGET, adjust the constant. This is needed, for
5168 example, in case it is a CONST_DOUBLE and we want only a word-sized
5169 value. */
5170 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5171 && TREE_CODE (exp) != ERROR_MARK
5172 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5173 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5174 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5175
5176 /* If value was not generated in the target, store it there.
5177 Convert the value to TARGET's type first if necessary and emit the
5178 pending incrementations that have been queued when expanding EXP.
5179 Note that we cannot emit the whole queue blindly because this will
5180 effectively disable the POST_INC optimization later.
5181
5182 If TEMP and TARGET compare equal according to rtx_equal_p, but
5183 one or both of them are volatile memory refs, we have to distinguish
5184 two cases:
5185 - expand_expr has used TARGET. In this case, we must not generate
5186 another copy. This can be detected by TARGET being equal according
5187 to == .
5188 - expand_expr has not used TARGET - that means that the source just
5189 happens to have the same RTX form. Since temp will have been created
5190 by expand_expr, it will compare unequal according to == .
5191 We must generate a copy in this case, to reach the correct number
5192 of volatile memory references. */
5193
5194 if ((! rtx_equal_p (temp, target)
5195 || (temp != target && (side_effects_p (temp)
5196 || side_effects_p (target))))
5197 && TREE_CODE (exp) != ERROR_MARK
5198 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5199 but TARGET is not valid memory reference, TEMP will differ
5200 from TARGET although it is really the same location. */
5201 && !(alt_rtl
5202 && rtx_equal_p (alt_rtl, target)
5203 && !side_effects_p (alt_rtl)
5204 && !side_effects_p (target))
5205 /* If there's nothing to copy, don't bother. Don't call
5206 expr_size unless necessary, because some front-ends (C++)
5207 expr_size-hook must not be given objects that are not
5208 supposed to be bit-copied or bit-initialized. */
5209 && expr_size (exp) != const0_rtx)
5210 {
5211 if (GET_MODE (temp) != GET_MODE (target)
5212 && GET_MODE (temp) != VOIDmode)
5213 {
5214 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5215 if (GET_MODE (target) == BLKmode
5216 && GET_MODE (temp) == BLKmode)
5217 emit_block_move (target, temp, expr_size (exp),
5218 (call_param_p
5219 ? BLOCK_OP_CALL_PARM
5220 : BLOCK_OP_NORMAL));
5221 else if (GET_MODE (target) == BLKmode)
5222 store_bit_field (target, INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5223 0, 0, 0, GET_MODE (temp), temp);
5224 else
5225 convert_move (target, temp, unsignedp);
5226 }
5227
5228 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5229 {
5230 /* Handle copying a string constant into an array. The string
5231 constant may be shorter than the array. So copy just the string's
5232 actual length, and clear the rest. First get the size of the data
5233 type of the string, which is actually the size of the target. */
5234 rtx size = expr_size (exp);
5235
5236 if (CONST_INT_P (size)
5237 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5238 emit_block_move (target, temp, size,
5239 (call_param_p
5240 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5241 else
5242 {
5243 enum machine_mode pointer_mode
5244 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5245 enum machine_mode address_mode
5246 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
5247
5248 /* Compute the size of the data to copy from the string. */
5249 tree copy_size
5250 = size_binop_loc (loc, MIN_EXPR,
5251 make_tree (sizetype, size),
5252 size_int (TREE_STRING_LENGTH (exp)));
5253 rtx copy_size_rtx
5254 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5255 (call_param_p
5256 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5257 rtx label = 0;
5258
5259 /* Copy that much. */
5260 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5261 TYPE_UNSIGNED (sizetype));
5262 emit_block_move (target, temp, copy_size_rtx,
5263 (call_param_p
5264 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5265
5266 /* Figure out how much is left in TARGET that we have to clear.
5267 Do all calculations in pointer_mode. */
5268 if (CONST_INT_P (copy_size_rtx))
5269 {
5270 size = plus_constant (size, -INTVAL (copy_size_rtx));
5271 target = adjust_address (target, BLKmode,
5272 INTVAL (copy_size_rtx));
5273 }
5274 else
5275 {
5276 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5277 copy_size_rtx, NULL_RTX, 0,
5278 OPTAB_LIB_WIDEN);
5279
5280 if (GET_MODE (copy_size_rtx) != address_mode)
5281 copy_size_rtx = convert_to_mode (address_mode,
5282 copy_size_rtx,
5283 TYPE_UNSIGNED (sizetype));
5284
5285 target = offset_address (target, copy_size_rtx,
5286 highest_pow2_factor (copy_size));
5287 label = gen_label_rtx ();
5288 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5289 GET_MODE (size), 0, label);
5290 }
5291
5292 if (size != const0_rtx)
5293 clear_storage (target, size, BLOCK_OP_NORMAL);
5294
5295 if (label)
5296 emit_label (label);
5297 }
5298 }
5299 /* Handle calls that return values in multiple non-contiguous locations.
5300 The Irix 6 ABI has examples of this. */
5301 else if (GET_CODE (target) == PARALLEL)
5302 emit_group_load (target, temp, TREE_TYPE (exp),
5303 int_size_in_bytes (TREE_TYPE (exp)));
5304 else if (GET_MODE (temp) == BLKmode)
5305 emit_block_move (target, temp, expr_size (exp),
5306 (call_param_p
5307 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5308 else if (nontemporal
5309 && emit_storent_insn (target, temp))
5310 /* If we managed to emit a nontemporal store, there is nothing else to
5311 do. */
5312 ;
5313 else
5314 {
5315 temp = force_operand (temp, target);
5316 if (temp != target)
5317 emit_move_insn (target, temp);
5318 }
5319 }
5320
5321 return NULL_RTX;
5322 }
5323 \f
5324 /* Return true if field F of structure TYPE is a flexible array. */
5325
5326 static bool
5327 flexible_array_member_p (const_tree f, const_tree type)
5328 {
5329 const_tree tf;
5330
5331 tf = TREE_TYPE (f);
5332 return (DECL_CHAIN (f) == NULL
5333 && TREE_CODE (tf) == ARRAY_TYPE
5334 && TYPE_DOMAIN (tf)
5335 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5336 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5337 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5338 && int_size_in_bytes (type) >= 0);
5339 }
5340
5341 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5342 must have in order for it to completely initialize a value of type TYPE.
5343 Return -1 if the number isn't known.
5344
5345 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5346
5347 static HOST_WIDE_INT
5348 count_type_elements (const_tree type, bool for_ctor_p)
5349 {
5350 switch (TREE_CODE (type))
5351 {
5352 case ARRAY_TYPE:
5353 {
5354 tree nelts;
5355
5356 nelts = array_type_nelts (type);
5357 if (nelts && host_integerp (nelts, 1))
5358 {
5359 unsigned HOST_WIDE_INT n;
5360
5361 n = tree_low_cst (nelts, 1) + 1;
5362 if (n == 0 || for_ctor_p)
5363 return n;
5364 else
5365 return n * count_type_elements (TREE_TYPE (type), false);
5366 }
5367 return for_ctor_p ? -1 : 1;
5368 }
5369
5370 case RECORD_TYPE:
5371 {
5372 unsigned HOST_WIDE_INT n;
5373 tree f;
5374
5375 n = 0;
5376 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5377 if (TREE_CODE (f) == FIELD_DECL)
5378 {
5379 if (!for_ctor_p)
5380 n += count_type_elements (TREE_TYPE (f), false);
5381 else if (!flexible_array_member_p (f, type))
5382 /* Don't count flexible arrays, which are not supposed
5383 to be initialized. */
5384 n += 1;
5385 }
5386
5387 return n;
5388 }
5389
5390 case UNION_TYPE:
5391 case QUAL_UNION_TYPE:
5392 {
5393 tree f;
5394 HOST_WIDE_INT n, m;
5395
5396 gcc_assert (!for_ctor_p);
5397 /* Estimate the number of scalars in each field and pick the
5398 maximum. Other estimates would do instead; the idea is simply
5399 to make sure that the estimate is not sensitive to the ordering
5400 of the fields. */
5401 n = 1;
5402 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5403 if (TREE_CODE (f) == FIELD_DECL)
5404 {
5405 m = count_type_elements (TREE_TYPE (f), false);
5406 /* If the field doesn't span the whole union, add an extra
5407 scalar for the rest. */
5408 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5409 TYPE_SIZE (type)) != 1)
5410 m++;
5411 if (n < m)
5412 n = m;
5413 }
5414 return n;
5415 }
5416
5417 case COMPLEX_TYPE:
5418 return 2;
5419
5420 case VECTOR_TYPE:
5421 return TYPE_VECTOR_SUBPARTS (type);
5422
5423 case INTEGER_TYPE:
5424 case REAL_TYPE:
5425 case FIXED_POINT_TYPE:
5426 case ENUMERAL_TYPE:
5427 case BOOLEAN_TYPE:
5428 case POINTER_TYPE:
5429 case OFFSET_TYPE:
5430 case REFERENCE_TYPE:
5431 case NULLPTR_TYPE:
5432 return 1;
5433
5434 case ERROR_MARK:
5435 return 0;
5436
5437 case VOID_TYPE:
5438 case METHOD_TYPE:
5439 case FUNCTION_TYPE:
5440 case LANG_TYPE:
5441 default:
5442 gcc_unreachable ();
5443 }
5444 }
5445
5446 /* Helper for categorize_ctor_elements. Identical interface. */
5447
5448 static bool
5449 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5450 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5451 {
5452 unsigned HOST_WIDE_INT idx;
5453 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5454 tree value, purpose, elt_type;
5455
5456 /* Whether CTOR is a valid constant initializer, in accordance with what
5457 initializer_constant_valid_p does. If inferred from the constructor
5458 elements, true until proven otherwise. */
5459 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5460 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5461
5462 nz_elts = 0;
5463 init_elts = 0;
5464 num_fields = 0;
5465 elt_type = NULL_TREE;
5466
5467 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5468 {
5469 HOST_WIDE_INT mult = 1;
5470
5471 if (TREE_CODE (purpose) == RANGE_EXPR)
5472 {
5473 tree lo_index = TREE_OPERAND (purpose, 0);
5474 tree hi_index = TREE_OPERAND (purpose, 1);
5475
5476 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
5477 mult = (tree_low_cst (hi_index, 1)
5478 - tree_low_cst (lo_index, 1) + 1);
5479 }
5480 num_fields += mult;
5481 elt_type = TREE_TYPE (value);
5482
5483 switch (TREE_CODE (value))
5484 {
5485 case CONSTRUCTOR:
5486 {
5487 HOST_WIDE_INT nz = 0, ic = 0;
5488
5489 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5490 p_complete);
5491
5492 nz_elts += mult * nz;
5493 init_elts += mult * ic;
5494
5495 if (const_from_elts_p && const_p)
5496 const_p = const_elt_p;
5497 }
5498 break;
5499
5500 case INTEGER_CST:
5501 case REAL_CST:
5502 case FIXED_CST:
5503 if (!initializer_zerop (value))
5504 nz_elts += mult;
5505 init_elts += mult;
5506 break;
5507
5508 case STRING_CST:
5509 nz_elts += mult * TREE_STRING_LENGTH (value);
5510 init_elts += mult * TREE_STRING_LENGTH (value);
5511 break;
5512
5513 case COMPLEX_CST:
5514 if (!initializer_zerop (TREE_REALPART (value)))
5515 nz_elts += mult;
5516 if (!initializer_zerop (TREE_IMAGPART (value)))
5517 nz_elts += mult;
5518 init_elts += mult;
5519 break;
5520
5521 case VECTOR_CST:
5522 {
5523 tree v;
5524 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
5525 {
5526 if (!initializer_zerop (TREE_VALUE (v)))
5527 nz_elts += mult;
5528 init_elts += mult;
5529 }
5530 }
5531 break;
5532
5533 default:
5534 {
5535 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5536 nz_elts += mult * tc;
5537 init_elts += mult * tc;
5538
5539 if (const_from_elts_p && const_p)
5540 const_p = initializer_constant_valid_p (value, elt_type)
5541 != NULL_TREE;
5542 }
5543 break;
5544 }
5545 }
5546
5547 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5548 num_fields, elt_type))
5549 *p_complete = false;
5550
5551 *p_nz_elts += nz_elts;
5552 *p_init_elts += init_elts;
5553
5554 return const_p;
5555 }
5556
5557 /* Examine CTOR to discover:
5558 * how many scalar fields are set to nonzero values,
5559 and place it in *P_NZ_ELTS;
5560 * how many scalar fields in total are in CTOR,
5561 and place it in *P_ELT_COUNT.
5562 * whether the constructor is complete -- in the sense that every
5563 meaningful byte is explicitly given a value --
5564 and place it in *P_COMPLETE.
5565
5566 Return whether or not CTOR is a valid static constant initializer, the same
5567 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5568
5569 bool
5570 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5571 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5572 {
5573 *p_nz_elts = 0;
5574 *p_init_elts = 0;
5575 *p_complete = true;
5576
5577 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5578 }
5579
5580 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5581 of which had type LAST_TYPE. Each element was itself a complete
5582 initializer, in the sense that every meaningful byte was explicitly
5583 given a value. Return true if the same is true for the constructor
5584 as a whole. */
5585
5586 bool
5587 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5588 const_tree last_type)
5589 {
5590 if (TREE_CODE (type) == UNION_TYPE
5591 || TREE_CODE (type) == QUAL_UNION_TYPE)
5592 {
5593 if (num_elts == 0)
5594 return false;
5595
5596 gcc_assert (num_elts == 1 && last_type);
5597
5598 /* ??? We could look at each element of the union, and find the
5599 largest element. Which would avoid comparing the size of the
5600 initialized element against any tail padding in the union.
5601 Doesn't seem worth the effort... */
5602 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5603 }
5604
5605 return count_type_elements (type, true) == num_elts;
5606 }
5607
5608 /* Return 1 if EXP contains mostly (3/4) zeros. */
5609
5610 static int
5611 mostly_zeros_p (const_tree exp)
5612 {
5613 if (TREE_CODE (exp) == CONSTRUCTOR)
5614 {
5615 HOST_WIDE_INT nz_elts, init_elts;
5616 bool complete_p;
5617
5618 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5619 return !complete_p || nz_elts < init_elts / 4;
5620 }
5621
5622 return initializer_zerop (exp);
5623 }
5624
5625 /* Return 1 if EXP contains all zeros. */
5626
5627 static int
5628 all_zeros_p (const_tree exp)
5629 {
5630 if (TREE_CODE (exp) == CONSTRUCTOR)
5631 {
5632 HOST_WIDE_INT nz_elts, init_elts;
5633 bool complete_p;
5634
5635 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5636 return nz_elts == 0;
5637 }
5638
5639 return initializer_zerop (exp);
5640 }
5641 \f
5642 /* Helper function for store_constructor.
5643 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5644 TYPE is the type of the CONSTRUCTOR, not the element type.
5645 CLEARED is as for store_constructor.
5646 ALIAS_SET is the alias set to use for any stores.
5647
5648 This provides a recursive shortcut back to store_constructor when it isn't
5649 necessary to go through store_field. This is so that we can pass through
5650 the cleared field to let store_constructor know that we may not have to
5651 clear a substructure if the outer structure has already been cleared. */
5652
5653 static void
5654 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5655 HOST_WIDE_INT bitpos, enum machine_mode mode,
5656 tree exp, tree type, int cleared,
5657 alias_set_type alias_set)
5658 {
5659 if (TREE_CODE (exp) == CONSTRUCTOR
5660 /* We can only call store_constructor recursively if the size and
5661 bit position are on a byte boundary. */
5662 && bitpos % BITS_PER_UNIT == 0
5663 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5664 /* If we have a nonzero bitpos for a register target, then we just
5665 let store_field do the bitfield handling. This is unlikely to
5666 generate unnecessary clear instructions anyways. */
5667 && (bitpos == 0 || MEM_P (target)))
5668 {
5669 if (MEM_P (target))
5670 target
5671 = adjust_address (target,
5672 GET_MODE (target) == BLKmode
5673 || 0 != (bitpos
5674 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5675 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5676
5677
5678 /* Update the alias set, if required. */
5679 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5680 && MEM_ALIAS_SET (target) != 0)
5681 {
5682 target = copy_rtx (target);
5683 set_mem_alias_set (target, alias_set);
5684 }
5685
5686 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5687 }
5688 else
5689 store_field (target, bitsize, bitpos, 0, 0, mode, exp, type, alias_set,
5690 false);
5691 }
5692
5693 /* Store the value of constructor EXP into the rtx TARGET.
5694 TARGET is either a REG or a MEM; we know it cannot conflict, since
5695 safe_from_p has been called.
5696 CLEARED is true if TARGET is known to have been zero'd.
5697 SIZE is the number of bytes of TARGET we are allowed to modify: this
5698 may not be the same as the size of EXP if we are assigning to a field
5699 which has been packed to exclude padding bits. */
5700
5701 static void
5702 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5703 {
5704 tree type = TREE_TYPE (exp);
5705 #ifdef WORD_REGISTER_OPERATIONS
5706 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5707 #endif
5708
5709 switch (TREE_CODE (type))
5710 {
5711 case RECORD_TYPE:
5712 case UNION_TYPE:
5713 case QUAL_UNION_TYPE:
5714 {
5715 unsigned HOST_WIDE_INT idx;
5716 tree field, value;
5717
5718 /* If size is zero or the target is already cleared, do nothing. */
5719 if (size == 0 || cleared)
5720 cleared = 1;
5721 /* We either clear the aggregate or indicate the value is dead. */
5722 else if ((TREE_CODE (type) == UNION_TYPE
5723 || TREE_CODE (type) == QUAL_UNION_TYPE)
5724 && ! CONSTRUCTOR_ELTS (exp))
5725 /* If the constructor is empty, clear the union. */
5726 {
5727 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5728 cleared = 1;
5729 }
5730
5731 /* If we are building a static constructor into a register,
5732 set the initial value as zero so we can fold the value into
5733 a constant. But if more than one register is involved,
5734 this probably loses. */
5735 else if (REG_P (target) && TREE_STATIC (exp)
5736 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5737 {
5738 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5739 cleared = 1;
5740 }
5741
5742 /* If the constructor has fewer fields than the structure or
5743 if we are initializing the structure to mostly zeros, clear
5744 the whole structure first. Don't do this if TARGET is a
5745 register whose mode size isn't equal to SIZE since
5746 clear_storage can't handle this case. */
5747 else if (size > 0
5748 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5749 != fields_length (type))
5750 || mostly_zeros_p (exp))
5751 && (!REG_P (target)
5752 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5753 == size)))
5754 {
5755 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5756 cleared = 1;
5757 }
5758
5759 if (REG_P (target) && !cleared)
5760 emit_clobber (target);
5761
5762 /* Store each element of the constructor into the
5763 corresponding field of TARGET. */
5764 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5765 {
5766 enum machine_mode mode;
5767 HOST_WIDE_INT bitsize;
5768 HOST_WIDE_INT bitpos = 0;
5769 tree offset;
5770 rtx to_rtx = target;
5771
5772 /* Just ignore missing fields. We cleared the whole
5773 structure, above, if any fields are missing. */
5774 if (field == 0)
5775 continue;
5776
5777 if (cleared && initializer_zerop (value))
5778 continue;
5779
5780 if (host_integerp (DECL_SIZE (field), 1))
5781 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5782 else
5783 bitsize = -1;
5784
5785 mode = DECL_MODE (field);
5786 if (DECL_BIT_FIELD (field))
5787 mode = VOIDmode;
5788
5789 offset = DECL_FIELD_OFFSET (field);
5790 if (host_integerp (offset, 0)
5791 && host_integerp (bit_position (field), 0))
5792 {
5793 bitpos = int_bit_position (field);
5794 offset = 0;
5795 }
5796 else
5797 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5798
5799 if (offset)
5800 {
5801 enum machine_mode address_mode;
5802 rtx offset_rtx;
5803
5804 offset
5805 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5806 make_tree (TREE_TYPE (exp),
5807 target));
5808
5809 offset_rtx = expand_normal (offset);
5810 gcc_assert (MEM_P (to_rtx));
5811
5812 address_mode
5813 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5814 if (GET_MODE (offset_rtx) != address_mode)
5815 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5816
5817 to_rtx = offset_address (to_rtx, offset_rtx,
5818 highest_pow2_factor (offset));
5819 }
5820
5821 #ifdef WORD_REGISTER_OPERATIONS
5822 /* If this initializes a field that is smaller than a
5823 word, at the start of a word, try to widen it to a full
5824 word. This special case allows us to output C++ member
5825 function initializations in a form that the optimizers
5826 can understand. */
5827 if (REG_P (target)
5828 && bitsize < BITS_PER_WORD
5829 && bitpos % BITS_PER_WORD == 0
5830 && GET_MODE_CLASS (mode) == MODE_INT
5831 && TREE_CODE (value) == INTEGER_CST
5832 && exp_size >= 0
5833 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5834 {
5835 tree type = TREE_TYPE (value);
5836
5837 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5838 {
5839 type = lang_hooks.types.type_for_size
5840 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5841 value = fold_convert (type, value);
5842 }
5843
5844 if (BYTES_BIG_ENDIAN)
5845 value
5846 = fold_build2 (LSHIFT_EXPR, type, value,
5847 build_int_cst (type,
5848 BITS_PER_WORD - bitsize));
5849 bitsize = BITS_PER_WORD;
5850 mode = word_mode;
5851 }
5852 #endif
5853
5854 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5855 && DECL_NONADDRESSABLE_P (field))
5856 {
5857 to_rtx = copy_rtx (to_rtx);
5858 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5859 }
5860
5861 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5862 value, type, cleared,
5863 get_alias_set (TREE_TYPE (field)));
5864 }
5865 break;
5866 }
5867 case ARRAY_TYPE:
5868 {
5869 tree value, index;
5870 unsigned HOST_WIDE_INT i;
5871 int need_to_clear;
5872 tree domain;
5873 tree elttype = TREE_TYPE (type);
5874 int const_bounds_p;
5875 HOST_WIDE_INT minelt = 0;
5876 HOST_WIDE_INT maxelt = 0;
5877
5878 domain = TYPE_DOMAIN (type);
5879 const_bounds_p = (TYPE_MIN_VALUE (domain)
5880 && TYPE_MAX_VALUE (domain)
5881 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5882 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5883
5884 /* If we have constant bounds for the range of the type, get them. */
5885 if (const_bounds_p)
5886 {
5887 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5888 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5889 }
5890
5891 /* If the constructor has fewer elements than the array, clear
5892 the whole array first. Similarly if this is static
5893 constructor of a non-BLKmode object. */
5894 if (cleared)
5895 need_to_clear = 0;
5896 else if (REG_P (target) && TREE_STATIC (exp))
5897 need_to_clear = 1;
5898 else
5899 {
5900 unsigned HOST_WIDE_INT idx;
5901 tree index, value;
5902 HOST_WIDE_INT count = 0, zero_count = 0;
5903 need_to_clear = ! const_bounds_p;
5904
5905 /* This loop is a more accurate version of the loop in
5906 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5907 is also needed to check for missing elements. */
5908 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5909 {
5910 HOST_WIDE_INT this_node_count;
5911
5912 if (need_to_clear)
5913 break;
5914
5915 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5916 {
5917 tree lo_index = TREE_OPERAND (index, 0);
5918 tree hi_index = TREE_OPERAND (index, 1);
5919
5920 if (! host_integerp (lo_index, 1)
5921 || ! host_integerp (hi_index, 1))
5922 {
5923 need_to_clear = 1;
5924 break;
5925 }
5926
5927 this_node_count = (tree_low_cst (hi_index, 1)
5928 - tree_low_cst (lo_index, 1) + 1);
5929 }
5930 else
5931 this_node_count = 1;
5932
5933 count += this_node_count;
5934 if (mostly_zeros_p (value))
5935 zero_count += this_node_count;
5936 }
5937
5938 /* Clear the entire array first if there are any missing
5939 elements, or if the incidence of zero elements is >=
5940 75%. */
5941 if (! need_to_clear
5942 && (count < maxelt - minelt + 1
5943 || 4 * zero_count >= 3 * count))
5944 need_to_clear = 1;
5945 }
5946
5947 if (need_to_clear && size > 0)
5948 {
5949 if (REG_P (target))
5950 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5951 else
5952 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5953 cleared = 1;
5954 }
5955
5956 if (!cleared && REG_P (target))
5957 /* Inform later passes that the old value is dead. */
5958 emit_clobber (target);
5959
5960 /* Store each element of the constructor into the
5961 corresponding element of TARGET, determined by counting the
5962 elements. */
5963 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5964 {
5965 enum machine_mode mode;
5966 HOST_WIDE_INT bitsize;
5967 HOST_WIDE_INT bitpos;
5968 rtx xtarget = target;
5969
5970 if (cleared && initializer_zerop (value))
5971 continue;
5972
5973 mode = TYPE_MODE (elttype);
5974 if (mode == BLKmode)
5975 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5976 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5977 : -1);
5978 else
5979 bitsize = GET_MODE_BITSIZE (mode);
5980
5981 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5982 {
5983 tree lo_index = TREE_OPERAND (index, 0);
5984 tree hi_index = TREE_OPERAND (index, 1);
5985 rtx index_r, pos_rtx;
5986 HOST_WIDE_INT lo, hi, count;
5987 tree position;
5988
5989 /* If the range is constant and "small", unroll the loop. */
5990 if (const_bounds_p
5991 && host_integerp (lo_index, 0)
5992 && host_integerp (hi_index, 0)
5993 && (lo = tree_low_cst (lo_index, 0),
5994 hi = tree_low_cst (hi_index, 0),
5995 count = hi - lo + 1,
5996 (!MEM_P (target)
5997 || count <= 2
5998 || (host_integerp (TYPE_SIZE (elttype), 1)
5999 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
6000 <= 40 * 8)))))
6001 {
6002 lo -= minelt; hi -= minelt;
6003 for (; lo <= hi; lo++)
6004 {
6005 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
6006
6007 if (MEM_P (target)
6008 && !MEM_KEEP_ALIAS_SET_P (target)
6009 && TREE_CODE (type) == ARRAY_TYPE
6010 && TYPE_NONALIASED_COMPONENT (type))
6011 {
6012 target = copy_rtx (target);
6013 MEM_KEEP_ALIAS_SET_P (target) = 1;
6014 }
6015
6016 store_constructor_field
6017 (target, bitsize, bitpos, mode, value, type, cleared,
6018 get_alias_set (elttype));
6019 }
6020 }
6021 else
6022 {
6023 rtx loop_start = gen_label_rtx ();
6024 rtx loop_end = gen_label_rtx ();
6025 tree exit_cond;
6026
6027 expand_normal (hi_index);
6028
6029 index = build_decl (EXPR_LOCATION (exp),
6030 VAR_DECL, NULL_TREE, domain);
6031 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6032 SET_DECL_RTL (index, index_r);
6033 store_expr (lo_index, index_r, 0, false);
6034
6035 /* Build the head of the loop. */
6036 do_pending_stack_adjust ();
6037 emit_label (loop_start);
6038
6039 /* Assign value to element index. */
6040 position =
6041 fold_convert (ssizetype,
6042 fold_build2 (MINUS_EXPR,
6043 TREE_TYPE (index),
6044 index,
6045 TYPE_MIN_VALUE (domain)));
6046
6047 position =
6048 size_binop (MULT_EXPR, position,
6049 fold_convert (ssizetype,
6050 TYPE_SIZE_UNIT (elttype)));
6051
6052 pos_rtx = expand_normal (position);
6053 xtarget = offset_address (target, pos_rtx,
6054 highest_pow2_factor (position));
6055 xtarget = adjust_address (xtarget, mode, 0);
6056 if (TREE_CODE (value) == CONSTRUCTOR)
6057 store_constructor (value, xtarget, cleared,
6058 bitsize / BITS_PER_UNIT);
6059 else
6060 store_expr (value, xtarget, 0, false);
6061
6062 /* Generate a conditional jump to exit the loop. */
6063 exit_cond = build2 (LT_EXPR, integer_type_node,
6064 index, hi_index);
6065 jumpif (exit_cond, loop_end, -1);
6066
6067 /* Update the loop counter, and jump to the head of
6068 the loop. */
6069 expand_assignment (index,
6070 build2 (PLUS_EXPR, TREE_TYPE (index),
6071 index, integer_one_node),
6072 false);
6073
6074 emit_jump (loop_start);
6075
6076 /* Build the end of the loop. */
6077 emit_label (loop_end);
6078 }
6079 }
6080 else if ((index != 0 && ! host_integerp (index, 0))
6081 || ! host_integerp (TYPE_SIZE (elttype), 1))
6082 {
6083 tree position;
6084
6085 if (index == 0)
6086 index = ssize_int (1);
6087
6088 if (minelt)
6089 index = fold_convert (ssizetype,
6090 fold_build2 (MINUS_EXPR,
6091 TREE_TYPE (index),
6092 index,
6093 TYPE_MIN_VALUE (domain)));
6094
6095 position =
6096 size_binop (MULT_EXPR, index,
6097 fold_convert (ssizetype,
6098 TYPE_SIZE_UNIT (elttype)));
6099 xtarget = offset_address (target,
6100 expand_normal (position),
6101 highest_pow2_factor (position));
6102 xtarget = adjust_address (xtarget, mode, 0);
6103 store_expr (value, xtarget, 0, false);
6104 }
6105 else
6106 {
6107 if (index != 0)
6108 bitpos = ((tree_low_cst (index, 0) - minelt)
6109 * tree_low_cst (TYPE_SIZE (elttype), 1));
6110 else
6111 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
6112
6113 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6114 && TREE_CODE (type) == ARRAY_TYPE
6115 && TYPE_NONALIASED_COMPONENT (type))
6116 {
6117 target = copy_rtx (target);
6118 MEM_KEEP_ALIAS_SET_P (target) = 1;
6119 }
6120 store_constructor_field (target, bitsize, bitpos, mode, value,
6121 type, cleared, get_alias_set (elttype));
6122 }
6123 }
6124 break;
6125 }
6126
6127 case VECTOR_TYPE:
6128 {
6129 unsigned HOST_WIDE_INT idx;
6130 constructor_elt *ce;
6131 int i;
6132 int need_to_clear;
6133 int icode = 0;
6134 tree elttype = TREE_TYPE (type);
6135 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
6136 enum machine_mode eltmode = TYPE_MODE (elttype);
6137 HOST_WIDE_INT bitsize;
6138 HOST_WIDE_INT bitpos;
6139 rtvec vector = NULL;
6140 unsigned n_elts;
6141 alias_set_type alias;
6142
6143 gcc_assert (eltmode != BLKmode);
6144
6145 n_elts = TYPE_VECTOR_SUBPARTS (type);
6146 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6147 {
6148 enum machine_mode mode = GET_MODE (target);
6149
6150 icode = (int) optab_handler (vec_init_optab, mode);
6151 if (icode != CODE_FOR_nothing)
6152 {
6153 unsigned int i;
6154
6155 vector = rtvec_alloc (n_elts);
6156 for (i = 0; i < n_elts; i++)
6157 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6158 }
6159 }
6160
6161 /* If the constructor has fewer elements than the vector,
6162 clear the whole array first. Similarly if this is static
6163 constructor of a non-BLKmode object. */
6164 if (cleared)
6165 need_to_clear = 0;
6166 else if (REG_P (target) && TREE_STATIC (exp))
6167 need_to_clear = 1;
6168 else
6169 {
6170 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6171 tree value;
6172
6173 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6174 {
6175 int n_elts_here = tree_low_cst
6176 (int_const_binop (TRUNC_DIV_EXPR,
6177 TYPE_SIZE (TREE_TYPE (value)),
6178 TYPE_SIZE (elttype)), 1);
6179
6180 count += n_elts_here;
6181 if (mostly_zeros_p (value))
6182 zero_count += n_elts_here;
6183 }
6184
6185 /* Clear the entire vector first if there are any missing elements,
6186 or if the incidence of zero elements is >= 75%. */
6187 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6188 }
6189
6190 if (need_to_clear && size > 0 && !vector)
6191 {
6192 if (REG_P (target))
6193 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6194 else
6195 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6196 cleared = 1;
6197 }
6198
6199 /* Inform later passes that the old value is dead. */
6200 if (!cleared && !vector && REG_P (target))
6201 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6202
6203 if (MEM_P (target))
6204 alias = MEM_ALIAS_SET (target);
6205 else
6206 alias = get_alias_set (elttype);
6207
6208 /* Store each element of the constructor into the corresponding
6209 element of TARGET, determined by counting the elements. */
6210 for (idx = 0, i = 0;
6211 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6212 idx++, i += bitsize / elt_size)
6213 {
6214 HOST_WIDE_INT eltpos;
6215 tree value = ce->value;
6216
6217 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
6218 if (cleared && initializer_zerop (value))
6219 continue;
6220
6221 if (ce->index)
6222 eltpos = tree_low_cst (ce->index, 1);
6223 else
6224 eltpos = i;
6225
6226 if (vector)
6227 {
6228 /* Vector CONSTRUCTORs should only be built from smaller
6229 vectors in the case of BLKmode vectors. */
6230 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6231 RTVEC_ELT (vector, eltpos)
6232 = expand_normal (value);
6233 }
6234 else
6235 {
6236 enum machine_mode value_mode =
6237 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6238 ? TYPE_MODE (TREE_TYPE (value))
6239 : eltmode;
6240 bitpos = eltpos * elt_size;
6241 store_constructor_field (target, bitsize, bitpos,
6242 value_mode, value, type,
6243 cleared, alias);
6244 }
6245 }
6246
6247 if (vector)
6248 emit_insn (GEN_FCN (icode)
6249 (target,
6250 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6251 break;
6252 }
6253
6254 default:
6255 gcc_unreachable ();
6256 }
6257 }
6258
6259 /* Store the value of EXP (an expression tree)
6260 into a subfield of TARGET which has mode MODE and occupies
6261 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6262 If MODE is VOIDmode, it means that we are storing into a bit-field.
6263
6264 BITREGION_START is bitpos of the first bitfield in this region.
6265 BITREGION_END is the bitpos of the ending bitfield in this region.
6266 These two fields are 0, if the C++ memory model does not apply,
6267 or we are not interested in keeping track of bitfield regions.
6268
6269 Always return const0_rtx unless we have something particular to
6270 return.
6271
6272 TYPE is the type of the underlying object,
6273
6274 ALIAS_SET is the alias set for the destination. This value will
6275 (in general) be different from that for TARGET, since TARGET is a
6276 reference to the containing structure.
6277
6278 If NONTEMPORAL is true, try generating a nontemporal store. */
6279
6280 static rtx
6281 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6282 unsigned HOST_WIDE_INT bitregion_start,
6283 unsigned HOST_WIDE_INT bitregion_end,
6284 enum machine_mode mode, tree exp, tree type,
6285 alias_set_type alias_set, bool nontemporal)
6286 {
6287 if (TREE_CODE (exp) == ERROR_MARK)
6288 return const0_rtx;
6289
6290 /* If we have nothing to store, do nothing unless the expression has
6291 side-effects. */
6292 if (bitsize == 0)
6293 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6294
6295 /* If we are storing into an unaligned field of an aligned union that is
6296 in a register, we may have the mode of TARGET being an integer mode but
6297 MODE == BLKmode. In that case, get an aligned object whose size and
6298 alignment are the same as TARGET and store TARGET into it (we can avoid
6299 the store if the field being stored is the entire width of TARGET). Then
6300 call ourselves recursively to store the field into a BLKmode version of
6301 that object. Finally, load from the object into TARGET. This is not
6302 very efficient in general, but should only be slightly more expensive
6303 than the otherwise-required unaligned accesses. Perhaps this can be
6304 cleaned up later. It's tempting to make OBJECT readonly, but it's set
6305 twice, once with emit_move_insn and once via store_field. */
6306
6307 if (mode == BLKmode
6308 && (REG_P (target) || GET_CODE (target) == SUBREG))
6309 {
6310 rtx object = assign_temp (type, 0, 1, 1);
6311 rtx blk_object = adjust_address (object, BLKmode, 0);
6312
6313 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
6314 emit_move_insn (object, target);
6315
6316 store_field (blk_object, bitsize, bitpos,
6317 bitregion_start, bitregion_end,
6318 mode, exp, type, alias_set, nontemporal);
6319
6320 emit_move_insn (target, object);
6321
6322 /* We want to return the BLKmode version of the data. */
6323 return blk_object;
6324 }
6325
6326 if (GET_CODE (target) == CONCAT)
6327 {
6328 /* We're storing into a struct containing a single __complex. */
6329
6330 gcc_assert (!bitpos);
6331 return store_expr (exp, target, 0, nontemporal);
6332 }
6333
6334 /* If the structure is in a register or if the component
6335 is a bit field, we cannot use addressing to access it.
6336 Use bit-field techniques or SUBREG to store in it. */
6337
6338 if (mode == VOIDmode
6339 || (mode != BLKmode && ! direct_store[(int) mode]
6340 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6341 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6342 || REG_P (target)
6343 || GET_CODE (target) == SUBREG
6344 /* If the field isn't aligned enough to store as an ordinary memref,
6345 store it as a bit field. */
6346 || (mode != BLKmode
6347 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6348 || bitpos % GET_MODE_ALIGNMENT (mode))
6349 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6350 || (bitpos % BITS_PER_UNIT != 0)))
6351 /* If the RHS and field are a constant size and the size of the
6352 RHS isn't the same size as the bitfield, we must use bitfield
6353 operations. */
6354 || (bitsize >= 0
6355 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6356 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6357 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6358 decl we must use bitfield operations. */
6359 || (bitsize >= 0
6360 && TREE_CODE (exp) == MEM_REF
6361 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6362 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6363 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6364 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6365 {
6366 rtx temp;
6367 gimple nop_def;
6368
6369 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6370 implies a mask operation. If the precision is the same size as
6371 the field we're storing into, that mask is redundant. This is
6372 particularly common with bit field assignments generated by the
6373 C front end. */
6374 nop_def = get_def_for_expr (exp, NOP_EXPR);
6375 if (nop_def)
6376 {
6377 tree type = TREE_TYPE (exp);
6378 if (INTEGRAL_TYPE_P (type)
6379 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6380 && bitsize == TYPE_PRECISION (type))
6381 {
6382 tree op = gimple_assign_rhs1 (nop_def);
6383 type = TREE_TYPE (op);
6384 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6385 exp = op;
6386 }
6387 }
6388
6389 temp = expand_normal (exp);
6390
6391 /* If BITSIZE is narrower than the size of the type of EXP
6392 we will be narrowing TEMP. Normally, what's wanted are the
6393 low-order bits. However, if EXP's type is a record and this is
6394 big-endian machine, we want the upper BITSIZE bits. */
6395 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6396 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6397 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6398 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6399 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6400 NULL_RTX, 1);
6401
6402 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
6403 MODE. */
6404 if (mode != VOIDmode && mode != BLKmode
6405 && mode != TYPE_MODE (TREE_TYPE (exp)))
6406 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6407
6408 /* If the modes of TEMP and TARGET are both BLKmode, both
6409 must be in memory and BITPOS must be aligned on a byte
6410 boundary. If so, we simply do a block copy. Likewise
6411 for a BLKmode-like TARGET. */
6412 if (GET_MODE (temp) == BLKmode
6413 && (GET_MODE (target) == BLKmode
6414 || (MEM_P (target)
6415 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6416 && (bitpos % BITS_PER_UNIT) == 0
6417 && (bitsize % BITS_PER_UNIT) == 0)))
6418 {
6419 gcc_assert (MEM_P (target) && MEM_P (temp)
6420 && (bitpos % BITS_PER_UNIT) == 0);
6421
6422 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6423 emit_block_move (target, temp,
6424 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6425 / BITS_PER_UNIT),
6426 BLOCK_OP_NORMAL);
6427
6428 return const0_rtx;
6429 }
6430
6431 /* Store the value in the bitfield. */
6432 store_bit_field (target, bitsize, bitpos,
6433 bitregion_start, bitregion_end,
6434 mode, temp);
6435
6436 return const0_rtx;
6437 }
6438 else
6439 {
6440 /* Now build a reference to just the desired component. */
6441 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6442
6443 if (to_rtx == target)
6444 to_rtx = copy_rtx (to_rtx);
6445
6446 if (!MEM_SCALAR_P (to_rtx))
6447 MEM_IN_STRUCT_P (to_rtx) = 1;
6448 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6449 set_mem_alias_set (to_rtx, alias_set);
6450
6451 return store_expr (exp, to_rtx, 0, nontemporal);
6452 }
6453 }
6454 \f
6455 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6456 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6457 codes and find the ultimate containing object, which we return.
6458
6459 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6460 bit position, and *PUNSIGNEDP to the signedness of the field.
6461 If the position of the field is variable, we store a tree
6462 giving the variable offset (in units) in *POFFSET.
6463 This offset is in addition to the bit position.
6464 If the position is not variable, we store 0 in *POFFSET.
6465
6466 If any of the extraction expressions is volatile,
6467 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6468
6469 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6470 Otherwise, it is a mode that can be used to access the field.
6471
6472 If the field describes a variable-sized object, *PMODE is set to
6473 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6474 this case, but the address of the object can be found.
6475
6476 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6477 look through nodes that serve as markers of a greater alignment than
6478 the one that can be deduced from the expression. These nodes make it
6479 possible for front-ends to prevent temporaries from being created by
6480 the middle-end on alignment considerations. For that purpose, the
6481 normal operating mode at high-level is to always pass FALSE so that
6482 the ultimate containing object is really returned; moreover, the
6483 associated predicate handled_component_p will always return TRUE
6484 on these nodes, thus indicating that they are essentially handled
6485 by get_inner_reference. TRUE should only be passed when the caller
6486 is scanning the expression in order to build another representation
6487 and specifically knows how to handle these nodes; as such, this is
6488 the normal operating mode in the RTL expanders. */
6489
6490 tree
6491 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6492 HOST_WIDE_INT *pbitpos, tree *poffset,
6493 enum machine_mode *pmode, int *punsignedp,
6494 int *pvolatilep, bool keep_aligning)
6495 {
6496 tree size_tree = 0;
6497 enum machine_mode mode = VOIDmode;
6498 bool blkmode_bitfield = false;
6499 tree offset = size_zero_node;
6500 double_int bit_offset = double_int_zero;
6501
6502 /* First get the mode, signedness, and size. We do this from just the
6503 outermost expression. */
6504 *pbitsize = -1;
6505 if (TREE_CODE (exp) == COMPONENT_REF)
6506 {
6507 tree field = TREE_OPERAND (exp, 1);
6508 size_tree = DECL_SIZE (field);
6509 if (!DECL_BIT_FIELD (field))
6510 mode = DECL_MODE (field);
6511 else if (DECL_MODE (field) == BLKmode)
6512 blkmode_bitfield = true;
6513 else if (TREE_THIS_VOLATILE (exp)
6514 && flag_strict_volatile_bitfields > 0)
6515 /* Volatile bitfields should be accessed in the mode of the
6516 field's type, not the mode computed based on the bit
6517 size. */
6518 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6519
6520 *punsignedp = DECL_UNSIGNED (field);
6521 }
6522 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6523 {
6524 size_tree = TREE_OPERAND (exp, 1);
6525 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6526 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6527
6528 /* For vector types, with the correct size of access, use the mode of
6529 inner type. */
6530 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6531 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6532 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6533 mode = TYPE_MODE (TREE_TYPE (exp));
6534 }
6535 else
6536 {
6537 mode = TYPE_MODE (TREE_TYPE (exp));
6538 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6539
6540 if (mode == BLKmode)
6541 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6542 else
6543 *pbitsize = GET_MODE_BITSIZE (mode);
6544 }
6545
6546 if (size_tree != 0)
6547 {
6548 if (! host_integerp (size_tree, 1))
6549 mode = BLKmode, *pbitsize = -1;
6550 else
6551 *pbitsize = tree_low_cst (size_tree, 1);
6552 }
6553
6554 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6555 and find the ultimate containing object. */
6556 while (1)
6557 {
6558 switch (TREE_CODE (exp))
6559 {
6560 case BIT_FIELD_REF:
6561 bit_offset
6562 = double_int_add (bit_offset,
6563 tree_to_double_int (TREE_OPERAND (exp, 2)));
6564 break;
6565
6566 case COMPONENT_REF:
6567 {
6568 tree field = TREE_OPERAND (exp, 1);
6569 tree this_offset = component_ref_field_offset (exp);
6570
6571 /* If this field hasn't been filled in yet, don't go past it.
6572 This should only happen when folding expressions made during
6573 type construction. */
6574 if (this_offset == 0)
6575 break;
6576
6577 offset = size_binop (PLUS_EXPR, offset, this_offset);
6578 bit_offset = double_int_add (bit_offset,
6579 tree_to_double_int
6580 (DECL_FIELD_BIT_OFFSET (field)));
6581
6582 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6583 }
6584 break;
6585
6586 case ARRAY_REF:
6587 case ARRAY_RANGE_REF:
6588 {
6589 tree index = TREE_OPERAND (exp, 1);
6590 tree low_bound = array_ref_low_bound (exp);
6591 tree unit_size = array_ref_element_size (exp);
6592
6593 /* We assume all arrays have sizes that are a multiple of a byte.
6594 First subtract the lower bound, if any, in the type of the
6595 index, then convert to sizetype and multiply by the size of
6596 the array element. */
6597 if (! integer_zerop (low_bound))
6598 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6599 index, low_bound);
6600
6601 offset = size_binop (PLUS_EXPR, offset,
6602 size_binop (MULT_EXPR,
6603 fold_convert (sizetype, index),
6604 unit_size));
6605 }
6606 break;
6607
6608 case REALPART_EXPR:
6609 break;
6610
6611 case IMAGPART_EXPR:
6612 bit_offset = double_int_add (bit_offset,
6613 uhwi_to_double_int (*pbitsize));
6614 break;
6615
6616 case VIEW_CONVERT_EXPR:
6617 if (keep_aligning && STRICT_ALIGNMENT
6618 && (TYPE_ALIGN (TREE_TYPE (exp))
6619 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6620 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6621 < BIGGEST_ALIGNMENT)
6622 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6623 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6624 goto done;
6625 break;
6626
6627 case MEM_REF:
6628 /* Hand back the decl for MEM[&decl, off]. */
6629 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6630 {
6631 tree off = TREE_OPERAND (exp, 1);
6632 if (!integer_zerop (off))
6633 {
6634 double_int boff, coff = mem_ref_offset (exp);
6635 boff = double_int_lshift (coff,
6636 BITS_PER_UNIT == 8
6637 ? 3 : exact_log2 (BITS_PER_UNIT),
6638 HOST_BITS_PER_DOUBLE_INT, true);
6639 bit_offset = double_int_add (bit_offset, boff);
6640 }
6641 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6642 }
6643 goto done;
6644
6645 default:
6646 goto done;
6647 }
6648
6649 /* If any reference in the chain is volatile, the effect is volatile. */
6650 if (TREE_THIS_VOLATILE (exp))
6651 *pvolatilep = 1;
6652
6653 exp = TREE_OPERAND (exp, 0);
6654 }
6655 done:
6656
6657 /* If OFFSET is constant, see if we can return the whole thing as a
6658 constant bit position. Make sure to handle overflow during
6659 this conversion. */
6660 if (TREE_CODE (offset) == INTEGER_CST)
6661 {
6662 double_int tem = tree_to_double_int (offset);
6663 tem = double_int_sext (tem, TYPE_PRECISION (sizetype));
6664 tem = double_int_lshift (tem,
6665 BITS_PER_UNIT == 8
6666 ? 3 : exact_log2 (BITS_PER_UNIT),
6667 HOST_BITS_PER_DOUBLE_INT, true);
6668 tem = double_int_add (tem, bit_offset);
6669 if (double_int_fits_in_shwi_p (tem))
6670 {
6671 *pbitpos = double_int_to_shwi (tem);
6672 *poffset = offset = NULL_TREE;
6673 }
6674 }
6675
6676 /* Otherwise, split it up. */
6677 if (offset)
6678 {
6679 *pbitpos = double_int_to_shwi (bit_offset);
6680 *poffset = offset;
6681 }
6682
6683 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6684 if (mode == VOIDmode
6685 && blkmode_bitfield
6686 && (*pbitpos % BITS_PER_UNIT) == 0
6687 && (*pbitsize % BITS_PER_UNIT) == 0)
6688 *pmode = BLKmode;
6689 else
6690 *pmode = mode;
6691
6692 return exp;
6693 }
6694
6695 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6696 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6697 EXP is marked as PACKED. */
6698
6699 bool
6700 contains_packed_reference (const_tree exp)
6701 {
6702 bool packed_p = false;
6703
6704 while (1)
6705 {
6706 switch (TREE_CODE (exp))
6707 {
6708 case COMPONENT_REF:
6709 {
6710 tree field = TREE_OPERAND (exp, 1);
6711 packed_p = DECL_PACKED (field)
6712 || TYPE_PACKED (TREE_TYPE (field))
6713 || TYPE_PACKED (TREE_TYPE (exp));
6714 if (packed_p)
6715 goto done;
6716 }
6717 break;
6718
6719 case BIT_FIELD_REF:
6720 case ARRAY_REF:
6721 case ARRAY_RANGE_REF:
6722 case REALPART_EXPR:
6723 case IMAGPART_EXPR:
6724 case VIEW_CONVERT_EXPR:
6725 break;
6726
6727 default:
6728 goto done;
6729 }
6730 exp = TREE_OPERAND (exp, 0);
6731 }
6732 done:
6733 return packed_p;
6734 }
6735
6736 /* Return a tree of sizetype representing the size, in bytes, of the element
6737 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6738
6739 tree
6740 array_ref_element_size (tree exp)
6741 {
6742 tree aligned_size = TREE_OPERAND (exp, 3);
6743 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6744 location_t loc = EXPR_LOCATION (exp);
6745
6746 /* If a size was specified in the ARRAY_REF, it's the size measured
6747 in alignment units of the element type. So multiply by that value. */
6748 if (aligned_size)
6749 {
6750 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6751 sizetype from another type of the same width and signedness. */
6752 if (TREE_TYPE (aligned_size) != sizetype)
6753 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6754 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6755 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6756 }
6757
6758 /* Otherwise, take the size from that of the element type. Substitute
6759 any PLACEHOLDER_EXPR that we have. */
6760 else
6761 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6762 }
6763
6764 /* Return a tree representing the lower bound of the array mentioned in
6765 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6766
6767 tree
6768 array_ref_low_bound (tree exp)
6769 {
6770 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6771
6772 /* If a lower bound is specified in EXP, use it. */
6773 if (TREE_OPERAND (exp, 2))
6774 return TREE_OPERAND (exp, 2);
6775
6776 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6777 substituting for a PLACEHOLDER_EXPR as needed. */
6778 if (domain_type && TYPE_MIN_VALUE (domain_type))
6779 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6780
6781 /* Otherwise, return a zero of the appropriate type. */
6782 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6783 }
6784
6785 /* Return a tree representing the upper bound of the array mentioned in
6786 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6787
6788 tree
6789 array_ref_up_bound (tree exp)
6790 {
6791 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6792
6793 /* If there is a domain type and it has an upper bound, use it, substituting
6794 for a PLACEHOLDER_EXPR as needed. */
6795 if (domain_type && TYPE_MAX_VALUE (domain_type))
6796 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6797
6798 /* Otherwise fail. */
6799 return NULL_TREE;
6800 }
6801
6802 /* Return a tree representing the offset, in bytes, of the field referenced
6803 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6804
6805 tree
6806 component_ref_field_offset (tree exp)
6807 {
6808 tree aligned_offset = TREE_OPERAND (exp, 2);
6809 tree field = TREE_OPERAND (exp, 1);
6810 location_t loc = EXPR_LOCATION (exp);
6811
6812 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6813 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6814 value. */
6815 if (aligned_offset)
6816 {
6817 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6818 sizetype from another type of the same width and signedness. */
6819 if (TREE_TYPE (aligned_offset) != sizetype)
6820 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6821 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6822 size_int (DECL_OFFSET_ALIGN (field)
6823 / BITS_PER_UNIT));
6824 }
6825
6826 /* Otherwise, take the offset from that of the field. Substitute
6827 any PLACEHOLDER_EXPR that we have. */
6828 else
6829 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6830 }
6831
6832 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6833
6834 static unsigned HOST_WIDE_INT
6835 target_align (const_tree target)
6836 {
6837 /* We might have a chain of nested references with intermediate misaligning
6838 bitfields components, so need to recurse to find out. */
6839
6840 unsigned HOST_WIDE_INT this_align, outer_align;
6841
6842 switch (TREE_CODE (target))
6843 {
6844 case BIT_FIELD_REF:
6845 return 1;
6846
6847 case COMPONENT_REF:
6848 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6849 outer_align = target_align (TREE_OPERAND (target, 0));
6850 return MIN (this_align, outer_align);
6851
6852 case ARRAY_REF:
6853 case ARRAY_RANGE_REF:
6854 this_align = TYPE_ALIGN (TREE_TYPE (target));
6855 outer_align = target_align (TREE_OPERAND (target, 0));
6856 return MIN (this_align, outer_align);
6857
6858 CASE_CONVERT:
6859 case NON_LVALUE_EXPR:
6860 case VIEW_CONVERT_EXPR:
6861 this_align = TYPE_ALIGN (TREE_TYPE (target));
6862 outer_align = target_align (TREE_OPERAND (target, 0));
6863 return MAX (this_align, outer_align);
6864
6865 default:
6866 return TYPE_ALIGN (TREE_TYPE (target));
6867 }
6868 }
6869
6870 \f
6871 /* Given an rtx VALUE that may contain additions and multiplications, return
6872 an equivalent value that just refers to a register, memory, or constant.
6873 This is done by generating instructions to perform the arithmetic and
6874 returning a pseudo-register containing the value.
6875
6876 The returned value may be a REG, SUBREG, MEM or constant. */
6877
6878 rtx
6879 force_operand (rtx value, rtx target)
6880 {
6881 rtx op1, op2;
6882 /* Use subtarget as the target for operand 0 of a binary operation. */
6883 rtx subtarget = get_subtarget (target);
6884 enum rtx_code code = GET_CODE (value);
6885
6886 /* Check for subreg applied to an expression produced by loop optimizer. */
6887 if (code == SUBREG
6888 && !REG_P (SUBREG_REG (value))
6889 && !MEM_P (SUBREG_REG (value)))
6890 {
6891 value
6892 = simplify_gen_subreg (GET_MODE (value),
6893 force_reg (GET_MODE (SUBREG_REG (value)),
6894 force_operand (SUBREG_REG (value),
6895 NULL_RTX)),
6896 GET_MODE (SUBREG_REG (value)),
6897 SUBREG_BYTE (value));
6898 code = GET_CODE (value);
6899 }
6900
6901 /* Check for a PIC address load. */
6902 if ((code == PLUS || code == MINUS)
6903 && XEXP (value, 0) == pic_offset_table_rtx
6904 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6905 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6906 || GET_CODE (XEXP (value, 1)) == CONST))
6907 {
6908 if (!subtarget)
6909 subtarget = gen_reg_rtx (GET_MODE (value));
6910 emit_move_insn (subtarget, value);
6911 return subtarget;
6912 }
6913
6914 if (ARITHMETIC_P (value))
6915 {
6916 op2 = XEXP (value, 1);
6917 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6918 subtarget = 0;
6919 if (code == MINUS && CONST_INT_P (op2))
6920 {
6921 code = PLUS;
6922 op2 = negate_rtx (GET_MODE (value), op2);
6923 }
6924
6925 /* Check for an addition with OP2 a constant integer and our first
6926 operand a PLUS of a virtual register and something else. In that
6927 case, we want to emit the sum of the virtual register and the
6928 constant first and then add the other value. This allows virtual
6929 register instantiation to simply modify the constant rather than
6930 creating another one around this addition. */
6931 if (code == PLUS && CONST_INT_P (op2)
6932 && GET_CODE (XEXP (value, 0)) == PLUS
6933 && REG_P (XEXP (XEXP (value, 0), 0))
6934 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6935 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6936 {
6937 rtx temp = expand_simple_binop (GET_MODE (value), code,
6938 XEXP (XEXP (value, 0), 0), op2,
6939 subtarget, 0, OPTAB_LIB_WIDEN);
6940 return expand_simple_binop (GET_MODE (value), code, temp,
6941 force_operand (XEXP (XEXP (value,
6942 0), 1), 0),
6943 target, 0, OPTAB_LIB_WIDEN);
6944 }
6945
6946 op1 = force_operand (XEXP (value, 0), subtarget);
6947 op2 = force_operand (op2, NULL_RTX);
6948 switch (code)
6949 {
6950 case MULT:
6951 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6952 case DIV:
6953 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6954 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6955 target, 1, OPTAB_LIB_WIDEN);
6956 else
6957 return expand_divmod (0,
6958 FLOAT_MODE_P (GET_MODE (value))
6959 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6960 GET_MODE (value), op1, op2, target, 0);
6961 case MOD:
6962 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6963 target, 0);
6964 case UDIV:
6965 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6966 target, 1);
6967 case UMOD:
6968 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6969 target, 1);
6970 case ASHIFTRT:
6971 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6972 target, 0, OPTAB_LIB_WIDEN);
6973 default:
6974 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6975 target, 1, OPTAB_LIB_WIDEN);
6976 }
6977 }
6978 if (UNARY_P (value))
6979 {
6980 if (!target)
6981 target = gen_reg_rtx (GET_MODE (value));
6982 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6983 switch (code)
6984 {
6985 case ZERO_EXTEND:
6986 case SIGN_EXTEND:
6987 case TRUNCATE:
6988 case FLOAT_EXTEND:
6989 case FLOAT_TRUNCATE:
6990 convert_move (target, op1, code == ZERO_EXTEND);
6991 return target;
6992
6993 case FIX:
6994 case UNSIGNED_FIX:
6995 expand_fix (target, op1, code == UNSIGNED_FIX);
6996 return target;
6997
6998 case FLOAT:
6999 case UNSIGNED_FLOAT:
7000 expand_float (target, op1, code == UNSIGNED_FLOAT);
7001 return target;
7002
7003 default:
7004 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7005 }
7006 }
7007
7008 #ifdef INSN_SCHEDULING
7009 /* On machines that have insn scheduling, we want all memory reference to be
7010 explicit, so we need to deal with such paradoxical SUBREGs. */
7011 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7012 value
7013 = simplify_gen_subreg (GET_MODE (value),
7014 force_reg (GET_MODE (SUBREG_REG (value)),
7015 force_operand (SUBREG_REG (value),
7016 NULL_RTX)),
7017 GET_MODE (SUBREG_REG (value)),
7018 SUBREG_BYTE (value));
7019 #endif
7020
7021 return value;
7022 }
7023 \f
7024 /* Subroutine of expand_expr: return nonzero iff there is no way that
7025 EXP can reference X, which is being modified. TOP_P is nonzero if this
7026 call is going to be used to determine whether we need a temporary
7027 for EXP, as opposed to a recursive call to this function.
7028
7029 It is always safe for this routine to return zero since it merely
7030 searches for optimization opportunities. */
7031
7032 int
7033 safe_from_p (const_rtx x, tree exp, int top_p)
7034 {
7035 rtx exp_rtl = 0;
7036 int i, nops;
7037
7038 if (x == 0
7039 /* If EXP has varying size, we MUST use a target since we currently
7040 have no way of allocating temporaries of variable size
7041 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7042 So we assume here that something at a higher level has prevented a
7043 clash. This is somewhat bogus, but the best we can do. Only
7044 do this when X is BLKmode and when we are at the top level. */
7045 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7046 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7047 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7048 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7049 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7050 != INTEGER_CST)
7051 && GET_MODE (x) == BLKmode)
7052 /* If X is in the outgoing argument area, it is always safe. */
7053 || (MEM_P (x)
7054 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7055 || (GET_CODE (XEXP (x, 0)) == PLUS
7056 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7057 return 1;
7058
7059 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7060 find the underlying pseudo. */
7061 if (GET_CODE (x) == SUBREG)
7062 {
7063 x = SUBREG_REG (x);
7064 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7065 return 0;
7066 }
7067
7068 /* Now look at our tree code and possibly recurse. */
7069 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7070 {
7071 case tcc_declaration:
7072 exp_rtl = DECL_RTL_IF_SET (exp);
7073 break;
7074
7075 case tcc_constant:
7076 return 1;
7077
7078 case tcc_exceptional:
7079 if (TREE_CODE (exp) == TREE_LIST)
7080 {
7081 while (1)
7082 {
7083 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7084 return 0;
7085 exp = TREE_CHAIN (exp);
7086 if (!exp)
7087 return 1;
7088 if (TREE_CODE (exp) != TREE_LIST)
7089 return safe_from_p (x, exp, 0);
7090 }
7091 }
7092 else if (TREE_CODE (exp) == CONSTRUCTOR)
7093 {
7094 constructor_elt *ce;
7095 unsigned HOST_WIDE_INT idx;
7096
7097 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce)
7098 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7099 || !safe_from_p (x, ce->value, 0))
7100 return 0;
7101 return 1;
7102 }
7103 else if (TREE_CODE (exp) == ERROR_MARK)
7104 return 1; /* An already-visited SAVE_EXPR? */
7105 else
7106 return 0;
7107
7108 case tcc_statement:
7109 /* The only case we look at here is the DECL_INITIAL inside a
7110 DECL_EXPR. */
7111 return (TREE_CODE (exp) != DECL_EXPR
7112 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7113 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7114 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7115
7116 case tcc_binary:
7117 case tcc_comparison:
7118 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7119 return 0;
7120 /* Fall through. */
7121
7122 case tcc_unary:
7123 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7124
7125 case tcc_expression:
7126 case tcc_reference:
7127 case tcc_vl_exp:
7128 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7129 the expression. If it is set, we conflict iff we are that rtx or
7130 both are in memory. Otherwise, we check all operands of the
7131 expression recursively. */
7132
7133 switch (TREE_CODE (exp))
7134 {
7135 case ADDR_EXPR:
7136 /* If the operand is static or we are static, we can't conflict.
7137 Likewise if we don't conflict with the operand at all. */
7138 if (staticp (TREE_OPERAND (exp, 0))
7139 || TREE_STATIC (exp)
7140 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7141 return 1;
7142
7143 /* Otherwise, the only way this can conflict is if we are taking
7144 the address of a DECL a that address if part of X, which is
7145 very rare. */
7146 exp = TREE_OPERAND (exp, 0);
7147 if (DECL_P (exp))
7148 {
7149 if (!DECL_RTL_SET_P (exp)
7150 || !MEM_P (DECL_RTL (exp)))
7151 return 0;
7152 else
7153 exp_rtl = XEXP (DECL_RTL (exp), 0);
7154 }
7155 break;
7156
7157 case MEM_REF:
7158 if (MEM_P (x)
7159 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7160 get_alias_set (exp)))
7161 return 0;
7162 break;
7163
7164 case CALL_EXPR:
7165 /* Assume that the call will clobber all hard registers and
7166 all of memory. */
7167 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7168 || MEM_P (x))
7169 return 0;
7170 break;
7171
7172 case WITH_CLEANUP_EXPR:
7173 case CLEANUP_POINT_EXPR:
7174 /* Lowered by gimplify.c. */
7175 gcc_unreachable ();
7176
7177 case SAVE_EXPR:
7178 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7179
7180 default:
7181 break;
7182 }
7183
7184 /* If we have an rtx, we do not need to scan our operands. */
7185 if (exp_rtl)
7186 break;
7187
7188 nops = TREE_OPERAND_LENGTH (exp);
7189 for (i = 0; i < nops; i++)
7190 if (TREE_OPERAND (exp, i) != 0
7191 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7192 return 0;
7193
7194 break;
7195
7196 case tcc_type:
7197 /* Should never get a type here. */
7198 gcc_unreachable ();
7199 }
7200
7201 /* If we have an rtl, find any enclosed object. Then see if we conflict
7202 with it. */
7203 if (exp_rtl)
7204 {
7205 if (GET_CODE (exp_rtl) == SUBREG)
7206 {
7207 exp_rtl = SUBREG_REG (exp_rtl);
7208 if (REG_P (exp_rtl)
7209 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7210 return 0;
7211 }
7212
7213 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7214 are memory and they conflict. */
7215 return ! (rtx_equal_p (x, exp_rtl)
7216 || (MEM_P (x) && MEM_P (exp_rtl)
7217 && true_dependence (exp_rtl, VOIDmode, x,
7218 rtx_addr_varies_p)));
7219 }
7220
7221 /* If we reach here, it is safe. */
7222 return 1;
7223 }
7224
7225 \f
7226 /* Return the highest power of two that EXP is known to be a multiple of.
7227 This is used in updating alignment of MEMs in array references. */
7228
7229 unsigned HOST_WIDE_INT
7230 highest_pow2_factor (const_tree exp)
7231 {
7232 unsigned HOST_WIDE_INT c0, c1;
7233
7234 switch (TREE_CODE (exp))
7235 {
7236 case INTEGER_CST:
7237 /* We can find the lowest bit that's a one. If the low
7238 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
7239 We need to handle this case since we can find it in a COND_EXPR,
7240 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
7241 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
7242 later ICE. */
7243 if (TREE_OVERFLOW (exp))
7244 return BIGGEST_ALIGNMENT;
7245 else
7246 {
7247 /* Note: tree_low_cst is intentionally not used here,
7248 we don't care about the upper bits. */
7249 c0 = TREE_INT_CST_LOW (exp);
7250 c0 &= -c0;
7251 return c0 ? c0 : BIGGEST_ALIGNMENT;
7252 }
7253 break;
7254
7255 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
7256 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7257 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7258 return MIN (c0, c1);
7259
7260 case MULT_EXPR:
7261 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7262 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7263 return c0 * c1;
7264
7265 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
7266 case CEIL_DIV_EXPR:
7267 if (integer_pow2p (TREE_OPERAND (exp, 1))
7268 && host_integerp (TREE_OPERAND (exp, 1), 1))
7269 {
7270 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7271 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
7272 return MAX (1, c0 / c1);
7273 }
7274 break;
7275
7276 case BIT_AND_EXPR:
7277 /* The highest power of two of a bit-and expression is the maximum of
7278 that of its operands. We typically get here for a complex LHS and
7279 a constant negative power of two on the RHS to force an explicit
7280 alignment, so don't bother looking at the LHS. */
7281 return highest_pow2_factor (TREE_OPERAND (exp, 1));
7282
7283 CASE_CONVERT:
7284 case SAVE_EXPR:
7285 return highest_pow2_factor (TREE_OPERAND (exp, 0));
7286
7287 case COMPOUND_EXPR:
7288 return highest_pow2_factor (TREE_OPERAND (exp, 1));
7289
7290 case COND_EXPR:
7291 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7292 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
7293 return MIN (c0, c1);
7294
7295 default:
7296 break;
7297 }
7298
7299 return 1;
7300 }
7301
7302 /* Similar, except that the alignment requirements of TARGET are
7303 taken into account. Assume it is at least as aligned as its
7304 type, unless it is a COMPONENT_REF in which case the layout of
7305 the structure gives the alignment. */
7306
7307 static unsigned HOST_WIDE_INT
7308 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7309 {
7310 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7311 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7312
7313 return MAX (factor, talign);
7314 }
7315 \f
7316 /* Subroutine of expand_expr. Expand the two operands of a binary
7317 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7318 The value may be stored in TARGET if TARGET is nonzero. The
7319 MODIFIER argument is as documented by expand_expr. */
7320
7321 static void
7322 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7323 enum expand_modifier modifier)
7324 {
7325 if (! safe_from_p (target, exp1, 1))
7326 target = 0;
7327 if (operand_equal_p (exp0, exp1, 0))
7328 {
7329 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7330 *op1 = copy_rtx (*op0);
7331 }
7332 else
7333 {
7334 /* If we need to preserve evaluation order, copy exp0 into its own
7335 temporary variable so that it can't be clobbered by exp1. */
7336 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7337 exp0 = save_expr (exp0);
7338 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7339 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7340 }
7341 }
7342
7343 \f
7344 /* Return a MEM that contains constant EXP. DEFER is as for
7345 output_constant_def and MODIFIER is as for expand_expr. */
7346
7347 static rtx
7348 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7349 {
7350 rtx mem;
7351
7352 mem = output_constant_def (exp, defer);
7353 if (modifier != EXPAND_INITIALIZER)
7354 mem = use_anchored_address (mem);
7355 return mem;
7356 }
7357
7358 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7359 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7360
7361 static rtx
7362 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7363 enum expand_modifier modifier, addr_space_t as)
7364 {
7365 rtx result, subtarget;
7366 tree inner, offset;
7367 HOST_WIDE_INT bitsize, bitpos;
7368 int volatilep, unsignedp;
7369 enum machine_mode mode1;
7370
7371 /* If we are taking the address of a constant and are at the top level,
7372 we have to use output_constant_def since we can't call force_const_mem
7373 at top level. */
7374 /* ??? This should be considered a front-end bug. We should not be
7375 generating ADDR_EXPR of something that isn't an LVALUE. The only
7376 exception here is STRING_CST. */
7377 if (CONSTANT_CLASS_P (exp))
7378 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
7379
7380 /* Everything must be something allowed by is_gimple_addressable. */
7381 switch (TREE_CODE (exp))
7382 {
7383 case INDIRECT_REF:
7384 /* This case will happen via recursion for &a->b. */
7385 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7386
7387 case MEM_REF:
7388 {
7389 tree tem = TREE_OPERAND (exp, 0);
7390 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7391 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7392 return expand_expr (tem, target, tmode, modifier);
7393 }
7394
7395 case CONST_DECL:
7396 /* Expand the initializer like constants above. */
7397 return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
7398
7399 case REALPART_EXPR:
7400 /* The real part of the complex number is always first, therefore
7401 the address is the same as the address of the parent object. */
7402 offset = 0;
7403 bitpos = 0;
7404 inner = TREE_OPERAND (exp, 0);
7405 break;
7406
7407 case IMAGPART_EXPR:
7408 /* The imaginary part of the complex number is always second.
7409 The expression is therefore always offset by the size of the
7410 scalar type. */
7411 offset = 0;
7412 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7413 inner = TREE_OPERAND (exp, 0);
7414 break;
7415
7416 default:
7417 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7418 expand_expr, as that can have various side effects; LABEL_DECLs for
7419 example, may not have their DECL_RTL set yet. Expand the rtl of
7420 CONSTRUCTORs too, which should yield a memory reference for the
7421 constructor's contents. Assume language specific tree nodes can
7422 be expanded in some interesting way. */
7423 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7424 if (DECL_P (exp)
7425 || TREE_CODE (exp) == CONSTRUCTOR
7426 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7427 {
7428 result = expand_expr (exp, target, tmode,
7429 modifier == EXPAND_INITIALIZER
7430 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7431
7432 /* If the DECL isn't in memory, then the DECL wasn't properly
7433 marked TREE_ADDRESSABLE, which will be either a front-end
7434 or a tree optimizer bug. */
7435
7436 if (TREE_ADDRESSABLE (exp)
7437 && ! MEM_P (result)
7438 && ! targetm.calls.allocate_stack_slots_for_args())
7439 {
7440 error ("local frame unavailable (naked function?)");
7441 return result;
7442 }
7443 else
7444 gcc_assert (MEM_P (result));
7445 result = XEXP (result, 0);
7446
7447 /* ??? Is this needed anymore? */
7448 if (DECL_P (exp) && !TREE_USED (exp) == 0)
7449 {
7450 assemble_external (exp);
7451 TREE_USED (exp) = 1;
7452 }
7453
7454 if (modifier != EXPAND_INITIALIZER
7455 && modifier != EXPAND_CONST_ADDRESS)
7456 result = force_operand (result, target);
7457 return result;
7458 }
7459
7460 /* Pass FALSE as the last argument to get_inner_reference although
7461 we are expanding to RTL. The rationale is that we know how to
7462 handle "aligning nodes" here: we can just bypass them because
7463 they won't change the final object whose address will be returned
7464 (they actually exist only for that purpose). */
7465 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7466 &mode1, &unsignedp, &volatilep, false);
7467 break;
7468 }
7469
7470 /* We must have made progress. */
7471 gcc_assert (inner != exp);
7472
7473 subtarget = offset || bitpos ? NULL_RTX : target;
7474 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7475 inner alignment, force the inner to be sufficiently aligned. */
7476 if (CONSTANT_CLASS_P (inner)
7477 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7478 {
7479 inner = copy_node (inner);
7480 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7481 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7482 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7483 }
7484 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7485
7486 if (offset)
7487 {
7488 rtx tmp;
7489
7490 if (modifier != EXPAND_NORMAL)
7491 result = force_operand (result, NULL);
7492 tmp = expand_expr (offset, NULL_RTX, tmode,
7493 modifier == EXPAND_INITIALIZER
7494 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7495
7496 result = convert_memory_address_addr_space (tmode, result, as);
7497 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7498
7499 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7500 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7501 else
7502 {
7503 subtarget = bitpos ? NULL_RTX : target;
7504 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7505 1, OPTAB_LIB_WIDEN);
7506 }
7507 }
7508
7509 if (bitpos)
7510 {
7511 /* Someone beforehand should have rejected taking the address
7512 of such an object. */
7513 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7514
7515 result = plus_constant (result, bitpos / BITS_PER_UNIT);
7516 if (modifier < EXPAND_SUM)
7517 result = force_operand (result, target);
7518 }
7519
7520 return result;
7521 }
7522
7523 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7524 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7525
7526 static rtx
7527 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7528 enum expand_modifier modifier)
7529 {
7530 addr_space_t as = ADDR_SPACE_GENERIC;
7531 enum machine_mode address_mode = Pmode;
7532 enum machine_mode pointer_mode = ptr_mode;
7533 enum machine_mode rmode;
7534 rtx result;
7535
7536 /* Target mode of VOIDmode says "whatever's natural". */
7537 if (tmode == VOIDmode)
7538 tmode = TYPE_MODE (TREE_TYPE (exp));
7539
7540 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7541 {
7542 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7543 address_mode = targetm.addr_space.address_mode (as);
7544 pointer_mode = targetm.addr_space.pointer_mode (as);
7545 }
7546
7547 /* We can get called with some Weird Things if the user does silliness
7548 like "(short) &a". In that case, convert_memory_address won't do
7549 the right thing, so ignore the given target mode. */
7550 if (tmode != address_mode && tmode != pointer_mode)
7551 tmode = address_mode;
7552
7553 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7554 tmode, modifier, as);
7555
7556 /* Despite expand_expr claims concerning ignoring TMODE when not
7557 strictly convenient, stuff breaks if we don't honor it. Note
7558 that combined with the above, we only do this for pointer modes. */
7559 rmode = GET_MODE (result);
7560 if (rmode == VOIDmode)
7561 rmode = tmode;
7562 if (rmode != tmode)
7563 result = convert_memory_address_addr_space (tmode, result, as);
7564
7565 return result;
7566 }
7567
7568 /* Generate code for computing CONSTRUCTOR EXP.
7569 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7570 is TRUE, instead of creating a temporary variable in memory
7571 NULL is returned and the caller needs to handle it differently. */
7572
7573 static rtx
7574 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7575 bool avoid_temp_mem)
7576 {
7577 tree type = TREE_TYPE (exp);
7578 enum machine_mode mode = TYPE_MODE (type);
7579
7580 /* Try to avoid creating a temporary at all. This is possible
7581 if all of the initializer is zero.
7582 FIXME: try to handle all [0..255] initializers we can handle
7583 with memset. */
7584 if (TREE_STATIC (exp)
7585 && !TREE_ADDRESSABLE (exp)
7586 && target != 0 && mode == BLKmode
7587 && all_zeros_p (exp))
7588 {
7589 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7590 return target;
7591 }
7592
7593 /* All elts simple constants => refer to a constant in memory. But
7594 if this is a non-BLKmode mode, let it store a field at a time
7595 since that should make a CONST_INT or CONST_DOUBLE when we
7596 fold. Likewise, if we have a target we can use, it is best to
7597 store directly into the target unless the type is large enough
7598 that memcpy will be used. If we are making an initializer and
7599 all operands are constant, put it in memory as well.
7600
7601 FIXME: Avoid trying to fill vector constructors piece-meal.
7602 Output them with output_constant_def below unless we're sure
7603 they're zeros. This should go away when vector initializers
7604 are treated like VECTOR_CST instead of arrays. */
7605 if ((TREE_STATIC (exp)
7606 && ((mode == BLKmode
7607 && ! (target != 0 && safe_from_p (target, exp, 1)))
7608 || TREE_ADDRESSABLE (exp)
7609 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7610 && (! MOVE_BY_PIECES_P
7611 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7612 TYPE_ALIGN (type)))
7613 && ! mostly_zeros_p (exp))))
7614 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7615 && TREE_CONSTANT (exp)))
7616 {
7617 rtx constructor;
7618
7619 if (avoid_temp_mem)
7620 return NULL_RTX;
7621
7622 constructor = expand_expr_constant (exp, 1, modifier);
7623
7624 if (modifier != EXPAND_CONST_ADDRESS
7625 && modifier != EXPAND_INITIALIZER
7626 && modifier != EXPAND_SUM)
7627 constructor = validize_mem (constructor);
7628
7629 return constructor;
7630 }
7631
7632 /* Handle calls that pass values in multiple non-contiguous
7633 locations. The Irix 6 ABI has examples of this. */
7634 if (target == 0 || ! safe_from_p (target, exp, 1)
7635 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7636 {
7637 if (avoid_temp_mem)
7638 return NULL_RTX;
7639
7640 target
7641 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7642 | (TREE_READONLY (exp)
7643 * TYPE_QUAL_CONST))),
7644 0, TREE_ADDRESSABLE (exp), 1);
7645 }
7646
7647 store_constructor (exp, target, 0, int_expr_size (exp));
7648 return target;
7649 }
7650
7651
7652 /* expand_expr: generate code for computing expression EXP.
7653 An rtx for the computed value is returned. The value is never null.
7654 In the case of a void EXP, const0_rtx is returned.
7655
7656 The value may be stored in TARGET if TARGET is nonzero.
7657 TARGET is just a suggestion; callers must assume that
7658 the rtx returned may not be the same as TARGET.
7659
7660 If TARGET is CONST0_RTX, it means that the value will be ignored.
7661
7662 If TMODE is not VOIDmode, it suggests generating the
7663 result in mode TMODE. But this is done only when convenient.
7664 Otherwise, TMODE is ignored and the value generated in its natural mode.
7665 TMODE is just a suggestion; callers must assume that
7666 the rtx returned may not have mode TMODE.
7667
7668 Note that TARGET may have neither TMODE nor MODE. In that case, it
7669 probably will not be used.
7670
7671 If MODIFIER is EXPAND_SUM then when EXP is an addition
7672 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7673 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7674 products as above, or REG or MEM, or constant.
7675 Ordinarily in such cases we would output mul or add instructions
7676 and then return a pseudo reg containing the sum.
7677
7678 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7679 it also marks a label as absolutely required (it can't be dead).
7680 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7681 This is used for outputting expressions used in initializers.
7682
7683 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7684 with a constant address even if that address is not normally legitimate.
7685 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7686
7687 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7688 a call parameter. Such targets require special care as we haven't yet
7689 marked TARGET so that it's safe from being trashed by libcalls. We
7690 don't want to use TARGET for anything but the final result;
7691 Intermediate values must go elsewhere. Additionally, calls to
7692 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7693
7694 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7695 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7696 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7697 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7698 recursively. */
7699
7700 rtx
7701 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7702 enum expand_modifier modifier, rtx *alt_rtl)
7703 {
7704 rtx ret;
7705
7706 /* Handle ERROR_MARK before anybody tries to access its type. */
7707 if (TREE_CODE (exp) == ERROR_MARK
7708 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7709 {
7710 ret = CONST0_RTX (tmode);
7711 return ret ? ret : const0_rtx;
7712 }
7713
7714 /* If this is an expression of some kind and it has an associated line
7715 number, then emit the line number before expanding the expression.
7716
7717 We need to save and restore the file and line information so that
7718 errors discovered during expansion are emitted with the right
7719 information. It would be better of the diagnostic routines
7720 used the file/line information embedded in the tree nodes rather
7721 than globals. */
7722 if (cfun && EXPR_HAS_LOCATION (exp))
7723 {
7724 location_t saved_location = input_location;
7725 location_t saved_curr_loc = get_curr_insn_source_location ();
7726 tree saved_block = get_curr_insn_block ();
7727 input_location = EXPR_LOCATION (exp);
7728 set_curr_insn_source_location (input_location);
7729
7730 /* Record where the insns produced belong. */
7731 set_curr_insn_block (TREE_BLOCK (exp));
7732
7733 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7734
7735 input_location = saved_location;
7736 set_curr_insn_block (saved_block);
7737 set_curr_insn_source_location (saved_curr_loc);
7738 }
7739 else
7740 {
7741 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7742 }
7743
7744 return ret;
7745 }
7746
7747 rtx
7748 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7749 enum expand_modifier modifier)
7750 {
7751 rtx op0, op1, op2, temp;
7752 tree type;
7753 int unsignedp;
7754 enum machine_mode mode;
7755 enum tree_code code = ops->code;
7756 optab this_optab;
7757 rtx subtarget, original_target;
7758 int ignore;
7759 bool reduce_bit_field;
7760 location_t loc = ops->location;
7761 tree treeop0, treeop1, treeop2;
7762 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7763 ? reduce_to_bit_field_precision ((expr), \
7764 target, \
7765 type) \
7766 : (expr))
7767
7768 type = ops->type;
7769 mode = TYPE_MODE (type);
7770 unsignedp = TYPE_UNSIGNED (type);
7771
7772 treeop0 = ops->op0;
7773 treeop1 = ops->op1;
7774 treeop2 = ops->op2;
7775
7776 /* We should be called only on simple (binary or unary) expressions,
7777 exactly those that are valid in gimple expressions that aren't
7778 GIMPLE_SINGLE_RHS (or invalid). */
7779 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7780 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7781 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7782
7783 ignore = (target == const0_rtx
7784 || ((CONVERT_EXPR_CODE_P (code)
7785 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7786 && TREE_CODE (type) == VOID_TYPE));
7787
7788 /* We should be called only if we need the result. */
7789 gcc_assert (!ignore);
7790
7791 /* An operation in what may be a bit-field type needs the
7792 result to be reduced to the precision of the bit-field type,
7793 which is narrower than that of the type's mode. */
7794 reduce_bit_field = (INTEGRAL_TYPE_P (type)
7795 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7796
7797 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7798 target = 0;
7799
7800 /* Use subtarget as the target for operand 0 of a binary operation. */
7801 subtarget = get_subtarget (target);
7802 original_target = target;
7803
7804 switch (code)
7805 {
7806 case NON_LVALUE_EXPR:
7807 case PAREN_EXPR:
7808 CASE_CONVERT:
7809 if (treeop0 == error_mark_node)
7810 return const0_rtx;
7811
7812 if (TREE_CODE (type) == UNION_TYPE)
7813 {
7814 tree valtype = TREE_TYPE (treeop0);
7815
7816 /* If both input and output are BLKmode, this conversion isn't doing
7817 anything except possibly changing memory attribute. */
7818 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7819 {
7820 rtx result = expand_expr (treeop0, target, tmode,
7821 modifier);
7822
7823 result = copy_rtx (result);
7824 set_mem_attributes (result, type, 0);
7825 return result;
7826 }
7827
7828 if (target == 0)
7829 {
7830 if (TYPE_MODE (type) != BLKmode)
7831 target = gen_reg_rtx (TYPE_MODE (type));
7832 else
7833 target = assign_temp (type, 0, 1, 1);
7834 }
7835
7836 if (MEM_P (target))
7837 /* Store data into beginning of memory target. */
7838 store_expr (treeop0,
7839 adjust_address (target, TYPE_MODE (valtype), 0),
7840 modifier == EXPAND_STACK_PARM,
7841 false);
7842
7843 else
7844 {
7845 gcc_assert (REG_P (target));
7846
7847 /* Store this field into a union of the proper type. */
7848 store_field (target,
7849 MIN ((int_size_in_bytes (TREE_TYPE
7850 (treeop0))
7851 * BITS_PER_UNIT),
7852 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7853 0, 0, 0, TYPE_MODE (valtype), treeop0,
7854 type, 0, false);
7855 }
7856
7857 /* Return the entire union. */
7858 return target;
7859 }
7860
7861 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7862 {
7863 op0 = expand_expr (treeop0, target, VOIDmode,
7864 modifier);
7865
7866 /* If the signedness of the conversion differs and OP0 is
7867 a promoted SUBREG, clear that indication since we now
7868 have to do the proper extension. */
7869 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7870 && GET_CODE (op0) == SUBREG)
7871 SUBREG_PROMOTED_VAR_P (op0) = 0;
7872
7873 return REDUCE_BIT_FIELD (op0);
7874 }
7875
7876 op0 = expand_expr (treeop0, NULL_RTX, mode,
7877 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7878 if (GET_MODE (op0) == mode)
7879 ;
7880
7881 /* If OP0 is a constant, just convert it into the proper mode. */
7882 else if (CONSTANT_P (op0))
7883 {
7884 tree inner_type = TREE_TYPE (treeop0);
7885 enum machine_mode inner_mode = GET_MODE (op0);
7886
7887 if (inner_mode == VOIDmode)
7888 inner_mode = TYPE_MODE (inner_type);
7889
7890 if (modifier == EXPAND_INITIALIZER)
7891 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7892 subreg_lowpart_offset (mode,
7893 inner_mode));
7894 else
7895 op0= convert_modes (mode, inner_mode, op0,
7896 TYPE_UNSIGNED (inner_type));
7897 }
7898
7899 else if (modifier == EXPAND_INITIALIZER)
7900 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7901
7902 else if (target == 0)
7903 op0 = convert_to_mode (mode, op0,
7904 TYPE_UNSIGNED (TREE_TYPE
7905 (treeop0)));
7906 else
7907 {
7908 convert_move (target, op0,
7909 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7910 op0 = target;
7911 }
7912
7913 return REDUCE_BIT_FIELD (op0);
7914
7915 case ADDR_SPACE_CONVERT_EXPR:
7916 {
7917 tree treeop0_type = TREE_TYPE (treeop0);
7918 addr_space_t as_to;
7919 addr_space_t as_from;
7920
7921 gcc_assert (POINTER_TYPE_P (type));
7922 gcc_assert (POINTER_TYPE_P (treeop0_type));
7923
7924 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7925 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7926
7927 /* Conversions between pointers to the same address space should
7928 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7929 gcc_assert (as_to != as_from);
7930
7931 /* Ask target code to handle conversion between pointers
7932 to overlapping address spaces. */
7933 if (targetm.addr_space.subset_p (as_to, as_from)
7934 || targetm.addr_space.subset_p (as_from, as_to))
7935 {
7936 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7937 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7938 gcc_assert (op0);
7939 return op0;
7940 }
7941
7942 /* For disjoint address spaces, converting anything but
7943 a null pointer invokes undefined behaviour. We simply
7944 always return a null pointer here. */
7945 return CONST0_RTX (mode);
7946 }
7947
7948 case POINTER_PLUS_EXPR:
7949 /* Even though the sizetype mode and the pointer's mode can be different
7950 expand is able to handle this correctly and get the correct result out
7951 of the PLUS_EXPR code. */
7952 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7953 if sizetype precision is smaller than pointer precision. */
7954 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7955 treeop1 = fold_convert_loc (loc, type,
7956 fold_convert_loc (loc, ssizetype,
7957 treeop1));
7958 case PLUS_EXPR:
7959 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7960 something else, make sure we add the register to the constant and
7961 then to the other thing. This case can occur during strength
7962 reduction and doing it this way will produce better code if the
7963 frame pointer or argument pointer is eliminated.
7964
7965 fold-const.c will ensure that the constant is always in the inner
7966 PLUS_EXPR, so the only case we need to do anything about is if
7967 sp, ap, or fp is our second argument, in which case we must swap
7968 the innermost first argument and our second argument. */
7969
7970 if (TREE_CODE (treeop0) == PLUS_EXPR
7971 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7972 && TREE_CODE (treeop1) == VAR_DECL
7973 && (DECL_RTL (treeop1) == frame_pointer_rtx
7974 || DECL_RTL (treeop1) == stack_pointer_rtx
7975 || DECL_RTL (treeop1) == arg_pointer_rtx))
7976 {
7977 tree t = treeop1;
7978
7979 treeop1 = TREE_OPERAND (treeop0, 0);
7980 TREE_OPERAND (treeop0, 0) = t;
7981 }
7982
7983 /* If the result is to be ptr_mode and we are adding an integer to
7984 something, we might be forming a constant. So try to use
7985 plus_constant. If it produces a sum and we can't accept it,
7986 use force_operand. This allows P = &ARR[const] to generate
7987 efficient code on machines where a SYMBOL_REF is not a valid
7988 address.
7989
7990 If this is an EXPAND_SUM call, always return the sum. */
7991 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7992 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7993 {
7994 if (modifier == EXPAND_STACK_PARM)
7995 target = 0;
7996 if (TREE_CODE (treeop0) == INTEGER_CST
7997 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
7998 && TREE_CONSTANT (treeop1))
7999 {
8000 rtx constant_part;
8001
8002 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8003 EXPAND_SUM);
8004 /* Use immed_double_const to ensure that the constant is
8005 truncated according to the mode of OP1, then sign extended
8006 to a HOST_WIDE_INT. Using the constant directly can result
8007 in non-canonical RTL in a 64x32 cross compile. */
8008 constant_part
8009 = immed_double_const (TREE_INT_CST_LOW (treeop0),
8010 (HOST_WIDE_INT) 0,
8011 TYPE_MODE (TREE_TYPE (treeop1)));
8012 op1 = plus_constant (op1, INTVAL (constant_part));
8013 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8014 op1 = force_operand (op1, target);
8015 return REDUCE_BIT_FIELD (op1);
8016 }
8017
8018 else if (TREE_CODE (treeop1) == INTEGER_CST
8019 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8020 && TREE_CONSTANT (treeop0))
8021 {
8022 rtx constant_part;
8023
8024 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8025 (modifier == EXPAND_INITIALIZER
8026 ? EXPAND_INITIALIZER : EXPAND_SUM));
8027 if (! CONSTANT_P (op0))
8028 {
8029 op1 = expand_expr (treeop1, NULL_RTX,
8030 VOIDmode, modifier);
8031 /* Return a PLUS if modifier says it's OK. */
8032 if (modifier == EXPAND_SUM
8033 || modifier == EXPAND_INITIALIZER)
8034 return simplify_gen_binary (PLUS, mode, op0, op1);
8035 goto binop2;
8036 }
8037 /* Use immed_double_const to ensure that the constant is
8038 truncated according to the mode of OP1, then sign extended
8039 to a HOST_WIDE_INT. Using the constant directly can result
8040 in non-canonical RTL in a 64x32 cross compile. */
8041 constant_part
8042 = immed_double_const (TREE_INT_CST_LOW (treeop1),
8043 (HOST_WIDE_INT) 0,
8044 TYPE_MODE (TREE_TYPE (treeop0)));
8045 op0 = plus_constant (op0, INTVAL (constant_part));
8046 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8047 op0 = force_operand (op0, target);
8048 return REDUCE_BIT_FIELD (op0);
8049 }
8050 }
8051
8052 /* Use TER to expand pointer addition of a negated value
8053 as pointer subtraction. */
8054 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8055 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8056 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8057 && TREE_CODE (treeop1) == SSA_NAME
8058 && TYPE_MODE (TREE_TYPE (treeop0))
8059 == TYPE_MODE (TREE_TYPE (treeop1)))
8060 {
8061 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8062 if (def)
8063 {
8064 treeop1 = gimple_assign_rhs1 (def);
8065 code = MINUS_EXPR;
8066 goto do_minus;
8067 }
8068 }
8069
8070 /* No sense saving up arithmetic to be done
8071 if it's all in the wrong mode to form part of an address.
8072 And force_operand won't know whether to sign-extend or
8073 zero-extend. */
8074 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8075 || mode != ptr_mode)
8076 {
8077 expand_operands (treeop0, treeop1,
8078 subtarget, &op0, &op1, EXPAND_NORMAL);
8079 if (op0 == const0_rtx)
8080 return op1;
8081 if (op1 == const0_rtx)
8082 return op0;
8083 goto binop2;
8084 }
8085
8086 expand_operands (treeop0, treeop1,
8087 subtarget, &op0, &op1, modifier);
8088 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8089
8090 case MINUS_EXPR:
8091 do_minus:
8092 /* For initializers, we are allowed to return a MINUS of two
8093 symbolic constants. Here we handle all cases when both operands
8094 are constant. */
8095 /* Handle difference of two symbolic constants,
8096 for the sake of an initializer. */
8097 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8098 && really_constant_p (treeop0)
8099 && really_constant_p (treeop1))
8100 {
8101 expand_operands (treeop0, treeop1,
8102 NULL_RTX, &op0, &op1, modifier);
8103
8104 /* If the last operand is a CONST_INT, use plus_constant of
8105 the negated constant. Else make the MINUS. */
8106 if (CONST_INT_P (op1))
8107 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8108 else
8109 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8110 }
8111
8112 /* No sense saving up arithmetic to be done
8113 if it's all in the wrong mode to form part of an address.
8114 And force_operand won't know whether to sign-extend or
8115 zero-extend. */
8116 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8117 || mode != ptr_mode)
8118 goto binop;
8119
8120 expand_operands (treeop0, treeop1,
8121 subtarget, &op0, &op1, modifier);
8122
8123 /* Convert A - const to A + (-const). */
8124 if (CONST_INT_P (op1))
8125 {
8126 op1 = negate_rtx (mode, op1);
8127 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8128 }
8129
8130 goto binop2;
8131
8132 case WIDEN_MULT_PLUS_EXPR:
8133 case WIDEN_MULT_MINUS_EXPR:
8134 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8135 op2 = expand_normal (treeop2);
8136 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8137 target, unsignedp);
8138 return target;
8139
8140 case WIDEN_MULT_EXPR:
8141 /* If first operand is constant, swap them.
8142 Thus the following special case checks need only
8143 check the second operand. */
8144 if (TREE_CODE (treeop0) == INTEGER_CST)
8145 {
8146 tree t1 = treeop0;
8147 treeop0 = treeop1;
8148 treeop1 = t1;
8149 }
8150
8151 /* First, check if we have a multiplication of one signed and one
8152 unsigned operand. */
8153 if (TREE_CODE (treeop1) != INTEGER_CST
8154 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8155 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8156 {
8157 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8158 this_optab = usmul_widen_optab;
8159 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8160 != CODE_FOR_nothing)
8161 {
8162 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8163 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8164 EXPAND_NORMAL);
8165 else
8166 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8167 EXPAND_NORMAL);
8168 goto binop3;
8169 }
8170 }
8171 /* Check for a multiplication with matching signedness. */
8172 else if ((TREE_CODE (treeop1) == INTEGER_CST
8173 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8174 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8175 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8176 {
8177 tree op0type = TREE_TYPE (treeop0);
8178 enum machine_mode innermode = TYPE_MODE (op0type);
8179 bool zextend_p = TYPE_UNSIGNED (op0type);
8180 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8181 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8182
8183 if (TREE_CODE (treeop0) != INTEGER_CST)
8184 {
8185 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8186 != CODE_FOR_nothing)
8187 {
8188 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8189 EXPAND_NORMAL);
8190 temp = expand_widening_mult (mode, op0, op1, target,
8191 unsignedp, this_optab);
8192 return REDUCE_BIT_FIELD (temp);
8193 }
8194 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8195 != CODE_FOR_nothing
8196 && innermode == word_mode)
8197 {
8198 rtx htem, hipart;
8199 op0 = expand_normal (treeop0);
8200 if (TREE_CODE (treeop1) == INTEGER_CST)
8201 op1 = convert_modes (innermode, mode,
8202 expand_normal (treeop1), unsignedp);
8203 else
8204 op1 = expand_normal (treeop1);
8205 temp = expand_binop (mode, other_optab, op0, op1, target,
8206 unsignedp, OPTAB_LIB_WIDEN);
8207 hipart = gen_highpart (innermode, temp);
8208 htem = expand_mult_highpart_adjust (innermode, hipart,
8209 op0, op1, hipart,
8210 zextend_p);
8211 if (htem != hipart)
8212 emit_move_insn (hipart, htem);
8213 return REDUCE_BIT_FIELD (temp);
8214 }
8215 }
8216 }
8217 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8218 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8219 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8220 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8221
8222 case FMA_EXPR:
8223 {
8224 optab opt = fma_optab;
8225 gimple def0, def2;
8226
8227 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8228 call. */
8229 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8230 {
8231 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8232 tree call_expr;
8233
8234 gcc_assert (fn != NULL_TREE);
8235 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8236 return expand_builtin (call_expr, target, subtarget, mode, false);
8237 }
8238
8239 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8240 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8241
8242 op0 = op2 = NULL;
8243
8244 if (def0 && def2
8245 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8246 {
8247 opt = fnms_optab;
8248 op0 = expand_normal (gimple_assign_rhs1 (def0));
8249 op2 = expand_normal (gimple_assign_rhs1 (def2));
8250 }
8251 else if (def0
8252 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8253 {
8254 opt = fnma_optab;
8255 op0 = expand_normal (gimple_assign_rhs1 (def0));
8256 }
8257 else if (def2
8258 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8259 {
8260 opt = fms_optab;
8261 op2 = expand_normal (gimple_assign_rhs1 (def2));
8262 }
8263
8264 if (op0 == NULL)
8265 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8266 if (op2 == NULL)
8267 op2 = expand_normal (treeop2);
8268 op1 = expand_normal (treeop1);
8269
8270 return expand_ternary_op (TYPE_MODE (type), opt,
8271 op0, op1, op2, target, 0);
8272 }
8273
8274 case MULT_EXPR:
8275 /* If this is a fixed-point operation, then we cannot use the code
8276 below because "expand_mult" doesn't support sat/no-sat fixed-point
8277 multiplications. */
8278 if (ALL_FIXED_POINT_MODE_P (mode))
8279 goto binop;
8280
8281 /* If first operand is constant, swap them.
8282 Thus the following special case checks need only
8283 check the second operand. */
8284 if (TREE_CODE (treeop0) == INTEGER_CST)
8285 {
8286 tree t1 = treeop0;
8287 treeop0 = treeop1;
8288 treeop1 = t1;
8289 }
8290
8291 /* Attempt to return something suitable for generating an
8292 indexed address, for machines that support that. */
8293
8294 if (modifier == EXPAND_SUM && mode == ptr_mode
8295 && host_integerp (treeop1, 0))
8296 {
8297 tree exp1 = treeop1;
8298
8299 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8300 EXPAND_SUM);
8301
8302 if (!REG_P (op0))
8303 op0 = force_operand (op0, NULL_RTX);
8304 if (!REG_P (op0))
8305 op0 = copy_to_mode_reg (mode, op0);
8306
8307 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8308 gen_int_mode (tree_low_cst (exp1, 0),
8309 TYPE_MODE (TREE_TYPE (exp1)))));
8310 }
8311
8312 if (modifier == EXPAND_STACK_PARM)
8313 target = 0;
8314
8315 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8316 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8317
8318 case TRUNC_DIV_EXPR:
8319 case FLOOR_DIV_EXPR:
8320 case CEIL_DIV_EXPR:
8321 case ROUND_DIV_EXPR:
8322 case EXACT_DIV_EXPR:
8323 /* If this is a fixed-point operation, then we cannot use the code
8324 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8325 divisions. */
8326 if (ALL_FIXED_POINT_MODE_P (mode))
8327 goto binop;
8328
8329 if (modifier == EXPAND_STACK_PARM)
8330 target = 0;
8331 /* Possible optimization: compute the dividend with EXPAND_SUM
8332 then if the divisor is constant can optimize the case
8333 where some terms of the dividend have coeffs divisible by it. */
8334 expand_operands (treeop0, treeop1,
8335 subtarget, &op0, &op1, EXPAND_NORMAL);
8336 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8337
8338 case RDIV_EXPR:
8339 goto binop;
8340
8341 case TRUNC_MOD_EXPR:
8342 case FLOOR_MOD_EXPR:
8343 case CEIL_MOD_EXPR:
8344 case ROUND_MOD_EXPR:
8345 if (modifier == EXPAND_STACK_PARM)
8346 target = 0;
8347 expand_operands (treeop0, treeop1,
8348 subtarget, &op0, &op1, EXPAND_NORMAL);
8349 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8350
8351 case FIXED_CONVERT_EXPR:
8352 op0 = expand_normal (treeop0);
8353 if (target == 0 || modifier == EXPAND_STACK_PARM)
8354 target = gen_reg_rtx (mode);
8355
8356 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8357 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8358 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8359 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8360 else
8361 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8362 return target;
8363
8364 case FIX_TRUNC_EXPR:
8365 op0 = expand_normal (treeop0);
8366 if (target == 0 || modifier == EXPAND_STACK_PARM)
8367 target = gen_reg_rtx (mode);
8368 expand_fix (target, op0, unsignedp);
8369 return target;
8370
8371 case FLOAT_EXPR:
8372 op0 = expand_normal (treeop0);
8373 if (target == 0 || modifier == EXPAND_STACK_PARM)
8374 target = gen_reg_rtx (mode);
8375 /* expand_float can't figure out what to do if FROM has VOIDmode.
8376 So give it the correct mode. With -O, cse will optimize this. */
8377 if (GET_MODE (op0) == VOIDmode)
8378 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8379 op0);
8380 expand_float (target, op0,
8381 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8382 return target;
8383
8384 case NEGATE_EXPR:
8385 op0 = expand_expr (treeop0, subtarget,
8386 VOIDmode, EXPAND_NORMAL);
8387 if (modifier == EXPAND_STACK_PARM)
8388 target = 0;
8389 temp = expand_unop (mode,
8390 optab_for_tree_code (NEGATE_EXPR, type,
8391 optab_default),
8392 op0, target, 0);
8393 gcc_assert (temp);
8394 return REDUCE_BIT_FIELD (temp);
8395
8396 case ABS_EXPR:
8397 op0 = expand_expr (treeop0, subtarget,
8398 VOIDmode, EXPAND_NORMAL);
8399 if (modifier == EXPAND_STACK_PARM)
8400 target = 0;
8401
8402 /* ABS_EXPR is not valid for complex arguments. */
8403 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8404 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8405
8406 /* Unsigned abs is simply the operand. Testing here means we don't
8407 risk generating incorrect code below. */
8408 if (TYPE_UNSIGNED (type))
8409 return op0;
8410
8411 return expand_abs (mode, op0, target, unsignedp,
8412 safe_from_p (target, treeop0, 1));
8413
8414 case MAX_EXPR:
8415 case MIN_EXPR:
8416 target = original_target;
8417 if (target == 0
8418 || modifier == EXPAND_STACK_PARM
8419 || (MEM_P (target) && MEM_VOLATILE_P (target))
8420 || GET_MODE (target) != mode
8421 || (REG_P (target)
8422 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8423 target = gen_reg_rtx (mode);
8424 expand_operands (treeop0, treeop1,
8425 target, &op0, &op1, EXPAND_NORMAL);
8426
8427 /* First try to do it with a special MIN or MAX instruction.
8428 If that does not win, use a conditional jump to select the proper
8429 value. */
8430 this_optab = optab_for_tree_code (code, type, optab_default);
8431 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8432 OPTAB_WIDEN);
8433 if (temp != 0)
8434 return temp;
8435
8436 /* At this point, a MEM target is no longer useful; we will get better
8437 code without it. */
8438
8439 if (! REG_P (target))
8440 target = gen_reg_rtx (mode);
8441
8442 /* If op1 was placed in target, swap op0 and op1. */
8443 if (target != op0 && target == op1)
8444 {
8445 temp = op0;
8446 op0 = op1;
8447 op1 = temp;
8448 }
8449
8450 /* We generate better code and avoid problems with op1 mentioning
8451 target by forcing op1 into a pseudo if it isn't a constant. */
8452 if (! CONSTANT_P (op1))
8453 op1 = force_reg (mode, op1);
8454
8455 {
8456 enum rtx_code comparison_code;
8457 rtx cmpop1 = op1;
8458
8459 if (code == MAX_EXPR)
8460 comparison_code = unsignedp ? GEU : GE;
8461 else
8462 comparison_code = unsignedp ? LEU : LE;
8463
8464 /* Canonicalize to comparisons against 0. */
8465 if (op1 == const1_rtx)
8466 {
8467 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8468 or (a != 0 ? a : 1) for unsigned.
8469 For MIN we are safe converting (a <= 1 ? a : 1)
8470 into (a <= 0 ? a : 1) */
8471 cmpop1 = const0_rtx;
8472 if (code == MAX_EXPR)
8473 comparison_code = unsignedp ? NE : GT;
8474 }
8475 if (op1 == constm1_rtx && !unsignedp)
8476 {
8477 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8478 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8479 cmpop1 = const0_rtx;
8480 if (code == MIN_EXPR)
8481 comparison_code = LT;
8482 }
8483 #ifdef HAVE_conditional_move
8484 /* Use a conditional move if possible. */
8485 if (can_conditionally_move_p (mode))
8486 {
8487 rtx insn;
8488
8489 /* ??? Same problem as in expmed.c: emit_conditional_move
8490 forces a stack adjustment via compare_from_rtx, and we
8491 lose the stack adjustment if the sequence we are about
8492 to create is discarded. */
8493 do_pending_stack_adjust ();
8494
8495 start_sequence ();
8496
8497 /* Try to emit the conditional move. */
8498 insn = emit_conditional_move (target, comparison_code,
8499 op0, cmpop1, mode,
8500 op0, op1, mode,
8501 unsignedp);
8502
8503 /* If we could do the conditional move, emit the sequence,
8504 and return. */
8505 if (insn)
8506 {
8507 rtx seq = get_insns ();
8508 end_sequence ();
8509 emit_insn (seq);
8510 return target;
8511 }
8512
8513 /* Otherwise discard the sequence and fall back to code with
8514 branches. */
8515 end_sequence ();
8516 }
8517 #endif
8518 if (target != op0)
8519 emit_move_insn (target, op0);
8520
8521 temp = gen_label_rtx ();
8522 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8523 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8524 -1);
8525 }
8526 emit_move_insn (target, op1);
8527 emit_label (temp);
8528 return target;
8529
8530 case BIT_NOT_EXPR:
8531 op0 = expand_expr (treeop0, subtarget,
8532 VOIDmode, EXPAND_NORMAL);
8533 if (modifier == EXPAND_STACK_PARM)
8534 target = 0;
8535 /* In case we have to reduce the result to bitfield precision
8536 expand this as XOR with a proper constant instead. */
8537 if (reduce_bit_field)
8538 temp = expand_binop (mode, xor_optab, op0,
8539 immed_double_int_const
8540 (double_int_mask (TYPE_PRECISION (type)), mode),
8541 target, 1, OPTAB_LIB_WIDEN);
8542 else
8543 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8544 gcc_assert (temp);
8545 return temp;
8546
8547 /* ??? Can optimize bitwise operations with one arg constant.
8548 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8549 and (a bitwise1 b) bitwise2 b (etc)
8550 but that is probably not worth while. */
8551
8552 case BIT_AND_EXPR:
8553 case BIT_IOR_EXPR:
8554 case BIT_XOR_EXPR:
8555 goto binop;
8556
8557 case LROTATE_EXPR:
8558 case RROTATE_EXPR:
8559 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8560 || (GET_MODE_PRECISION (TYPE_MODE (type))
8561 == TYPE_PRECISION (type)));
8562 /* fall through */
8563
8564 case LSHIFT_EXPR:
8565 case RSHIFT_EXPR:
8566 /* If this is a fixed-point operation, then we cannot use the code
8567 below because "expand_shift" doesn't support sat/no-sat fixed-point
8568 shifts. */
8569 if (ALL_FIXED_POINT_MODE_P (mode))
8570 goto binop;
8571
8572 if (! safe_from_p (subtarget, treeop1, 1))
8573 subtarget = 0;
8574 if (modifier == EXPAND_STACK_PARM)
8575 target = 0;
8576 op0 = expand_expr (treeop0, subtarget,
8577 VOIDmode, EXPAND_NORMAL);
8578 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8579 unsignedp);
8580 if (code == LSHIFT_EXPR)
8581 temp = REDUCE_BIT_FIELD (temp);
8582 return temp;
8583
8584 /* Could determine the answer when only additive constants differ. Also,
8585 the addition of one can be handled by changing the condition. */
8586 case LT_EXPR:
8587 case LE_EXPR:
8588 case GT_EXPR:
8589 case GE_EXPR:
8590 case EQ_EXPR:
8591 case NE_EXPR:
8592 case UNORDERED_EXPR:
8593 case ORDERED_EXPR:
8594 case UNLT_EXPR:
8595 case UNLE_EXPR:
8596 case UNGT_EXPR:
8597 case UNGE_EXPR:
8598 case UNEQ_EXPR:
8599 case LTGT_EXPR:
8600 temp = do_store_flag (ops,
8601 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8602 tmode != VOIDmode ? tmode : mode);
8603 if (temp)
8604 return temp;
8605
8606 /* Use a compare and a jump for BLKmode comparisons, or for function
8607 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8608
8609 if ((target == 0
8610 || modifier == EXPAND_STACK_PARM
8611 || ! safe_from_p (target, treeop0, 1)
8612 || ! safe_from_p (target, treeop1, 1)
8613 /* Make sure we don't have a hard reg (such as function's return
8614 value) live across basic blocks, if not optimizing. */
8615 || (!optimize && REG_P (target)
8616 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8617 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8618
8619 emit_move_insn (target, const0_rtx);
8620
8621 op1 = gen_label_rtx ();
8622 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8623
8624 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8625 emit_move_insn (target, constm1_rtx);
8626 else
8627 emit_move_insn (target, const1_rtx);
8628
8629 emit_label (op1);
8630 return target;
8631
8632 case COMPLEX_EXPR:
8633 /* Get the rtx code of the operands. */
8634 op0 = expand_normal (treeop0);
8635 op1 = expand_normal (treeop1);
8636
8637 if (!target)
8638 target = gen_reg_rtx (TYPE_MODE (type));
8639
8640 /* Move the real (op0) and imaginary (op1) parts to their location. */
8641 write_complex_part (target, op0, false);
8642 write_complex_part (target, op1, true);
8643
8644 return target;
8645
8646 case WIDEN_SUM_EXPR:
8647 {
8648 tree oprnd0 = treeop0;
8649 tree oprnd1 = treeop1;
8650
8651 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8652 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8653 target, unsignedp);
8654 return target;
8655 }
8656
8657 case REDUC_MAX_EXPR:
8658 case REDUC_MIN_EXPR:
8659 case REDUC_PLUS_EXPR:
8660 {
8661 op0 = expand_normal (treeop0);
8662 this_optab = optab_for_tree_code (code, type, optab_default);
8663 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8664 gcc_assert (temp);
8665 return temp;
8666 }
8667
8668 case VEC_EXTRACT_EVEN_EXPR:
8669 case VEC_EXTRACT_ODD_EXPR:
8670 case VEC_INTERLEAVE_HIGH_EXPR:
8671 case VEC_INTERLEAVE_LOW_EXPR:
8672 goto binop;
8673
8674 case VEC_LSHIFT_EXPR:
8675 case VEC_RSHIFT_EXPR:
8676 {
8677 target = expand_vec_shift_expr (ops, target);
8678 return target;
8679 }
8680
8681 case VEC_UNPACK_HI_EXPR:
8682 case VEC_UNPACK_LO_EXPR:
8683 {
8684 op0 = expand_normal (treeop0);
8685 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8686 target, unsignedp);
8687 gcc_assert (temp);
8688 return temp;
8689 }
8690
8691 case VEC_UNPACK_FLOAT_HI_EXPR:
8692 case VEC_UNPACK_FLOAT_LO_EXPR:
8693 {
8694 op0 = expand_normal (treeop0);
8695 /* The signedness is determined from input operand. */
8696 temp = expand_widen_pattern_expr
8697 (ops, op0, NULL_RTX, NULL_RTX,
8698 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8699
8700 gcc_assert (temp);
8701 return temp;
8702 }
8703
8704 case VEC_WIDEN_MULT_HI_EXPR:
8705 case VEC_WIDEN_MULT_LO_EXPR:
8706 {
8707 tree oprnd0 = treeop0;
8708 tree oprnd1 = treeop1;
8709
8710 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8711 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8712 target, unsignedp);
8713 gcc_assert (target);
8714 return target;
8715 }
8716
8717 case VEC_WIDEN_LSHIFT_HI_EXPR:
8718 case VEC_WIDEN_LSHIFT_LO_EXPR:
8719 {
8720 tree oprnd0 = treeop0;
8721 tree oprnd1 = treeop1;
8722
8723 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8724 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8725 target, unsignedp);
8726 gcc_assert (target);
8727 return target;
8728 }
8729
8730 case VEC_PACK_TRUNC_EXPR:
8731 case VEC_PACK_SAT_EXPR:
8732 case VEC_PACK_FIX_TRUNC_EXPR:
8733 mode = TYPE_MODE (TREE_TYPE (treeop0));
8734 goto binop;
8735
8736 case VEC_PERM_EXPR:
8737 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
8738 op2 = expand_normal (treeop2);
8739 temp = expand_vec_perm (mode, op0, op1, op2, target);
8740 gcc_assert (temp);
8741 return temp;
8742
8743 case DOT_PROD_EXPR:
8744 {
8745 tree oprnd0 = treeop0;
8746 tree oprnd1 = treeop1;
8747 tree oprnd2 = treeop2;
8748 rtx op2;
8749
8750 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8751 op2 = expand_normal (oprnd2);
8752 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8753 target, unsignedp);
8754 return target;
8755 }
8756
8757 case REALIGN_LOAD_EXPR:
8758 {
8759 tree oprnd0 = treeop0;
8760 tree oprnd1 = treeop1;
8761 tree oprnd2 = treeop2;
8762 rtx op2;
8763
8764 this_optab = optab_for_tree_code (code, type, optab_default);
8765 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8766 op2 = expand_normal (oprnd2);
8767 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8768 target, unsignedp);
8769 gcc_assert (temp);
8770 return temp;
8771 }
8772
8773 case COND_EXPR:
8774 /* A COND_EXPR with its type being VOID_TYPE represents a
8775 conditional jump and is handled in
8776 expand_gimple_cond_expr. */
8777 gcc_assert (!VOID_TYPE_P (type));
8778
8779 /* Note that COND_EXPRs whose type is a structure or union
8780 are required to be constructed to contain assignments of
8781 a temporary variable, so that we can evaluate them here
8782 for side effect only. If type is void, we must do likewise. */
8783
8784 gcc_assert (!TREE_ADDRESSABLE (type)
8785 && !ignore
8786 && TREE_TYPE (treeop1) != void_type_node
8787 && TREE_TYPE (treeop2) != void_type_node);
8788
8789 /* If we are not to produce a result, we have no target. Otherwise,
8790 if a target was specified use it; it will not be used as an
8791 intermediate target unless it is safe. If no target, use a
8792 temporary. */
8793
8794 if (modifier != EXPAND_STACK_PARM
8795 && original_target
8796 && safe_from_p (original_target, treeop0, 1)
8797 && GET_MODE (original_target) == mode
8798 #ifdef HAVE_conditional_move
8799 && (! can_conditionally_move_p (mode)
8800 || REG_P (original_target))
8801 #endif
8802 && !MEM_P (original_target))
8803 temp = original_target;
8804 else
8805 temp = assign_temp (type, 0, 0, 1);
8806
8807 do_pending_stack_adjust ();
8808 NO_DEFER_POP;
8809 op0 = gen_label_rtx ();
8810 op1 = gen_label_rtx ();
8811 jumpifnot (treeop0, op0, -1);
8812 store_expr (treeop1, temp,
8813 modifier == EXPAND_STACK_PARM,
8814 false);
8815
8816 emit_jump_insn (gen_jump (op1));
8817 emit_barrier ();
8818 emit_label (op0);
8819 store_expr (treeop2, temp,
8820 modifier == EXPAND_STACK_PARM,
8821 false);
8822
8823 emit_label (op1);
8824 OK_DEFER_POP;
8825 return temp;
8826
8827 case VEC_COND_EXPR:
8828 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
8829 return target;
8830
8831 default:
8832 gcc_unreachable ();
8833 }
8834
8835 /* Here to do an ordinary binary operator. */
8836 binop:
8837 expand_operands (treeop0, treeop1,
8838 subtarget, &op0, &op1, EXPAND_NORMAL);
8839 binop2:
8840 this_optab = optab_for_tree_code (code, type, optab_default);
8841 binop3:
8842 if (modifier == EXPAND_STACK_PARM)
8843 target = 0;
8844 temp = expand_binop (mode, this_optab, op0, op1, target,
8845 unsignedp, OPTAB_LIB_WIDEN);
8846 gcc_assert (temp);
8847 /* Bitwise operations do not need bitfield reduction as we expect their
8848 operands being properly truncated. */
8849 if (code == BIT_XOR_EXPR
8850 || code == BIT_AND_EXPR
8851 || code == BIT_IOR_EXPR)
8852 return temp;
8853 return REDUCE_BIT_FIELD (temp);
8854 }
8855 #undef REDUCE_BIT_FIELD
8856
8857 rtx
8858 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8859 enum expand_modifier modifier, rtx *alt_rtl)
8860 {
8861 rtx op0, op1, temp, decl_rtl;
8862 tree type;
8863 int unsignedp;
8864 enum machine_mode mode;
8865 enum tree_code code = TREE_CODE (exp);
8866 rtx subtarget, original_target;
8867 int ignore;
8868 tree context;
8869 bool reduce_bit_field;
8870 location_t loc = EXPR_LOCATION (exp);
8871 struct separate_ops ops;
8872 tree treeop0, treeop1, treeop2;
8873 tree ssa_name = NULL_TREE;
8874 gimple g;
8875
8876 type = TREE_TYPE (exp);
8877 mode = TYPE_MODE (type);
8878 unsignedp = TYPE_UNSIGNED (type);
8879
8880 treeop0 = treeop1 = treeop2 = NULL_TREE;
8881 if (!VL_EXP_CLASS_P (exp))
8882 switch (TREE_CODE_LENGTH (code))
8883 {
8884 default:
8885 case 3: treeop2 = TREE_OPERAND (exp, 2);
8886 case 2: treeop1 = TREE_OPERAND (exp, 1);
8887 case 1: treeop0 = TREE_OPERAND (exp, 0);
8888 case 0: break;
8889 }
8890 ops.code = code;
8891 ops.type = type;
8892 ops.op0 = treeop0;
8893 ops.op1 = treeop1;
8894 ops.op2 = treeop2;
8895 ops.location = loc;
8896
8897 ignore = (target == const0_rtx
8898 || ((CONVERT_EXPR_CODE_P (code)
8899 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8900 && TREE_CODE (type) == VOID_TYPE));
8901
8902 /* An operation in what may be a bit-field type needs the
8903 result to be reduced to the precision of the bit-field type,
8904 which is narrower than that of the type's mode. */
8905 reduce_bit_field = (!ignore
8906 && INTEGRAL_TYPE_P (type)
8907 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8908
8909 /* If we are going to ignore this result, we need only do something
8910 if there is a side-effect somewhere in the expression. If there
8911 is, short-circuit the most common cases here. Note that we must
8912 not call expand_expr with anything but const0_rtx in case this
8913 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8914
8915 if (ignore)
8916 {
8917 if (! TREE_SIDE_EFFECTS (exp))
8918 return const0_rtx;
8919
8920 /* Ensure we reference a volatile object even if value is ignored, but
8921 don't do this if all we are doing is taking its address. */
8922 if (TREE_THIS_VOLATILE (exp)
8923 && TREE_CODE (exp) != FUNCTION_DECL
8924 && mode != VOIDmode && mode != BLKmode
8925 && modifier != EXPAND_CONST_ADDRESS)
8926 {
8927 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8928 if (MEM_P (temp))
8929 copy_to_reg (temp);
8930 return const0_rtx;
8931 }
8932
8933 if (TREE_CODE_CLASS (code) == tcc_unary
8934 || code == COMPONENT_REF || code == INDIRECT_REF)
8935 return expand_expr (treeop0, const0_rtx, VOIDmode,
8936 modifier);
8937
8938 else if (TREE_CODE_CLASS (code) == tcc_binary
8939 || TREE_CODE_CLASS (code) == tcc_comparison
8940 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8941 {
8942 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8943 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8944 return const0_rtx;
8945 }
8946 else if (code == BIT_FIELD_REF)
8947 {
8948 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8949 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8950 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8951 return const0_rtx;
8952 }
8953
8954 target = 0;
8955 }
8956
8957 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8958 target = 0;
8959
8960 /* Use subtarget as the target for operand 0 of a binary operation. */
8961 subtarget = get_subtarget (target);
8962 original_target = target;
8963
8964 switch (code)
8965 {
8966 case LABEL_DECL:
8967 {
8968 tree function = decl_function_context (exp);
8969
8970 temp = label_rtx (exp);
8971 temp = gen_rtx_LABEL_REF (Pmode, temp);
8972
8973 if (function != current_function_decl
8974 && function != 0)
8975 LABEL_REF_NONLOCAL_P (temp) = 1;
8976
8977 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8978 return temp;
8979 }
8980
8981 case SSA_NAME:
8982 /* ??? ivopts calls expander, without any preparation from
8983 out-of-ssa. So fake instructions as if this was an access to the
8984 base variable. This unnecessarily allocates a pseudo, see how we can
8985 reuse it, if partition base vars have it set already. */
8986 if (!currently_expanding_to_rtl)
8987 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
8988 NULL);
8989
8990 g = get_gimple_for_ssa_name (exp);
8991 /* For EXPAND_INITIALIZER try harder to get something simpler. */
8992 if (g == NULL
8993 && modifier == EXPAND_INITIALIZER
8994 && !SSA_NAME_IS_DEFAULT_DEF (exp)
8995 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
8996 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
8997 g = SSA_NAME_DEF_STMT (exp);
8998 if (g)
8999 return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
9000 modifier, NULL);
9001
9002 ssa_name = exp;
9003 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9004 exp = SSA_NAME_VAR (ssa_name);
9005 goto expand_decl_rtl;
9006
9007 case PARM_DECL:
9008 case VAR_DECL:
9009 /* If a static var's type was incomplete when the decl was written,
9010 but the type is complete now, lay out the decl now. */
9011 if (DECL_SIZE (exp) == 0
9012 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9013 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9014 layout_decl (exp, 0);
9015
9016 /* ... fall through ... */
9017
9018 case FUNCTION_DECL:
9019 case RESULT_DECL:
9020 decl_rtl = DECL_RTL (exp);
9021 expand_decl_rtl:
9022 gcc_assert (decl_rtl);
9023 decl_rtl = copy_rtx (decl_rtl);
9024 /* Record writes to register variables. */
9025 if (modifier == EXPAND_WRITE
9026 && REG_P (decl_rtl)
9027 && HARD_REGISTER_P (decl_rtl))
9028 add_to_hard_reg_set (&crtl->asm_clobbers,
9029 GET_MODE (decl_rtl), REGNO (decl_rtl));
9030
9031 /* Ensure variable marked as used even if it doesn't go through
9032 a parser. If it hasn't be used yet, write out an external
9033 definition. */
9034 if (! TREE_USED (exp))
9035 {
9036 assemble_external (exp);
9037 TREE_USED (exp) = 1;
9038 }
9039
9040 /* Show we haven't gotten RTL for this yet. */
9041 temp = 0;
9042
9043 /* Variables inherited from containing functions should have
9044 been lowered by this point. */
9045 context = decl_function_context (exp);
9046 gcc_assert (!context
9047 || context == current_function_decl
9048 || TREE_STATIC (exp)
9049 || DECL_EXTERNAL (exp)
9050 /* ??? C++ creates functions that are not TREE_STATIC. */
9051 || TREE_CODE (exp) == FUNCTION_DECL);
9052
9053 /* This is the case of an array whose size is to be determined
9054 from its initializer, while the initializer is still being parsed.
9055 See expand_decl. */
9056
9057 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9058 temp = validize_mem (decl_rtl);
9059
9060 /* If DECL_RTL is memory, we are in the normal case and the
9061 address is not valid, get the address into a register. */
9062
9063 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9064 {
9065 if (alt_rtl)
9066 *alt_rtl = decl_rtl;
9067 decl_rtl = use_anchored_address (decl_rtl);
9068 if (modifier != EXPAND_CONST_ADDRESS
9069 && modifier != EXPAND_SUM
9070 && !memory_address_addr_space_p (DECL_MODE (exp),
9071 XEXP (decl_rtl, 0),
9072 MEM_ADDR_SPACE (decl_rtl)))
9073 temp = replace_equiv_address (decl_rtl,
9074 copy_rtx (XEXP (decl_rtl, 0)));
9075 }
9076
9077 /* If we got something, return it. But first, set the alignment
9078 if the address is a register. */
9079 if (temp != 0)
9080 {
9081 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9082 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9083
9084 return temp;
9085 }
9086
9087 /* If the mode of DECL_RTL does not match that of the decl,
9088 there are two cases: we are dealing with a BLKmode value
9089 that is returned in a register, or we are dealing with
9090 a promoted value. In the latter case, return a SUBREG
9091 of the wanted mode, but mark it so that we know that it
9092 was already extended. */
9093 if (REG_P (decl_rtl)
9094 && DECL_MODE (exp) != BLKmode
9095 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9096 {
9097 enum machine_mode pmode;
9098
9099 /* Get the signedness to be used for this variable. Ensure we get
9100 the same mode we got when the variable was declared. */
9101 if (code == SSA_NAME
9102 && (g = SSA_NAME_DEF_STMT (ssa_name))
9103 && gimple_code (g) == GIMPLE_CALL)
9104 {
9105 gcc_assert (!gimple_call_internal_p (g));
9106 pmode = promote_function_mode (type, mode, &unsignedp,
9107 gimple_call_fntype (g),
9108 2);
9109 }
9110 else
9111 pmode = promote_decl_mode (exp, &unsignedp);
9112 gcc_assert (GET_MODE (decl_rtl) == pmode);
9113
9114 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9115 SUBREG_PROMOTED_VAR_P (temp) = 1;
9116 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9117 return temp;
9118 }
9119
9120 return decl_rtl;
9121
9122 case INTEGER_CST:
9123 temp = immed_double_const (TREE_INT_CST_LOW (exp),
9124 TREE_INT_CST_HIGH (exp), mode);
9125
9126 return temp;
9127
9128 case VECTOR_CST:
9129 {
9130 tree tmp = NULL_TREE;
9131 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9132 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9133 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9134 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9135 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9136 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9137 return const_vector_from_tree (exp);
9138 if (GET_MODE_CLASS (mode) == MODE_INT)
9139 {
9140 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9141 if (type_for_mode)
9142 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9143 }
9144 if (!tmp)
9145 tmp = build_constructor_from_list (type,
9146 TREE_VECTOR_CST_ELTS (exp));
9147 return expand_expr (tmp, ignore ? const0_rtx : target,
9148 tmode, modifier);
9149 }
9150
9151 case CONST_DECL:
9152 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9153
9154 case REAL_CST:
9155 /* If optimized, generate immediate CONST_DOUBLE
9156 which will be turned into memory by reload if necessary.
9157
9158 We used to force a register so that loop.c could see it. But
9159 this does not allow gen_* patterns to perform optimizations with
9160 the constants. It also produces two insns in cases like "x = 1.0;".
9161 On most machines, floating-point constants are not permitted in
9162 many insns, so we'd end up copying it to a register in any case.
9163
9164 Now, we do the copying in expand_binop, if appropriate. */
9165 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9166 TYPE_MODE (TREE_TYPE (exp)));
9167
9168 case FIXED_CST:
9169 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9170 TYPE_MODE (TREE_TYPE (exp)));
9171
9172 case COMPLEX_CST:
9173 /* Handle evaluating a complex constant in a CONCAT target. */
9174 if (original_target && GET_CODE (original_target) == CONCAT)
9175 {
9176 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9177 rtx rtarg, itarg;
9178
9179 rtarg = XEXP (original_target, 0);
9180 itarg = XEXP (original_target, 1);
9181
9182 /* Move the real and imaginary parts separately. */
9183 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9184 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9185
9186 if (op0 != rtarg)
9187 emit_move_insn (rtarg, op0);
9188 if (op1 != itarg)
9189 emit_move_insn (itarg, op1);
9190
9191 return original_target;
9192 }
9193
9194 /* ... fall through ... */
9195
9196 case STRING_CST:
9197 temp = expand_expr_constant (exp, 1, modifier);
9198
9199 /* temp contains a constant address.
9200 On RISC machines where a constant address isn't valid,
9201 make some insns to get that address into a register. */
9202 if (modifier != EXPAND_CONST_ADDRESS
9203 && modifier != EXPAND_INITIALIZER
9204 && modifier != EXPAND_SUM
9205 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9206 MEM_ADDR_SPACE (temp)))
9207 return replace_equiv_address (temp,
9208 copy_rtx (XEXP (temp, 0)));
9209 return temp;
9210
9211 case SAVE_EXPR:
9212 {
9213 tree val = treeop0;
9214 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
9215
9216 if (!SAVE_EXPR_RESOLVED_P (exp))
9217 {
9218 /* We can indeed still hit this case, typically via builtin
9219 expanders calling save_expr immediately before expanding
9220 something. Assume this means that we only have to deal
9221 with non-BLKmode values. */
9222 gcc_assert (GET_MODE (ret) != BLKmode);
9223
9224 val = build_decl (EXPR_LOCATION (exp),
9225 VAR_DECL, NULL, TREE_TYPE (exp));
9226 DECL_ARTIFICIAL (val) = 1;
9227 DECL_IGNORED_P (val) = 1;
9228 treeop0 = val;
9229 TREE_OPERAND (exp, 0) = treeop0;
9230 SAVE_EXPR_RESOLVED_P (exp) = 1;
9231
9232 if (!CONSTANT_P (ret))
9233 ret = copy_to_reg (ret);
9234 SET_DECL_RTL (val, ret);
9235 }
9236
9237 return ret;
9238 }
9239
9240
9241 case CONSTRUCTOR:
9242 /* If we don't need the result, just ensure we evaluate any
9243 subexpressions. */
9244 if (ignore)
9245 {
9246 unsigned HOST_WIDE_INT idx;
9247 tree value;
9248
9249 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9250 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9251
9252 return const0_rtx;
9253 }
9254
9255 return expand_constructor (exp, target, modifier, false);
9256
9257 case TARGET_MEM_REF:
9258 {
9259 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
9260 struct mem_address addr;
9261 enum insn_code icode;
9262 unsigned int align;
9263
9264 get_address_description (exp, &addr);
9265 op0 = addr_for_mem_ref (&addr, as, true);
9266 op0 = memory_address_addr_space (mode, op0, as);
9267 temp = gen_rtx_MEM (mode, op0);
9268 set_mem_attributes (temp, exp, 0);
9269 set_mem_addr_space (temp, as);
9270 align = get_object_or_type_alignment (exp);
9271 if (mode != BLKmode
9272 && align < GET_MODE_ALIGNMENT (mode)
9273 /* If the target does not have special handling for unaligned
9274 loads of mode then it can use regular moves for them. */
9275 && ((icode = optab_handler (movmisalign_optab, mode))
9276 != CODE_FOR_nothing))
9277 {
9278 struct expand_operand ops[2];
9279
9280 /* We've already validated the memory, and we're creating a
9281 new pseudo destination. The predicates really can't fail,
9282 nor can the generator. */
9283 create_output_operand (&ops[0], NULL_RTX, mode);
9284 create_fixed_operand (&ops[1], temp);
9285 expand_insn (icode, 2, ops);
9286 return ops[0].value;
9287 }
9288 return temp;
9289 }
9290
9291 case MEM_REF:
9292 {
9293 addr_space_t as
9294 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))));
9295 enum machine_mode address_mode;
9296 tree base = TREE_OPERAND (exp, 0);
9297 gimple def_stmt;
9298 enum insn_code icode;
9299 unsigned align;
9300 /* Handle expansion of non-aliased memory with non-BLKmode. That
9301 might end up in a register. */
9302 if (TREE_CODE (base) == ADDR_EXPR)
9303 {
9304 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9305 tree bit_offset;
9306 base = TREE_OPERAND (base, 0);
9307 if (!DECL_P (base))
9308 {
9309 HOST_WIDE_INT off;
9310 base = get_addr_base_and_unit_offset (base, &off);
9311 gcc_assert (base);
9312 offset += off;
9313 }
9314 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
9315 decl we must use bitfield operations. */
9316 if (DECL_P (base)
9317 && !TREE_ADDRESSABLE (base)
9318 && DECL_MODE (base) != BLKmode
9319 && DECL_RTL_SET_P (base)
9320 && !MEM_P (DECL_RTL (base)))
9321 {
9322 tree bftype;
9323 if (offset == 0
9324 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
9325 && (GET_MODE_BITSIZE (DECL_MODE (base))
9326 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
9327 return expand_expr (build1 (VIEW_CONVERT_EXPR,
9328 TREE_TYPE (exp), base),
9329 target, tmode, modifier);
9330 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
9331 bftype = TREE_TYPE (base);
9332 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
9333 bftype = TREE_TYPE (exp);
9334 return expand_expr (build3 (BIT_FIELD_REF, bftype,
9335 base,
9336 TYPE_SIZE (TREE_TYPE (exp)),
9337 bit_offset),
9338 target, tmode, modifier);
9339 }
9340 }
9341 address_mode = targetm.addr_space.address_mode (as);
9342 base = TREE_OPERAND (exp, 0);
9343 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9344 {
9345 tree mask = gimple_assign_rhs2 (def_stmt);
9346 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9347 gimple_assign_rhs1 (def_stmt), mask);
9348 TREE_OPERAND (exp, 0) = base;
9349 }
9350 align = get_object_or_type_alignment (exp);
9351 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9352 op0 = memory_address_addr_space (address_mode, op0, as);
9353 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9354 {
9355 rtx off
9356 = immed_double_int_const (mem_ref_offset (exp), address_mode);
9357 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9358 }
9359 op0 = memory_address_addr_space (mode, op0, as);
9360 temp = gen_rtx_MEM (mode, op0);
9361 set_mem_attributes (temp, exp, 0);
9362 set_mem_addr_space (temp, as);
9363 if (TREE_THIS_VOLATILE (exp))
9364 MEM_VOLATILE_P (temp) = 1;
9365 if (mode != BLKmode
9366 && align < GET_MODE_ALIGNMENT (mode)
9367 /* If the target does not have special handling for unaligned
9368 loads of mode then it can use regular moves for them. */
9369 && ((icode = optab_handler (movmisalign_optab, mode))
9370 != CODE_FOR_nothing))
9371 {
9372 struct expand_operand ops[2];
9373
9374 /* We've already validated the memory, and we're creating a
9375 new pseudo destination. The predicates really can't fail,
9376 nor can the generator. */
9377 create_output_operand (&ops[0], NULL_RTX, mode);
9378 create_fixed_operand (&ops[1], temp);
9379 expand_insn (icode, 2, ops);
9380 return ops[0].value;
9381 }
9382 return temp;
9383 }
9384
9385 case ARRAY_REF:
9386
9387 {
9388 tree array = treeop0;
9389 tree index = treeop1;
9390
9391 /* Fold an expression like: "foo"[2].
9392 This is not done in fold so it won't happen inside &.
9393 Don't fold if this is for wide characters since it's too
9394 difficult to do correctly and this is a very rare case. */
9395
9396 if (modifier != EXPAND_CONST_ADDRESS
9397 && modifier != EXPAND_INITIALIZER
9398 && modifier != EXPAND_MEMORY)
9399 {
9400 tree t = fold_read_from_constant_string (exp);
9401
9402 if (t)
9403 return expand_expr (t, target, tmode, modifier);
9404 }
9405
9406 /* If this is a constant index into a constant array,
9407 just get the value from the array. Handle both the cases when
9408 we have an explicit constructor and when our operand is a variable
9409 that was declared const. */
9410
9411 if (modifier != EXPAND_CONST_ADDRESS
9412 && modifier != EXPAND_INITIALIZER
9413 && modifier != EXPAND_MEMORY
9414 && TREE_CODE (array) == CONSTRUCTOR
9415 && ! TREE_SIDE_EFFECTS (array)
9416 && TREE_CODE (index) == INTEGER_CST)
9417 {
9418 unsigned HOST_WIDE_INT ix;
9419 tree field, value;
9420
9421 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9422 field, value)
9423 if (tree_int_cst_equal (field, index))
9424 {
9425 if (!TREE_SIDE_EFFECTS (value))
9426 return expand_expr (fold (value), target, tmode, modifier);
9427 break;
9428 }
9429 }
9430
9431 else if (optimize >= 1
9432 && modifier != EXPAND_CONST_ADDRESS
9433 && modifier != EXPAND_INITIALIZER
9434 && modifier != EXPAND_MEMORY
9435 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9436 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
9437 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
9438 && const_value_known_p (array))
9439 {
9440 if (TREE_CODE (index) == INTEGER_CST)
9441 {
9442 tree init = DECL_INITIAL (array);
9443
9444 if (TREE_CODE (init) == CONSTRUCTOR)
9445 {
9446 unsigned HOST_WIDE_INT ix;
9447 tree field, value;
9448
9449 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9450 field, value)
9451 if (tree_int_cst_equal (field, index))
9452 {
9453 if (TREE_SIDE_EFFECTS (value))
9454 break;
9455
9456 if (TREE_CODE (value) == CONSTRUCTOR)
9457 {
9458 /* If VALUE is a CONSTRUCTOR, this
9459 optimization is only useful if
9460 this doesn't store the CONSTRUCTOR
9461 into memory. If it does, it is more
9462 efficient to just load the data from
9463 the array directly. */
9464 rtx ret = expand_constructor (value, target,
9465 modifier, true);
9466 if (ret == NULL_RTX)
9467 break;
9468 }
9469
9470 return expand_expr (fold (value), target, tmode,
9471 modifier);
9472 }
9473 }
9474 else if(TREE_CODE (init) == STRING_CST)
9475 {
9476 tree index1 = index;
9477 tree low_bound = array_ref_low_bound (exp);
9478 index1 = fold_convert_loc (loc, sizetype,
9479 treeop1);
9480
9481 /* Optimize the special-case of a zero lower bound.
9482
9483 We convert the low_bound to sizetype to avoid some problems
9484 with constant folding. (E.g. suppose the lower bound is 1,
9485 and its mode is QI. Without the conversion,l (ARRAY
9486 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9487 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
9488
9489 if (! integer_zerop (low_bound))
9490 index1 = size_diffop_loc (loc, index1,
9491 fold_convert_loc (loc, sizetype,
9492 low_bound));
9493
9494 if (0 > compare_tree_int (index1,
9495 TREE_STRING_LENGTH (init)))
9496 {
9497 tree type = TREE_TYPE (TREE_TYPE (init));
9498 enum machine_mode mode = TYPE_MODE (type);
9499
9500 if (GET_MODE_CLASS (mode) == MODE_INT
9501 && GET_MODE_SIZE (mode) == 1)
9502 return gen_int_mode (TREE_STRING_POINTER (init)
9503 [TREE_INT_CST_LOW (index1)],
9504 mode);
9505 }
9506 }
9507 }
9508 }
9509 }
9510 goto normal_inner_ref;
9511
9512 case COMPONENT_REF:
9513 /* If the operand is a CONSTRUCTOR, we can just extract the
9514 appropriate field if it is present. */
9515 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9516 {
9517 unsigned HOST_WIDE_INT idx;
9518 tree field, value;
9519
9520 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9521 idx, field, value)
9522 if (field == treeop1
9523 /* We can normally use the value of the field in the
9524 CONSTRUCTOR. However, if this is a bitfield in
9525 an integral mode that we can fit in a HOST_WIDE_INT,
9526 we must mask only the number of bits in the bitfield,
9527 since this is done implicitly by the constructor. If
9528 the bitfield does not meet either of those conditions,
9529 we can't do this optimization. */
9530 && (! DECL_BIT_FIELD (field)
9531 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9532 && (GET_MODE_PRECISION (DECL_MODE (field))
9533 <= HOST_BITS_PER_WIDE_INT))))
9534 {
9535 if (DECL_BIT_FIELD (field)
9536 && modifier == EXPAND_STACK_PARM)
9537 target = 0;
9538 op0 = expand_expr (value, target, tmode, modifier);
9539 if (DECL_BIT_FIELD (field))
9540 {
9541 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9542 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9543
9544 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9545 {
9546 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
9547 op0 = expand_and (imode, op0, op1, target);
9548 }
9549 else
9550 {
9551 int count = GET_MODE_PRECISION (imode) - bitsize;
9552
9553 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9554 target, 0);
9555 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9556 target, 0);
9557 }
9558 }
9559
9560 return op0;
9561 }
9562 }
9563 goto normal_inner_ref;
9564
9565 case BIT_FIELD_REF:
9566 case ARRAY_RANGE_REF:
9567 normal_inner_ref:
9568 {
9569 enum machine_mode mode1, mode2;
9570 HOST_WIDE_INT bitsize, bitpos;
9571 tree offset;
9572 int volatilep = 0, must_force_mem;
9573 bool packedp = false;
9574 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9575 &mode1, &unsignedp, &volatilep, true);
9576 rtx orig_op0, memloc;
9577
9578 /* If we got back the original object, something is wrong. Perhaps
9579 we are evaluating an expression too early. In any event, don't
9580 infinitely recurse. */
9581 gcc_assert (tem != exp);
9582
9583 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
9584 || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
9585 && DECL_PACKED (TREE_OPERAND (exp, 1))))
9586 packedp = true;
9587
9588 /* If TEM's type is a union of variable size, pass TARGET to the inner
9589 computation, since it will need a temporary and TARGET is known
9590 to have to do. This occurs in unchecked conversion in Ada. */
9591 orig_op0 = op0
9592 = expand_expr (tem,
9593 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9594 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9595 != INTEGER_CST)
9596 && modifier != EXPAND_STACK_PARM
9597 ? target : NULL_RTX),
9598 VOIDmode,
9599 (modifier == EXPAND_INITIALIZER
9600 || modifier == EXPAND_CONST_ADDRESS
9601 || modifier == EXPAND_STACK_PARM)
9602 ? modifier : EXPAND_NORMAL);
9603
9604
9605 /* If the bitfield is volatile, we want to access it in the
9606 field's mode, not the computed mode.
9607 If a MEM has VOIDmode (external with incomplete type),
9608 use BLKmode for it instead. */
9609 if (MEM_P (op0))
9610 {
9611 if (volatilep && flag_strict_volatile_bitfields > 0)
9612 op0 = adjust_address (op0, mode1, 0);
9613 else if (GET_MODE (op0) == VOIDmode)
9614 op0 = adjust_address (op0, BLKmode, 0);
9615 }
9616
9617 mode2
9618 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9619
9620 /* If we have either an offset, a BLKmode result, or a reference
9621 outside the underlying object, we must force it to memory.
9622 Such a case can occur in Ada if we have unchecked conversion
9623 of an expression from a scalar type to an aggregate type or
9624 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9625 passed a partially uninitialized object or a view-conversion
9626 to a larger size. */
9627 must_force_mem = (offset
9628 || mode1 == BLKmode
9629 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9630
9631 /* Handle CONCAT first. */
9632 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9633 {
9634 if (bitpos == 0
9635 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9636 return op0;
9637 if (bitpos == 0
9638 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9639 && bitsize)
9640 {
9641 op0 = XEXP (op0, 0);
9642 mode2 = GET_MODE (op0);
9643 }
9644 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9645 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9646 && bitpos
9647 && bitsize)
9648 {
9649 op0 = XEXP (op0, 1);
9650 bitpos = 0;
9651 mode2 = GET_MODE (op0);
9652 }
9653 else
9654 /* Otherwise force into memory. */
9655 must_force_mem = 1;
9656 }
9657
9658 /* If this is a constant, put it in a register if it is a legitimate
9659 constant and we don't need a memory reference. */
9660 if (CONSTANT_P (op0)
9661 && mode2 != BLKmode
9662 && targetm.legitimate_constant_p (mode2, op0)
9663 && !must_force_mem)
9664 op0 = force_reg (mode2, op0);
9665
9666 /* Otherwise, if this is a constant, try to force it to the constant
9667 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9668 is a legitimate constant. */
9669 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9670 op0 = validize_mem (memloc);
9671
9672 /* Otherwise, if this is a constant or the object is not in memory
9673 and need be, put it there. */
9674 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9675 {
9676 tree nt = build_qualified_type (TREE_TYPE (tem),
9677 (TYPE_QUALS (TREE_TYPE (tem))
9678 | TYPE_QUAL_CONST));
9679 memloc = assign_temp (nt, 1, 1, 1);
9680 emit_move_insn (memloc, op0);
9681 op0 = memloc;
9682 }
9683
9684 if (offset)
9685 {
9686 enum machine_mode address_mode;
9687 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9688 EXPAND_SUM);
9689
9690 gcc_assert (MEM_P (op0));
9691
9692 address_mode
9693 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9694 if (GET_MODE (offset_rtx) != address_mode)
9695 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9696
9697 if (GET_MODE (op0) == BLKmode
9698 /* A constant address in OP0 can have VOIDmode, we must
9699 not try to call force_reg in that case. */
9700 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9701 && bitsize != 0
9702 && (bitpos % bitsize) == 0
9703 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9704 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9705 {
9706 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9707 bitpos = 0;
9708 }
9709
9710 op0 = offset_address (op0, offset_rtx,
9711 highest_pow2_factor (offset));
9712 }
9713
9714 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9715 record its alignment as BIGGEST_ALIGNMENT. */
9716 if (MEM_P (op0) && bitpos == 0 && offset != 0
9717 && is_aligning_offset (offset, tem))
9718 set_mem_align (op0, BIGGEST_ALIGNMENT);
9719
9720 /* Don't forget about volatility even if this is a bitfield. */
9721 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9722 {
9723 if (op0 == orig_op0)
9724 op0 = copy_rtx (op0);
9725
9726 MEM_VOLATILE_P (op0) = 1;
9727 }
9728
9729 /* In cases where an aligned union has an unaligned object
9730 as a field, we might be extracting a BLKmode value from
9731 an integer-mode (e.g., SImode) object. Handle this case
9732 by doing the extract into an object as wide as the field
9733 (which we know to be the width of a basic mode), then
9734 storing into memory, and changing the mode to BLKmode. */
9735 if (mode1 == VOIDmode
9736 || REG_P (op0) || GET_CODE (op0) == SUBREG
9737 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9738 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9739 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9740 && modifier != EXPAND_CONST_ADDRESS
9741 && modifier != EXPAND_INITIALIZER)
9742 /* If the field is volatile, we always want an aligned
9743 access. Only do this if the access is not already naturally
9744 aligned, otherwise "normal" (non-bitfield) volatile fields
9745 become non-addressable. */
9746 || (volatilep && flag_strict_volatile_bitfields > 0
9747 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
9748 /* If the field isn't aligned enough to fetch as a memref,
9749 fetch it as a bit field. */
9750 || (mode1 != BLKmode
9751 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9752 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9753 || (MEM_P (op0)
9754 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9755 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9756 && ((modifier == EXPAND_CONST_ADDRESS
9757 || modifier == EXPAND_INITIALIZER)
9758 ? STRICT_ALIGNMENT
9759 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9760 || (bitpos % BITS_PER_UNIT != 0)))
9761 /* If the type and the field are a constant size and the
9762 size of the type isn't the same size as the bitfield,
9763 we must use bitfield operations. */
9764 || (bitsize >= 0
9765 && TYPE_SIZE (TREE_TYPE (exp))
9766 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9767 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9768 bitsize)))
9769 {
9770 enum machine_mode ext_mode = mode;
9771
9772 if (ext_mode == BLKmode
9773 && ! (target != 0 && MEM_P (op0)
9774 && MEM_P (target)
9775 && bitpos % BITS_PER_UNIT == 0))
9776 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9777
9778 if (ext_mode == BLKmode)
9779 {
9780 if (target == 0)
9781 target = assign_temp (type, 0, 1, 1);
9782
9783 if (bitsize == 0)
9784 return target;
9785
9786 /* In this case, BITPOS must start at a byte boundary and
9787 TARGET, if specified, must be a MEM. */
9788 gcc_assert (MEM_P (op0)
9789 && (!target || MEM_P (target))
9790 && !(bitpos % BITS_PER_UNIT));
9791
9792 emit_block_move (target,
9793 adjust_address (op0, VOIDmode,
9794 bitpos / BITS_PER_UNIT),
9795 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9796 / BITS_PER_UNIT),
9797 (modifier == EXPAND_STACK_PARM
9798 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9799
9800 return target;
9801 }
9802
9803 op0 = validize_mem (op0);
9804
9805 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9806 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9807
9808 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
9809 (modifier == EXPAND_STACK_PARM
9810 ? NULL_RTX : target),
9811 ext_mode, ext_mode);
9812
9813 /* If the result is a record type and BITSIZE is narrower than
9814 the mode of OP0, an integral mode, and this is a big endian
9815 machine, we must put the field into the high-order bits. */
9816 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9817 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9818 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9819 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9820 GET_MODE_BITSIZE (GET_MODE (op0))
9821 - bitsize, op0, 1);
9822
9823 /* If the result type is BLKmode, store the data into a temporary
9824 of the appropriate type, but with the mode corresponding to the
9825 mode for the data we have (op0's mode). It's tempting to make
9826 this a constant type, since we know it's only being stored once,
9827 but that can cause problems if we are taking the address of this
9828 COMPONENT_REF because the MEM of any reference via that address
9829 will have flags corresponding to the type, which will not
9830 necessarily be constant. */
9831 if (mode == BLKmode)
9832 {
9833 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9834 rtx new_rtx;
9835
9836 /* If the reference doesn't use the alias set of its type,
9837 we cannot create the temporary using that type. */
9838 if (component_uses_parent_alias_set (exp))
9839 {
9840 new_rtx = assign_stack_local (ext_mode, size, 0);
9841 set_mem_alias_set (new_rtx, get_alias_set (exp));
9842 }
9843 else
9844 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9845
9846 emit_move_insn (new_rtx, op0);
9847 op0 = copy_rtx (new_rtx);
9848 PUT_MODE (op0, BLKmode);
9849 set_mem_attributes (op0, exp, 1);
9850 }
9851
9852 return op0;
9853 }
9854
9855 /* If the result is BLKmode, use that to access the object
9856 now as well. */
9857 if (mode == BLKmode)
9858 mode1 = BLKmode;
9859
9860 /* Get a reference to just this component. */
9861 if (modifier == EXPAND_CONST_ADDRESS
9862 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9863 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9864 else
9865 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9866
9867 if (op0 == orig_op0)
9868 op0 = copy_rtx (op0);
9869
9870 set_mem_attributes (op0, exp, 0);
9871 if (REG_P (XEXP (op0, 0)))
9872 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9873
9874 MEM_VOLATILE_P (op0) |= volatilep;
9875 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9876 || modifier == EXPAND_CONST_ADDRESS
9877 || modifier == EXPAND_INITIALIZER)
9878 return op0;
9879 else if (target == 0)
9880 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9881
9882 convert_move (target, op0, unsignedp);
9883 return target;
9884 }
9885
9886 case OBJ_TYPE_REF:
9887 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9888
9889 case CALL_EXPR:
9890 /* All valid uses of __builtin_va_arg_pack () are removed during
9891 inlining. */
9892 if (CALL_EXPR_VA_ARG_PACK (exp))
9893 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9894 {
9895 tree fndecl = get_callee_fndecl (exp), attr;
9896
9897 if (fndecl
9898 && (attr = lookup_attribute ("error",
9899 DECL_ATTRIBUTES (fndecl))) != NULL)
9900 error ("%Kcall to %qs declared with attribute error: %s",
9901 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9902 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9903 if (fndecl
9904 && (attr = lookup_attribute ("warning",
9905 DECL_ATTRIBUTES (fndecl))) != NULL)
9906 warning_at (tree_nonartificial_location (exp),
9907 0, "%Kcall to %qs declared with attribute warning: %s",
9908 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9909 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9910
9911 /* Check for a built-in function. */
9912 if (fndecl && DECL_BUILT_IN (fndecl))
9913 {
9914 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9915 return expand_builtin (exp, target, subtarget, tmode, ignore);
9916 }
9917 }
9918 return expand_call (exp, target, ignore);
9919
9920 case VIEW_CONVERT_EXPR:
9921 op0 = NULL_RTX;
9922
9923 /* If we are converting to BLKmode, try to avoid an intermediate
9924 temporary by fetching an inner memory reference. */
9925 if (mode == BLKmode
9926 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9927 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9928 && handled_component_p (treeop0))
9929 {
9930 enum machine_mode mode1;
9931 HOST_WIDE_INT bitsize, bitpos;
9932 tree offset;
9933 int unsignedp;
9934 int volatilep = 0;
9935 tree tem
9936 = get_inner_reference (treeop0, &bitsize, &bitpos,
9937 &offset, &mode1, &unsignedp, &volatilep,
9938 true);
9939 rtx orig_op0;
9940
9941 /* ??? We should work harder and deal with non-zero offsets. */
9942 if (!offset
9943 && (bitpos % BITS_PER_UNIT) == 0
9944 && bitsize >= 0
9945 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9946 {
9947 /* See the normal_inner_ref case for the rationale. */
9948 orig_op0
9949 = expand_expr (tem,
9950 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9951 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9952 != INTEGER_CST)
9953 && modifier != EXPAND_STACK_PARM
9954 ? target : NULL_RTX),
9955 VOIDmode,
9956 (modifier == EXPAND_INITIALIZER
9957 || modifier == EXPAND_CONST_ADDRESS
9958 || modifier == EXPAND_STACK_PARM)
9959 ? modifier : EXPAND_NORMAL);
9960
9961 if (MEM_P (orig_op0))
9962 {
9963 op0 = orig_op0;
9964
9965 /* Get a reference to just this component. */
9966 if (modifier == EXPAND_CONST_ADDRESS
9967 || modifier == EXPAND_SUM
9968 || modifier == EXPAND_INITIALIZER)
9969 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9970 else
9971 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9972
9973 if (op0 == orig_op0)
9974 op0 = copy_rtx (op0);
9975
9976 set_mem_attributes (op0, treeop0, 0);
9977 if (REG_P (XEXP (op0, 0)))
9978 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9979
9980 MEM_VOLATILE_P (op0) |= volatilep;
9981 }
9982 }
9983 }
9984
9985 if (!op0)
9986 op0 = expand_expr (treeop0,
9987 NULL_RTX, VOIDmode, modifier);
9988
9989 /* If the input and output modes are both the same, we are done. */
9990 if (mode == GET_MODE (op0))
9991 ;
9992 /* If neither mode is BLKmode, and both modes are the same size
9993 then we can use gen_lowpart. */
9994 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9995 && (GET_MODE_PRECISION (mode)
9996 == GET_MODE_PRECISION (GET_MODE (op0)))
9997 && !COMPLEX_MODE_P (GET_MODE (op0)))
9998 {
9999 if (GET_CODE (op0) == SUBREG)
10000 op0 = force_reg (GET_MODE (op0), op0);
10001 temp = gen_lowpart_common (mode, op0);
10002 if (temp)
10003 op0 = temp;
10004 else
10005 {
10006 if (!REG_P (op0) && !MEM_P (op0))
10007 op0 = force_reg (GET_MODE (op0), op0);
10008 op0 = gen_lowpart (mode, op0);
10009 }
10010 }
10011 /* If both types are integral, convert from one mode to the other. */
10012 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10013 op0 = convert_modes (mode, GET_MODE (op0), op0,
10014 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10015 /* As a last resort, spill op0 to memory, and reload it in a
10016 different mode. */
10017 else if (!MEM_P (op0))
10018 {
10019 /* If the operand is not a MEM, force it into memory. Since we
10020 are going to be changing the mode of the MEM, don't call
10021 force_const_mem for constants because we don't allow pool
10022 constants to change mode. */
10023 tree inner_type = TREE_TYPE (treeop0);
10024
10025 gcc_assert (!TREE_ADDRESSABLE (exp));
10026
10027 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10028 target
10029 = assign_stack_temp_for_type
10030 (TYPE_MODE (inner_type),
10031 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
10032
10033 emit_move_insn (target, op0);
10034 op0 = target;
10035 }
10036
10037 /* At this point, OP0 is in the correct mode. If the output type is
10038 such that the operand is known to be aligned, indicate that it is.
10039 Otherwise, we need only be concerned about alignment for non-BLKmode
10040 results. */
10041 if (MEM_P (op0))
10042 {
10043 op0 = copy_rtx (op0);
10044
10045 if (TYPE_ALIGN_OK (type))
10046 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10047 else if (STRICT_ALIGNMENT
10048 && mode != BLKmode
10049 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10050 {
10051 tree inner_type = TREE_TYPE (treeop0);
10052 HOST_WIDE_INT temp_size
10053 = MAX (int_size_in_bytes (inner_type),
10054 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10055 rtx new_rtx
10056 = assign_stack_temp_for_type (mode, temp_size, 0, type);
10057 rtx new_with_op0_mode
10058 = adjust_address (new_rtx, GET_MODE (op0), 0);
10059
10060 gcc_assert (!TREE_ADDRESSABLE (exp));
10061
10062 if (GET_MODE (op0) == BLKmode)
10063 emit_block_move (new_with_op0_mode, op0,
10064 GEN_INT (GET_MODE_SIZE (mode)),
10065 (modifier == EXPAND_STACK_PARM
10066 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10067 else
10068 emit_move_insn (new_with_op0_mode, op0);
10069
10070 op0 = new_rtx;
10071 }
10072
10073 op0 = adjust_address (op0, mode, 0);
10074 }
10075
10076 return op0;
10077
10078 case MODIFY_EXPR:
10079 {
10080 tree lhs = treeop0;
10081 tree rhs = treeop1;
10082 gcc_assert (ignore);
10083
10084 /* Check for |= or &= of a bitfield of size one into another bitfield
10085 of size 1. In this case, (unless we need the result of the
10086 assignment) we can do this more efficiently with a
10087 test followed by an assignment, if necessary.
10088
10089 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10090 things change so we do, this code should be enhanced to
10091 support it. */
10092 if (TREE_CODE (lhs) == COMPONENT_REF
10093 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10094 || TREE_CODE (rhs) == BIT_AND_EXPR)
10095 && TREE_OPERAND (rhs, 0) == lhs
10096 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10097 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10098 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10099 {
10100 rtx label = gen_label_rtx ();
10101 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10102 do_jump (TREE_OPERAND (rhs, 1),
10103 value ? label : 0,
10104 value ? 0 : label, -1);
10105 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10106 MOVE_NONTEMPORAL (exp));
10107 do_pending_stack_adjust ();
10108 emit_label (label);
10109 return const0_rtx;
10110 }
10111
10112 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
10113 return const0_rtx;
10114 }
10115
10116 case ADDR_EXPR:
10117 return expand_expr_addr_expr (exp, target, tmode, modifier);
10118
10119 case REALPART_EXPR:
10120 op0 = expand_normal (treeop0);
10121 return read_complex_part (op0, false);
10122
10123 case IMAGPART_EXPR:
10124 op0 = expand_normal (treeop0);
10125 return read_complex_part (op0, true);
10126
10127 case RETURN_EXPR:
10128 case LABEL_EXPR:
10129 case GOTO_EXPR:
10130 case SWITCH_EXPR:
10131 case ASM_EXPR:
10132 /* Expanded in cfgexpand.c. */
10133 gcc_unreachable ();
10134
10135 case TRY_CATCH_EXPR:
10136 case CATCH_EXPR:
10137 case EH_FILTER_EXPR:
10138 case TRY_FINALLY_EXPR:
10139 /* Lowered by tree-eh.c. */
10140 gcc_unreachable ();
10141
10142 case WITH_CLEANUP_EXPR:
10143 case CLEANUP_POINT_EXPR:
10144 case TARGET_EXPR:
10145 case CASE_LABEL_EXPR:
10146 case VA_ARG_EXPR:
10147 case BIND_EXPR:
10148 case INIT_EXPR:
10149 case CONJ_EXPR:
10150 case COMPOUND_EXPR:
10151 case PREINCREMENT_EXPR:
10152 case PREDECREMENT_EXPR:
10153 case POSTINCREMENT_EXPR:
10154 case POSTDECREMENT_EXPR:
10155 case LOOP_EXPR:
10156 case EXIT_EXPR:
10157 /* Lowered by gimplify.c. */
10158 gcc_unreachable ();
10159
10160 case FDESC_EXPR:
10161 /* Function descriptors are not valid except for as
10162 initialization constants, and should not be expanded. */
10163 gcc_unreachable ();
10164
10165 case WITH_SIZE_EXPR:
10166 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10167 have pulled out the size to use in whatever context it needed. */
10168 return expand_expr_real (treeop0, original_target, tmode,
10169 modifier, alt_rtl);
10170
10171 case COMPOUND_LITERAL_EXPR:
10172 {
10173 /* Initialize the anonymous variable declared in the compound
10174 literal, then return the variable. */
10175 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
10176
10177 /* Create RTL for this variable. */
10178 if (!DECL_RTL_SET_P (decl))
10179 {
10180 if (DECL_HARD_REGISTER (decl))
10181 /* The user specified an assembler name for this variable.
10182 Set that up now. */
10183 rest_of_decl_compilation (decl, 0, 0);
10184 else
10185 expand_decl (decl);
10186 }
10187
10188 return expand_expr_real (decl, original_target, tmode,
10189 modifier, alt_rtl);
10190 }
10191
10192 default:
10193 return expand_expr_real_2 (&ops, target, tmode, modifier);
10194 }
10195 }
10196 \f
10197 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10198 signedness of TYPE), possibly returning the result in TARGET. */
10199 static rtx
10200 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10201 {
10202 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10203 if (target && GET_MODE (target) != GET_MODE (exp))
10204 target = 0;
10205 /* For constant values, reduce using build_int_cst_type. */
10206 if (CONST_INT_P (exp))
10207 {
10208 HOST_WIDE_INT value = INTVAL (exp);
10209 tree t = build_int_cst_type (type, value);
10210 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10211 }
10212 else if (TYPE_UNSIGNED (type))
10213 {
10214 rtx mask = immed_double_int_const (double_int_mask (prec),
10215 GET_MODE (exp));
10216 return expand_and (GET_MODE (exp), exp, mask, target);
10217 }
10218 else
10219 {
10220 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10221 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10222 exp, count, target, 0);
10223 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10224 exp, count, target, 0);
10225 }
10226 }
10227 \f
10228 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10229 when applied to the address of EXP produces an address known to be
10230 aligned more than BIGGEST_ALIGNMENT. */
10231
10232 static int
10233 is_aligning_offset (const_tree offset, const_tree exp)
10234 {
10235 /* Strip off any conversions. */
10236 while (CONVERT_EXPR_P (offset))
10237 offset = TREE_OPERAND (offset, 0);
10238
10239 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10240 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10241 if (TREE_CODE (offset) != BIT_AND_EXPR
10242 || !host_integerp (TREE_OPERAND (offset, 1), 1)
10243 || compare_tree_int (TREE_OPERAND (offset, 1),
10244 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10245 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
10246 return 0;
10247
10248 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10249 It must be NEGATE_EXPR. Then strip any more conversions. */
10250 offset = TREE_OPERAND (offset, 0);
10251 while (CONVERT_EXPR_P (offset))
10252 offset = TREE_OPERAND (offset, 0);
10253
10254 if (TREE_CODE (offset) != NEGATE_EXPR)
10255 return 0;
10256
10257 offset = TREE_OPERAND (offset, 0);
10258 while (CONVERT_EXPR_P (offset))
10259 offset = TREE_OPERAND (offset, 0);
10260
10261 /* This must now be the address of EXP. */
10262 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10263 }
10264 \f
10265 /* Return the tree node if an ARG corresponds to a string constant or zero
10266 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10267 in bytes within the string that ARG is accessing. The type of the
10268 offset will be `sizetype'. */
10269
10270 tree
10271 string_constant (tree arg, tree *ptr_offset)
10272 {
10273 tree array, offset, lower_bound;
10274 STRIP_NOPS (arg);
10275
10276 if (TREE_CODE (arg) == ADDR_EXPR)
10277 {
10278 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10279 {
10280 *ptr_offset = size_zero_node;
10281 return TREE_OPERAND (arg, 0);
10282 }
10283 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10284 {
10285 array = TREE_OPERAND (arg, 0);
10286 offset = size_zero_node;
10287 }
10288 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10289 {
10290 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10291 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10292 if (TREE_CODE (array) != STRING_CST
10293 && TREE_CODE (array) != VAR_DECL)
10294 return 0;
10295
10296 /* Check if the array has a nonzero lower bound. */
10297 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10298 if (!integer_zerop (lower_bound))
10299 {
10300 /* If the offset and base aren't both constants, return 0. */
10301 if (TREE_CODE (lower_bound) != INTEGER_CST)
10302 return 0;
10303 if (TREE_CODE (offset) != INTEGER_CST)
10304 return 0;
10305 /* Adjust offset by the lower bound. */
10306 offset = size_diffop (fold_convert (sizetype, offset),
10307 fold_convert (sizetype, lower_bound));
10308 }
10309 }
10310 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10311 {
10312 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10313 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10314 if (TREE_CODE (array) != ADDR_EXPR)
10315 return 0;
10316 array = TREE_OPERAND (array, 0);
10317 if (TREE_CODE (array) != STRING_CST
10318 && TREE_CODE (array) != VAR_DECL)
10319 return 0;
10320 }
10321 else
10322 return 0;
10323 }
10324 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10325 {
10326 tree arg0 = TREE_OPERAND (arg, 0);
10327 tree arg1 = TREE_OPERAND (arg, 1);
10328
10329 STRIP_NOPS (arg0);
10330 STRIP_NOPS (arg1);
10331
10332 if (TREE_CODE (arg0) == ADDR_EXPR
10333 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10334 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10335 {
10336 array = TREE_OPERAND (arg0, 0);
10337 offset = arg1;
10338 }
10339 else if (TREE_CODE (arg1) == ADDR_EXPR
10340 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10341 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10342 {
10343 array = TREE_OPERAND (arg1, 0);
10344 offset = arg0;
10345 }
10346 else
10347 return 0;
10348 }
10349 else
10350 return 0;
10351
10352 if (TREE_CODE (array) == STRING_CST)
10353 {
10354 *ptr_offset = fold_convert (sizetype, offset);
10355 return array;
10356 }
10357 else if (TREE_CODE (array) == VAR_DECL
10358 || TREE_CODE (array) == CONST_DECL)
10359 {
10360 int length;
10361
10362 /* Variables initialized to string literals can be handled too. */
10363 if (!const_value_known_p (array)
10364 || !DECL_INITIAL (array)
10365 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
10366 return 0;
10367
10368 /* Avoid const char foo[4] = "abcde"; */
10369 if (DECL_SIZE_UNIT (array) == NULL_TREE
10370 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10371 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
10372 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10373 return 0;
10374
10375 /* If variable is bigger than the string literal, OFFSET must be constant
10376 and inside of the bounds of the string literal. */
10377 offset = fold_convert (sizetype, offset);
10378 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10379 && (! host_integerp (offset, 1)
10380 || compare_tree_int (offset, length) >= 0))
10381 return 0;
10382
10383 *ptr_offset = offset;
10384 return DECL_INITIAL (array);
10385 }
10386
10387 return 0;
10388 }
10389 \f
10390 /* Generate code to calculate OPS, and exploded expression
10391 using a store-flag instruction and return an rtx for the result.
10392 OPS reflects a comparison.
10393
10394 If TARGET is nonzero, store the result there if convenient.
10395
10396 Return zero if there is no suitable set-flag instruction
10397 available on this machine.
10398
10399 Once expand_expr has been called on the arguments of the comparison,
10400 we are committed to doing the store flag, since it is not safe to
10401 re-evaluate the expression. We emit the store-flag insn by calling
10402 emit_store_flag, but only expand the arguments if we have a reason
10403 to believe that emit_store_flag will be successful. If we think that
10404 it will, but it isn't, we have to simulate the store-flag with a
10405 set/jump/set sequence. */
10406
10407 static rtx
10408 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10409 {
10410 enum rtx_code code;
10411 tree arg0, arg1, type;
10412 tree tem;
10413 enum machine_mode operand_mode;
10414 int unsignedp;
10415 rtx op0, op1;
10416 rtx subtarget = target;
10417 location_t loc = ops->location;
10418
10419 arg0 = ops->op0;
10420 arg1 = ops->op1;
10421
10422 /* Don't crash if the comparison was erroneous. */
10423 if (arg0 == error_mark_node || arg1 == error_mark_node)
10424 return const0_rtx;
10425
10426 type = TREE_TYPE (arg0);
10427 operand_mode = TYPE_MODE (type);
10428 unsignedp = TYPE_UNSIGNED (type);
10429
10430 /* We won't bother with BLKmode store-flag operations because it would mean
10431 passing a lot of information to emit_store_flag. */
10432 if (operand_mode == BLKmode)
10433 return 0;
10434
10435 /* We won't bother with store-flag operations involving function pointers
10436 when function pointers must be canonicalized before comparisons. */
10437 #ifdef HAVE_canonicalize_funcptr_for_compare
10438 if (HAVE_canonicalize_funcptr_for_compare
10439 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10440 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10441 == FUNCTION_TYPE))
10442 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10443 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10444 == FUNCTION_TYPE))))
10445 return 0;
10446 #endif
10447
10448 STRIP_NOPS (arg0);
10449 STRIP_NOPS (arg1);
10450
10451 /* For vector typed comparisons emit code to generate the desired
10452 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10453 expander for this. */
10454 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10455 {
10456 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10457 tree if_true = constant_boolean_node (true, ops->type);
10458 tree if_false = constant_boolean_node (false, ops->type);
10459 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10460 }
10461
10462 /* For vector typed comparisons emit code to generate the desired
10463 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10464 expander for this. */
10465 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10466 {
10467 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10468 tree if_true = constant_boolean_node (true, ops->type);
10469 tree if_false = constant_boolean_node (false, ops->type);
10470 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10471 }
10472
10473 /* Get the rtx comparison code to use. We know that EXP is a comparison
10474 operation of some type. Some comparisons against 1 and -1 can be
10475 converted to comparisons with zero. Do so here so that the tests
10476 below will be aware that we have a comparison with zero. These
10477 tests will not catch constants in the first operand, but constants
10478 are rarely passed as the first operand. */
10479
10480 switch (ops->code)
10481 {
10482 case EQ_EXPR:
10483 code = EQ;
10484 break;
10485 case NE_EXPR:
10486 code = NE;
10487 break;
10488 case LT_EXPR:
10489 if (integer_onep (arg1))
10490 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10491 else
10492 code = unsignedp ? LTU : LT;
10493 break;
10494 case LE_EXPR:
10495 if (! unsignedp && integer_all_onesp (arg1))
10496 arg1 = integer_zero_node, code = LT;
10497 else
10498 code = unsignedp ? LEU : LE;
10499 break;
10500 case GT_EXPR:
10501 if (! unsignedp && integer_all_onesp (arg1))
10502 arg1 = integer_zero_node, code = GE;
10503 else
10504 code = unsignedp ? GTU : GT;
10505 break;
10506 case GE_EXPR:
10507 if (integer_onep (arg1))
10508 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10509 else
10510 code = unsignedp ? GEU : GE;
10511 break;
10512
10513 case UNORDERED_EXPR:
10514 code = UNORDERED;
10515 break;
10516 case ORDERED_EXPR:
10517 code = ORDERED;
10518 break;
10519 case UNLT_EXPR:
10520 code = UNLT;
10521 break;
10522 case UNLE_EXPR:
10523 code = UNLE;
10524 break;
10525 case UNGT_EXPR:
10526 code = UNGT;
10527 break;
10528 case UNGE_EXPR:
10529 code = UNGE;
10530 break;
10531 case UNEQ_EXPR:
10532 code = UNEQ;
10533 break;
10534 case LTGT_EXPR:
10535 code = LTGT;
10536 break;
10537
10538 default:
10539 gcc_unreachable ();
10540 }
10541
10542 /* Put a constant second. */
10543 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10544 || TREE_CODE (arg0) == FIXED_CST)
10545 {
10546 tem = arg0; arg0 = arg1; arg1 = tem;
10547 code = swap_condition (code);
10548 }
10549
10550 /* If this is an equality or inequality test of a single bit, we can
10551 do this by shifting the bit being tested to the low-order bit and
10552 masking the result with the constant 1. If the condition was EQ,
10553 we xor it with 1. This does not require an scc insn and is faster
10554 than an scc insn even if we have it.
10555
10556 The code to make this transformation was moved into fold_single_bit_test,
10557 so we just call into the folder and expand its result. */
10558
10559 if ((code == NE || code == EQ)
10560 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10561 && integer_pow2p (TREE_OPERAND (arg0, 1))
10562 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10563 {
10564 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10565 return expand_expr (fold_single_bit_test (loc,
10566 code == NE ? NE_EXPR : EQ_EXPR,
10567 arg0, arg1, type),
10568 target, VOIDmode, EXPAND_NORMAL);
10569 }
10570
10571 if (! get_subtarget (target)
10572 || GET_MODE (subtarget) != operand_mode)
10573 subtarget = 0;
10574
10575 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10576
10577 if (target == 0)
10578 target = gen_reg_rtx (mode);
10579
10580 /* Try a cstore if possible. */
10581 return emit_store_flag_force (target, code, op0, op1,
10582 operand_mode, unsignedp,
10583 (TYPE_PRECISION (ops->type) == 1
10584 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10585 }
10586 \f
10587
10588 /* Stubs in case we haven't got a casesi insn. */
10589 #ifndef HAVE_casesi
10590 # define HAVE_casesi 0
10591 # define gen_casesi(a, b, c, d, e) (0)
10592 # define CODE_FOR_casesi CODE_FOR_nothing
10593 #endif
10594
10595 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10596 0 otherwise (i.e. if there is no casesi instruction). */
10597 int
10598 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10599 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10600 rtx fallback_label ATTRIBUTE_UNUSED)
10601 {
10602 struct expand_operand ops[5];
10603 enum machine_mode index_mode = SImode;
10604 int index_bits = GET_MODE_BITSIZE (index_mode);
10605 rtx op1, op2, index;
10606
10607 if (! HAVE_casesi)
10608 return 0;
10609
10610 /* Convert the index to SImode. */
10611 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10612 {
10613 enum machine_mode omode = TYPE_MODE (index_type);
10614 rtx rangertx = expand_normal (range);
10615
10616 /* We must handle the endpoints in the original mode. */
10617 index_expr = build2 (MINUS_EXPR, index_type,
10618 index_expr, minval);
10619 minval = integer_zero_node;
10620 index = expand_normal (index_expr);
10621 if (default_label)
10622 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10623 omode, 1, default_label);
10624 /* Now we can safely truncate. */
10625 index = convert_to_mode (index_mode, index, 0);
10626 }
10627 else
10628 {
10629 if (TYPE_MODE (index_type) != index_mode)
10630 {
10631 index_type = lang_hooks.types.type_for_size (index_bits, 0);
10632 index_expr = fold_convert (index_type, index_expr);
10633 }
10634
10635 index = expand_normal (index_expr);
10636 }
10637
10638 do_pending_stack_adjust ();
10639
10640 op1 = expand_normal (minval);
10641 op2 = expand_normal (range);
10642
10643 create_input_operand (&ops[0], index, index_mode);
10644 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10645 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10646 create_fixed_operand (&ops[3], table_label);
10647 create_fixed_operand (&ops[4], (default_label
10648 ? default_label
10649 : fallback_label));
10650 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10651 return 1;
10652 }
10653
10654 /* Attempt to generate a tablejump instruction; same concept. */
10655 #ifndef HAVE_tablejump
10656 #define HAVE_tablejump 0
10657 #define gen_tablejump(x, y) (0)
10658 #endif
10659
10660 /* Subroutine of the next function.
10661
10662 INDEX is the value being switched on, with the lowest value
10663 in the table already subtracted.
10664 MODE is its expected mode (needed if INDEX is constant).
10665 RANGE is the length of the jump table.
10666 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10667
10668 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10669 index value is out of range. */
10670
10671 static void
10672 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10673 rtx default_label)
10674 {
10675 rtx temp, vector;
10676
10677 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10678 cfun->cfg->max_jumptable_ents = INTVAL (range);
10679
10680 /* Do an unsigned comparison (in the proper mode) between the index
10681 expression and the value which represents the length of the range.
10682 Since we just finished subtracting the lower bound of the range
10683 from the index expression, this comparison allows us to simultaneously
10684 check that the original index expression value is both greater than
10685 or equal to the minimum value of the range and less than or equal to
10686 the maximum value of the range. */
10687
10688 if (default_label)
10689 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10690 default_label);
10691
10692 /* If index is in range, it must fit in Pmode.
10693 Convert to Pmode so we can index with it. */
10694 if (mode != Pmode)
10695 index = convert_to_mode (Pmode, index, 1);
10696
10697 /* Don't let a MEM slip through, because then INDEX that comes
10698 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10699 and break_out_memory_refs will go to work on it and mess it up. */
10700 #ifdef PIC_CASE_VECTOR_ADDRESS
10701 if (flag_pic && !REG_P (index))
10702 index = copy_to_mode_reg (Pmode, index);
10703 #endif
10704
10705 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10706 GET_MODE_SIZE, because this indicates how large insns are. The other
10707 uses should all be Pmode, because they are addresses. This code
10708 could fail if addresses and insns are not the same size. */
10709 index = gen_rtx_PLUS (Pmode,
10710 gen_rtx_MULT (Pmode, index,
10711 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10712 gen_rtx_LABEL_REF (Pmode, table_label));
10713 #ifdef PIC_CASE_VECTOR_ADDRESS
10714 if (flag_pic)
10715 index = PIC_CASE_VECTOR_ADDRESS (index);
10716 else
10717 #endif
10718 index = memory_address (CASE_VECTOR_MODE, index);
10719 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10720 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10721 convert_move (temp, vector, 0);
10722
10723 emit_jump_insn (gen_tablejump (temp, table_label));
10724
10725 /* If we are generating PIC code or if the table is PC-relative, the
10726 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10727 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10728 emit_barrier ();
10729 }
10730
10731 int
10732 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10733 rtx table_label, rtx default_label)
10734 {
10735 rtx index;
10736
10737 if (! HAVE_tablejump)
10738 return 0;
10739
10740 index_expr = fold_build2 (MINUS_EXPR, index_type,
10741 fold_convert (index_type, index_expr),
10742 fold_convert (index_type, minval));
10743 index = expand_normal (index_expr);
10744 do_pending_stack_adjust ();
10745
10746 do_tablejump (index, TYPE_MODE (index_type),
10747 convert_modes (TYPE_MODE (index_type),
10748 TYPE_MODE (TREE_TYPE (range)),
10749 expand_normal (range),
10750 TYPE_UNSIGNED (TREE_TYPE (range))),
10751 table_label, default_label);
10752 return 1;
10753 }
10754
10755 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10756 static rtx
10757 const_vector_from_tree (tree exp)
10758 {
10759 rtvec v;
10760 int units, i;
10761 tree link, elt;
10762 enum machine_mode inner, mode;
10763
10764 mode = TYPE_MODE (TREE_TYPE (exp));
10765
10766 if (initializer_zerop (exp))
10767 return CONST0_RTX (mode);
10768
10769 units = GET_MODE_NUNITS (mode);
10770 inner = GET_MODE_INNER (mode);
10771
10772 v = rtvec_alloc (units);
10773
10774 link = TREE_VECTOR_CST_ELTS (exp);
10775 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10776 {
10777 elt = TREE_VALUE (link);
10778
10779 if (TREE_CODE (elt) == REAL_CST)
10780 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10781 inner);
10782 else if (TREE_CODE (elt) == FIXED_CST)
10783 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10784 inner);
10785 else
10786 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
10787 inner);
10788 }
10789
10790 /* Initialize remaining elements to 0. */
10791 for (; i < units; ++i)
10792 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10793
10794 return gen_rtx_CONST_VECTOR (mode, v);
10795 }
10796
10797 /* Build a decl for a personality function given a language prefix. */
10798
10799 tree
10800 build_personality_function (const char *lang)
10801 {
10802 const char *unwind_and_version;
10803 tree decl, type;
10804 char *name;
10805
10806 switch (targetm_common.except_unwind_info (&global_options))
10807 {
10808 case UI_NONE:
10809 return NULL;
10810 case UI_SJLJ:
10811 unwind_and_version = "_sj0";
10812 break;
10813 case UI_DWARF2:
10814 case UI_TARGET:
10815 unwind_and_version = "_v0";
10816 break;
10817 default:
10818 gcc_unreachable ();
10819 }
10820
10821 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
10822
10823 type = build_function_type_list (integer_type_node, integer_type_node,
10824 long_long_unsigned_type_node,
10825 ptr_type_node, ptr_type_node, NULL_TREE);
10826 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10827 get_identifier (name), type);
10828 DECL_ARTIFICIAL (decl) = 1;
10829 DECL_EXTERNAL (decl) = 1;
10830 TREE_PUBLIC (decl) = 1;
10831
10832 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10833 are the flags assigned by targetm.encode_section_info. */
10834 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10835
10836 return decl;
10837 }
10838
10839 /* Extracts the personality function of DECL and returns the corresponding
10840 libfunc. */
10841
10842 rtx
10843 get_personality_function (tree decl)
10844 {
10845 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10846 enum eh_personality_kind pk;
10847
10848 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10849 if (pk == eh_personality_none)
10850 return NULL;
10851
10852 if (!personality
10853 && pk == eh_personality_any)
10854 personality = lang_hooks.eh_personality ();
10855
10856 if (pk == eh_personality_lang)
10857 gcc_assert (personality != NULL_TREE);
10858
10859 return XEXP (DECL_RTL (personality), 0);
10860 }
10861
10862 #include "gt-expr.h"