spe.md (frob_di_df_2): Handle non-offsettable memory operand.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4 Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "typeclass.h"
46 #include "toplev.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "intl.h"
50 #include "tm_p.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
54 #include "target.h"
55 #include "timevar.h"
56
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
59
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
62
63 #ifdef PUSH_ROUNDING
64
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
70
71 #endif
72
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
80
81
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
89
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
93 {
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
105 };
106
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
109
110 struct store_by_pieces
111 {
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
121 };
122
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
130 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static rtx clear_storage_via_libcall (rtx, rtx, bool);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, int);
148
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
150
151 static int is_aligning_offset (tree, tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
162
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
166
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
169
170 /* Record for each mode whether we can float-extend from memory. */
171
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
180 #endif
181
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
188 #endif
189
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
197 #endif
198
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
201
202 /* This array records the insn_code of insns to perform block sets. */
203 enum insn_code setmem_optab[NUM_MACHINE_MODES];
204
205 /* These arrays record the insn_code of three different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
209 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210
211 /* Synchronization primitives. */
212 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
231 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
233 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234
235 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
236
237 #ifndef SLOW_UNALIGNED_ACCESS
238 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
239 #endif
240 \f
241 /* This is run once per compilation to set up which modes can be used
242 directly in memory and to initialize the block move optab. */
243
244 void
245 init_expr_once (void)
246 {
247 rtx insn, pat;
248 enum machine_mode mode;
249 int num_clobbers;
250 rtx mem, mem1;
251 rtx reg;
252
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
257 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258
259 /* A scratch register we can modify in-place below to avoid
260 useless RTL allocations. */
261 reg = gen_rtx_REG (VOIDmode, -1);
262
263 insn = rtx_alloc (INSN);
264 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
265 PATTERN (insn) = pat;
266
267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268 mode = (enum machine_mode) ((int) mode + 1))
269 {
270 int regno;
271
272 direct_load[(int) mode] = direct_store[(int) mode] = 0;
273 PUT_MODE (mem, mode);
274 PUT_MODE (mem1, mode);
275 PUT_MODE (reg, mode);
276
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
279
280 if (mode != VOIDmode && mode != BLKmode)
281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283 regno++)
284 {
285 if (! HARD_REGNO_MODE_OK (regno, mode))
286 continue;
287
288 REGNO (reg) = regno;
289
290 SET_SRC (pat) = mem;
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
294
295 SET_SRC (pat) = mem1;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
299
300 SET_SRC (pat) = reg;
301 SET_DEST (pat) = mem;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
304
305 SET_SRC (pat) = reg;
306 SET_DEST (pat) = mem1;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
309 }
310 }
311
312 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313
314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
315 mode = GET_MODE_WIDER_MODE (mode))
316 {
317 enum machine_mode srcmode;
318 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
319 srcmode = GET_MODE_WIDER_MODE (srcmode))
320 {
321 enum insn_code ic;
322
323 ic = can_extend_p (mode, srcmode, 0);
324 if (ic == CODE_FOR_nothing)
325 continue;
326
327 PUT_MODE (mem, srcmode);
328
329 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
330 float_extend_from_mem[mode][srcmode] = true;
331 }
332 }
333 }
334
335 /* This is run at the start of compiling a function. */
336
337 void
338 init_expr (void)
339 {
340 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
341 }
342 \f
343 /* Copy data from FROM to TO, where the machine modes are not the same.
344 Both modes may be integer, or both may be floating.
345 UNSIGNEDP should be nonzero if FROM is an unsigned type.
346 This causes zero-extension instead of sign-extension. */
347
348 void
349 convert_move (rtx to, rtx from, int unsignedp)
350 {
351 enum machine_mode to_mode = GET_MODE (to);
352 enum machine_mode from_mode = GET_MODE (from);
353 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
354 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
355 enum insn_code code;
356 rtx libcall;
357
358 /* rtx code for making an equivalent value. */
359 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
360 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
361
362
363 gcc_assert (to_real == from_real);
364
365 /* If the source and destination are already the same, then there's
366 nothing to do. */
367 if (to == from)
368 return;
369
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
372 TO here. */
373
374 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
376 >= GET_MODE_SIZE (to_mode))
377 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
378 from = gen_lowpart (to_mode, from), from_mode = to_mode;
379
380 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
381
382 if (to_mode == from_mode
383 || (from_mode == VOIDmode && CONSTANT_P (from)))
384 {
385 emit_move_insn (to, from);
386 return;
387 }
388
389 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390 {
391 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
392
393 if (VECTOR_MODE_P (to_mode))
394 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
395 else
396 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
397
398 emit_move_insn (to, from);
399 return;
400 }
401
402 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403 {
404 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
405 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
406 return;
407 }
408
409 if (to_real)
410 {
411 rtx value, insns;
412 convert_optab tab;
413
414 gcc_assert ((GET_MODE_PRECISION (from_mode)
415 != GET_MODE_PRECISION (to_mode))
416 || (DECIMAL_FLOAT_MODE_P (from_mode)
417 != DECIMAL_FLOAT_MODE_P (to_mode)));
418
419 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
420 /* Conversion between decimal float and binary float, same size. */
421 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
422 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
423 tab = sext_optab;
424 else
425 tab = trunc_optab;
426
427 /* Try converting directly if the insn is supported. */
428
429 code = tab->handlers[to_mode][from_mode].insn_code;
430 if (code != CODE_FOR_nothing)
431 {
432 emit_unop_insn (code, to, from,
433 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
434 return;
435 }
436
437 /* Otherwise use a libcall. */
438 libcall = tab->handlers[to_mode][from_mode].libfunc;
439
440 /* Is this conversion implemented yet? */
441 gcc_assert (libcall);
442
443 start_sequence ();
444 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
445 1, from, from_mode);
446 insns = get_insns ();
447 end_sequence ();
448 emit_libcall_block (insns, to, value,
449 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
450 from)
451 : gen_rtx_FLOAT_EXTEND (to_mode, from));
452 return;
453 }
454
455 /* Handle pointer conversion. */ /* SPEE 900220. */
456 /* Targets are expected to provide conversion insns between PxImode and
457 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
458 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
459 {
460 enum machine_mode full_mode
461 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
462
463 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
464 != CODE_FOR_nothing);
465
466 if (full_mode != from_mode)
467 from = convert_to_mode (full_mode, from, unsignedp);
468 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
469 to, from, UNKNOWN);
470 return;
471 }
472 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
473 {
474 rtx new_from;
475 enum machine_mode full_mode
476 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
477
478 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
479 != CODE_FOR_nothing);
480
481 if (to_mode == full_mode)
482 {
483 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
484 to, from, UNKNOWN);
485 return;
486 }
487
488 new_from = gen_reg_rtx (full_mode);
489 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
490 new_from, from, UNKNOWN);
491
492 /* else proceed to integer conversions below. */
493 from_mode = full_mode;
494 from = new_from;
495 }
496
497 /* Now both modes are integers. */
498
499 /* Handle expanding beyond a word. */
500 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
501 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
502 {
503 rtx insns;
504 rtx lowpart;
505 rtx fill_value;
506 rtx lowfrom;
507 int i;
508 enum machine_mode lowpart_mode;
509 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
510
511 /* Try converting directly if the insn is supported. */
512 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
513 != CODE_FOR_nothing)
514 {
515 /* If FROM is a SUBREG, put it into a register. Do this
516 so that we always generate the same set of insns for
517 better cse'ing; if an intermediate assignment occurred,
518 we won't be doing the operation directly on the SUBREG. */
519 if (optimize > 0 && GET_CODE (from) == SUBREG)
520 from = force_reg (from_mode, from);
521 emit_unop_insn (code, to, from, equiv_code);
522 return;
523 }
524 /* Next, try converting via full word. */
525 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
526 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
527 != CODE_FOR_nothing))
528 {
529 if (REG_P (to))
530 {
531 if (reg_overlap_mentioned_p (to, from))
532 from = force_reg (from_mode, from);
533 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
534 }
535 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
536 emit_unop_insn (code, to,
537 gen_lowpart (word_mode, to), equiv_code);
538 return;
539 }
540
541 /* No special multiword conversion insn; do it by hand. */
542 start_sequence ();
543
544 /* Since we will turn this into a no conflict block, we must ensure
545 that the source does not overlap the target. */
546
547 if (reg_overlap_mentioned_p (to, from))
548 from = force_reg (from_mode, from);
549
550 /* Get a copy of FROM widened to a word, if necessary. */
551 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
552 lowpart_mode = word_mode;
553 else
554 lowpart_mode = from_mode;
555
556 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
557
558 lowpart = gen_lowpart (lowpart_mode, to);
559 emit_move_insn (lowpart, lowfrom);
560
561 /* Compute the value to put in each remaining word. */
562 if (unsignedp)
563 fill_value = const0_rtx;
564 else
565 {
566 #ifdef HAVE_slt
567 if (HAVE_slt
568 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
569 && STORE_FLAG_VALUE == -1)
570 {
571 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
572 lowpart_mode, 0);
573 fill_value = gen_reg_rtx (word_mode);
574 emit_insn (gen_slt (fill_value));
575 }
576 else
577 #endif
578 {
579 fill_value
580 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
581 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
582 NULL_RTX, 0);
583 fill_value = convert_to_mode (word_mode, fill_value, 1);
584 }
585 }
586
587 /* Fill the remaining words. */
588 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
589 {
590 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
591 rtx subword = operand_subword (to, index, 1, to_mode);
592
593 gcc_assert (subword);
594
595 if (fill_value != subword)
596 emit_move_insn (subword, fill_value);
597 }
598
599 insns = get_insns ();
600 end_sequence ();
601
602 emit_no_conflict_block (insns, to, from, NULL_RTX,
603 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
604 return;
605 }
606
607 /* Truncating multi-word to a word or less. */
608 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
609 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
610 {
611 if (!((MEM_P (from)
612 && ! MEM_VOLATILE_P (from)
613 && direct_load[(int) to_mode]
614 && ! mode_dependent_address_p (XEXP (from, 0)))
615 || REG_P (from)
616 || GET_CODE (from) == SUBREG))
617 from = force_reg (from_mode, from);
618 convert_move (to, gen_lowpart (word_mode, from), 0);
619 return;
620 }
621
622 /* Now follow all the conversions between integers
623 no more than a word long. */
624
625 /* For truncation, usually we can just refer to FROM in a narrower mode. */
626 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628 GET_MODE_BITSIZE (from_mode)))
629 {
630 if (!((MEM_P (from)
631 && ! MEM_VOLATILE_P (from)
632 && direct_load[(int) to_mode]
633 && ! mode_dependent_address_p (XEXP (from, 0)))
634 || REG_P (from)
635 || GET_CODE (from) == SUBREG))
636 from = force_reg (from_mode, from);
637 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
638 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
639 from = copy_to_reg (from);
640 emit_move_insn (to, gen_lowpart (to_mode, from));
641 return;
642 }
643
644 /* Handle extension. */
645 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
646 {
647 /* Convert directly if that works. */
648 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
649 != CODE_FOR_nothing)
650 {
651 emit_unop_insn (code, to, from, equiv_code);
652 return;
653 }
654 else
655 {
656 enum machine_mode intermediate;
657 rtx tmp;
658 tree shift_amount;
659
660 /* Search for a mode to convert via. */
661 for (intermediate = from_mode; intermediate != VOIDmode;
662 intermediate = GET_MODE_WIDER_MODE (intermediate))
663 if (((can_extend_p (to_mode, intermediate, unsignedp)
664 != CODE_FOR_nothing)
665 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
666 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
667 GET_MODE_BITSIZE (intermediate))))
668 && (can_extend_p (intermediate, from_mode, unsignedp)
669 != CODE_FOR_nothing))
670 {
671 convert_move (to, convert_to_mode (intermediate, from,
672 unsignedp), unsignedp);
673 return;
674 }
675
676 /* No suitable intermediate mode.
677 Generate what we need with shifts. */
678 shift_amount = build_int_cst (NULL_TREE,
679 GET_MODE_BITSIZE (to_mode)
680 - GET_MODE_BITSIZE (from_mode));
681 from = gen_lowpart (to_mode, force_reg (from_mode, from));
682 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
683 to, unsignedp);
684 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
685 to, unsignedp);
686 if (tmp != to)
687 emit_move_insn (to, tmp);
688 return;
689 }
690 }
691
692 /* Support special truncate insns for certain modes. */
693 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
694 {
695 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
696 to, from, UNKNOWN);
697 return;
698 }
699
700 /* Handle truncation of volatile memrefs, and so on;
701 the things that couldn't be truncated directly,
702 and for which there was no special instruction.
703
704 ??? Code above formerly short-circuited this, for most integer
705 mode pairs, with a force_reg in from_mode followed by a recursive
706 call to this routine. Appears always to have been wrong. */
707 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
708 {
709 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
710 emit_move_insn (to, temp);
711 return;
712 }
713
714 /* Mode combination is not recognized. */
715 gcc_unreachable ();
716 }
717
718 /* Return an rtx for a value that would result
719 from converting X to mode MODE.
720 Both X and MODE may be floating, or both integer.
721 UNSIGNEDP is nonzero if X is an unsigned value.
722 This can be done by referring to a part of X in place
723 or by copying to a new temporary with conversion. */
724
725 rtx
726 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
727 {
728 return convert_modes (mode, VOIDmode, x, unsignedp);
729 }
730
731 /* Return an rtx for a value that would result
732 from converting X from mode OLDMODE to mode MODE.
733 Both modes may be floating, or both integer.
734 UNSIGNEDP is nonzero if X is an unsigned value.
735
736 This can be done by referring to a part of X in place
737 or by copying to a new temporary with conversion.
738
739 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
740
741 rtx
742 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
743 {
744 rtx temp;
745
746 /* If FROM is a SUBREG that indicates that we have already done at least
747 the required extension, strip it. */
748
749 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
750 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
751 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
752 x = gen_lowpart (mode, x);
753
754 if (GET_MODE (x) != VOIDmode)
755 oldmode = GET_MODE (x);
756
757 if (mode == oldmode)
758 return x;
759
760 /* There is one case that we must handle specially: If we are converting
761 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
762 we are to interpret the constant as unsigned, gen_lowpart will do
763 the wrong if the constant appears negative. What we want to do is
764 make the high-order word of the constant zero, not all ones. */
765
766 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
767 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
768 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
769 {
770 HOST_WIDE_INT val = INTVAL (x);
771
772 if (oldmode != VOIDmode
773 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
774 {
775 int width = GET_MODE_BITSIZE (oldmode);
776
777 /* We need to zero extend VAL. */
778 val &= ((HOST_WIDE_INT) 1 << width) - 1;
779 }
780
781 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
782 }
783
784 /* We can do this with a gen_lowpart if both desired and current modes
785 are integer, and this is either a constant integer, a register, or a
786 non-volatile MEM. Except for the constant case where MODE is no
787 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
788
789 if ((GET_CODE (x) == CONST_INT
790 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
791 || (GET_MODE_CLASS (mode) == MODE_INT
792 && GET_MODE_CLASS (oldmode) == MODE_INT
793 && (GET_CODE (x) == CONST_DOUBLE
794 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
795 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
796 && direct_load[(int) mode])
797 || (REG_P (x)
798 && (! HARD_REGISTER_P (x)
799 || HARD_REGNO_MODE_OK (REGNO (x), mode))
800 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
801 GET_MODE_BITSIZE (GET_MODE (x)))))))))
802 {
803 /* ?? If we don't know OLDMODE, we have to assume here that
804 X does not need sign- or zero-extension. This may not be
805 the case, but it's the best we can do. */
806 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
807 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
808 {
809 HOST_WIDE_INT val = INTVAL (x);
810 int width = GET_MODE_BITSIZE (oldmode);
811
812 /* We must sign or zero-extend in this case. Start by
813 zero-extending, then sign extend if we need to. */
814 val &= ((HOST_WIDE_INT) 1 << width) - 1;
815 if (! unsignedp
816 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
817 val |= (HOST_WIDE_INT) (-1) << width;
818
819 return gen_int_mode (val, mode);
820 }
821
822 return gen_lowpart (mode, x);
823 }
824
825 /* Converting from integer constant into mode is always equivalent to an
826 subreg operation. */
827 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
828 {
829 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
830 return simplify_gen_subreg (mode, x, oldmode, 0);
831 }
832
833 temp = gen_reg_rtx (mode);
834 convert_move (temp, x, unsignedp);
835 return temp;
836 }
837 \f
838 /* STORE_MAX_PIECES is the number of bytes at a time that we can
839 store efficiently. Due to internal GCC limitations, this is
840 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841 for an immediate constant. */
842
843 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
844
845 /* Determine whether the LEN bytes can be moved by using several move
846 instructions. Return nonzero if a call to move_by_pieces should
847 succeed. */
848
849 int
850 can_move_by_pieces (unsigned HOST_WIDE_INT len,
851 unsigned int align ATTRIBUTE_UNUSED)
852 {
853 return MOVE_BY_PIECES_P (len, align);
854 }
855
856 /* Generate several move instructions to copy LEN bytes from block FROM to
857 block TO. (These are MEM rtx's with BLKmode).
858
859 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860 used to push FROM to the stack.
861
862 ALIGN is maximum stack alignment we can assume.
863
864 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
866 stpcpy. */
867
868 rtx
869 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
870 unsigned int align, int endp)
871 {
872 struct move_by_pieces data;
873 rtx to_addr, from_addr = XEXP (from, 0);
874 unsigned int max_size = MOVE_MAX_PIECES + 1;
875 enum machine_mode mode = VOIDmode, tmode;
876 enum insn_code icode;
877
878 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
879
880 data.offset = 0;
881 data.from_addr = from_addr;
882 if (to)
883 {
884 to_addr = XEXP (to, 0);
885 data.to = to;
886 data.autinc_to
887 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
888 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
889 data.reverse
890 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
891 }
892 else
893 {
894 to_addr = NULL_RTX;
895 data.to = NULL_RTX;
896 data.autinc_to = 1;
897 #ifdef STACK_GROWS_DOWNWARD
898 data.reverse = 1;
899 #else
900 data.reverse = 0;
901 #endif
902 }
903 data.to_addr = to_addr;
904 data.from = from;
905 data.autinc_from
906 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
907 || GET_CODE (from_addr) == POST_INC
908 || GET_CODE (from_addr) == POST_DEC);
909
910 data.explicit_inc_from = 0;
911 data.explicit_inc_to = 0;
912 if (data.reverse) data.offset = len;
913 data.len = len;
914
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data.autinc_from && data.autinc_to)
919 && move_by_pieces_ninsns (len, align, max_size) > 2)
920 {
921 /* Find the mode of the largest move... */
922 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
923 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
924 if (GET_MODE_SIZE (tmode) < max_size)
925 mode = tmode;
926
927 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
928 {
929 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
930 data.autinc_from = 1;
931 data.explicit_inc_from = -1;
932 }
933 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
934 {
935 data.from_addr = copy_addr_to_reg (from_addr);
936 data.autinc_from = 1;
937 data.explicit_inc_from = 1;
938 }
939 if (!data.autinc_from && CONSTANT_P (from_addr))
940 data.from_addr = copy_addr_to_reg (from_addr);
941 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
942 {
943 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
944 data.autinc_to = 1;
945 data.explicit_inc_to = -1;
946 }
947 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
948 {
949 data.to_addr = copy_addr_to_reg (to_addr);
950 data.autinc_to = 1;
951 data.explicit_inc_to = 1;
952 }
953 if (!data.autinc_to && CONSTANT_P (to_addr))
954 data.to_addr = copy_addr_to_reg (to_addr);
955 }
956
957 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
958 if (align >= GET_MODE_ALIGNMENT (tmode))
959 align = GET_MODE_ALIGNMENT (tmode);
960 else
961 {
962 enum machine_mode xmode;
963
964 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
965 tmode != VOIDmode;
966 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
967 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
968 || SLOW_UNALIGNED_ACCESS (tmode, align))
969 break;
970
971 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
972 }
973
974 /* First move what we can in the largest integer mode, then go to
975 successively smaller modes. */
976
977 while (max_size > 1)
978 {
979 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
980 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
981 if (GET_MODE_SIZE (tmode) < max_size)
982 mode = tmode;
983
984 if (mode == VOIDmode)
985 break;
986
987 icode = mov_optab->handlers[(int) mode].insn_code;
988 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
989 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
990
991 max_size = GET_MODE_SIZE (mode);
992 }
993
994 /* The code above should have handled everything. */
995 gcc_assert (!data.len);
996
997 if (endp)
998 {
999 rtx to1;
1000
1001 gcc_assert (!data.reverse);
1002 if (data.autinc_to)
1003 {
1004 if (endp == 2)
1005 {
1006 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1007 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1008 else
1009 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1010 -1));
1011 }
1012 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1013 data.offset);
1014 }
1015 else
1016 {
1017 if (endp == 2)
1018 --data.offset;
1019 to1 = adjust_address (data.to, QImode, data.offset);
1020 }
1021 return to1;
1022 }
1023 else
1024 return data.to;
1025 }
1026
1027 /* Return number of insns required to move L bytes by pieces.
1028 ALIGN (in bits) is maximum alignment we can assume. */
1029
1030 static unsigned HOST_WIDE_INT
1031 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1032 unsigned int max_size)
1033 {
1034 unsigned HOST_WIDE_INT n_insns = 0;
1035 enum machine_mode tmode;
1036
1037 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1038 if (align >= GET_MODE_ALIGNMENT (tmode))
1039 align = GET_MODE_ALIGNMENT (tmode);
1040 else
1041 {
1042 enum machine_mode tmode, xmode;
1043
1044 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1045 tmode != VOIDmode;
1046 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1047 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1048 || SLOW_UNALIGNED_ACCESS (tmode, align))
1049 break;
1050
1051 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1052 }
1053
1054 while (max_size > 1)
1055 {
1056 enum machine_mode mode = VOIDmode;
1057 enum insn_code icode;
1058
1059 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1060 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1061 if (GET_MODE_SIZE (tmode) < max_size)
1062 mode = tmode;
1063
1064 if (mode == VOIDmode)
1065 break;
1066
1067 icode = mov_optab->handlers[(int) mode].insn_code;
1068 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1069 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1070
1071 max_size = GET_MODE_SIZE (mode);
1072 }
1073
1074 gcc_assert (!l);
1075 return n_insns;
1076 }
1077
1078 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1079 with move instructions for mode MODE. GENFUN is the gen_... function
1080 to make a move insn for that mode. DATA has all the other info. */
1081
1082 static void
1083 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1084 struct move_by_pieces *data)
1085 {
1086 unsigned int size = GET_MODE_SIZE (mode);
1087 rtx to1 = NULL_RTX, from1;
1088
1089 while (data->len >= size)
1090 {
1091 if (data->reverse)
1092 data->offset -= size;
1093
1094 if (data->to)
1095 {
1096 if (data->autinc_to)
1097 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1098 data->offset);
1099 else
1100 to1 = adjust_address (data->to, mode, data->offset);
1101 }
1102
1103 if (data->autinc_from)
1104 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1105 data->offset);
1106 else
1107 from1 = adjust_address (data->from, mode, data->offset);
1108
1109 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1110 emit_insn (gen_add2_insn (data->to_addr,
1111 GEN_INT (-(HOST_WIDE_INT)size)));
1112 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1113 emit_insn (gen_add2_insn (data->from_addr,
1114 GEN_INT (-(HOST_WIDE_INT)size)));
1115
1116 if (data->to)
1117 emit_insn ((*genfun) (to1, from1));
1118 else
1119 {
1120 #ifdef PUSH_ROUNDING
1121 emit_single_push_insn (mode, from1, NULL);
1122 #else
1123 gcc_unreachable ();
1124 #endif
1125 }
1126
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1128 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1129 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1130 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1131
1132 if (! data->reverse)
1133 data->offset += size;
1134
1135 data->len -= size;
1136 }
1137 }
1138 \f
1139 /* Emit code to move a block Y to a block X. This may be done with
1140 string-move instructions, with multiple scalar move instructions,
1141 or with a library call.
1142
1143 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1144 SIZE is an rtx that says how long they are.
1145 ALIGN is the maximum alignment we can assume they have.
1146 METHOD describes what kind of copy this is, and what mechanisms may be used.
1147
1148 Return the address of the new block, if memcpy is called and returns it,
1149 0 otherwise. */
1150
1151 rtx
1152 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1153 {
1154 bool may_use_call;
1155 rtx retval = 0;
1156 unsigned int align;
1157
1158 switch (method)
1159 {
1160 case BLOCK_OP_NORMAL:
1161 case BLOCK_OP_TAILCALL:
1162 may_use_call = true;
1163 break;
1164
1165 case BLOCK_OP_CALL_PARM:
1166 may_use_call = block_move_libcall_safe_for_call_parm ();
1167
1168 /* Make inhibit_defer_pop nonzero around the library call
1169 to force it to pop the arguments right away. */
1170 NO_DEFER_POP;
1171 break;
1172
1173 case BLOCK_OP_NO_LIBCALL:
1174 may_use_call = false;
1175 break;
1176
1177 default:
1178 gcc_unreachable ();
1179 }
1180
1181 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1182
1183 gcc_assert (MEM_P (x));
1184 gcc_assert (MEM_P (y));
1185 gcc_assert (size);
1186
1187 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1188 block copy is more efficient for other large modes, e.g. DCmode. */
1189 x = adjust_address (x, BLKmode, 0);
1190 y = adjust_address (y, BLKmode, 0);
1191
1192 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1193 can be incorrect is coming from __builtin_memcpy. */
1194 if (GET_CODE (size) == CONST_INT)
1195 {
1196 if (INTVAL (size) == 0)
1197 return 0;
1198
1199 x = shallow_copy_rtx (x);
1200 y = shallow_copy_rtx (y);
1201 set_mem_size (x, size);
1202 set_mem_size (y, size);
1203 }
1204
1205 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1206 move_by_pieces (x, y, INTVAL (size), align, 0);
1207 else if (emit_block_move_via_movmem (x, y, size, align))
1208 ;
1209 else if (may_use_call)
1210 retval = emit_block_move_via_libcall (x, y, size,
1211 method == BLOCK_OP_TAILCALL);
1212 else
1213 emit_block_move_via_loop (x, y, size, align);
1214
1215 if (method == BLOCK_OP_CALL_PARM)
1216 OK_DEFER_POP;
1217
1218 return retval;
1219 }
1220
1221 /* A subroutine of emit_block_move. Returns true if calling the
1222 block move libcall will not clobber any parameters which may have
1223 already been placed on the stack. */
1224
1225 static bool
1226 block_move_libcall_safe_for_call_parm (void)
1227 {
1228 /* If arguments are pushed on the stack, then they're safe. */
1229 if (PUSH_ARGS)
1230 return true;
1231
1232 /* If registers go on the stack anyway, any argument is sure to clobber
1233 an outgoing argument. */
1234 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1235 {
1236 tree fn = emit_block_move_libcall_fn (false);
1237 (void) fn;
1238 if (REG_PARM_STACK_SPACE (fn) != 0)
1239 return false;
1240 }
1241 #endif
1242
1243 /* If any argument goes in memory, then it might clobber an outgoing
1244 argument. */
1245 {
1246 CUMULATIVE_ARGS args_so_far;
1247 tree fn, arg;
1248
1249 fn = emit_block_move_libcall_fn (false);
1250 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1251
1252 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1253 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1254 {
1255 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1256 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1257 if (!tmp || !REG_P (tmp))
1258 return false;
1259 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1260 return false;
1261 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1262 }
1263 }
1264 return true;
1265 }
1266
1267 /* A subroutine of emit_block_move. Expand a movmem pattern;
1268 return true if successful. */
1269
1270 static bool
1271 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1272 {
1273 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1274 int save_volatile_ok = volatile_ok;
1275 enum machine_mode mode;
1276
1277 /* Since this is a move insn, we don't care about volatility. */
1278 volatile_ok = 1;
1279
1280 /* Try the most limited insn first, because there's no point
1281 including more than one in the machine description unless
1282 the more limited one has some advantage. */
1283
1284 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1285 mode = GET_MODE_WIDER_MODE (mode))
1286 {
1287 enum insn_code code = movmem_optab[(int) mode];
1288 insn_operand_predicate_fn pred;
1289
1290 if (code != CODE_FOR_nothing
1291 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1292 here because if SIZE is less than the mode mask, as it is
1293 returned by the macro, it will definitely be less than the
1294 actual mode mask. */
1295 && ((GET_CODE (size) == CONST_INT
1296 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1297 <= (GET_MODE_MASK (mode) >> 1)))
1298 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1299 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1300 || (*pred) (x, BLKmode))
1301 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1302 || (*pred) (y, BLKmode))
1303 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1304 || (*pred) (opalign, VOIDmode)))
1305 {
1306 rtx op2;
1307 rtx last = get_last_insn ();
1308 rtx pat;
1309
1310 op2 = convert_to_mode (mode, size, 1);
1311 pred = insn_data[(int) code].operand[2].predicate;
1312 if (pred != 0 && ! (*pred) (op2, mode))
1313 op2 = copy_to_mode_reg (mode, op2);
1314
1315 /* ??? When called via emit_block_move_for_call, it'd be
1316 nice if there were some way to inform the backend, so
1317 that it doesn't fail the expansion because it thinks
1318 emitting the libcall would be more efficient. */
1319
1320 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1321 if (pat)
1322 {
1323 emit_insn (pat);
1324 volatile_ok = save_volatile_ok;
1325 return true;
1326 }
1327 else
1328 delete_insns_since (last);
1329 }
1330 }
1331
1332 volatile_ok = save_volatile_ok;
1333 return false;
1334 }
1335
1336 /* A subroutine of emit_block_move. Expand a call to memcpy.
1337 Return the return value from memcpy, 0 otherwise. */
1338
1339 static rtx
1340 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1341 {
1342 rtx dst_addr, src_addr;
1343 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1344 enum machine_mode size_mode;
1345 rtx retval;
1346
1347 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1348 pseudos. We can then place those new pseudos into a VAR_DECL and
1349 use them later. */
1350
1351 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1352 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1353
1354 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1355 src_addr = convert_memory_address (ptr_mode, src_addr);
1356
1357 dst_tree = make_tree (ptr_type_node, dst_addr);
1358 src_tree = make_tree (ptr_type_node, src_addr);
1359
1360 size_mode = TYPE_MODE (sizetype);
1361
1362 size = convert_to_mode (size_mode, size, 1);
1363 size = copy_to_mode_reg (size_mode, size);
1364
1365 /* It is incorrect to use the libcall calling conventions to call
1366 memcpy in this context. This could be a user call to memcpy and
1367 the user may wish to examine the return value from memcpy. For
1368 targets where libcalls and normal calls have different conventions
1369 for returning pointers, we could end up generating incorrect code. */
1370
1371 size_tree = make_tree (sizetype, size);
1372
1373 fn = emit_block_move_libcall_fn (true);
1374 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1375 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1376 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1377
1378 /* Now we have to build up the CALL_EXPR itself. */
1379 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1380 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1381 call_expr, arg_list, NULL_TREE);
1382 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1383
1384 retval = expand_normal (call_expr);
1385
1386 return retval;
1387 }
1388
1389 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1390 for the function we use for block copies. The first time FOR_CALL
1391 is true, we call assemble_external. */
1392
1393 static GTY(()) tree block_move_fn;
1394
1395 void
1396 init_block_move_fn (const char *asmspec)
1397 {
1398 if (!block_move_fn)
1399 {
1400 tree args, fn;
1401
1402 fn = get_identifier ("memcpy");
1403 args = build_function_type_list (ptr_type_node, ptr_type_node,
1404 const_ptr_type_node, sizetype,
1405 NULL_TREE);
1406
1407 fn = build_decl (FUNCTION_DECL, fn, args);
1408 DECL_EXTERNAL (fn) = 1;
1409 TREE_PUBLIC (fn) = 1;
1410 DECL_ARTIFICIAL (fn) = 1;
1411 TREE_NOTHROW (fn) = 1;
1412 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1413 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1414
1415 block_move_fn = fn;
1416 }
1417
1418 if (asmspec)
1419 set_user_assembler_name (block_move_fn, asmspec);
1420 }
1421
1422 static tree
1423 emit_block_move_libcall_fn (int for_call)
1424 {
1425 static bool emitted_extern;
1426
1427 if (!block_move_fn)
1428 init_block_move_fn (NULL);
1429
1430 if (for_call && !emitted_extern)
1431 {
1432 emitted_extern = true;
1433 make_decl_rtl (block_move_fn);
1434 assemble_external (block_move_fn);
1435 }
1436
1437 return block_move_fn;
1438 }
1439
1440 /* A subroutine of emit_block_move. Copy the data via an explicit
1441 loop. This is used only when libcalls are forbidden. */
1442 /* ??? It'd be nice to copy in hunks larger than QImode. */
1443
1444 static void
1445 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1446 unsigned int align ATTRIBUTE_UNUSED)
1447 {
1448 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1449 enum machine_mode iter_mode;
1450
1451 iter_mode = GET_MODE (size);
1452 if (iter_mode == VOIDmode)
1453 iter_mode = word_mode;
1454
1455 top_label = gen_label_rtx ();
1456 cmp_label = gen_label_rtx ();
1457 iter = gen_reg_rtx (iter_mode);
1458
1459 emit_move_insn (iter, const0_rtx);
1460
1461 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1462 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1463 do_pending_stack_adjust ();
1464
1465 emit_jump (cmp_label);
1466 emit_label (top_label);
1467
1468 tmp = convert_modes (Pmode, iter_mode, iter, true);
1469 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1470 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1471 x = change_address (x, QImode, x_addr);
1472 y = change_address (y, QImode, y_addr);
1473
1474 emit_move_insn (x, y);
1475
1476 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1477 true, OPTAB_LIB_WIDEN);
1478 if (tmp != iter)
1479 emit_move_insn (iter, tmp);
1480
1481 emit_label (cmp_label);
1482
1483 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1484 true, top_label);
1485 }
1486 \f
1487 /* Copy all or part of a value X into registers starting at REGNO.
1488 The number of registers to be filled is NREGS. */
1489
1490 void
1491 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1492 {
1493 int i;
1494 #ifdef HAVE_load_multiple
1495 rtx pat;
1496 rtx last;
1497 #endif
1498
1499 if (nregs == 0)
1500 return;
1501
1502 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1503 x = validize_mem (force_const_mem (mode, x));
1504
1505 /* See if the machine can do this with a load multiple insn. */
1506 #ifdef HAVE_load_multiple
1507 if (HAVE_load_multiple)
1508 {
1509 last = get_last_insn ();
1510 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1511 GEN_INT (nregs));
1512 if (pat)
1513 {
1514 emit_insn (pat);
1515 return;
1516 }
1517 else
1518 delete_insns_since (last);
1519 }
1520 #endif
1521
1522 for (i = 0; i < nregs; i++)
1523 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1524 operand_subword_force (x, i, mode));
1525 }
1526
1527 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1528 The number of registers to be filled is NREGS. */
1529
1530 void
1531 move_block_from_reg (int regno, rtx x, int nregs)
1532 {
1533 int i;
1534
1535 if (nregs == 0)
1536 return;
1537
1538 /* See if the machine can do this with a store multiple insn. */
1539 #ifdef HAVE_store_multiple
1540 if (HAVE_store_multiple)
1541 {
1542 rtx last = get_last_insn ();
1543 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1544 GEN_INT (nregs));
1545 if (pat)
1546 {
1547 emit_insn (pat);
1548 return;
1549 }
1550 else
1551 delete_insns_since (last);
1552 }
1553 #endif
1554
1555 for (i = 0; i < nregs; i++)
1556 {
1557 rtx tem = operand_subword (x, i, 1, BLKmode);
1558
1559 gcc_assert (tem);
1560
1561 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1562 }
1563 }
1564
1565 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1566 ORIG, where ORIG is a non-consecutive group of registers represented by
1567 a PARALLEL. The clone is identical to the original except in that the
1568 original set of registers is replaced by a new set of pseudo registers.
1569 The new set has the same modes as the original set. */
1570
1571 rtx
1572 gen_group_rtx (rtx orig)
1573 {
1574 int i, length;
1575 rtx *tmps;
1576
1577 gcc_assert (GET_CODE (orig) == PARALLEL);
1578
1579 length = XVECLEN (orig, 0);
1580 tmps = alloca (sizeof (rtx) * length);
1581
1582 /* Skip a NULL entry in first slot. */
1583 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1584
1585 if (i)
1586 tmps[0] = 0;
1587
1588 for (; i < length; i++)
1589 {
1590 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1591 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1592
1593 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1594 }
1595
1596 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1597 }
1598
1599 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1600 except that values are placed in TMPS[i], and must later be moved
1601 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1602
1603 static void
1604 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1605 {
1606 rtx src;
1607 int start, i;
1608 enum machine_mode m = GET_MODE (orig_src);
1609
1610 gcc_assert (GET_CODE (dst) == PARALLEL);
1611
1612 if (m != VOIDmode
1613 && !SCALAR_INT_MODE_P (m)
1614 && !MEM_P (orig_src)
1615 && GET_CODE (orig_src) != CONCAT)
1616 {
1617 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1618 if (imode == BLKmode)
1619 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1620 else
1621 src = gen_reg_rtx (imode);
1622 if (imode != BLKmode)
1623 src = gen_lowpart (GET_MODE (orig_src), src);
1624 emit_move_insn (src, orig_src);
1625 /* ...and back again. */
1626 if (imode != BLKmode)
1627 src = gen_lowpart (imode, src);
1628 emit_group_load_1 (tmps, dst, src, type, ssize);
1629 return;
1630 }
1631
1632 /* Check for a NULL entry, used to indicate that the parameter goes
1633 both on the stack and in registers. */
1634 if (XEXP (XVECEXP (dst, 0, 0), 0))
1635 start = 0;
1636 else
1637 start = 1;
1638
1639 /* Process the pieces. */
1640 for (i = start; i < XVECLEN (dst, 0); i++)
1641 {
1642 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1643 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1644 unsigned int bytelen = GET_MODE_SIZE (mode);
1645 int shift = 0;
1646
1647 /* Handle trailing fragments that run over the size of the struct. */
1648 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1649 {
1650 /* Arrange to shift the fragment to where it belongs.
1651 extract_bit_field loads to the lsb of the reg. */
1652 if (
1653 #ifdef BLOCK_REG_PADDING
1654 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1655 == (BYTES_BIG_ENDIAN ? upward : downward)
1656 #else
1657 BYTES_BIG_ENDIAN
1658 #endif
1659 )
1660 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1661 bytelen = ssize - bytepos;
1662 gcc_assert (bytelen > 0);
1663 }
1664
1665 /* If we won't be loading directly from memory, protect the real source
1666 from strange tricks we might play; but make sure that the source can
1667 be loaded directly into the destination. */
1668 src = orig_src;
1669 if (!MEM_P (orig_src)
1670 && (!CONSTANT_P (orig_src)
1671 || (GET_MODE (orig_src) != mode
1672 && GET_MODE (orig_src) != VOIDmode)))
1673 {
1674 if (GET_MODE (orig_src) == VOIDmode)
1675 src = gen_reg_rtx (mode);
1676 else
1677 src = gen_reg_rtx (GET_MODE (orig_src));
1678
1679 emit_move_insn (src, orig_src);
1680 }
1681
1682 /* Optimize the access just a bit. */
1683 if (MEM_P (src)
1684 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1685 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1686 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1687 && bytelen == GET_MODE_SIZE (mode))
1688 {
1689 tmps[i] = gen_reg_rtx (mode);
1690 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1691 }
1692 else if (COMPLEX_MODE_P (mode)
1693 && GET_MODE (src) == mode
1694 && bytelen == GET_MODE_SIZE (mode))
1695 /* Let emit_move_complex do the bulk of the work. */
1696 tmps[i] = src;
1697 else if (GET_CODE (src) == CONCAT)
1698 {
1699 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1700 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1701
1702 if ((bytepos == 0 && bytelen == slen0)
1703 || (bytepos != 0 && bytepos + bytelen <= slen))
1704 {
1705 /* The following assumes that the concatenated objects all
1706 have the same size. In this case, a simple calculation
1707 can be used to determine the object and the bit field
1708 to be extracted. */
1709 tmps[i] = XEXP (src, bytepos / slen0);
1710 if (! CONSTANT_P (tmps[i])
1711 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1712 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1713 (bytepos % slen0) * BITS_PER_UNIT,
1714 1, NULL_RTX, mode, mode);
1715 }
1716 else
1717 {
1718 rtx mem;
1719
1720 gcc_assert (!bytepos);
1721 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1722 emit_move_insn (mem, src);
1723 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1724 0, 1, NULL_RTX, mode, mode);
1725 }
1726 }
1727 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1728 SIMD register, which is currently broken. While we get GCC
1729 to emit proper RTL for these cases, let's dump to memory. */
1730 else if (VECTOR_MODE_P (GET_MODE (dst))
1731 && REG_P (src))
1732 {
1733 int slen = GET_MODE_SIZE (GET_MODE (src));
1734 rtx mem;
1735
1736 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1737 emit_move_insn (mem, src);
1738 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1739 }
1740 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1741 && XVECLEN (dst, 0) > 1)
1742 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1743 else if (CONSTANT_P (src)
1744 || (REG_P (src) && GET_MODE (src) == mode))
1745 tmps[i] = src;
1746 else
1747 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1748 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1749 mode, mode);
1750
1751 if (shift)
1752 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1753 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1754 }
1755 }
1756
1757 /* Emit code to move a block SRC of type TYPE to a block DST,
1758 where DST is non-consecutive registers represented by a PARALLEL.
1759 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1760 if not known. */
1761
1762 void
1763 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1764 {
1765 rtx *tmps;
1766 int i;
1767
1768 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1769 emit_group_load_1 (tmps, dst, src, type, ssize);
1770
1771 /* Copy the extracted pieces into the proper (probable) hard regs. */
1772 for (i = 0; i < XVECLEN (dst, 0); i++)
1773 {
1774 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1775 if (d == NULL)
1776 continue;
1777 emit_move_insn (d, tmps[i]);
1778 }
1779 }
1780
1781 /* Similar, but load SRC into new pseudos in a format that looks like
1782 PARALLEL. This can later be fed to emit_group_move to get things
1783 in the right place. */
1784
1785 rtx
1786 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1787 {
1788 rtvec vec;
1789 int i;
1790
1791 vec = rtvec_alloc (XVECLEN (parallel, 0));
1792 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1793
1794 /* Convert the vector to look just like the original PARALLEL, except
1795 with the computed values. */
1796 for (i = 0; i < XVECLEN (parallel, 0); i++)
1797 {
1798 rtx e = XVECEXP (parallel, 0, i);
1799 rtx d = XEXP (e, 0);
1800
1801 if (d)
1802 {
1803 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1804 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1805 }
1806 RTVEC_ELT (vec, i) = e;
1807 }
1808
1809 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1810 }
1811
1812 /* Emit code to move a block SRC to block DST, where SRC and DST are
1813 non-consecutive groups of registers, each represented by a PARALLEL. */
1814
1815 void
1816 emit_group_move (rtx dst, rtx src)
1817 {
1818 int i;
1819
1820 gcc_assert (GET_CODE (src) == PARALLEL
1821 && GET_CODE (dst) == PARALLEL
1822 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1823
1824 /* Skip first entry if NULL. */
1825 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1826 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1827 XEXP (XVECEXP (src, 0, i), 0));
1828 }
1829
1830 /* Move a group of registers represented by a PARALLEL into pseudos. */
1831
1832 rtx
1833 emit_group_move_into_temps (rtx src)
1834 {
1835 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1836 int i;
1837
1838 for (i = 0; i < XVECLEN (src, 0); i++)
1839 {
1840 rtx e = XVECEXP (src, 0, i);
1841 rtx d = XEXP (e, 0);
1842
1843 if (d)
1844 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1845 RTVEC_ELT (vec, i) = e;
1846 }
1847
1848 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1849 }
1850
1851 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1852 where SRC is non-consecutive registers represented by a PARALLEL.
1853 SSIZE represents the total size of block ORIG_DST, or -1 if not
1854 known. */
1855
1856 void
1857 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1858 {
1859 rtx *tmps, dst;
1860 int start, finish, i;
1861 enum machine_mode m = GET_MODE (orig_dst);
1862
1863 gcc_assert (GET_CODE (src) == PARALLEL);
1864
1865 if (!SCALAR_INT_MODE_P (m)
1866 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1867 {
1868 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1869 if (imode == BLKmode)
1870 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1871 else
1872 dst = gen_reg_rtx (imode);
1873 emit_group_store (dst, src, type, ssize);
1874 if (imode != BLKmode)
1875 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1876 emit_move_insn (orig_dst, dst);
1877 return;
1878 }
1879
1880 /* Check for a NULL entry, used to indicate that the parameter goes
1881 both on the stack and in registers. */
1882 if (XEXP (XVECEXP (src, 0, 0), 0))
1883 start = 0;
1884 else
1885 start = 1;
1886 finish = XVECLEN (src, 0);
1887
1888 tmps = alloca (sizeof (rtx) * finish);
1889
1890 /* Copy the (probable) hard regs into pseudos. */
1891 for (i = start; i < finish; i++)
1892 {
1893 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1894 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1895 {
1896 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1897 emit_move_insn (tmps[i], reg);
1898 }
1899 else
1900 tmps[i] = reg;
1901 }
1902
1903 /* If we won't be storing directly into memory, protect the real destination
1904 from strange tricks we might play. */
1905 dst = orig_dst;
1906 if (GET_CODE (dst) == PARALLEL)
1907 {
1908 rtx temp;
1909
1910 /* We can get a PARALLEL dst if there is a conditional expression in
1911 a return statement. In that case, the dst and src are the same,
1912 so no action is necessary. */
1913 if (rtx_equal_p (dst, src))
1914 return;
1915
1916 /* It is unclear if we can ever reach here, but we may as well handle
1917 it. Allocate a temporary, and split this into a store/load to/from
1918 the temporary. */
1919
1920 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1921 emit_group_store (temp, src, type, ssize);
1922 emit_group_load (dst, temp, type, ssize);
1923 return;
1924 }
1925 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1926 {
1927 enum machine_mode outer = GET_MODE (dst);
1928 enum machine_mode inner;
1929 HOST_WIDE_INT bytepos;
1930 bool done = false;
1931 rtx temp;
1932
1933 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1934 dst = gen_reg_rtx (outer);
1935
1936 /* Make life a bit easier for combine. */
1937 /* If the first element of the vector is the low part
1938 of the destination mode, use a paradoxical subreg to
1939 initialize the destination. */
1940 if (start < finish)
1941 {
1942 inner = GET_MODE (tmps[start]);
1943 bytepos = subreg_lowpart_offset (inner, outer);
1944 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1945 {
1946 temp = simplify_gen_subreg (outer, tmps[start],
1947 inner, 0);
1948 if (temp)
1949 {
1950 emit_move_insn (dst, temp);
1951 done = true;
1952 start++;
1953 }
1954 }
1955 }
1956
1957 /* If the first element wasn't the low part, try the last. */
1958 if (!done
1959 && start < finish - 1)
1960 {
1961 inner = GET_MODE (tmps[finish - 1]);
1962 bytepos = subreg_lowpart_offset (inner, outer);
1963 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1964 {
1965 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1966 inner, 0);
1967 if (temp)
1968 {
1969 emit_move_insn (dst, temp);
1970 done = true;
1971 finish--;
1972 }
1973 }
1974 }
1975
1976 /* Otherwise, simply initialize the result to zero. */
1977 if (!done)
1978 emit_move_insn (dst, CONST0_RTX (outer));
1979 }
1980
1981 /* Process the pieces. */
1982 for (i = start; i < finish; i++)
1983 {
1984 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1985 enum machine_mode mode = GET_MODE (tmps[i]);
1986 unsigned int bytelen = GET_MODE_SIZE (mode);
1987 rtx dest = dst;
1988
1989 /* Handle trailing fragments that run over the size of the struct. */
1990 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1991 {
1992 /* store_bit_field always takes its value from the lsb.
1993 Move the fragment to the lsb if it's not already there. */
1994 if (
1995 #ifdef BLOCK_REG_PADDING
1996 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1997 == (BYTES_BIG_ENDIAN ? upward : downward)
1998 #else
1999 BYTES_BIG_ENDIAN
2000 #endif
2001 )
2002 {
2003 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2004 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2005 build_int_cst (NULL_TREE, shift),
2006 tmps[i], 0);
2007 }
2008 bytelen = ssize - bytepos;
2009 }
2010
2011 if (GET_CODE (dst) == CONCAT)
2012 {
2013 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2014 dest = XEXP (dst, 0);
2015 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2016 {
2017 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2018 dest = XEXP (dst, 1);
2019 }
2020 else
2021 {
2022 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2023 dest = assign_stack_temp (GET_MODE (dest),
2024 GET_MODE_SIZE (GET_MODE (dest)), 0);
2025 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2026 tmps[i]);
2027 dst = dest;
2028 break;
2029 }
2030 }
2031
2032 /* Optimize the access just a bit. */
2033 if (MEM_P (dest)
2034 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2035 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2036 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2037 && bytelen == GET_MODE_SIZE (mode))
2038 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2039 else
2040 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2041 mode, tmps[i]);
2042 }
2043
2044 /* Copy from the pseudo into the (probable) hard reg. */
2045 if (orig_dst != dst)
2046 emit_move_insn (orig_dst, dst);
2047 }
2048
2049 /* Generate code to copy a BLKmode object of TYPE out of a
2050 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2051 is null, a stack temporary is created. TGTBLK is returned.
2052
2053 The purpose of this routine is to handle functions that return
2054 BLKmode structures in registers. Some machines (the PA for example)
2055 want to return all small structures in registers regardless of the
2056 structure's alignment. */
2057
2058 rtx
2059 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2060 {
2061 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2062 rtx src = NULL, dst = NULL;
2063 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2064 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2065
2066 if (tgtblk == 0)
2067 {
2068 tgtblk = assign_temp (build_qualified_type (type,
2069 (TYPE_QUALS (type)
2070 | TYPE_QUAL_CONST)),
2071 0, 1, 1);
2072 preserve_temp_slots (tgtblk);
2073 }
2074
2075 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2076 into a new pseudo which is a full word. */
2077
2078 if (GET_MODE (srcreg) != BLKmode
2079 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2080 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2081
2082 /* If the structure doesn't take up a whole number of words, see whether
2083 SRCREG is padded on the left or on the right. If it's on the left,
2084 set PADDING_CORRECTION to the number of bits to skip.
2085
2086 In most ABIs, the structure will be returned at the least end of
2087 the register, which translates to right padding on little-endian
2088 targets and left padding on big-endian targets. The opposite
2089 holds if the structure is returned at the most significant
2090 end of the register. */
2091 if (bytes % UNITS_PER_WORD != 0
2092 && (targetm.calls.return_in_msb (type)
2093 ? !BYTES_BIG_ENDIAN
2094 : BYTES_BIG_ENDIAN))
2095 padding_correction
2096 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2097
2098 /* Copy the structure BITSIZE bites at a time.
2099
2100 We could probably emit more efficient code for machines which do not use
2101 strict alignment, but it doesn't seem worth the effort at the current
2102 time. */
2103 for (bitpos = 0, xbitpos = padding_correction;
2104 bitpos < bytes * BITS_PER_UNIT;
2105 bitpos += bitsize, xbitpos += bitsize)
2106 {
2107 /* We need a new source operand each time xbitpos is on a
2108 word boundary and when xbitpos == padding_correction
2109 (the first time through). */
2110 if (xbitpos % BITS_PER_WORD == 0
2111 || xbitpos == padding_correction)
2112 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2113 GET_MODE (srcreg));
2114
2115 /* We need a new destination operand each time bitpos is on
2116 a word boundary. */
2117 if (bitpos % BITS_PER_WORD == 0)
2118 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2119
2120 /* Use xbitpos for the source extraction (right justified) and
2121 xbitpos for the destination store (left justified). */
2122 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2123 extract_bit_field (src, bitsize,
2124 xbitpos % BITS_PER_WORD, 1,
2125 NULL_RTX, word_mode, word_mode));
2126 }
2127
2128 return tgtblk;
2129 }
2130
2131 /* Add a USE expression for REG to the (possibly empty) list pointed
2132 to by CALL_FUSAGE. REG must denote a hard register. */
2133
2134 void
2135 use_reg (rtx *call_fusage, rtx reg)
2136 {
2137 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2138
2139 *call_fusage
2140 = gen_rtx_EXPR_LIST (VOIDmode,
2141 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2142 }
2143
2144 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2145 starting at REGNO. All of these registers must be hard registers. */
2146
2147 void
2148 use_regs (rtx *call_fusage, int regno, int nregs)
2149 {
2150 int i;
2151
2152 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2153
2154 for (i = 0; i < nregs; i++)
2155 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2156 }
2157
2158 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2159 PARALLEL REGS. This is for calls that pass values in multiple
2160 non-contiguous locations. The Irix 6 ABI has examples of this. */
2161
2162 void
2163 use_group_regs (rtx *call_fusage, rtx regs)
2164 {
2165 int i;
2166
2167 for (i = 0; i < XVECLEN (regs, 0); i++)
2168 {
2169 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2170
2171 /* A NULL entry means the parameter goes both on the stack and in
2172 registers. This can also be a MEM for targets that pass values
2173 partially on the stack and partially in registers. */
2174 if (reg != 0 && REG_P (reg))
2175 use_reg (call_fusage, reg);
2176 }
2177 }
2178 \f
2179
2180 /* Determine whether the LEN bytes generated by CONSTFUN can be
2181 stored to memory using several move instructions. CONSTFUNDATA is
2182 a pointer which will be passed as argument in every CONSTFUN call.
2183 ALIGN is maximum alignment we can assume. Return nonzero if a
2184 call to store_by_pieces should succeed. */
2185
2186 int
2187 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2188 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2189 void *constfundata, unsigned int align)
2190 {
2191 unsigned HOST_WIDE_INT l;
2192 unsigned int max_size;
2193 HOST_WIDE_INT offset = 0;
2194 enum machine_mode mode, tmode;
2195 enum insn_code icode;
2196 int reverse;
2197 rtx cst;
2198
2199 if (len == 0)
2200 return 1;
2201
2202 if (! STORE_BY_PIECES_P (len, align))
2203 return 0;
2204
2205 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2206 if (align >= GET_MODE_ALIGNMENT (tmode))
2207 align = GET_MODE_ALIGNMENT (tmode);
2208 else
2209 {
2210 enum machine_mode xmode;
2211
2212 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2213 tmode != VOIDmode;
2214 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2215 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2216 || SLOW_UNALIGNED_ACCESS (tmode, align))
2217 break;
2218
2219 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2220 }
2221
2222 /* We would first store what we can in the largest integer mode, then go to
2223 successively smaller modes. */
2224
2225 for (reverse = 0;
2226 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2227 reverse++)
2228 {
2229 l = len;
2230 mode = VOIDmode;
2231 max_size = STORE_MAX_PIECES + 1;
2232 while (max_size > 1)
2233 {
2234 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2235 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2236 if (GET_MODE_SIZE (tmode) < max_size)
2237 mode = tmode;
2238
2239 if (mode == VOIDmode)
2240 break;
2241
2242 icode = mov_optab->handlers[(int) mode].insn_code;
2243 if (icode != CODE_FOR_nothing
2244 && align >= GET_MODE_ALIGNMENT (mode))
2245 {
2246 unsigned int size = GET_MODE_SIZE (mode);
2247
2248 while (l >= size)
2249 {
2250 if (reverse)
2251 offset -= size;
2252
2253 cst = (*constfun) (constfundata, offset, mode);
2254 if (!LEGITIMATE_CONSTANT_P (cst))
2255 return 0;
2256
2257 if (!reverse)
2258 offset += size;
2259
2260 l -= size;
2261 }
2262 }
2263
2264 max_size = GET_MODE_SIZE (mode);
2265 }
2266
2267 /* The code above should have handled everything. */
2268 gcc_assert (!l);
2269 }
2270
2271 return 1;
2272 }
2273
2274 /* Generate several move instructions to store LEN bytes generated by
2275 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2276 pointer which will be passed as argument in every CONSTFUN call.
2277 ALIGN is maximum alignment we can assume.
2278 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2279 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2280 stpcpy. */
2281
2282 rtx
2283 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2284 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2285 void *constfundata, unsigned int align, int endp)
2286 {
2287 struct store_by_pieces data;
2288
2289 if (len == 0)
2290 {
2291 gcc_assert (endp != 2);
2292 return to;
2293 }
2294
2295 gcc_assert (STORE_BY_PIECES_P (len, align));
2296 data.constfun = constfun;
2297 data.constfundata = constfundata;
2298 data.len = len;
2299 data.to = to;
2300 store_by_pieces_1 (&data, align);
2301 if (endp)
2302 {
2303 rtx to1;
2304
2305 gcc_assert (!data.reverse);
2306 if (data.autinc_to)
2307 {
2308 if (endp == 2)
2309 {
2310 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2311 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2312 else
2313 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2314 -1));
2315 }
2316 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2317 data.offset);
2318 }
2319 else
2320 {
2321 if (endp == 2)
2322 --data.offset;
2323 to1 = adjust_address (data.to, QImode, data.offset);
2324 }
2325 return to1;
2326 }
2327 else
2328 return data.to;
2329 }
2330
2331 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2332 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2333
2334 static void
2335 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2336 {
2337 struct store_by_pieces data;
2338
2339 if (len == 0)
2340 return;
2341
2342 data.constfun = clear_by_pieces_1;
2343 data.constfundata = NULL;
2344 data.len = len;
2345 data.to = to;
2346 store_by_pieces_1 (&data, align);
2347 }
2348
2349 /* Callback routine for clear_by_pieces.
2350 Return const0_rtx unconditionally. */
2351
2352 static rtx
2353 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2354 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2355 enum machine_mode mode ATTRIBUTE_UNUSED)
2356 {
2357 return const0_rtx;
2358 }
2359
2360 /* Subroutine of clear_by_pieces and store_by_pieces.
2361 Generate several move instructions to store LEN bytes of block TO. (A MEM
2362 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2363
2364 static void
2365 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2366 unsigned int align ATTRIBUTE_UNUSED)
2367 {
2368 rtx to_addr = XEXP (data->to, 0);
2369 unsigned int max_size = STORE_MAX_PIECES + 1;
2370 enum machine_mode mode = VOIDmode, tmode;
2371 enum insn_code icode;
2372
2373 data->offset = 0;
2374 data->to_addr = to_addr;
2375 data->autinc_to
2376 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2377 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2378
2379 data->explicit_inc_to = 0;
2380 data->reverse
2381 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2382 if (data->reverse)
2383 data->offset = data->len;
2384
2385 /* If storing requires more than two move insns,
2386 copy addresses to registers (to make displacements shorter)
2387 and use post-increment if available. */
2388 if (!data->autinc_to
2389 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2390 {
2391 /* Determine the main mode we'll be using. */
2392 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2393 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2394 if (GET_MODE_SIZE (tmode) < max_size)
2395 mode = tmode;
2396
2397 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2398 {
2399 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2400 data->autinc_to = 1;
2401 data->explicit_inc_to = -1;
2402 }
2403
2404 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2405 && ! data->autinc_to)
2406 {
2407 data->to_addr = copy_addr_to_reg (to_addr);
2408 data->autinc_to = 1;
2409 data->explicit_inc_to = 1;
2410 }
2411
2412 if ( !data->autinc_to && CONSTANT_P (to_addr))
2413 data->to_addr = copy_addr_to_reg (to_addr);
2414 }
2415
2416 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2417 if (align >= GET_MODE_ALIGNMENT (tmode))
2418 align = GET_MODE_ALIGNMENT (tmode);
2419 else
2420 {
2421 enum machine_mode xmode;
2422
2423 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2424 tmode != VOIDmode;
2425 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2426 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2427 || SLOW_UNALIGNED_ACCESS (tmode, align))
2428 break;
2429
2430 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2431 }
2432
2433 /* First store what we can in the largest integer mode, then go to
2434 successively smaller modes. */
2435
2436 while (max_size > 1)
2437 {
2438 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2439 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2440 if (GET_MODE_SIZE (tmode) < max_size)
2441 mode = tmode;
2442
2443 if (mode == VOIDmode)
2444 break;
2445
2446 icode = mov_optab->handlers[(int) mode].insn_code;
2447 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2448 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2449
2450 max_size = GET_MODE_SIZE (mode);
2451 }
2452
2453 /* The code above should have handled everything. */
2454 gcc_assert (!data->len);
2455 }
2456
2457 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2458 with move instructions for mode MODE. GENFUN is the gen_... function
2459 to make a move insn for that mode. DATA has all the other info. */
2460
2461 static void
2462 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2463 struct store_by_pieces *data)
2464 {
2465 unsigned int size = GET_MODE_SIZE (mode);
2466 rtx to1, cst;
2467
2468 while (data->len >= size)
2469 {
2470 if (data->reverse)
2471 data->offset -= size;
2472
2473 if (data->autinc_to)
2474 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2475 data->offset);
2476 else
2477 to1 = adjust_address (data->to, mode, data->offset);
2478
2479 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2480 emit_insn (gen_add2_insn (data->to_addr,
2481 GEN_INT (-(HOST_WIDE_INT) size)));
2482
2483 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2484 emit_insn ((*genfun) (to1, cst));
2485
2486 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2487 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2488
2489 if (! data->reverse)
2490 data->offset += size;
2491
2492 data->len -= size;
2493 }
2494 }
2495 \f
2496 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2497 its length in bytes. */
2498
2499 rtx
2500 clear_storage (rtx object, rtx size, enum block_op_methods method)
2501 {
2502 enum machine_mode mode = GET_MODE (object);
2503 unsigned int align;
2504
2505 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2506
2507 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2508 just move a zero. Otherwise, do this a piece at a time. */
2509 if (mode != BLKmode
2510 && GET_CODE (size) == CONST_INT
2511 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2512 {
2513 rtx zero = CONST0_RTX (mode);
2514 if (zero != NULL)
2515 {
2516 emit_move_insn (object, zero);
2517 return NULL;
2518 }
2519
2520 if (COMPLEX_MODE_P (mode))
2521 {
2522 zero = CONST0_RTX (GET_MODE_INNER (mode));
2523 if (zero != NULL)
2524 {
2525 write_complex_part (object, zero, 0);
2526 write_complex_part (object, zero, 1);
2527 return NULL;
2528 }
2529 }
2530 }
2531
2532 if (size == const0_rtx)
2533 return NULL;
2534
2535 align = MEM_ALIGN (object);
2536
2537 if (GET_CODE (size) == CONST_INT
2538 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2539 clear_by_pieces (object, INTVAL (size), align);
2540 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2541 ;
2542 else
2543 return clear_storage_via_libcall (object, size,
2544 method == BLOCK_OP_TAILCALL);
2545
2546 return NULL;
2547 }
2548
2549 /* A subroutine of clear_storage. Expand a call to memset.
2550 Return the return value of memset, 0 otherwise. */
2551
2552 static rtx
2553 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2554 {
2555 tree call_expr, arg_list, fn, object_tree, size_tree;
2556 enum machine_mode size_mode;
2557 rtx retval;
2558
2559 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2560 place those into new pseudos into a VAR_DECL and use them later. */
2561
2562 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2563
2564 size_mode = TYPE_MODE (sizetype);
2565 size = convert_to_mode (size_mode, size, 1);
2566 size = copy_to_mode_reg (size_mode, size);
2567
2568 /* It is incorrect to use the libcall calling conventions to call
2569 memset in this context. This could be a user call to memset and
2570 the user may wish to examine the return value from memset. For
2571 targets where libcalls and normal calls have different conventions
2572 for returning pointers, we could end up generating incorrect code. */
2573
2574 object_tree = make_tree (ptr_type_node, object);
2575 size_tree = make_tree (sizetype, size);
2576
2577 fn = clear_storage_libcall_fn (true);
2578 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2579 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2580 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2581
2582 /* Now we have to build up the CALL_EXPR itself. */
2583 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2584 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2585 call_expr, arg_list, NULL_TREE);
2586 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2587
2588 retval = expand_normal (call_expr);
2589
2590 return retval;
2591 }
2592
2593 /* A subroutine of clear_storage_via_libcall. Create the tree node
2594 for the function we use for block clears. The first time FOR_CALL
2595 is true, we call assemble_external. */
2596
2597 static GTY(()) tree block_clear_fn;
2598
2599 void
2600 init_block_clear_fn (const char *asmspec)
2601 {
2602 if (!block_clear_fn)
2603 {
2604 tree fn, args;
2605
2606 fn = get_identifier ("memset");
2607 args = build_function_type_list (ptr_type_node, ptr_type_node,
2608 integer_type_node, sizetype,
2609 NULL_TREE);
2610
2611 fn = build_decl (FUNCTION_DECL, fn, args);
2612 DECL_EXTERNAL (fn) = 1;
2613 TREE_PUBLIC (fn) = 1;
2614 DECL_ARTIFICIAL (fn) = 1;
2615 TREE_NOTHROW (fn) = 1;
2616 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2617 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2618
2619 block_clear_fn = fn;
2620 }
2621
2622 if (asmspec)
2623 set_user_assembler_name (block_clear_fn, asmspec);
2624 }
2625
2626 static tree
2627 clear_storage_libcall_fn (int for_call)
2628 {
2629 static bool emitted_extern;
2630
2631 if (!block_clear_fn)
2632 init_block_clear_fn (NULL);
2633
2634 if (for_call && !emitted_extern)
2635 {
2636 emitted_extern = true;
2637 make_decl_rtl (block_clear_fn);
2638 assemble_external (block_clear_fn);
2639 }
2640
2641 return block_clear_fn;
2642 }
2643 \f
2644 /* Expand a setmem pattern; return true if successful. */
2645
2646 bool
2647 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2648 {
2649 /* Try the most limited insn first, because there's no point
2650 including more than one in the machine description unless
2651 the more limited one has some advantage. */
2652
2653 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2654 enum machine_mode mode;
2655
2656 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2657 mode = GET_MODE_WIDER_MODE (mode))
2658 {
2659 enum insn_code code = setmem_optab[(int) mode];
2660 insn_operand_predicate_fn pred;
2661
2662 if (code != CODE_FOR_nothing
2663 /* We don't need MODE to be narrower than
2664 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2665 the mode mask, as it is returned by the macro, it will
2666 definitely be less than the actual mode mask. */
2667 && ((GET_CODE (size) == CONST_INT
2668 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2669 <= (GET_MODE_MASK (mode) >> 1)))
2670 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2671 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2672 || (*pred) (object, BLKmode))
2673 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2674 || (*pred) (opalign, VOIDmode)))
2675 {
2676 rtx opsize, opchar;
2677 enum machine_mode char_mode;
2678 rtx last = get_last_insn ();
2679 rtx pat;
2680
2681 opsize = convert_to_mode (mode, size, 1);
2682 pred = insn_data[(int) code].operand[1].predicate;
2683 if (pred != 0 && ! (*pred) (opsize, mode))
2684 opsize = copy_to_mode_reg (mode, opsize);
2685
2686 opchar = val;
2687 char_mode = insn_data[(int) code].operand[2].mode;
2688 if (char_mode != VOIDmode)
2689 {
2690 opchar = convert_to_mode (char_mode, opchar, 1);
2691 pred = insn_data[(int) code].operand[2].predicate;
2692 if (pred != 0 && ! (*pred) (opchar, char_mode))
2693 opchar = copy_to_mode_reg (char_mode, opchar);
2694 }
2695
2696 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2697 if (pat)
2698 {
2699 emit_insn (pat);
2700 return true;
2701 }
2702 else
2703 delete_insns_since (last);
2704 }
2705 }
2706
2707 return false;
2708 }
2709
2710 \f
2711 /* Write to one of the components of the complex value CPLX. Write VAL to
2712 the real part if IMAG_P is false, and the imaginary part if its true. */
2713
2714 static void
2715 write_complex_part (rtx cplx, rtx val, bool imag_p)
2716 {
2717 enum machine_mode cmode;
2718 enum machine_mode imode;
2719 unsigned ibitsize;
2720
2721 if (GET_CODE (cplx) == CONCAT)
2722 {
2723 emit_move_insn (XEXP (cplx, imag_p), val);
2724 return;
2725 }
2726
2727 cmode = GET_MODE (cplx);
2728 imode = GET_MODE_INNER (cmode);
2729 ibitsize = GET_MODE_BITSIZE (imode);
2730
2731 /* For MEMs simplify_gen_subreg may generate an invalid new address
2732 because, e.g., the original address is considered mode-dependent
2733 by the target, which restricts simplify_subreg from invoking
2734 adjust_address_nv. Instead of preparing fallback support for an
2735 invalid address, we call adjust_address_nv directly. */
2736 if (MEM_P (cplx))
2737 {
2738 emit_move_insn (adjust_address_nv (cplx, imode,
2739 imag_p ? GET_MODE_SIZE (imode) : 0),
2740 val);
2741 return;
2742 }
2743
2744 /* If the sub-object is at least word sized, then we know that subregging
2745 will work. This special case is important, since store_bit_field
2746 wants to operate on integer modes, and there's rarely an OImode to
2747 correspond to TCmode. */
2748 if (ibitsize >= BITS_PER_WORD
2749 /* For hard regs we have exact predicates. Assume we can split
2750 the original object if it spans an even number of hard regs.
2751 This special case is important for SCmode on 64-bit platforms
2752 where the natural size of floating-point regs is 32-bit. */
2753 || (REG_P (cplx)
2754 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2755 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2756 {
2757 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2758 imag_p ? GET_MODE_SIZE (imode) : 0);
2759 if (part)
2760 {
2761 emit_move_insn (part, val);
2762 return;
2763 }
2764 else
2765 /* simplify_gen_subreg may fail for sub-word MEMs. */
2766 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2767 }
2768
2769 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2770 }
2771
2772 /* Extract one of the components of the complex value CPLX. Extract the
2773 real part if IMAG_P is false, and the imaginary part if it's true. */
2774
2775 static rtx
2776 read_complex_part (rtx cplx, bool imag_p)
2777 {
2778 enum machine_mode cmode, imode;
2779 unsigned ibitsize;
2780
2781 if (GET_CODE (cplx) == CONCAT)
2782 return XEXP (cplx, imag_p);
2783
2784 cmode = GET_MODE (cplx);
2785 imode = GET_MODE_INNER (cmode);
2786 ibitsize = GET_MODE_BITSIZE (imode);
2787
2788 /* Special case reads from complex constants that got spilled to memory. */
2789 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2790 {
2791 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2792 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2793 {
2794 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2795 if (CONSTANT_CLASS_P (part))
2796 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2797 }
2798 }
2799
2800 /* For MEMs simplify_gen_subreg may generate an invalid new address
2801 because, e.g., the original address is considered mode-dependent
2802 by the target, which restricts simplify_subreg from invoking
2803 adjust_address_nv. Instead of preparing fallback support for an
2804 invalid address, we call adjust_address_nv directly. */
2805 if (MEM_P (cplx))
2806 return adjust_address_nv (cplx, imode,
2807 imag_p ? GET_MODE_SIZE (imode) : 0);
2808
2809 /* If the sub-object is at least word sized, then we know that subregging
2810 will work. This special case is important, since extract_bit_field
2811 wants to operate on integer modes, and there's rarely an OImode to
2812 correspond to TCmode. */
2813 if (ibitsize >= BITS_PER_WORD
2814 /* For hard regs we have exact predicates. Assume we can split
2815 the original object if it spans an even number of hard regs.
2816 This special case is important for SCmode on 64-bit platforms
2817 where the natural size of floating-point regs is 32-bit. */
2818 || (REG_P (cplx)
2819 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2820 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2821 {
2822 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2823 imag_p ? GET_MODE_SIZE (imode) : 0);
2824 if (ret)
2825 return ret;
2826 else
2827 /* simplify_gen_subreg may fail for sub-word MEMs. */
2828 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2829 }
2830
2831 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2832 true, NULL_RTX, imode, imode);
2833 }
2834 \f
2835 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2836 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2837 represented in NEW_MODE. If FORCE is true, this will never happen, as
2838 we'll force-create a SUBREG if needed. */
2839
2840 static rtx
2841 emit_move_change_mode (enum machine_mode new_mode,
2842 enum machine_mode old_mode, rtx x, bool force)
2843 {
2844 rtx ret;
2845
2846 if (MEM_P (x))
2847 {
2848 /* We don't have to worry about changing the address since the
2849 size in bytes is supposed to be the same. */
2850 if (reload_in_progress)
2851 {
2852 /* Copy the MEM to change the mode and move any
2853 substitutions from the old MEM to the new one. */
2854 ret = adjust_address_nv (x, new_mode, 0);
2855 copy_replacements (x, ret);
2856 }
2857 else
2858 ret = adjust_address (x, new_mode, 0);
2859 }
2860 else
2861 {
2862 /* Note that we do want simplify_subreg's behavior of validating
2863 that the new mode is ok for a hard register. If we were to use
2864 simplify_gen_subreg, we would create the subreg, but would
2865 probably run into the target not being able to implement it. */
2866 /* Except, of course, when FORCE is true, when this is exactly what
2867 we want. Which is needed for CCmodes on some targets. */
2868 if (force)
2869 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2870 else
2871 ret = simplify_subreg (new_mode, x, old_mode, 0);
2872 }
2873
2874 return ret;
2875 }
2876
2877 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2878 an integer mode of the same size as MODE. Returns the instruction
2879 emitted, or NULL if such a move could not be generated. */
2880
2881 static rtx
2882 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2883 {
2884 enum machine_mode imode;
2885 enum insn_code code;
2886
2887 /* There must exist a mode of the exact size we require. */
2888 imode = int_mode_for_mode (mode);
2889 if (imode == BLKmode)
2890 return NULL_RTX;
2891
2892 /* The target must support moves in this mode. */
2893 code = mov_optab->handlers[imode].insn_code;
2894 if (code == CODE_FOR_nothing)
2895 return NULL_RTX;
2896
2897 x = emit_move_change_mode (imode, mode, x, force);
2898 if (x == NULL_RTX)
2899 return NULL_RTX;
2900 y = emit_move_change_mode (imode, mode, y, force);
2901 if (y == NULL_RTX)
2902 return NULL_RTX;
2903 return emit_insn (GEN_FCN (code) (x, y));
2904 }
2905
2906 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2907 Return an equivalent MEM that does not use an auto-increment. */
2908
2909 static rtx
2910 emit_move_resolve_push (enum machine_mode mode, rtx x)
2911 {
2912 enum rtx_code code = GET_CODE (XEXP (x, 0));
2913 HOST_WIDE_INT adjust;
2914 rtx temp;
2915
2916 adjust = GET_MODE_SIZE (mode);
2917 #ifdef PUSH_ROUNDING
2918 adjust = PUSH_ROUNDING (adjust);
2919 #endif
2920 if (code == PRE_DEC || code == POST_DEC)
2921 adjust = -adjust;
2922 else if (code == PRE_MODIFY || code == POST_MODIFY)
2923 {
2924 rtx expr = XEXP (XEXP (x, 0), 1);
2925 HOST_WIDE_INT val;
2926
2927 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2928 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2929 val = INTVAL (XEXP (expr, 1));
2930 if (GET_CODE (expr) == MINUS)
2931 val = -val;
2932 gcc_assert (adjust == val || adjust == -val);
2933 adjust = val;
2934 }
2935
2936 /* Do not use anti_adjust_stack, since we don't want to update
2937 stack_pointer_delta. */
2938 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2939 GEN_INT (adjust), stack_pointer_rtx,
2940 0, OPTAB_LIB_WIDEN);
2941 if (temp != stack_pointer_rtx)
2942 emit_move_insn (stack_pointer_rtx, temp);
2943
2944 switch (code)
2945 {
2946 case PRE_INC:
2947 case PRE_DEC:
2948 case PRE_MODIFY:
2949 temp = stack_pointer_rtx;
2950 break;
2951 case POST_INC:
2952 case POST_DEC:
2953 case POST_MODIFY:
2954 temp = plus_constant (stack_pointer_rtx, -adjust);
2955 break;
2956 default:
2957 gcc_unreachable ();
2958 }
2959
2960 return replace_equiv_address (x, temp);
2961 }
2962
2963 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2964 X is known to satisfy push_operand, and MODE is known to be complex.
2965 Returns the last instruction emitted. */
2966
2967 static rtx
2968 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2969 {
2970 enum machine_mode submode = GET_MODE_INNER (mode);
2971 bool imag_first;
2972
2973 #ifdef PUSH_ROUNDING
2974 unsigned int submodesize = GET_MODE_SIZE (submode);
2975
2976 /* In case we output to the stack, but the size is smaller than the
2977 machine can push exactly, we need to use move instructions. */
2978 if (PUSH_ROUNDING (submodesize) != submodesize)
2979 {
2980 x = emit_move_resolve_push (mode, x);
2981 return emit_move_insn (x, y);
2982 }
2983 #endif
2984
2985 /* Note that the real part always precedes the imag part in memory
2986 regardless of machine's endianness. */
2987 switch (GET_CODE (XEXP (x, 0)))
2988 {
2989 case PRE_DEC:
2990 case POST_DEC:
2991 imag_first = true;
2992 break;
2993 case PRE_INC:
2994 case POST_INC:
2995 imag_first = false;
2996 break;
2997 default:
2998 gcc_unreachable ();
2999 }
3000
3001 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3002 read_complex_part (y, imag_first));
3003 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3004 read_complex_part (y, !imag_first));
3005 }
3006
3007 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3008 MODE is known to be complex. Returns the last instruction emitted. */
3009
3010 static rtx
3011 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3012 {
3013 bool try_int;
3014
3015 /* Need to take special care for pushes, to maintain proper ordering
3016 of the data, and possibly extra padding. */
3017 if (push_operand (x, mode))
3018 return emit_move_complex_push (mode, x, y);
3019
3020 /* See if we can coerce the target into moving both values at once. */
3021
3022 /* Move floating point as parts. */
3023 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3024 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3025 try_int = false;
3026 /* Not possible if the values are inherently not adjacent. */
3027 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3028 try_int = false;
3029 /* Is possible if both are registers (or subregs of registers). */
3030 else if (register_operand (x, mode) && register_operand (y, mode))
3031 try_int = true;
3032 /* If one of the operands is a memory, and alignment constraints
3033 are friendly enough, we may be able to do combined memory operations.
3034 We do not attempt this if Y is a constant because that combination is
3035 usually better with the by-parts thing below. */
3036 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3037 && (!STRICT_ALIGNMENT
3038 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3039 try_int = true;
3040 else
3041 try_int = false;
3042
3043 if (try_int)
3044 {
3045 rtx ret;
3046
3047 /* For memory to memory moves, optimal behavior can be had with the
3048 existing block move logic. */
3049 if (MEM_P (x) && MEM_P (y))
3050 {
3051 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3052 BLOCK_OP_NO_LIBCALL);
3053 return get_last_insn ();
3054 }
3055
3056 ret = emit_move_via_integer (mode, x, y, true);
3057 if (ret)
3058 return ret;
3059 }
3060
3061 /* Show the output dies here. This is necessary for SUBREGs
3062 of pseudos since we cannot track their lifetimes correctly;
3063 hard regs shouldn't appear here except as return values. */
3064 if (!reload_completed && !reload_in_progress
3065 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3066 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3067
3068 write_complex_part (x, read_complex_part (y, false), false);
3069 write_complex_part (x, read_complex_part (y, true), true);
3070 return get_last_insn ();
3071 }
3072
3073 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3074 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3075
3076 static rtx
3077 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3078 {
3079 rtx ret;
3080
3081 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3082 if (mode != CCmode)
3083 {
3084 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3085 if (code != CODE_FOR_nothing)
3086 {
3087 x = emit_move_change_mode (CCmode, mode, x, true);
3088 y = emit_move_change_mode (CCmode, mode, y, true);
3089 return emit_insn (GEN_FCN (code) (x, y));
3090 }
3091 }
3092
3093 /* Otherwise, find the MODE_INT mode of the same width. */
3094 ret = emit_move_via_integer (mode, x, y, false);
3095 gcc_assert (ret != NULL);
3096 return ret;
3097 }
3098
3099 /* Return true if word I of OP lies entirely in the
3100 undefined bits of a paradoxical subreg. */
3101
3102 static bool
3103 undefined_operand_subword_p (rtx op, int i)
3104 {
3105 enum machine_mode innermode, innermostmode;
3106 int offset;
3107 if (GET_CODE (op) != SUBREG)
3108 return false;
3109 innermode = GET_MODE (op);
3110 innermostmode = GET_MODE (SUBREG_REG (op));
3111 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3112 /* The SUBREG_BYTE represents offset, as if the value were stored in
3113 memory, except for a paradoxical subreg where we define
3114 SUBREG_BYTE to be 0; undo this exception as in
3115 simplify_subreg. */
3116 if (SUBREG_BYTE (op) == 0
3117 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3118 {
3119 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3120 if (WORDS_BIG_ENDIAN)
3121 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3122 if (BYTES_BIG_ENDIAN)
3123 offset += difference % UNITS_PER_WORD;
3124 }
3125 if (offset >= GET_MODE_SIZE (innermostmode)
3126 || offset <= -GET_MODE_SIZE (word_mode))
3127 return true;
3128 return false;
3129 }
3130
3131 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3132 MODE is any multi-word or full-word mode that lacks a move_insn
3133 pattern. Note that you will get better code if you define such
3134 patterns, even if they must turn into multiple assembler instructions. */
3135
3136 static rtx
3137 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3138 {
3139 rtx last_insn = 0;
3140 rtx seq, inner;
3141 bool need_clobber;
3142 int i;
3143
3144 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3145
3146 /* If X is a push on the stack, do the push now and replace
3147 X with a reference to the stack pointer. */
3148 if (push_operand (x, mode))
3149 x = emit_move_resolve_push (mode, x);
3150
3151 /* If we are in reload, see if either operand is a MEM whose address
3152 is scheduled for replacement. */
3153 if (reload_in_progress && MEM_P (x)
3154 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3155 x = replace_equiv_address_nv (x, inner);
3156 if (reload_in_progress && MEM_P (y)
3157 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3158 y = replace_equiv_address_nv (y, inner);
3159
3160 start_sequence ();
3161
3162 need_clobber = false;
3163 for (i = 0;
3164 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3165 i++)
3166 {
3167 rtx xpart = operand_subword (x, i, 1, mode);
3168 rtx ypart;
3169
3170 /* Do not generate code for a move if it would come entirely
3171 from the undefined bits of a paradoxical subreg. */
3172 if (undefined_operand_subword_p (y, i))
3173 continue;
3174
3175 ypart = operand_subword (y, i, 1, mode);
3176
3177 /* If we can't get a part of Y, put Y into memory if it is a
3178 constant. Otherwise, force it into a register. Then we must
3179 be able to get a part of Y. */
3180 if (ypart == 0 && CONSTANT_P (y))
3181 {
3182 y = use_anchored_address (force_const_mem (mode, y));
3183 ypart = operand_subword (y, i, 1, mode);
3184 }
3185 else if (ypart == 0)
3186 ypart = operand_subword_force (y, i, mode);
3187
3188 gcc_assert (xpart && ypart);
3189
3190 need_clobber |= (GET_CODE (xpart) == SUBREG);
3191
3192 last_insn = emit_move_insn (xpart, ypart);
3193 }
3194
3195 seq = get_insns ();
3196 end_sequence ();
3197
3198 /* Show the output dies here. This is necessary for SUBREGs
3199 of pseudos since we cannot track their lifetimes correctly;
3200 hard regs shouldn't appear here except as return values.
3201 We never want to emit such a clobber after reload. */
3202 if (x != y
3203 && ! (reload_in_progress || reload_completed)
3204 && need_clobber != 0)
3205 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3206
3207 emit_insn (seq);
3208
3209 return last_insn;
3210 }
3211
3212 /* Low level part of emit_move_insn.
3213 Called just like emit_move_insn, but assumes X and Y
3214 are basically valid. */
3215
3216 rtx
3217 emit_move_insn_1 (rtx x, rtx y)
3218 {
3219 enum machine_mode mode = GET_MODE (x);
3220 enum insn_code code;
3221
3222 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3223
3224 code = mov_optab->handlers[mode].insn_code;
3225 if (code != CODE_FOR_nothing)
3226 return emit_insn (GEN_FCN (code) (x, y));
3227
3228 /* Expand complex moves by moving real part and imag part. */
3229 if (COMPLEX_MODE_P (mode))
3230 return emit_move_complex (mode, x, y);
3231
3232 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3233 {
3234 rtx result = emit_move_via_integer (mode, x, y, true);
3235
3236 /* If we can't find an integer mode, use multi words. */
3237 if (result)
3238 return result;
3239 else
3240 return emit_move_multi_word (mode, x, y);
3241 }
3242
3243 if (GET_MODE_CLASS (mode) == MODE_CC)
3244 return emit_move_ccmode (mode, x, y);
3245
3246 /* Try using a move pattern for the corresponding integer mode. This is
3247 only safe when simplify_subreg can convert MODE constants into integer
3248 constants. At present, it can only do this reliably if the value
3249 fits within a HOST_WIDE_INT. */
3250 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3251 {
3252 rtx ret = emit_move_via_integer (mode, x, y, false);
3253 if (ret)
3254 return ret;
3255 }
3256
3257 return emit_move_multi_word (mode, x, y);
3258 }
3259
3260 /* Generate code to copy Y into X.
3261 Both Y and X must have the same mode, except that
3262 Y can be a constant with VOIDmode.
3263 This mode cannot be BLKmode; use emit_block_move for that.
3264
3265 Return the last instruction emitted. */
3266
3267 rtx
3268 emit_move_insn (rtx x, rtx y)
3269 {
3270 enum machine_mode mode = GET_MODE (x);
3271 rtx y_cst = NULL_RTX;
3272 rtx last_insn, set;
3273
3274 gcc_assert (mode != BLKmode
3275 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3276
3277 if (CONSTANT_P (y))
3278 {
3279 if (optimize
3280 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3281 && (last_insn = compress_float_constant (x, y)))
3282 return last_insn;
3283
3284 y_cst = y;
3285
3286 if (!LEGITIMATE_CONSTANT_P (y))
3287 {
3288 y = force_const_mem (mode, y);
3289
3290 /* If the target's cannot_force_const_mem prevented the spill,
3291 assume that the target's move expanders will also take care
3292 of the non-legitimate constant. */
3293 if (!y)
3294 y = y_cst;
3295 else
3296 y = use_anchored_address (y);
3297 }
3298 }
3299
3300 /* If X or Y are memory references, verify that their addresses are valid
3301 for the machine. */
3302 if (MEM_P (x)
3303 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3304 && ! push_operand (x, GET_MODE (x)))
3305 || (flag_force_addr
3306 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3307 x = validize_mem (x);
3308
3309 if (MEM_P (y)
3310 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3311 || (flag_force_addr
3312 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3313 y = validize_mem (y);
3314
3315 gcc_assert (mode != BLKmode);
3316
3317 last_insn = emit_move_insn_1 (x, y);
3318
3319 if (y_cst && REG_P (x)
3320 && (set = single_set (last_insn)) != NULL_RTX
3321 && SET_DEST (set) == x
3322 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3323 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3324
3325 return last_insn;
3326 }
3327
3328 /* If Y is representable exactly in a narrower mode, and the target can
3329 perform the extension directly from constant or memory, then emit the
3330 move as an extension. */
3331
3332 static rtx
3333 compress_float_constant (rtx x, rtx y)
3334 {
3335 enum machine_mode dstmode = GET_MODE (x);
3336 enum machine_mode orig_srcmode = GET_MODE (y);
3337 enum machine_mode srcmode;
3338 REAL_VALUE_TYPE r;
3339 int oldcost, newcost;
3340
3341 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3342
3343 if (LEGITIMATE_CONSTANT_P (y))
3344 oldcost = rtx_cost (y, SET);
3345 else
3346 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3347
3348 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3349 srcmode != orig_srcmode;
3350 srcmode = GET_MODE_WIDER_MODE (srcmode))
3351 {
3352 enum insn_code ic;
3353 rtx trunc_y, last_insn;
3354
3355 /* Skip if the target can't extend this way. */
3356 ic = can_extend_p (dstmode, srcmode, 0);
3357 if (ic == CODE_FOR_nothing)
3358 continue;
3359
3360 /* Skip if the narrowed value isn't exact. */
3361 if (! exact_real_truncate (srcmode, &r))
3362 continue;
3363
3364 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3365
3366 if (LEGITIMATE_CONSTANT_P (trunc_y))
3367 {
3368 /* Skip if the target needs extra instructions to perform
3369 the extension. */
3370 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3371 continue;
3372 /* This is valid, but may not be cheaper than the original. */
3373 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3374 if (oldcost < newcost)
3375 continue;
3376 }
3377 else if (float_extend_from_mem[dstmode][srcmode])
3378 {
3379 trunc_y = force_const_mem (srcmode, trunc_y);
3380 /* This is valid, but may not be cheaper than the original. */
3381 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3382 if (oldcost < newcost)
3383 continue;
3384 trunc_y = validize_mem (trunc_y);
3385 }
3386 else
3387 continue;
3388
3389 /* For CSE's benefit, force the compressed constant pool entry
3390 into a new pseudo. This constant may be used in different modes,
3391 and if not, combine will put things back together for us. */
3392 trunc_y = force_reg (srcmode, trunc_y);
3393 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3394 last_insn = get_last_insn ();
3395
3396 if (REG_P (x))
3397 set_unique_reg_note (last_insn, REG_EQUAL, y);
3398
3399 return last_insn;
3400 }
3401
3402 return NULL_RTX;
3403 }
3404 \f
3405 /* Pushing data onto the stack. */
3406
3407 /* Push a block of length SIZE (perhaps variable)
3408 and return an rtx to address the beginning of the block.
3409 The value may be virtual_outgoing_args_rtx.
3410
3411 EXTRA is the number of bytes of padding to push in addition to SIZE.
3412 BELOW nonzero means this padding comes at low addresses;
3413 otherwise, the padding comes at high addresses. */
3414
3415 rtx
3416 push_block (rtx size, int extra, int below)
3417 {
3418 rtx temp;
3419
3420 size = convert_modes (Pmode, ptr_mode, size, 1);
3421 if (CONSTANT_P (size))
3422 anti_adjust_stack (plus_constant (size, extra));
3423 else if (REG_P (size) && extra == 0)
3424 anti_adjust_stack (size);
3425 else
3426 {
3427 temp = copy_to_mode_reg (Pmode, size);
3428 if (extra != 0)
3429 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3430 temp, 0, OPTAB_LIB_WIDEN);
3431 anti_adjust_stack (temp);
3432 }
3433
3434 #ifndef STACK_GROWS_DOWNWARD
3435 if (0)
3436 #else
3437 if (1)
3438 #endif
3439 {
3440 temp = virtual_outgoing_args_rtx;
3441 if (extra != 0 && below)
3442 temp = plus_constant (temp, extra);
3443 }
3444 else
3445 {
3446 if (GET_CODE (size) == CONST_INT)
3447 temp = plus_constant (virtual_outgoing_args_rtx,
3448 -INTVAL (size) - (below ? 0 : extra));
3449 else if (extra != 0 && !below)
3450 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3451 negate_rtx (Pmode, plus_constant (size, extra)));
3452 else
3453 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3454 negate_rtx (Pmode, size));
3455 }
3456
3457 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3458 }
3459
3460 #ifdef PUSH_ROUNDING
3461
3462 /* Emit single push insn. */
3463
3464 static void
3465 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3466 {
3467 rtx dest_addr;
3468 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3469 rtx dest;
3470 enum insn_code icode;
3471 insn_operand_predicate_fn pred;
3472
3473 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3474 /* If there is push pattern, use it. Otherwise try old way of throwing
3475 MEM representing push operation to move expander. */
3476 icode = push_optab->handlers[(int) mode].insn_code;
3477 if (icode != CODE_FOR_nothing)
3478 {
3479 if (((pred = insn_data[(int) icode].operand[0].predicate)
3480 && !((*pred) (x, mode))))
3481 x = force_reg (mode, x);
3482 emit_insn (GEN_FCN (icode) (x));
3483 return;
3484 }
3485 if (GET_MODE_SIZE (mode) == rounded_size)
3486 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3487 /* If we are to pad downward, adjust the stack pointer first and
3488 then store X into the stack location using an offset. This is
3489 because emit_move_insn does not know how to pad; it does not have
3490 access to type. */
3491 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3492 {
3493 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3494 HOST_WIDE_INT offset;
3495
3496 emit_move_insn (stack_pointer_rtx,
3497 expand_binop (Pmode,
3498 #ifdef STACK_GROWS_DOWNWARD
3499 sub_optab,
3500 #else
3501 add_optab,
3502 #endif
3503 stack_pointer_rtx,
3504 GEN_INT (rounded_size),
3505 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3506
3507 offset = (HOST_WIDE_INT) padding_size;
3508 #ifdef STACK_GROWS_DOWNWARD
3509 if (STACK_PUSH_CODE == POST_DEC)
3510 /* We have already decremented the stack pointer, so get the
3511 previous value. */
3512 offset += (HOST_WIDE_INT) rounded_size;
3513 #else
3514 if (STACK_PUSH_CODE == POST_INC)
3515 /* We have already incremented the stack pointer, so get the
3516 previous value. */
3517 offset -= (HOST_WIDE_INT) rounded_size;
3518 #endif
3519 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3520 }
3521 else
3522 {
3523 #ifdef STACK_GROWS_DOWNWARD
3524 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3525 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3526 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3527 #else
3528 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3529 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3530 GEN_INT (rounded_size));
3531 #endif
3532 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3533 }
3534
3535 dest = gen_rtx_MEM (mode, dest_addr);
3536
3537 if (type != 0)
3538 {
3539 set_mem_attributes (dest, type, 1);
3540
3541 if (flag_optimize_sibling_calls)
3542 /* Function incoming arguments may overlap with sibling call
3543 outgoing arguments and we cannot allow reordering of reads
3544 from function arguments with stores to outgoing arguments
3545 of sibling calls. */
3546 set_mem_alias_set (dest, 0);
3547 }
3548 emit_move_insn (dest, x);
3549 }
3550 #endif
3551
3552 /* Generate code to push X onto the stack, assuming it has mode MODE and
3553 type TYPE.
3554 MODE is redundant except when X is a CONST_INT (since they don't
3555 carry mode info).
3556 SIZE is an rtx for the size of data to be copied (in bytes),
3557 needed only if X is BLKmode.
3558
3559 ALIGN (in bits) is maximum alignment we can assume.
3560
3561 If PARTIAL and REG are both nonzero, then copy that many of the first
3562 bytes of X into registers starting with REG, and push the rest of X.
3563 The amount of space pushed is decreased by PARTIAL bytes.
3564 REG must be a hard register in this case.
3565 If REG is zero but PARTIAL is not, take any all others actions for an
3566 argument partially in registers, but do not actually load any
3567 registers.
3568
3569 EXTRA is the amount in bytes of extra space to leave next to this arg.
3570 This is ignored if an argument block has already been allocated.
3571
3572 On a machine that lacks real push insns, ARGS_ADDR is the address of
3573 the bottom of the argument block for this call. We use indexing off there
3574 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3575 argument block has not been preallocated.
3576
3577 ARGS_SO_FAR is the size of args previously pushed for this call.
3578
3579 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3580 for arguments passed in registers. If nonzero, it will be the number
3581 of bytes required. */
3582
3583 void
3584 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3585 unsigned int align, int partial, rtx reg, int extra,
3586 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3587 rtx alignment_pad)
3588 {
3589 rtx xinner;
3590 enum direction stack_direction
3591 #ifdef STACK_GROWS_DOWNWARD
3592 = downward;
3593 #else
3594 = upward;
3595 #endif
3596
3597 /* Decide where to pad the argument: `downward' for below,
3598 `upward' for above, or `none' for don't pad it.
3599 Default is below for small data on big-endian machines; else above. */
3600 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3601
3602 /* Invert direction if stack is post-decrement.
3603 FIXME: why? */
3604 if (STACK_PUSH_CODE == POST_DEC)
3605 if (where_pad != none)
3606 where_pad = (where_pad == downward ? upward : downward);
3607
3608 xinner = x;
3609
3610 if (mode == BLKmode)
3611 {
3612 /* Copy a block into the stack, entirely or partially. */
3613
3614 rtx temp;
3615 int used;
3616 int offset;
3617 int skip;
3618
3619 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3620 used = partial - offset;
3621
3622 gcc_assert (size);
3623
3624 /* USED is now the # of bytes we need not copy to the stack
3625 because registers will take care of them. */
3626
3627 if (partial != 0)
3628 xinner = adjust_address (xinner, BLKmode, used);
3629
3630 /* If the partial register-part of the arg counts in its stack size,
3631 skip the part of stack space corresponding to the registers.
3632 Otherwise, start copying to the beginning of the stack space,
3633 by setting SKIP to 0. */
3634 skip = (reg_parm_stack_space == 0) ? 0 : used;
3635
3636 #ifdef PUSH_ROUNDING
3637 /* Do it with several push insns if that doesn't take lots of insns
3638 and if there is no difficulty with push insns that skip bytes
3639 on the stack for alignment purposes. */
3640 if (args_addr == 0
3641 && PUSH_ARGS
3642 && GET_CODE (size) == CONST_INT
3643 && skip == 0
3644 && MEM_ALIGN (xinner) >= align
3645 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3646 /* Here we avoid the case of a structure whose weak alignment
3647 forces many pushes of a small amount of data,
3648 and such small pushes do rounding that causes trouble. */
3649 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3650 || align >= BIGGEST_ALIGNMENT
3651 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3652 == (align / BITS_PER_UNIT)))
3653 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3654 {
3655 /* Push padding now if padding above and stack grows down,
3656 or if padding below and stack grows up.
3657 But if space already allocated, this has already been done. */
3658 if (extra && args_addr == 0
3659 && where_pad != none && where_pad != stack_direction)
3660 anti_adjust_stack (GEN_INT (extra));
3661
3662 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3663 }
3664 else
3665 #endif /* PUSH_ROUNDING */
3666 {
3667 rtx target;
3668
3669 /* Otherwise make space on the stack and copy the data
3670 to the address of that space. */
3671
3672 /* Deduct words put into registers from the size we must copy. */
3673 if (partial != 0)
3674 {
3675 if (GET_CODE (size) == CONST_INT)
3676 size = GEN_INT (INTVAL (size) - used);
3677 else
3678 size = expand_binop (GET_MODE (size), sub_optab, size,
3679 GEN_INT (used), NULL_RTX, 0,
3680 OPTAB_LIB_WIDEN);
3681 }
3682
3683 /* Get the address of the stack space.
3684 In this case, we do not deal with EXTRA separately.
3685 A single stack adjust will do. */
3686 if (! args_addr)
3687 {
3688 temp = push_block (size, extra, where_pad == downward);
3689 extra = 0;
3690 }
3691 else if (GET_CODE (args_so_far) == CONST_INT)
3692 temp = memory_address (BLKmode,
3693 plus_constant (args_addr,
3694 skip + INTVAL (args_so_far)));
3695 else
3696 temp = memory_address (BLKmode,
3697 plus_constant (gen_rtx_PLUS (Pmode,
3698 args_addr,
3699 args_so_far),
3700 skip));
3701
3702 if (!ACCUMULATE_OUTGOING_ARGS)
3703 {
3704 /* If the source is referenced relative to the stack pointer,
3705 copy it to another register to stabilize it. We do not need
3706 to do this if we know that we won't be changing sp. */
3707
3708 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3709 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3710 temp = copy_to_reg (temp);
3711 }
3712
3713 target = gen_rtx_MEM (BLKmode, temp);
3714
3715 /* We do *not* set_mem_attributes here, because incoming arguments
3716 may overlap with sibling call outgoing arguments and we cannot
3717 allow reordering of reads from function arguments with stores
3718 to outgoing arguments of sibling calls. We do, however, want
3719 to record the alignment of the stack slot. */
3720 /* ALIGN may well be better aligned than TYPE, e.g. due to
3721 PARM_BOUNDARY. Assume the caller isn't lying. */
3722 set_mem_align (target, align);
3723
3724 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3725 }
3726 }
3727 else if (partial > 0)
3728 {
3729 /* Scalar partly in registers. */
3730
3731 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3732 int i;
3733 int not_stack;
3734 /* # bytes of start of argument
3735 that we must make space for but need not store. */
3736 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3737 int args_offset = INTVAL (args_so_far);
3738 int skip;
3739
3740 /* Push padding now if padding above and stack grows down,
3741 or if padding below and stack grows up.
3742 But if space already allocated, this has already been done. */
3743 if (extra && args_addr == 0
3744 && where_pad != none && where_pad != stack_direction)
3745 anti_adjust_stack (GEN_INT (extra));
3746
3747 /* If we make space by pushing it, we might as well push
3748 the real data. Otherwise, we can leave OFFSET nonzero
3749 and leave the space uninitialized. */
3750 if (args_addr == 0)
3751 offset = 0;
3752
3753 /* Now NOT_STACK gets the number of words that we don't need to
3754 allocate on the stack. Convert OFFSET to words too. */
3755 not_stack = (partial - offset) / UNITS_PER_WORD;
3756 offset /= UNITS_PER_WORD;
3757
3758 /* If the partial register-part of the arg counts in its stack size,
3759 skip the part of stack space corresponding to the registers.
3760 Otherwise, start copying to the beginning of the stack space,
3761 by setting SKIP to 0. */
3762 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3763
3764 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3765 x = validize_mem (force_const_mem (mode, x));
3766
3767 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3768 SUBREGs of such registers are not allowed. */
3769 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3770 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3771 x = copy_to_reg (x);
3772
3773 /* Loop over all the words allocated on the stack for this arg. */
3774 /* We can do it by words, because any scalar bigger than a word
3775 has a size a multiple of a word. */
3776 #ifndef PUSH_ARGS_REVERSED
3777 for (i = not_stack; i < size; i++)
3778 #else
3779 for (i = size - 1; i >= not_stack; i--)
3780 #endif
3781 if (i >= not_stack + offset)
3782 emit_push_insn (operand_subword_force (x, i, mode),
3783 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3784 0, args_addr,
3785 GEN_INT (args_offset + ((i - not_stack + skip)
3786 * UNITS_PER_WORD)),
3787 reg_parm_stack_space, alignment_pad);
3788 }
3789 else
3790 {
3791 rtx addr;
3792 rtx dest;
3793
3794 /* Push padding now if padding above and stack grows down,
3795 or if padding below and stack grows up.
3796 But if space already allocated, this has already been done. */
3797 if (extra && args_addr == 0
3798 && where_pad != none && where_pad != stack_direction)
3799 anti_adjust_stack (GEN_INT (extra));
3800
3801 #ifdef PUSH_ROUNDING
3802 if (args_addr == 0 && PUSH_ARGS)
3803 emit_single_push_insn (mode, x, type);
3804 else
3805 #endif
3806 {
3807 if (GET_CODE (args_so_far) == CONST_INT)
3808 addr
3809 = memory_address (mode,
3810 plus_constant (args_addr,
3811 INTVAL (args_so_far)));
3812 else
3813 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3814 args_so_far));
3815 dest = gen_rtx_MEM (mode, addr);
3816
3817 /* We do *not* set_mem_attributes here, because incoming arguments
3818 may overlap with sibling call outgoing arguments and we cannot
3819 allow reordering of reads from function arguments with stores
3820 to outgoing arguments of sibling calls. We do, however, want
3821 to record the alignment of the stack slot. */
3822 /* ALIGN may well be better aligned than TYPE, e.g. due to
3823 PARM_BOUNDARY. Assume the caller isn't lying. */
3824 set_mem_align (dest, align);
3825
3826 emit_move_insn (dest, x);
3827 }
3828 }
3829
3830 /* If part should go in registers, copy that part
3831 into the appropriate registers. Do this now, at the end,
3832 since mem-to-mem copies above may do function calls. */
3833 if (partial > 0 && reg != 0)
3834 {
3835 /* Handle calls that pass values in multiple non-contiguous locations.
3836 The Irix 6 ABI has examples of this. */
3837 if (GET_CODE (reg) == PARALLEL)
3838 emit_group_load (reg, x, type, -1);
3839 else
3840 {
3841 gcc_assert (partial % UNITS_PER_WORD == 0);
3842 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3843 }
3844 }
3845
3846 if (extra && args_addr == 0 && where_pad == stack_direction)
3847 anti_adjust_stack (GEN_INT (extra));
3848
3849 if (alignment_pad && args_addr == 0)
3850 anti_adjust_stack (alignment_pad);
3851 }
3852 \f
3853 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3854 operations. */
3855
3856 static rtx
3857 get_subtarget (rtx x)
3858 {
3859 return (optimize
3860 || x == 0
3861 /* Only registers can be subtargets. */
3862 || !REG_P (x)
3863 /* Don't use hard regs to avoid extending their life. */
3864 || REGNO (x) < FIRST_PSEUDO_REGISTER
3865 ? 0 : x);
3866 }
3867
3868 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3869 FIELD is a bitfield. Returns true if the optimization was successful,
3870 and there's nothing else to do. */
3871
3872 static bool
3873 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3874 unsigned HOST_WIDE_INT bitpos,
3875 enum machine_mode mode1, rtx str_rtx,
3876 tree to, tree src)
3877 {
3878 enum machine_mode str_mode = GET_MODE (str_rtx);
3879 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3880 tree op0, op1;
3881 rtx value, result;
3882 optab binop;
3883
3884 if (mode1 != VOIDmode
3885 || bitsize >= BITS_PER_WORD
3886 || str_bitsize > BITS_PER_WORD
3887 || TREE_SIDE_EFFECTS (to)
3888 || TREE_THIS_VOLATILE (to))
3889 return false;
3890
3891 STRIP_NOPS (src);
3892 if (!BINARY_CLASS_P (src)
3893 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3894 return false;
3895
3896 op0 = TREE_OPERAND (src, 0);
3897 op1 = TREE_OPERAND (src, 1);
3898 STRIP_NOPS (op0);
3899
3900 if (!operand_equal_p (to, op0, 0))
3901 return false;
3902
3903 if (MEM_P (str_rtx))
3904 {
3905 unsigned HOST_WIDE_INT offset1;
3906
3907 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3908 str_mode = word_mode;
3909 str_mode = get_best_mode (bitsize, bitpos,
3910 MEM_ALIGN (str_rtx), str_mode, 0);
3911 if (str_mode == VOIDmode)
3912 return false;
3913 str_bitsize = GET_MODE_BITSIZE (str_mode);
3914
3915 offset1 = bitpos;
3916 bitpos %= str_bitsize;
3917 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3918 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3919 }
3920 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3921 return false;
3922
3923 /* If the bit field covers the whole REG/MEM, store_field
3924 will likely generate better code. */
3925 if (bitsize >= str_bitsize)
3926 return false;
3927
3928 /* We can't handle fields split across multiple entities. */
3929 if (bitpos + bitsize > str_bitsize)
3930 return false;
3931
3932 if (BYTES_BIG_ENDIAN)
3933 bitpos = str_bitsize - bitpos - bitsize;
3934
3935 switch (TREE_CODE (src))
3936 {
3937 case PLUS_EXPR:
3938 case MINUS_EXPR:
3939 /* For now, just optimize the case of the topmost bitfield
3940 where we don't need to do any masking and also
3941 1 bit bitfields where xor can be used.
3942 We might win by one instruction for the other bitfields
3943 too if insv/extv instructions aren't used, so that
3944 can be added later. */
3945 if (bitpos + bitsize != str_bitsize
3946 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3947 break;
3948
3949 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3950 value = convert_modes (str_mode,
3951 TYPE_MODE (TREE_TYPE (op1)), value,
3952 TYPE_UNSIGNED (TREE_TYPE (op1)));
3953
3954 /* We may be accessing data outside the field, which means
3955 we can alias adjacent data. */
3956 if (MEM_P (str_rtx))
3957 {
3958 str_rtx = shallow_copy_rtx (str_rtx);
3959 set_mem_alias_set (str_rtx, 0);
3960 set_mem_expr (str_rtx, 0);
3961 }
3962
3963 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3964 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3965 {
3966 value = expand_and (str_mode, value, const1_rtx, NULL);
3967 binop = xor_optab;
3968 }
3969 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3970 build_int_cst (NULL_TREE, bitpos),
3971 NULL_RTX, 1);
3972 result = expand_binop (str_mode, binop, str_rtx,
3973 value, str_rtx, 1, OPTAB_WIDEN);
3974 if (result != str_rtx)
3975 emit_move_insn (str_rtx, result);
3976 return true;
3977
3978 case BIT_IOR_EXPR:
3979 case BIT_XOR_EXPR:
3980 if (TREE_CODE (op1) != INTEGER_CST)
3981 break;
3982 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3983 value = convert_modes (GET_MODE (str_rtx),
3984 TYPE_MODE (TREE_TYPE (op1)), value,
3985 TYPE_UNSIGNED (TREE_TYPE (op1)));
3986
3987 /* We may be accessing data outside the field, which means
3988 we can alias adjacent data. */
3989 if (MEM_P (str_rtx))
3990 {
3991 str_rtx = shallow_copy_rtx (str_rtx);
3992 set_mem_alias_set (str_rtx, 0);
3993 set_mem_expr (str_rtx, 0);
3994 }
3995
3996 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3997 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3998 {
3999 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4000 - 1);
4001 value = expand_and (GET_MODE (str_rtx), value, mask,
4002 NULL_RTX);
4003 }
4004 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4005 build_int_cst (NULL_TREE, bitpos),
4006 NULL_RTX, 1);
4007 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4008 value, str_rtx, 1, OPTAB_WIDEN);
4009 if (result != str_rtx)
4010 emit_move_insn (str_rtx, result);
4011 return true;
4012
4013 default:
4014 break;
4015 }
4016
4017 return false;
4018 }
4019
4020
4021 /* Expand an assignment that stores the value of FROM into TO. */
4022
4023 void
4024 expand_assignment (tree to, tree from)
4025 {
4026 rtx to_rtx = 0;
4027 rtx result;
4028
4029 /* Don't crash if the lhs of the assignment was erroneous. */
4030 if (TREE_CODE (to) == ERROR_MARK)
4031 {
4032 result = expand_normal (from);
4033 return;
4034 }
4035
4036 /* Optimize away no-op moves without side-effects. */
4037 if (operand_equal_p (to, from, 0))
4038 return;
4039
4040 /* Assignment of a structure component needs special treatment
4041 if the structure component's rtx is not simply a MEM.
4042 Assignment of an array element at a constant index, and assignment of
4043 an array element in an unaligned packed structure field, has the same
4044 problem. */
4045 if (handled_component_p (to)
4046 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4047 {
4048 enum machine_mode mode1;
4049 HOST_WIDE_INT bitsize, bitpos;
4050 tree offset;
4051 int unsignedp;
4052 int volatilep = 0;
4053 tree tem;
4054
4055 push_temp_slots ();
4056 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4057 &unsignedp, &volatilep, true);
4058
4059 /* If we are going to use store_bit_field and extract_bit_field,
4060 make sure to_rtx will be safe for multiple use. */
4061
4062 to_rtx = expand_normal (tem);
4063
4064 if (offset != 0)
4065 {
4066 rtx offset_rtx;
4067
4068 if (!MEM_P (to_rtx))
4069 {
4070 /* We can get constant negative offsets into arrays with broken
4071 user code. Translate this to a trap instead of ICEing. */
4072 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4073 expand_builtin_trap ();
4074 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4075 }
4076
4077 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4078 #ifdef POINTERS_EXTEND_UNSIGNED
4079 if (GET_MODE (offset_rtx) != Pmode)
4080 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4081 #else
4082 if (GET_MODE (offset_rtx) != ptr_mode)
4083 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4084 #endif
4085
4086 /* A constant address in TO_RTX can have VOIDmode, we must not try
4087 to call force_reg for that case. Avoid that case. */
4088 if (MEM_P (to_rtx)
4089 && GET_MODE (to_rtx) == BLKmode
4090 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4091 && bitsize > 0
4092 && (bitpos % bitsize) == 0
4093 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4094 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4095 {
4096 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4097 bitpos = 0;
4098 }
4099
4100 to_rtx = offset_address (to_rtx, offset_rtx,
4101 highest_pow2_factor_for_target (to,
4102 offset));
4103 }
4104
4105 /* Handle expand_expr of a complex value returning a CONCAT. */
4106 if (GET_CODE (to_rtx) == CONCAT)
4107 {
4108 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4109 {
4110 gcc_assert (bitpos == 0);
4111 result = store_expr (from, to_rtx, false);
4112 }
4113 else
4114 {
4115 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4116 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4117 }
4118 }
4119 else
4120 {
4121 if (MEM_P (to_rtx))
4122 {
4123 /* If the field is at offset zero, we could have been given the
4124 DECL_RTX of the parent struct. Don't munge it. */
4125 to_rtx = shallow_copy_rtx (to_rtx);
4126
4127 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4128
4129 /* Deal with volatile and readonly fields. The former is only
4130 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4131 if (volatilep)
4132 MEM_VOLATILE_P (to_rtx) = 1;
4133 if (component_uses_parent_alias_set (to))
4134 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4135 }
4136
4137 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4138 to_rtx, to, from))
4139 result = NULL;
4140 else
4141 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4142 TREE_TYPE (tem), get_alias_set (to));
4143 }
4144
4145 if (result)
4146 preserve_temp_slots (result);
4147 free_temp_slots ();
4148 pop_temp_slots ();
4149 return;
4150 }
4151
4152 /* If the rhs is a function call and its value is not an aggregate,
4153 call the function before we start to compute the lhs.
4154 This is needed for correct code for cases such as
4155 val = setjmp (buf) on machines where reference to val
4156 requires loading up part of an address in a separate insn.
4157
4158 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4159 since it might be a promoted variable where the zero- or sign- extension
4160 needs to be done. Handling this in the normal way is safe because no
4161 computation is done before the call. */
4162 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4163 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4164 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4165 && REG_P (DECL_RTL (to))))
4166 {
4167 rtx value;
4168
4169 push_temp_slots ();
4170 value = expand_normal (from);
4171 if (to_rtx == 0)
4172 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4173
4174 /* Handle calls that return values in multiple non-contiguous locations.
4175 The Irix 6 ABI has examples of this. */
4176 if (GET_CODE (to_rtx) == PARALLEL)
4177 emit_group_load (to_rtx, value, TREE_TYPE (from),
4178 int_size_in_bytes (TREE_TYPE (from)));
4179 else if (GET_MODE (to_rtx) == BLKmode)
4180 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4181 else
4182 {
4183 if (POINTER_TYPE_P (TREE_TYPE (to)))
4184 value = convert_memory_address (GET_MODE (to_rtx), value);
4185 emit_move_insn (to_rtx, value);
4186 }
4187 preserve_temp_slots (to_rtx);
4188 free_temp_slots ();
4189 pop_temp_slots ();
4190 return;
4191 }
4192
4193 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4194 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4195
4196 if (to_rtx == 0)
4197 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4198
4199 /* Don't move directly into a return register. */
4200 if (TREE_CODE (to) == RESULT_DECL
4201 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4202 {
4203 rtx temp;
4204
4205 push_temp_slots ();
4206 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4207
4208 if (GET_CODE (to_rtx) == PARALLEL)
4209 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4210 int_size_in_bytes (TREE_TYPE (from)));
4211 else
4212 emit_move_insn (to_rtx, temp);
4213
4214 preserve_temp_slots (to_rtx);
4215 free_temp_slots ();
4216 pop_temp_slots ();
4217 return;
4218 }
4219
4220 /* In case we are returning the contents of an object which overlaps
4221 the place the value is being stored, use a safe function when copying
4222 a value through a pointer into a structure value return block. */
4223 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4224 && current_function_returns_struct
4225 && !current_function_returns_pcc_struct)
4226 {
4227 rtx from_rtx, size;
4228
4229 push_temp_slots ();
4230 size = expr_size (from);
4231 from_rtx = expand_normal (from);
4232
4233 emit_library_call (memmove_libfunc, LCT_NORMAL,
4234 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4235 XEXP (from_rtx, 0), Pmode,
4236 convert_to_mode (TYPE_MODE (sizetype),
4237 size, TYPE_UNSIGNED (sizetype)),
4238 TYPE_MODE (sizetype));
4239
4240 preserve_temp_slots (to_rtx);
4241 free_temp_slots ();
4242 pop_temp_slots ();
4243 return;
4244 }
4245
4246 /* Compute FROM and store the value in the rtx we got. */
4247
4248 push_temp_slots ();
4249 result = store_expr (from, to_rtx, 0);
4250 preserve_temp_slots (result);
4251 free_temp_slots ();
4252 pop_temp_slots ();
4253 return;
4254 }
4255
4256 /* Generate code for computing expression EXP,
4257 and storing the value into TARGET.
4258
4259 If the mode is BLKmode then we may return TARGET itself.
4260 It turns out that in BLKmode it doesn't cause a problem.
4261 because C has no operators that could combine two different
4262 assignments into the same BLKmode object with different values
4263 with no sequence point. Will other languages need this to
4264 be more thorough?
4265
4266 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4267 stack, and block moves may need to be treated specially. */
4268
4269 rtx
4270 store_expr (tree exp, rtx target, int call_param_p)
4271 {
4272 rtx temp;
4273 rtx alt_rtl = NULL_RTX;
4274 int dont_return_target = 0;
4275
4276 if (VOID_TYPE_P (TREE_TYPE (exp)))
4277 {
4278 /* C++ can generate ?: expressions with a throw expression in one
4279 branch and an rvalue in the other. Here, we resolve attempts to
4280 store the throw expression's nonexistent result. */
4281 gcc_assert (!call_param_p);
4282 expand_expr (exp, const0_rtx, VOIDmode, 0);
4283 return NULL_RTX;
4284 }
4285 if (TREE_CODE (exp) == COMPOUND_EXPR)
4286 {
4287 /* Perform first part of compound expression, then assign from second
4288 part. */
4289 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4290 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4291 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4292 }
4293 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4294 {
4295 /* For conditional expression, get safe form of the target. Then
4296 test the condition, doing the appropriate assignment on either
4297 side. This avoids the creation of unnecessary temporaries.
4298 For non-BLKmode, it is more efficient not to do this. */
4299
4300 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4301
4302 do_pending_stack_adjust ();
4303 NO_DEFER_POP;
4304 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4305 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4306 emit_jump_insn (gen_jump (lab2));
4307 emit_barrier ();
4308 emit_label (lab1);
4309 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4310 emit_label (lab2);
4311 OK_DEFER_POP;
4312
4313 return NULL_RTX;
4314 }
4315 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4316 /* If this is a scalar in a register that is stored in a wider mode
4317 than the declared mode, compute the result into its declared mode
4318 and then convert to the wider mode. Our value is the computed
4319 expression. */
4320 {
4321 rtx inner_target = 0;
4322
4323 /* We can do the conversion inside EXP, which will often result
4324 in some optimizations. Do the conversion in two steps: first
4325 change the signedness, if needed, then the extend. But don't
4326 do this if the type of EXP is a subtype of something else
4327 since then the conversion might involve more than just
4328 converting modes. */
4329 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4330 && TREE_TYPE (TREE_TYPE (exp)) == 0
4331 && (!lang_hooks.reduce_bit_field_operations
4332 || (GET_MODE_PRECISION (GET_MODE (target))
4333 == TYPE_PRECISION (TREE_TYPE (exp)))))
4334 {
4335 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4336 != SUBREG_PROMOTED_UNSIGNED_P (target))
4337 exp = fold_convert
4338 (lang_hooks.types.signed_or_unsigned_type
4339 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4340
4341 exp = fold_convert (lang_hooks.types.type_for_mode
4342 (GET_MODE (SUBREG_REG (target)),
4343 SUBREG_PROMOTED_UNSIGNED_P (target)),
4344 exp);
4345
4346 inner_target = SUBREG_REG (target);
4347 }
4348
4349 temp = expand_expr (exp, inner_target, VOIDmode,
4350 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4351
4352 /* If TEMP is a VOIDmode constant, use convert_modes to make
4353 sure that we properly convert it. */
4354 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4355 {
4356 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4357 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4358 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4359 GET_MODE (target), temp,
4360 SUBREG_PROMOTED_UNSIGNED_P (target));
4361 }
4362
4363 convert_move (SUBREG_REG (target), temp,
4364 SUBREG_PROMOTED_UNSIGNED_P (target));
4365
4366 return NULL_RTX;
4367 }
4368 else
4369 {
4370 temp = expand_expr_real (exp, target, GET_MODE (target),
4371 (call_param_p
4372 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4373 &alt_rtl);
4374 /* Return TARGET if it's a specified hardware register.
4375 If TARGET is a volatile mem ref, either return TARGET
4376 or return a reg copied *from* TARGET; ANSI requires this.
4377
4378 Otherwise, if TEMP is not TARGET, return TEMP
4379 if it is constant (for efficiency),
4380 or if we really want the correct value. */
4381 if (!(target && REG_P (target)
4382 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4383 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4384 && ! rtx_equal_p (temp, target)
4385 && CONSTANT_P (temp))
4386 dont_return_target = 1;
4387 }
4388
4389 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4390 the same as that of TARGET, adjust the constant. This is needed, for
4391 example, in case it is a CONST_DOUBLE and we want only a word-sized
4392 value. */
4393 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4394 && TREE_CODE (exp) != ERROR_MARK
4395 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4396 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4397 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4398
4399 /* If value was not generated in the target, store it there.
4400 Convert the value to TARGET's type first if necessary and emit the
4401 pending incrementations that have been queued when expanding EXP.
4402 Note that we cannot emit the whole queue blindly because this will
4403 effectively disable the POST_INC optimization later.
4404
4405 If TEMP and TARGET compare equal according to rtx_equal_p, but
4406 one or both of them are volatile memory refs, we have to distinguish
4407 two cases:
4408 - expand_expr has used TARGET. In this case, we must not generate
4409 another copy. This can be detected by TARGET being equal according
4410 to == .
4411 - expand_expr has not used TARGET - that means that the source just
4412 happens to have the same RTX form. Since temp will have been created
4413 by expand_expr, it will compare unequal according to == .
4414 We must generate a copy in this case, to reach the correct number
4415 of volatile memory references. */
4416
4417 if ((! rtx_equal_p (temp, target)
4418 || (temp != target && (side_effects_p (temp)
4419 || side_effects_p (target))))
4420 && TREE_CODE (exp) != ERROR_MARK
4421 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4422 but TARGET is not valid memory reference, TEMP will differ
4423 from TARGET although it is really the same location. */
4424 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4425 /* If there's nothing to copy, don't bother. Don't call
4426 expr_size unless necessary, because some front-ends (C++)
4427 expr_size-hook must not be given objects that are not
4428 supposed to be bit-copied or bit-initialized. */
4429 && expr_size (exp) != const0_rtx)
4430 {
4431 if (GET_MODE (temp) != GET_MODE (target)
4432 && GET_MODE (temp) != VOIDmode)
4433 {
4434 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4435 if (dont_return_target)
4436 {
4437 /* In this case, we will return TEMP,
4438 so make sure it has the proper mode.
4439 But don't forget to store the value into TARGET. */
4440 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4441 emit_move_insn (target, temp);
4442 }
4443 else
4444 convert_move (target, temp, unsignedp);
4445 }
4446
4447 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4448 {
4449 /* Handle copying a string constant into an array. The string
4450 constant may be shorter than the array. So copy just the string's
4451 actual length, and clear the rest. First get the size of the data
4452 type of the string, which is actually the size of the target. */
4453 rtx size = expr_size (exp);
4454
4455 if (GET_CODE (size) == CONST_INT
4456 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4457 emit_block_move (target, temp, size,
4458 (call_param_p
4459 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4460 else
4461 {
4462 /* Compute the size of the data to copy from the string. */
4463 tree copy_size
4464 = size_binop (MIN_EXPR,
4465 make_tree (sizetype, size),
4466 size_int (TREE_STRING_LENGTH (exp)));
4467 rtx copy_size_rtx
4468 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4469 (call_param_p
4470 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4471 rtx label = 0;
4472
4473 /* Copy that much. */
4474 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4475 TYPE_UNSIGNED (sizetype));
4476 emit_block_move (target, temp, copy_size_rtx,
4477 (call_param_p
4478 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4479
4480 /* Figure out how much is left in TARGET that we have to clear.
4481 Do all calculations in ptr_mode. */
4482 if (GET_CODE (copy_size_rtx) == CONST_INT)
4483 {
4484 size = plus_constant (size, -INTVAL (copy_size_rtx));
4485 target = adjust_address (target, BLKmode,
4486 INTVAL (copy_size_rtx));
4487 }
4488 else
4489 {
4490 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4491 copy_size_rtx, NULL_RTX, 0,
4492 OPTAB_LIB_WIDEN);
4493
4494 #ifdef POINTERS_EXTEND_UNSIGNED
4495 if (GET_MODE (copy_size_rtx) != Pmode)
4496 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4497 TYPE_UNSIGNED (sizetype));
4498 #endif
4499
4500 target = offset_address (target, copy_size_rtx,
4501 highest_pow2_factor (copy_size));
4502 label = gen_label_rtx ();
4503 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4504 GET_MODE (size), 0, label);
4505 }
4506
4507 if (size != const0_rtx)
4508 clear_storage (target, size, BLOCK_OP_NORMAL);
4509
4510 if (label)
4511 emit_label (label);
4512 }
4513 }
4514 /* Handle calls that return values in multiple non-contiguous locations.
4515 The Irix 6 ABI has examples of this. */
4516 else if (GET_CODE (target) == PARALLEL)
4517 emit_group_load (target, temp, TREE_TYPE (exp),
4518 int_size_in_bytes (TREE_TYPE (exp)));
4519 else if (GET_MODE (temp) == BLKmode)
4520 emit_block_move (target, temp, expr_size (exp),
4521 (call_param_p
4522 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4523 else
4524 {
4525 temp = force_operand (temp, target);
4526 if (temp != target)
4527 emit_move_insn (target, temp);
4528 }
4529 }
4530
4531 return NULL_RTX;
4532 }
4533 \f
4534 /* Helper for categorize_ctor_elements. Identical interface. */
4535
4536 static bool
4537 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4538 HOST_WIDE_INT *p_elt_count,
4539 bool *p_must_clear)
4540 {
4541 unsigned HOST_WIDE_INT idx;
4542 HOST_WIDE_INT nz_elts, elt_count;
4543 tree value, purpose;
4544
4545 /* Whether CTOR is a valid constant initializer, in accordance with what
4546 initializer_constant_valid_p does. If inferred from the constructor
4547 elements, true until proven otherwise. */
4548 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4549 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4550
4551 nz_elts = 0;
4552 elt_count = 0;
4553
4554 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4555 {
4556 HOST_WIDE_INT mult;
4557
4558 mult = 1;
4559 if (TREE_CODE (purpose) == RANGE_EXPR)
4560 {
4561 tree lo_index = TREE_OPERAND (purpose, 0);
4562 tree hi_index = TREE_OPERAND (purpose, 1);
4563
4564 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4565 mult = (tree_low_cst (hi_index, 1)
4566 - tree_low_cst (lo_index, 1) + 1);
4567 }
4568
4569 switch (TREE_CODE (value))
4570 {
4571 case CONSTRUCTOR:
4572 {
4573 HOST_WIDE_INT nz = 0, ic = 0;
4574
4575 bool const_elt_p
4576 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4577
4578 nz_elts += mult * nz;
4579 elt_count += mult * ic;
4580
4581 if (const_from_elts_p && const_p)
4582 const_p = const_elt_p;
4583 }
4584 break;
4585
4586 case INTEGER_CST:
4587 case REAL_CST:
4588 if (!initializer_zerop (value))
4589 nz_elts += mult;
4590 elt_count += mult;
4591 break;
4592
4593 case STRING_CST:
4594 nz_elts += mult * TREE_STRING_LENGTH (value);
4595 elt_count += mult * TREE_STRING_LENGTH (value);
4596 break;
4597
4598 case COMPLEX_CST:
4599 if (!initializer_zerop (TREE_REALPART (value)))
4600 nz_elts += mult;
4601 if (!initializer_zerop (TREE_IMAGPART (value)))
4602 nz_elts += mult;
4603 elt_count += mult;
4604 break;
4605
4606 case VECTOR_CST:
4607 {
4608 tree v;
4609 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4610 {
4611 if (!initializer_zerop (TREE_VALUE (v)))
4612 nz_elts += mult;
4613 elt_count += mult;
4614 }
4615 }
4616 break;
4617
4618 default:
4619 nz_elts += mult;
4620 elt_count += mult;
4621
4622 if (const_from_elts_p && const_p)
4623 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4624 != NULL_TREE;
4625 break;
4626 }
4627 }
4628
4629 if (!*p_must_clear
4630 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4631 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4632 {
4633 tree init_sub_type;
4634 bool clear_this = true;
4635
4636 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4637 {
4638 /* We don't expect more than one element of the union to be
4639 initialized. Not sure what we should do otherwise... */
4640 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4641 == 1);
4642
4643 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4644 CONSTRUCTOR_ELTS (ctor),
4645 0)->value);
4646
4647 /* ??? We could look at each element of the union, and find the
4648 largest element. Which would avoid comparing the size of the
4649 initialized element against any tail padding in the union.
4650 Doesn't seem worth the effort... */
4651 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4652 TYPE_SIZE (init_sub_type)) == 1)
4653 {
4654 /* And now we have to find out if the element itself is fully
4655 constructed. E.g. for union { struct { int a, b; } s; } u
4656 = { .s = { .a = 1 } }. */
4657 if (elt_count == count_type_elements (init_sub_type, false))
4658 clear_this = false;
4659 }
4660 }
4661
4662 *p_must_clear = clear_this;
4663 }
4664
4665 *p_nz_elts += nz_elts;
4666 *p_elt_count += elt_count;
4667
4668 return const_p;
4669 }
4670
4671 /* Examine CTOR to discover:
4672 * how many scalar fields are set to nonzero values,
4673 and place it in *P_NZ_ELTS;
4674 * how many scalar fields in total are in CTOR,
4675 and place it in *P_ELT_COUNT.
4676 * if a type is a union, and the initializer from the constructor
4677 is not the largest element in the union, then set *p_must_clear.
4678
4679 Return whether or not CTOR is a valid static constant initializer, the same
4680 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4681
4682 bool
4683 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4684 HOST_WIDE_INT *p_elt_count,
4685 bool *p_must_clear)
4686 {
4687 *p_nz_elts = 0;
4688 *p_elt_count = 0;
4689 *p_must_clear = false;
4690
4691 return
4692 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4693 }
4694
4695 /* Count the number of scalars in TYPE. Return -1 on overflow or
4696 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4697 array member at the end of the structure. */
4698
4699 HOST_WIDE_INT
4700 count_type_elements (tree type, bool allow_flexarr)
4701 {
4702 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4703 switch (TREE_CODE (type))
4704 {
4705 case ARRAY_TYPE:
4706 {
4707 tree telts = array_type_nelts (type);
4708 if (telts && host_integerp (telts, 1))
4709 {
4710 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4711 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4712 if (n == 0)
4713 return 0;
4714 else if (max / n > m)
4715 return n * m;
4716 }
4717 return -1;
4718 }
4719
4720 case RECORD_TYPE:
4721 {
4722 HOST_WIDE_INT n = 0, t;
4723 tree f;
4724
4725 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4726 if (TREE_CODE (f) == FIELD_DECL)
4727 {
4728 t = count_type_elements (TREE_TYPE (f), false);
4729 if (t < 0)
4730 {
4731 /* Check for structures with flexible array member. */
4732 tree tf = TREE_TYPE (f);
4733 if (allow_flexarr
4734 && TREE_CHAIN (f) == NULL
4735 && TREE_CODE (tf) == ARRAY_TYPE
4736 && TYPE_DOMAIN (tf)
4737 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4738 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4739 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4740 && int_size_in_bytes (type) >= 0)
4741 break;
4742
4743 return -1;
4744 }
4745 n += t;
4746 }
4747
4748 return n;
4749 }
4750
4751 case UNION_TYPE:
4752 case QUAL_UNION_TYPE:
4753 {
4754 /* Ho hum. How in the world do we guess here? Clearly it isn't
4755 right to count the fields. Guess based on the number of words. */
4756 HOST_WIDE_INT n = int_size_in_bytes (type);
4757 if (n < 0)
4758 return -1;
4759 return n / UNITS_PER_WORD;
4760 }
4761
4762 case COMPLEX_TYPE:
4763 return 2;
4764
4765 case VECTOR_TYPE:
4766 return TYPE_VECTOR_SUBPARTS (type);
4767
4768 case INTEGER_TYPE:
4769 case REAL_TYPE:
4770 case ENUMERAL_TYPE:
4771 case BOOLEAN_TYPE:
4772 case POINTER_TYPE:
4773 case OFFSET_TYPE:
4774 case REFERENCE_TYPE:
4775 return 1;
4776
4777 case VOID_TYPE:
4778 case METHOD_TYPE:
4779 case FUNCTION_TYPE:
4780 case LANG_TYPE:
4781 default:
4782 gcc_unreachable ();
4783 }
4784 }
4785
4786 /* Return 1 if EXP contains mostly (3/4) zeros. */
4787
4788 static int
4789 mostly_zeros_p (tree exp)
4790 {
4791 if (TREE_CODE (exp) == CONSTRUCTOR)
4792
4793 {
4794 HOST_WIDE_INT nz_elts, count, elts;
4795 bool must_clear;
4796
4797 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4798 if (must_clear)
4799 return 1;
4800
4801 elts = count_type_elements (TREE_TYPE (exp), false);
4802
4803 return nz_elts < elts / 4;
4804 }
4805
4806 return initializer_zerop (exp);
4807 }
4808
4809 /* Return 1 if EXP contains all zeros. */
4810
4811 static int
4812 all_zeros_p (tree exp)
4813 {
4814 if (TREE_CODE (exp) == CONSTRUCTOR)
4815
4816 {
4817 HOST_WIDE_INT nz_elts, count;
4818 bool must_clear;
4819
4820 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4821 return nz_elts == 0;
4822 }
4823
4824 return initializer_zerop (exp);
4825 }
4826 \f
4827 /* Helper function for store_constructor.
4828 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4829 TYPE is the type of the CONSTRUCTOR, not the element type.
4830 CLEARED is as for store_constructor.
4831 ALIAS_SET is the alias set to use for any stores.
4832
4833 This provides a recursive shortcut back to store_constructor when it isn't
4834 necessary to go through store_field. This is so that we can pass through
4835 the cleared field to let store_constructor know that we may not have to
4836 clear a substructure if the outer structure has already been cleared. */
4837
4838 static void
4839 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4840 HOST_WIDE_INT bitpos, enum machine_mode mode,
4841 tree exp, tree type, int cleared, int alias_set)
4842 {
4843 if (TREE_CODE (exp) == CONSTRUCTOR
4844 /* We can only call store_constructor recursively if the size and
4845 bit position are on a byte boundary. */
4846 && bitpos % BITS_PER_UNIT == 0
4847 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4848 /* If we have a nonzero bitpos for a register target, then we just
4849 let store_field do the bitfield handling. This is unlikely to
4850 generate unnecessary clear instructions anyways. */
4851 && (bitpos == 0 || MEM_P (target)))
4852 {
4853 if (MEM_P (target))
4854 target
4855 = adjust_address (target,
4856 GET_MODE (target) == BLKmode
4857 || 0 != (bitpos
4858 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4859 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4860
4861
4862 /* Update the alias set, if required. */
4863 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4864 && MEM_ALIAS_SET (target) != 0)
4865 {
4866 target = copy_rtx (target);
4867 set_mem_alias_set (target, alias_set);
4868 }
4869
4870 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4871 }
4872 else
4873 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4874 }
4875
4876 /* Store the value of constructor EXP into the rtx TARGET.
4877 TARGET is either a REG or a MEM; we know it cannot conflict, since
4878 safe_from_p has been called.
4879 CLEARED is true if TARGET is known to have been zero'd.
4880 SIZE is the number of bytes of TARGET we are allowed to modify: this
4881 may not be the same as the size of EXP if we are assigning to a field
4882 which has been packed to exclude padding bits. */
4883
4884 static void
4885 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4886 {
4887 tree type = TREE_TYPE (exp);
4888 #ifdef WORD_REGISTER_OPERATIONS
4889 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4890 #endif
4891
4892 switch (TREE_CODE (type))
4893 {
4894 case RECORD_TYPE:
4895 case UNION_TYPE:
4896 case QUAL_UNION_TYPE:
4897 {
4898 unsigned HOST_WIDE_INT idx;
4899 tree field, value;
4900
4901 /* If size is zero or the target is already cleared, do nothing. */
4902 if (size == 0 || cleared)
4903 cleared = 1;
4904 /* We either clear the aggregate or indicate the value is dead. */
4905 else if ((TREE_CODE (type) == UNION_TYPE
4906 || TREE_CODE (type) == QUAL_UNION_TYPE)
4907 && ! CONSTRUCTOR_ELTS (exp))
4908 /* If the constructor is empty, clear the union. */
4909 {
4910 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4911 cleared = 1;
4912 }
4913
4914 /* If we are building a static constructor into a register,
4915 set the initial value as zero so we can fold the value into
4916 a constant. But if more than one register is involved,
4917 this probably loses. */
4918 else if (REG_P (target) && TREE_STATIC (exp)
4919 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4920 {
4921 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4922 cleared = 1;
4923 }
4924
4925 /* If the constructor has fewer fields than the structure or
4926 if we are initializing the structure to mostly zeros, clear
4927 the whole structure first. Don't do this if TARGET is a
4928 register whose mode size isn't equal to SIZE since
4929 clear_storage can't handle this case. */
4930 else if (size > 0
4931 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4932 != fields_length (type))
4933 || mostly_zeros_p (exp))
4934 && (!REG_P (target)
4935 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4936 == size)))
4937 {
4938 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4939 cleared = 1;
4940 }
4941
4942 if (! cleared)
4943 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4944
4945 /* Store each element of the constructor into the
4946 corresponding field of TARGET. */
4947 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4948 {
4949 enum machine_mode mode;
4950 HOST_WIDE_INT bitsize;
4951 HOST_WIDE_INT bitpos = 0;
4952 tree offset;
4953 rtx to_rtx = target;
4954
4955 /* Just ignore missing fields. We cleared the whole
4956 structure, above, if any fields are missing. */
4957 if (field == 0)
4958 continue;
4959
4960 if (cleared && initializer_zerop (value))
4961 continue;
4962
4963 if (host_integerp (DECL_SIZE (field), 1))
4964 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4965 else
4966 bitsize = -1;
4967
4968 mode = DECL_MODE (field);
4969 if (DECL_BIT_FIELD (field))
4970 mode = VOIDmode;
4971
4972 offset = DECL_FIELD_OFFSET (field);
4973 if (host_integerp (offset, 0)
4974 && host_integerp (bit_position (field), 0))
4975 {
4976 bitpos = int_bit_position (field);
4977 offset = 0;
4978 }
4979 else
4980 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4981
4982 if (offset)
4983 {
4984 rtx offset_rtx;
4985
4986 offset
4987 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4988 make_tree (TREE_TYPE (exp),
4989 target));
4990
4991 offset_rtx = expand_normal (offset);
4992 gcc_assert (MEM_P (to_rtx));
4993
4994 #ifdef POINTERS_EXTEND_UNSIGNED
4995 if (GET_MODE (offset_rtx) != Pmode)
4996 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4997 #else
4998 if (GET_MODE (offset_rtx) != ptr_mode)
4999 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5000 #endif
5001
5002 to_rtx = offset_address (to_rtx, offset_rtx,
5003 highest_pow2_factor (offset));
5004 }
5005
5006 #ifdef WORD_REGISTER_OPERATIONS
5007 /* If this initializes a field that is smaller than a
5008 word, at the start of a word, try to widen it to a full
5009 word. This special case allows us to output C++ member
5010 function initializations in a form that the optimizers
5011 can understand. */
5012 if (REG_P (target)
5013 && bitsize < BITS_PER_WORD
5014 && bitpos % BITS_PER_WORD == 0
5015 && GET_MODE_CLASS (mode) == MODE_INT
5016 && TREE_CODE (value) == INTEGER_CST
5017 && exp_size >= 0
5018 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5019 {
5020 tree type = TREE_TYPE (value);
5021
5022 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5023 {
5024 type = lang_hooks.types.type_for_size
5025 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5026 value = fold_convert (type, value);
5027 }
5028
5029 if (BYTES_BIG_ENDIAN)
5030 value
5031 = fold_build2 (LSHIFT_EXPR, type, value,
5032 build_int_cst (type,
5033 BITS_PER_WORD - bitsize));
5034 bitsize = BITS_PER_WORD;
5035 mode = word_mode;
5036 }
5037 #endif
5038
5039 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5040 && DECL_NONADDRESSABLE_P (field))
5041 {
5042 to_rtx = copy_rtx (to_rtx);
5043 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5044 }
5045
5046 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5047 value, type, cleared,
5048 get_alias_set (TREE_TYPE (field)));
5049 }
5050 break;
5051 }
5052 case ARRAY_TYPE:
5053 {
5054 tree value, index;
5055 unsigned HOST_WIDE_INT i;
5056 int need_to_clear;
5057 tree domain;
5058 tree elttype = TREE_TYPE (type);
5059 int const_bounds_p;
5060 HOST_WIDE_INT minelt = 0;
5061 HOST_WIDE_INT maxelt = 0;
5062
5063 domain = TYPE_DOMAIN (type);
5064 const_bounds_p = (TYPE_MIN_VALUE (domain)
5065 && TYPE_MAX_VALUE (domain)
5066 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5067 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5068
5069 /* If we have constant bounds for the range of the type, get them. */
5070 if (const_bounds_p)
5071 {
5072 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5073 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5074 }
5075
5076 /* If the constructor has fewer elements than the array, clear
5077 the whole array first. Similarly if this is static
5078 constructor of a non-BLKmode object. */
5079 if (cleared)
5080 need_to_clear = 0;
5081 else if (REG_P (target) && TREE_STATIC (exp))
5082 need_to_clear = 1;
5083 else
5084 {
5085 unsigned HOST_WIDE_INT idx;
5086 tree index, value;
5087 HOST_WIDE_INT count = 0, zero_count = 0;
5088 need_to_clear = ! const_bounds_p;
5089
5090 /* This loop is a more accurate version of the loop in
5091 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5092 is also needed to check for missing elements. */
5093 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5094 {
5095 HOST_WIDE_INT this_node_count;
5096
5097 if (need_to_clear)
5098 break;
5099
5100 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5101 {
5102 tree lo_index = TREE_OPERAND (index, 0);
5103 tree hi_index = TREE_OPERAND (index, 1);
5104
5105 if (! host_integerp (lo_index, 1)
5106 || ! host_integerp (hi_index, 1))
5107 {
5108 need_to_clear = 1;
5109 break;
5110 }
5111
5112 this_node_count = (tree_low_cst (hi_index, 1)
5113 - tree_low_cst (lo_index, 1) + 1);
5114 }
5115 else
5116 this_node_count = 1;
5117
5118 count += this_node_count;
5119 if (mostly_zeros_p (value))
5120 zero_count += this_node_count;
5121 }
5122
5123 /* Clear the entire array first if there are any missing
5124 elements, or if the incidence of zero elements is >=
5125 75%. */
5126 if (! need_to_clear
5127 && (count < maxelt - minelt + 1
5128 || 4 * zero_count >= 3 * count))
5129 need_to_clear = 1;
5130 }
5131
5132 if (need_to_clear && size > 0)
5133 {
5134 if (REG_P (target))
5135 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5136 else
5137 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5138 cleared = 1;
5139 }
5140
5141 if (!cleared && REG_P (target))
5142 /* Inform later passes that the old value is dead. */
5143 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5144
5145 /* Store each element of the constructor into the
5146 corresponding element of TARGET, determined by counting the
5147 elements. */
5148 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5149 {
5150 enum machine_mode mode;
5151 HOST_WIDE_INT bitsize;
5152 HOST_WIDE_INT bitpos;
5153 int unsignedp;
5154 rtx xtarget = target;
5155
5156 if (cleared && initializer_zerop (value))
5157 continue;
5158
5159 unsignedp = TYPE_UNSIGNED (elttype);
5160 mode = TYPE_MODE (elttype);
5161 if (mode == BLKmode)
5162 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5163 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5164 : -1);
5165 else
5166 bitsize = GET_MODE_BITSIZE (mode);
5167
5168 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5169 {
5170 tree lo_index = TREE_OPERAND (index, 0);
5171 tree hi_index = TREE_OPERAND (index, 1);
5172 rtx index_r, pos_rtx;
5173 HOST_WIDE_INT lo, hi, count;
5174 tree position;
5175
5176 /* If the range is constant and "small", unroll the loop. */
5177 if (const_bounds_p
5178 && host_integerp (lo_index, 0)
5179 && host_integerp (hi_index, 0)
5180 && (lo = tree_low_cst (lo_index, 0),
5181 hi = tree_low_cst (hi_index, 0),
5182 count = hi - lo + 1,
5183 (!MEM_P (target)
5184 || count <= 2
5185 || (host_integerp (TYPE_SIZE (elttype), 1)
5186 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5187 <= 40 * 8)))))
5188 {
5189 lo -= minelt; hi -= minelt;
5190 for (; lo <= hi; lo++)
5191 {
5192 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5193
5194 if (MEM_P (target)
5195 && !MEM_KEEP_ALIAS_SET_P (target)
5196 && TREE_CODE (type) == ARRAY_TYPE
5197 && TYPE_NONALIASED_COMPONENT (type))
5198 {
5199 target = copy_rtx (target);
5200 MEM_KEEP_ALIAS_SET_P (target) = 1;
5201 }
5202
5203 store_constructor_field
5204 (target, bitsize, bitpos, mode, value, type, cleared,
5205 get_alias_set (elttype));
5206 }
5207 }
5208 else
5209 {
5210 rtx loop_start = gen_label_rtx ();
5211 rtx loop_end = gen_label_rtx ();
5212 tree exit_cond;
5213
5214 expand_normal (hi_index);
5215 unsignedp = TYPE_UNSIGNED (domain);
5216
5217 index = build_decl (VAR_DECL, NULL_TREE, domain);
5218
5219 index_r
5220 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5221 &unsignedp, 0));
5222 SET_DECL_RTL (index, index_r);
5223 store_expr (lo_index, index_r, 0);
5224
5225 /* Build the head of the loop. */
5226 do_pending_stack_adjust ();
5227 emit_label (loop_start);
5228
5229 /* Assign value to element index. */
5230 position =
5231 fold_convert (ssizetype,
5232 fold_build2 (MINUS_EXPR,
5233 TREE_TYPE (index),
5234 index,
5235 TYPE_MIN_VALUE (domain)));
5236
5237 position =
5238 size_binop (MULT_EXPR, position,
5239 fold_convert (ssizetype,
5240 TYPE_SIZE_UNIT (elttype)));
5241
5242 pos_rtx = expand_normal (position);
5243 xtarget = offset_address (target, pos_rtx,
5244 highest_pow2_factor (position));
5245 xtarget = adjust_address (xtarget, mode, 0);
5246 if (TREE_CODE (value) == CONSTRUCTOR)
5247 store_constructor (value, xtarget, cleared,
5248 bitsize / BITS_PER_UNIT);
5249 else
5250 store_expr (value, xtarget, 0);
5251
5252 /* Generate a conditional jump to exit the loop. */
5253 exit_cond = build2 (LT_EXPR, integer_type_node,
5254 index, hi_index);
5255 jumpif (exit_cond, loop_end);
5256
5257 /* Update the loop counter, and jump to the head of
5258 the loop. */
5259 expand_assignment (index,
5260 build2 (PLUS_EXPR, TREE_TYPE (index),
5261 index, integer_one_node));
5262
5263 emit_jump (loop_start);
5264
5265 /* Build the end of the loop. */
5266 emit_label (loop_end);
5267 }
5268 }
5269 else if ((index != 0 && ! host_integerp (index, 0))
5270 || ! host_integerp (TYPE_SIZE (elttype), 1))
5271 {
5272 tree position;
5273
5274 if (index == 0)
5275 index = ssize_int (1);
5276
5277 if (minelt)
5278 index = fold_convert (ssizetype,
5279 fold_build2 (MINUS_EXPR,
5280 TREE_TYPE (index),
5281 index,
5282 TYPE_MIN_VALUE (domain)));
5283
5284 position =
5285 size_binop (MULT_EXPR, index,
5286 fold_convert (ssizetype,
5287 TYPE_SIZE_UNIT (elttype)));
5288 xtarget = offset_address (target,
5289 expand_normal (position),
5290 highest_pow2_factor (position));
5291 xtarget = adjust_address (xtarget, mode, 0);
5292 store_expr (value, xtarget, 0);
5293 }
5294 else
5295 {
5296 if (index != 0)
5297 bitpos = ((tree_low_cst (index, 0) - minelt)
5298 * tree_low_cst (TYPE_SIZE (elttype), 1));
5299 else
5300 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5301
5302 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5303 && TREE_CODE (type) == ARRAY_TYPE
5304 && TYPE_NONALIASED_COMPONENT (type))
5305 {
5306 target = copy_rtx (target);
5307 MEM_KEEP_ALIAS_SET_P (target) = 1;
5308 }
5309 store_constructor_field (target, bitsize, bitpos, mode, value,
5310 type, cleared, get_alias_set (elttype));
5311 }
5312 }
5313 break;
5314 }
5315
5316 case VECTOR_TYPE:
5317 {
5318 unsigned HOST_WIDE_INT idx;
5319 constructor_elt *ce;
5320 int i;
5321 int need_to_clear;
5322 int icode = 0;
5323 tree elttype = TREE_TYPE (type);
5324 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5325 enum machine_mode eltmode = TYPE_MODE (elttype);
5326 HOST_WIDE_INT bitsize;
5327 HOST_WIDE_INT bitpos;
5328 rtvec vector = NULL;
5329 unsigned n_elts;
5330
5331 gcc_assert (eltmode != BLKmode);
5332
5333 n_elts = TYPE_VECTOR_SUBPARTS (type);
5334 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5335 {
5336 enum machine_mode mode = GET_MODE (target);
5337
5338 icode = (int) vec_init_optab->handlers[mode].insn_code;
5339 if (icode != CODE_FOR_nothing)
5340 {
5341 unsigned int i;
5342
5343 vector = rtvec_alloc (n_elts);
5344 for (i = 0; i < n_elts; i++)
5345 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5346 }
5347 }
5348
5349 /* If the constructor has fewer elements than the vector,
5350 clear the whole array first. Similarly if this is static
5351 constructor of a non-BLKmode object. */
5352 if (cleared)
5353 need_to_clear = 0;
5354 else if (REG_P (target) && TREE_STATIC (exp))
5355 need_to_clear = 1;
5356 else
5357 {
5358 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5359 tree value;
5360
5361 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5362 {
5363 int n_elts_here = tree_low_cst
5364 (int_const_binop (TRUNC_DIV_EXPR,
5365 TYPE_SIZE (TREE_TYPE (value)),
5366 TYPE_SIZE (elttype), 0), 1);
5367
5368 count += n_elts_here;
5369 if (mostly_zeros_p (value))
5370 zero_count += n_elts_here;
5371 }
5372
5373 /* Clear the entire vector first if there are any missing elements,
5374 or if the incidence of zero elements is >= 75%. */
5375 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5376 }
5377
5378 if (need_to_clear && size > 0 && !vector)
5379 {
5380 if (REG_P (target))
5381 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5382 else
5383 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5384 cleared = 1;
5385 }
5386
5387 /* Inform later passes that the old value is dead. */
5388 if (!cleared && !vector && REG_P (target))
5389 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5390
5391 /* Store each element of the constructor into the corresponding
5392 element of TARGET, determined by counting the elements. */
5393 for (idx = 0, i = 0;
5394 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5395 idx++, i += bitsize / elt_size)
5396 {
5397 HOST_WIDE_INT eltpos;
5398 tree value = ce->value;
5399
5400 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5401 if (cleared && initializer_zerop (value))
5402 continue;
5403
5404 if (ce->index)
5405 eltpos = tree_low_cst (ce->index, 1);
5406 else
5407 eltpos = i;
5408
5409 if (vector)
5410 {
5411 /* Vector CONSTRUCTORs should only be built from smaller
5412 vectors in the case of BLKmode vectors. */
5413 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5414 RTVEC_ELT (vector, eltpos)
5415 = expand_normal (value);
5416 }
5417 else
5418 {
5419 enum machine_mode value_mode =
5420 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5421 ? TYPE_MODE (TREE_TYPE (value))
5422 : eltmode;
5423 bitpos = eltpos * elt_size;
5424 store_constructor_field (target, bitsize, bitpos,
5425 value_mode, value, type,
5426 cleared, get_alias_set (elttype));
5427 }
5428 }
5429
5430 if (vector)
5431 emit_insn (GEN_FCN (icode)
5432 (target,
5433 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5434 break;
5435 }
5436
5437 default:
5438 gcc_unreachable ();
5439 }
5440 }
5441
5442 /* Store the value of EXP (an expression tree)
5443 into a subfield of TARGET which has mode MODE and occupies
5444 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5445 If MODE is VOIDmode, it means that we are storing into a bit-field.
5446
5447 Always return const0_rtx unless we have something particular to
5448 return.
5449
5450 TYPE is the type of the underlying object,
5451
5452 ALIAS_SET is the alias set for the destination. This value will
5453 (in general) be different from that for TARGET, since TARGET is a
5454 reference to the containing structure. */
5455
5456 static rtx
5457 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5458 enum machine_mode mode, tree exp, tree type, int alias_set)
5459 {
5460 HOST_WIDE_INT width_mask = 0;
5461
5462 if (TREE_CODE (exp) == ERROR_MARK)
5463 return const0_rtx;
5464
5465 /* If we have nothing to store, do nothing unless the expression has
5466 side-effects. */
5467 if (bitsize == 0)
5468 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5469 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5470 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5471
5472 /* If we are storing into an unaligned field of an aligned union that is
5473 in a register, we may have the mode of TARGET being an integer mode but
5474 MODE == BLKmode. In that case, get an aligned object whose size and
5475 alignment are the same as TARGET and store TARGET into it (we can avoid
5476 the store if the field being stored is the entire width of TARGET). Then
5477 call ourselves recursively to store the field into a BLKmode version of
5478 that object. Finally, load from the object into TARGET. This is not
5479 very efficient in general, but should only be slightly more expensive
5480 than the otherwise-required unaligned accesses. Perhaps this can be
5481 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5482 twice, once with emit_move_insn and once via store_field. */
5483
5484 if (mode == BLKmode
5485 && (REG_P (target) || GET_CODE (target) == SUBREG))
5486 {
5487 rtx object = assign_temp (type, 0, 1, 1);
5488 rtx blk_object = adjust_address (object, BLKmode, 0);
5489
5490 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5491 emit_move_insn (object, target);
5492
5493 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5494
5495 emit_move_insn (target, object);
5496
5497 /* We want to return the BLKmode version of the data. */
5498 return blk_object;
5499 }
5500
5501 if (GET_CODE (target) == CONCAT)
5502 {
5503 /* We're storing into a struct containing a single __complex. */
5504
5505 gcc_assert (!bitpos);
5506 return store_expr (exp, target, 0);
5507 }
5508
5509 /* If the structure is in a register or if the component
5510 is a bit field, we cannot use addressing to access it.
5511 Use bit-field techniques or SUBREG to store in it. */
5512
5513 if (mode == VOIDmode
5514 || (mode != BLKmode && ! direct_store[(int) mode]
5515 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5516 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5517 || REG_P (target)
5518 || GET_CODE (target) == SUBREG
5519 /* If the field isn't aligned enough to store as an ordinary memref,
5520 store it as a bit field. */
5521 || (mode != BLKmode
5522 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5523 || bitpos % GET_MODE_ALIGNMENT (mode))
5524 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5525 || (bitpos % BITS_PER_UNIT != 0)))
5526 /* If the RHS and field are a constant size and the size of the
5527 RHS isn't the same size as the bitfield, we must use bitfield
5528 operations. */
5529 || (bitsize >= 0
5530 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5531 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5532 {
5533 rtx temp;
5534
5535 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5536 implies a mask operation. If the precision is the same size as
5537 the field we're storing into, that mask is redundant. This is
5538 particularly common with bit field assignments generated by the
5539 C front end. */
5540 if (TREE_CODE (exp) == NOP_EXPR)
5541 {
5542 tree type = TREE_TYPE (exp);
5543 if (INTEGRAL_TYPE_P (type)
5544 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5545 && bitsize == TYPE_PRECISION (type))
5546 {
5547 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5548 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5549 exp = TREE_OPERAND (exp, 0);
5550 }
5551 }
5552
5553 temp = expand_normal (exp);
5554
5555 /* If BITSIZE is narrower than the size of the type of EXP
5556 we will be narrowing TEMP. Normally, what's wanted are the
5557 low-order bits. However, if EXP's type is a record and this is
5558 big-endian machine, we want the upper BITSIZE bits. */
5559 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5560 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5561 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5562 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5563 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5564 - bitsize),
5565 NULL_RTX, 1);
5566
5567 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5568 MODE. */
5569 if (mode != VOIDmode && mode != BLKmode
5570 && mode != TYPE_MODE (TREE_TYPE (exp)))
5571 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5572
5573 /* If the modes of TARGET and TEMP are both BLKmode, both
5574 must be in memory and BITPOS must be aligned on a byte
5575 boundary. If so, we simply do a block copy. */
5576 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5577 {
5578 gcc_assert (MEM_P (target) && MEM_P (temp)
5579 && !(bitpos % BITS_PER_UNIT));
5580
5581 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5582 emit_block_move (target, temp,
5583 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5584 / BITS_PER_UNIT),
5585 BLOCK_OP_NORMAL);
5586
5587 return const0_rtx;
5588 }
5589
5590 /* Store the value in the bitfield. */
5591 store_bit_field (target, bitsize, bitpos, mode, temp);
5592
5593 return const0_rtx;
5594 }
5595 else
5596 {
5597 /* Now build a reference to just the desired component. */
5598 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5599
5600 if (to_rtx == target)
5601 to_rtx = copy_rtx (to_rtx);
5602
5603 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5604 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5605 set_mem_alias_set (to_rtx, alias_set);
5606
5607 return store_expr (exp, to_rtx, 0);
5608 }
5609 }
5610 \f
5611 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5612 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5613 codes and find the ultimate containing object, which we return.
5614
5615 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5616 bit position, and *PUNSIGNEDP to the signedness of the field.
5617 If the position of the field is variable, we store a tree
5618 giving the variable offset (in units) in *POFFSET.
5619 This offset is in addition to the bit position.
5620 If the position is not variable, we store 0 in *POFFSET.
5621
5622 If any of the extraction expressions is volatile,
5623 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5624
5625 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5626 is a mode that can be used to access the field. In that case, *PBITSIZE
5627 is redundant.
5628
5629 If the field describes a variable-sized object, *PMODE is set to
5630 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5631 this case, but the address of the object can be found.
5632
5633 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5634 look through nodes that serve as markers of a greater alignment than
5635 the one that can be deduced from the expression. These nodes make it
5636 possible for front-ends to prevent temporaries from being created by
5637 the middle-end on alignment considerations. For that purpose, the
5638 normal operating mode at high-level is to always pass FALSE so that
5639 the ultimate containing object is really returned; moreover, the
5640 associated predicate handled_component_p will always return TRUE
5641 on these nodes, thus indicating that they are essentially handled
5642 by get_inner_reference. TRUE should only be passed when the caller
5643 is scanning the expression in order to build another representation
5644 and specifically knows how to handle these nodes; as such, this is
5645 the normal operating mode in the RTL expanders. */
5646
5647 tree
5648 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5649 HOST_WIDE_INT *pbitpos, tree *poffset,
5650 enum machine_mode *pmode, int *punsignedp,
5651 int *pvolatilep, bool keep_aligning)
5652 {
5653 tree size_tree = 0;
5654 enum machine_mode mode = VOIDmode;
5655 tree offset = size_zero_node;
5656 tree bit_offset = bitsize_zero_node;
5657 tree tem;
5658
5659 /* First get the mode, signedness, and size. We do this from just the
5660 outermost expression. */
5661 if (TREE_CODE (exp) == COMPONENT_REF)
5662 {
5663 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5664 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5665 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5666
5667 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5668 }
5669 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5670 {
5671 size_tree = TREE_OPERAND (exp, 1);
5672 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5673 }
5674 else
5675 {
5676 mode = TYPE_MODE (TREE_TYPE (exp));
5677 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5678
5679 if (mode == BLKmode)
5680 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5681 else
5682 *pbitsize = GET_MODE_BITSIZE (mode);
5683 }
5684
5685 if (size_tree != 0)
5686 {
5687 if (! host_integerp (size_tree, 1))
5688 mode = BLKmode, *pbitsize = -1;
5689 else
5690 *pbitsize = tree_low_cst (size_tree, 1);
5691 }
5692
5693 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5694 and find the ultimate containing object. */
5695 while (1)
5696 {
5697 switch (TREE_CODE (exp))
5698 {
5699 case BIT_FIELD_REF:
5700 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5701 TREE_OPERAND (exp, 2));
5702 break;
5703
5704 case COMPONENT_REF:
5705 {
5706 tree field = TREE_OPERAND (exp, 1);
5707 tree this_offset = component_ref_field_offset (exp);
5708
5709 /* If this field hasn't been filled in yet, don't go past it.
5710 This should only happen when folding expressions made during
5711 type construction. */
5712 if (this_offset == 0)
5713 break;
5714
5715 offset = size_binop (PLUS_EXPR, offset, this_offset);
5716 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5717 DECL_FIELD_BIT_OFFSET (field));
5718
5719 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5720 }
5721 break;
5722
5723 case ARRAY_REF:
5724 case ARRAY_RANGE_REF:
5725 {
5726 tree index = TREE_OPERAND (exp, 1);
5727 tree low_bound = array_ref_low_bound (exp);
5728 tree unit_size = array_ref_element_size (exp);
5729
5730 /* We assume all arrays have sizes that are a multiple of a byte.
5731 First subtract the lower bound, if any, in the type of the
5732 index, then convert to sizetype and multiply by the size of
5733 the array element. */
5734 if (! integer_zerop (low_bound))
5735 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5736 index, low_bound);
5737
5738 offset = size_binop (PLUS_EXPR, offset,
5739 size_binop (MULT_EXPR,
5740 fold_convert (sizetype, index),
5741 unit_size));
5742 }
5743 break;
5744
5745 case REALPART_EXPR:
5746 break;
5747
5748 case IMAGPART_EXPR:
5749 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5750 bitsize_int (*pbitsize));
5751 break;
5752
5753 case VIEW_CONVERT_EXPR:
5754 if (keep_aligning && STRICT_ALIGNMENT
5755 && (TYPE_ALIGN (TREE_TYPE (exp))
5756 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5757 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5758 < BIGGEST_ALIGNMENT)
5759 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5760 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5761 goto done;
5762 break;
5763
5764 default:
5765 goto done;
5766 }
5767
5768 /* If any reference in the chain is volatile, the effect is volatile. */
5769 if (TREE_THIS_VOLATILE (exp))
5770 *pvolatilep = 1;
5771
5772 exp = TREE_OPERAND (exp, 0);
5773 }
5774 done:
5775
5776 /* If OFFSET is constant, see if we can return the whole thing as a
5777 constant bit position. Otherwise, split it up. */
5778 if (host_integerp (offset, 0)
5779 && 0 != (tem = size_binop (MULT_EXPR,
5780 fold_convert (bitsizetype, offset),
5781 bitsize_unit_node))
5782 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5783 && host_integerp (tem, 0))
5784 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5785 else
5786 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5787
5788 *pmode = mode;
5789 return exp;
5790 }
5791
5792 /* Return a tree of sizetype representing the size, in bytes, of the element
5793 of EXP, an ARRAY_REF. */
5794
5795 tree
5796 array_ref_element_size (tree exp)
5797 {
5798 tree aligned_size = TREE_OPERAND (exp, 3);
5799 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5800
5801 /* If a size was specified in the ARRAY_REF, it's the size measured
5802 in alignment units of the element type. So multiply by that value. */
5803 if (aligned_size)
5804 {
5805 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5806 sizetype from another type of the same width and signedness. */
5807 if (TREE_TYPE (aligned_size) != sizetype)
5808 aligned_size = fold_convert (sizetype, aligned_size);
5809 return size_binop (MULT_EXPR, aligned_size,
5810 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5811 }
5812
5813 /* Otherwise, take the size from that of the element type. Substitute
5814 any PLACEHOLDER_EXPR that we have. */
5815 else
5816 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5817 }
5818
5819 /* Return a tree representing the lower bound of the array mentioned in
5820 EXP, an ARRAY_REF. */
5821
5822 tree
5823 array_ref_low_bound (tree exp)
5824 {
5825 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5826
5827 /* If a lower bound is specified in EXP, use it. */
5828 if (TREE_OPERAND (exp, 2))
5829 return TREE_OPERAND (exp, 2);
5830
5831 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5832 substituting for a PLACEHOLDER_EXPR as needed. */
5833 if (domain_type && TYPE_MIN_VALUE (domain_type))
5834 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5835
5836 /* Otherwise, return a zero of the appropriate type. */
5837 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5838 }
5839
5840 /* Return a tree representing the upper bound of the array mentioned in
5841 EXP, an ARRAY_REF. */
5842
5843 tree
5844 array_ref_up_bound (tree exp)
5845 {
5846 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5847
5848 /* If there is a domain type and it has an upper bound, use it, substituting
5849 for a PLACEHOLDER_EXPR as needed. */
5850 if (domain_type && TYPE_MAX_VALUE (domain_type))
5851 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5852
5853 /* Otherwise fail. */
5854 return NULL_TREE;
5855 }
5856
5857 /* Return a tree representing the offset, in bytes, of the field referenced
5858 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5859
5860 tree
5861 component_ref_field_offset (tree exp)
5862 {
5863 tree aligned_offset = TREE_OPERAND (exp, 2);
5864 tree field = TREE_OPERAND (exp, 1);
5865
5866 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5867 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5868 value. */
5869 if (aligned_offset)
5870 {
5871 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5872 sizetype from another type of the same width and signedness. */
5873 if (TREE_TYPE (aligned_offset) != sizetype)
5874 aligned_offset = fold_convert (sizetype, aligned_offset);
5875 return size_binop (MULT_EXPR, aligned_offset,
5876 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5877 }
5878
5879 /* Otherwise, take the offset from that of the field. Substitute
5880 any PLACEHOLDER_EXPR that we have. */
5881 else
5882 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5883 }
5884
5885 /* Return 1 if T is an expression that get_inner_reference handles. */
5886
5887 int
5888 handled_component_p (tree t)
5889 {
5890 switch (TREE_CODE (t))
5891 {
5892 case BIT_FIELD_REF:
5893 case COMPONENT_REF:
5894 case ARRAY_REF:
5895 case ARRAY_RANGE_REF:
5896 case VIEW_CONVERT_EXPR:
5897 case REALPART_EXPR:
5898 case IMAGPART_EXPR:
5899 return 1;
5900
5901 default:
5902 return 0;
5903 }
5904 }
5905 \f
5906 /* Given an rtx VALUE that may contain additions and multiplications, return
5907 an equivalent value that just refers to a register, memory, or constant.
5908 This is done by generating instructions to perform the arithmetic and
5909 returning a pseudo-register containing the value.
5910
5911 The returned value may be a REG, SUBREG, MEM or constant. */
5912
5913 rtx
5914 force_operand (rtx value, rtx target)
5915 {
5916 rtx op1, op2;
5917 /* Use subtarget as the target for operand 0 of a binary operation. */
5918 rtx subtarget = get_subtarget (target);
5919 enum rtx_code code = GET_CODE (value);
5920
5921 /* Check for subreg applied to an expression produced by loop optimizer. */
5922 if (code == SUBREG
5923 && !REG_P (SUBREG_REG (value))
5924 && !MEM_P (SUBREG_REG (value)))
5925 {
5926 value = simplify_gen_subreg (GET_MODE (value),
5927 force_reg (GET_MODE (SUBREG_REG (value)),
5928 force_operand (SUBREG_REG (value),
5929 NULL_RTX)),
5930 GET_MODE (SUBREG_REG (value)),
5931 SUBREG_BYTE (value));
5932 code = GET_CODE (value);
5933 }
5934
5935 /* Check for a PIC address load. */
5936 if ((code == PLUS || code == MINUS)
5937 && XEXP (value, 0) == pic_offset_table_rtx
5938 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5939 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5940 || GET_CODE (XEXP (value, 1)) == CONST))
5941 {
5942 if (!subtarget)
5943 subtarget = gen_reg_rtx (GET_MODE (value));
5944 emit_move_insn (subtarget, value);
5945 return subtarget;
5946 }
5947
5948 if (ARITHMETIC_P (value))
5949 {
5950 op2 = XEXP (value, 1);
5951 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5952 subtarget = 0;
5953 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5954 {
5955 code = PLUS;
5956 op2 = negate_rtx (GET_MODE (value), op2);
5957 }
5958
5959 /* Check for an addition with OP2 a constant integer and our first
5960 operand a PLUS of a virtual register and something else. In that
5961 case, we want to emit the sum of the virtual register and the
5962 constant first and then add the other value. This allows virtual
5963 register instantiation to simply modify the constant rather than
5964 creating another one around this addition. */
5965 if (code == PLUS && GET_CODE (op2) == CONST_INT
5966 && GET_CODE (XEXP (value, 0)) == PLUS
5967 && REG_P (XEXP (XEXP (value, 0), 0))
5968 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5969 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5970 {
5971 rtx temp = expand_simple_binop (GET_MODE (value), code,
5972 XEXP (XEXP (value, 0), 0), op2,
5973 subtarget, 0, OPTAB_LIB_WIDEN);
5974 return expand_simple_binop (GET_MODE (value), code, temp,
5975 force_operand (XEXP (XEXP (value,
5976 0), 1), 0),
5977 target, 0, OPTAB_LIB_WIDEN);
5978 }
5979
5980 op1 = force_operand (XEXP (value, 0), subtarget);
5981 op2 = force_operand (op2, NULL_RTX);
5982 switch (code)
5983 {
5984 case MULT:
5985 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5986 case DIV:
5987 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5988 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5989 target, 1, OPTAB_LIB_WIDEN);
5990 else
5991 return expand_divmod (0,
5992 FLOAT_MODE_P (GET_MODE (value))
5993 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5994 GET_MODE (value), op1, op2, target, 0);
5995 break;
5996 case MOD:
5997 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5998 target, 0);
5999 break;
6000 case UDIV:
6001 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6002 target, 1);
6003 break;
6004 case UMOD:
6005 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6006 target, 1);
6007 break;
6008 case ASHIFTRT:
6009 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6010 target, 0, OPTAB_LIB_WIDEN);
6011 break;
6012 default:
6013 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6014 target, 1, OPTAB_LIB_WIDEN);
6015 }
6016 }
6017 if (UNARY_P (value))
6018 {
6019 if (!target)
6020 target = gen_reg_rtx (GET_MODE (value));
6021 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6022 switch (code)
6023 {
6024 case ZERO_EXTEND:
6025 case SIGN_EXTEND:
6026 case TRUNCATE:
6027 case FLOAT_EXTEND:
6028 case FLOAT_TRUNCATE:
6029 convert_move (target, op1, code == ZERO_EXTEND);
6030 return target;
6031
6032 case FIX:
6033 case UNSIGNED_FIX:
6034 expand_fix (target, op1, code == UNSIGNED_FIX);
6035 return target;
6036
6037 case FLOAT:
6038 case UNSIGNED_FLOAT:
6039 expand_float (target, op1, code == UNSIGNED_FLOAT);
6040 return target;
6041
6042 default:
6043 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6044 }
6045 }
6046
6047 #ifdef INSN_SCHEDULING
6048 /* On machines that have insn scheduling, we want all memory reference to be
6049 explicit, so we need to deal with such paradoxical SUBREGs. */
6050 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6051 && (GET_MODE_SIZE (GET_MODE (value))
6052 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6053 value
6054 = simplify_gen_subreg (GET_MODE (value),
6055 force_reg (GET_MODE (SUBREG_REG (value)),
6056 force_operand (SUBREG_REG (value),
6057 NULL_RTX)),
6058 GET_MODE (SUBREG_REG (value)),
6059 SUBREG_BYTE (value));
6060 #endif
6061
6062 return value;
6063 }
6064 \f
6065 /* Subroutine of expand_expr: return nonzero iff there is no way that
6066 EXP can reference X, which is being modified. TOP_P is nonzero if this
6067 call is going to be used to determine whether we need a temporary
6068 for EXP, as opposed to a recursive call to this function.
6069
6070 It is always safe for this routine to return zero since it merely
6071 searches for optimization opportunities. */
6072
6073 int
6074 safe_from_p (rtx x, tree exp, int top_p)
6075 {
6076 rtx exp_rtl = 0;
6077 int i, nops;
6078
6079 if (x == 0
6080 /* If EXP has varying size, we MUST use a target since we currently
6081 have no way of allocating temporaries of variable size
6082 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6083 So we assume here that something at a higher level has prevented a
6084 clash. This is somewhat bogus, but the best we can do. Only
6085 do this when X is BLKmode and when we are at the top level. */
6086 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6087 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6088 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6089 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6090 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6091 != INTEGER_CST)
6092 && GET_MODE (x) == BLKmode)
6093 /* If X is in the outgoing argument area, it is always safe. */
6094 || (MEM_P (x)
6095 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6096 || (GET_CODE (XEXP (x, 0)) == PLUS
6097 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6098 return 1;
6099
6100 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6101 find the underlying pseudo. */
6102 if (GET_CODE (x) == SUBREG)
6103 {
6104 x = SUBREG_REG (x);
6105 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6106 return 0;
6107 }
6108
6109 /* Now look at our tree code and possibly recurse. */
6110 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6111 {
6112 case tcc_declaration:
6113 exp_rtl = DECL_RTL_IF_SET (exp);
6114 break;
6115
6116 case tcc_constant:
6117 return 1;
6118
6119 case tcc_exceptional:
6120 if (TREE_CODE (exp) == TREE_LIST)
6121 {
6122 while (1)
6123 {
6124 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6125 return 0;
6126 exp = TREE_CHAIN (exp);
6127 if (!exp)
6128 return 1;
6129 if (TREE_CODE (exp) != TREE_LIST)
6130 return safe_from_p (x, exp, 0);
6131 }
6132 }
6133 else if (TREE_CODE (exp) == CONSTRUCTOR)
6134 {
6135 constructor_elt *ce;
6136 unsigned HOST_WIDE_INT idx;
6137
6138 for (idx = 0;
6139 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6140 idx++)
6141 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6142 || !safe_from_p (x, ce->value, 0))
6143 return 0;
6144 return 1;
6145 }
6146 else if (TREE_CODE (exp) == ERROR_MARK)
6147 return 1; /* An already-visited SAVE_EXPR? */
6148 else
6149 return 0;
6150
6151 case tcc_statement:
6152 /* The only case we look at here is the DECL_INITIAL inside a
6153 DECL_EXPR. */
6154 return (TREE_CODE (exp) != DECL_EXPR
6155 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6156 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6157 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6158
6159 case tcc_binary:
6160 case tcc_comparison:
6161 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6162 return 0;
6163 /* Fall through. */
6164
6165 case tcc_unary:
6166 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6167
6168 case tcc_expression:
6169 case tcc_reference:
6170 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6171 the expression. If it is set, we conflict iff we are that rtx or
6172 both are in memory. Otherwise, we check all operands of the
6173 expression recursively. */
6174
6175 switch (TREE_CODE (exp))
6176 {
6177 case ADDR_EXPR:
6178 /* If the operand is static or we are static, we can't conflict.
6179 Likewise if we don't conflict with the operand at all. */
6180 if (staticp (TREE_OPERAND (exp, 0))
6181 || TREE_STATIC (exp)
6182 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6183 return 1;
6184
6185 /* Otherwise, the only way this can conflict is if we are taking
6186 the address of a DECL a that address if part of X, which is
6187 very rare. */
6188 exp = TREE_OPERAND (exp, 0);
6189 if (DECL_P (exp))
6190 {
6191 if (!DECL_RTL_SET_P (exp)
6192 || !MEM_P (DECL_RTL (exp)))
6193 return 0;
6194 else
6195 exp_rtl = XEXP (DECL_RTL (exp), 0);
6196 }
6197 break;
6198
6199 case MISALIGNED_INDIRECT_REF:
6200 case ALIGN_INDIRECT_REF:
6201 case INDIRECT_REF:
6202 if (MEM_P (x)
6203 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6204 get_alias_set (exp)))
6205 return 0;
6206 break;
6207
6208 case CALL_EXPR:
6209 /* Assume that the call will clobber all hard registers and
6210 all of memory. */
6211 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6212 || MEM_P (x))
6213 return 0;
6214 break;
6215
6216 case WITH_CLEANUP_EXPR:
6217 case CLEANUP_POINT_EXPR:
6218 /* Lowered by gimplify.c. */
6219 gcc_unreachable ();
6220
6221 case SAVE_EXPR:
6222 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6223
6224 default:
6225 break;
6226 }
6227
6228 /* If we have an rtx, we do not need to scan our operands. */
6229 if (exp_rtl)
6230 break;
6231
6232 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6233 for (i = 0; i < nops; i++)
6234 if (TREE_OPERAND (exp, i) != 0
6235 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6236 return 0;
6237
6238 /* If this is a language-specific tree code, it may require
6239 special handling. */
6240 if ((unsigned int) TREE_CODE (exp)
6241 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6242 && !lang_hooks.safe_from_p (x, exp))
6243 return 0;
6244 break;
6245
6246 case tcc_type:
6247 /* Should never get a type here. */
6248 gcc_unreachable ();
6249 }
6250
6251 /* If we have an rtl, find any enclosed object. Then see if we conflict
6252 with it. */
6253 if (exp_rtl)
6254 {
6255 if (GET_CODE (exp_rtl) == SUBREG)
6256 {
6257 exp_rtl = SUBREG_REG (exp_rtl);
6258 if (REG_P (exp_rtl)
6259 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6260 return 0;
6261 }
6262
6263 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6264 are memory and they conflict. */
6265 return ! (rtx_equal_p (x, exp_rtl)
6266 || (MEM_P (x) && MEM_P (exp_rtl)
6267 && true_dependence (exp_rtl, VOIDmode, x,
6268 rtx_addr_varies_p)));
6269 }
6270
6271 /* If we reach here, it is safe. */
6272 return 1;
6273 }
6274
6275 \f
6276 /* Return the highest power of two that EXP is known to be a multiple of.
6277 This is used in updating alignment of MEMs in array references. */
6278
6279 unsigned HOST_WIDE_INT
6280 highest_pow2_factor (tree exp)
6281 {
6282 unsigned HOST_WIDE_INT c0, c1;
6283
6284 switch (TREE_CODE (exp))
6285 {
6286 case INTEGER_CST:
6287 /* We can find the lowest bit that's a one. If the low
6288 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6289 We need to handle this case since we can find it in a COND_EXPR,
6290 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6291 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6292 later ICE. */
6293 if (TREE_CONSTANT_OVERFLOW (exp))
6294 return BIGGEST_ALIGNMENT;
6295 else
6296 {
6297 /* Note: tree_low_cst is intentionally not used here,
6298 we don't care about the upper bits. */
6299 c0 = TREE_INT_CST_LOW (exp);
6300 c0 &= -c0;
6301 return c0 ? c0 : BIGGEST_ALIGNMENT;
6302 }
6303 break;
6304
6305 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6306 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6307 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6308 return MIN (c0, c1);
6309
6310 case MULT_EXPR:
6311 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6312 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6313 return c0 * c1;
6314
6315 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6316 case CEIL_DIV_EXPR:
6317 if (integer_pow2p (TREE_OPERAND (exp, 1))
6318 && host_integerp (TREE_OPERAND (exp, 1), 1))
6319 {
6320 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6321 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6322 return MAX (1, c0 / c1);
6323 }
6324 break;
6325
6326 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6327 case SAVE_EXPR:
6328 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6329
6330 case COMPOUND_EXPR:
6331 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6332
6333 case COND_EXPR:
6334 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6335 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6336 return MIN (c0, c1);
6337
6338 default:
6339 break;
6340 }
6341
6342 return 1;
6343 }
6344
6345 /* Similar, except that the alignment requirements of TARGET are
6346 taken into account. Assume it is at least as aligned as its
6347 type, unless it is a COMPONENT_REF in which case the layout of
6348 the structure gives the alignment. */
6349
6350 static unsigned HOST_WIDE_INT
6351 highest_pow2_factor_for_target (tree target, tree exp)
6352 {
6353 unsigned HOST_WIDE_INT target_align, factor;
6354
6355 factor = highest_pow2_factor (exp);
6356 if (TREE_CODE (target) == COMPONENT_REF)
6357 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6358 else
6359 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6360 return MAX (factor, target_align);
6361 }
6362 \f
6363 /* Expands variable VAR. */
6364
6365 void
6366 expand_var (tree var)
6367 {
6368 if (DECL_EXTERNAL (var))
6369 return;
6370
6371 if (TREE_STATIC (var))
6372 /* If this is an inlined copy of a static local variable,
6373 look up the original decl. */
6374 var = DECL_ORIGIN (var);
6375
6376 if (TREE_STATIC (var)
6377 ? !TREE_ASM_WRITTEN (var)
6378 : !DECL_RTL_SET_P (var))
6379 {
6380 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6381 /* Should be ignored. */;
6382 else if (lang_hooks.expand_decl (var))
6383 /* OK. */;
6384 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6385 expand_decl (var);
6386 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6387 rest_of_decl_compilation (var, 0, 0);
6388 else
6389 /* No expansion needed. */
6390 gcc_assert (TREE_CODE (var) == TYPE_DECL
6391 || TREE_CODE (var) == CONST_DECL
6392 || TREE_CODE (var) == FUNCTION_DECL
6393 || TREE_CODE (var) == LABEL_DECL);
6394 }
6395 }
6396
6397 /* Subroutine of expand_expr. Expand the two operands of a binary
6398 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6399 The value may be stored in TARGET if TARGET is nonzero. The
6400 MODIFIER argument is as documented by expand_expr. */
6401
6402 static void
6403 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6404 enum expand_modifier modifier)
6405 {
6406 if (! safe_from_p (target, exp1, 1))
6407 target = 0;
6408 if (operand_equal_p (exp0, exp1, 0))
6409 {
6410 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6411 *op1 = copy_rtx (*op0);
6412 }
6413 else
6414 {
6415 /* If we need to preserve evaluation order, copy exp0 into its own
6416 temporary variable so that it can't be clobbered by exp1. */
6417 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6418 exp0 = save_expr (exp0);
6419 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6420 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6421 }
6422 }
6423
6424 \f
6425 /* Return a MEM that contains constant EXP. DEFER is as for
6426 output_constant_def and MODIFIER is as for expand_expr. */
6427
6428 static rtx
6429 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6430 {
6431 rtx mem;
6432
6433 mem = output_constant_def (exp, defer);
6434 if (modifier != EXPAND_INITIALIZER)
6435 mem = use_anchored_address (mem);
6436 return mem;
6437 }
6438
6439 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6440 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6441
6442 static rtx
6443 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6444 enum expand_modifier modifier)
6445 {
6446 rtx result, subtarget;
6447 tree inner, offset;
6448 HOST_WIDE_INT bitsize, bitpos;
6449 int volatilep, unsignedp;
6450 enum machine_mode mode1;
6451
6452 /* If we are taking the address of a constant and are at the top level,
6453 we have to use output_constant_def since we can't call force_const_mem
6454 at top level. */
6455 /* ??? This should be considered a front-end bug. We should not be
6456 generating ADDR_EXPR of something that isn't an LVALUE. The only
6457 exception here is STRING_CST. */
6458 if (TREE_CODE (exp) == CONSTRUCTOR
6459 || CONSTANT_CLASS_P (exp))
6460 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6461
6462 /* Everything must be something allowed by is_gimple_addressable. */
6463 switch (TREE_CODE (exp))
6464 {
6465 case INDIRECT_REF:
6466 /* This case will happen via recursion for &a->b. */
6467 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6468
6469 case CONST_DECL:
6470 /* Recurse and make the output_constant_def clause above handle this. */
6471 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6472 tmode, modifier);
6473
6474 case REALPART_EXPR:
6475 /* The real part of the complex number is always first, therefore
6476 the address is the same as the address of the parent object. */
6477 offset = 0;
6478 bitpos = 0;
6479 inner = TREE_OPERAND (exp, 0);
6480 break;
6481
6482 case IMAGPART_EXPR:
6483 /* The imaginary part of the complex number is always second.
6484 The expression is therefore always offset by the size of the
6485 scalar type. */
6486 offset = 0;
6487 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6488 inner = TREE_OPERAND (exp, 0);
6489 break;
6490
6491 default:
6492 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6493 expand_expr, as that can have various side effects; LABEL_DECLs for
6494 example, may not have their DECL_RTL set yet. Assume language
6495 specific tree nodes can be expanded in some interesting way. */
6496 if (DECL_P (exp)
6497 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6498 {
6499 result = expand_expr (exp, target, tmode,
6500 modifier == EXPAND_INITIALIZER
6501 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6502
6503 /* If the DECL isn't in memory, then the DECL wasn't properly
6504 marked TREE_ADDRESSABLE, which will be either a front-end
6505 or a tree optimizer bug. */
6506 gcc_assert (MEM_P (result));
6507 result = XEXP (result, 0);
6508
6509 /* ??? Is this needed anymore? */
6510 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6511 {
6512 assemble_external (exp);
6513 TREE_USED (exp) = 1;
6514 }
6515
6516 if (modifier != EXPAND_INITIALIZER
6517 && modifier != EXPAND_CONST_ADDRESS)
6518 result = force_operand (result, target);
6519 return result;
6520 }
6521
6522 /* Pass FALSE as the last argument to get_inner_reference although
6523 we are expanding to RTL. The rationale is that we know how to
6524 handle "aligning nodes" here: we can just bypass them because
6525 they won't change the final object whose address will be returned
6526 (they actually exist only for that purpose). */
6527 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6528 &mode1, &unsignedp, &volatilep, false);
6529 break;
6530 }
6531
6532 /* We must have made progress. */
6533 gcc_assert (inner != exp);
6534
6535 subtarget = offset || bitpos ? NULL_RTX : target;
6536 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6537
6538 if (offset)
6539 {
6540 rtx tmp;
6541
6542 if (modifier != EXPAND_NORMAL)
6543 result = force_operand (result, NULL);
6544 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6545
6546 result = convert_memory_address (tmode, result);
6547 tmp = convert_memory_address (tmode, tmp);
6548
6549 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6550 result = gen_rtx_PLUS (tmode, result, tmp);
6551 else
6552 {
6553 subtarget = bitpos ? NULL_RTX : target;
6554 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6555 1, OPTAB_LIB_WIDEN);
6556 }
6557 }
6558
6559 if (bitpos)
6560 {
6561 /* Someone beforehand should have rejected taking the address
6562 of such an object. */
6563 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6564
6565 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6566 if (modifier < EXPAND_SUM)
6567 result = force_operand (result, target);
6568 }
6569
6570 return result;
6571 }
6572
6573 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6574 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6575
6576 static rtx
6577 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6578 enum expand_modifier modifier)
6579 {
6580 enum machine_mode rmode;
6581 rtx result;
6582
6583 /* Target mode of VOIDmode says "whatever's natural". */
6584 if (tmode == VOIDmode)
6585 tmode = TYPE_MODE (TREE_TYPE (exp));
6586
6587 /* We can get called with some Weird Things if the user does silliness
6588 like "(short) &a". In that case, convert_memory_address won't do
6589 the right thing, so ignore the given target mode. */
6590 if (tmode != Pmode && tmode != ptr_mode)
6591 tmode = Pmode;
6592
6593 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6594 tmode, modifier);
6595
6596 /* Despite expand_expr claims concerning ignoring TMODE when not
6597 strictly convenient, stuff breaks if we don't honor it. Note
6598 that combined with the above, we only do this for pointer modes. */
6599 rmode = GET_MODE (result);
6600 if (rmode == VOIDmode)
6601 rmode = tmode;
6602 if (rmode != tmode)
6603 result = convert_memory_address (tmode, result);
6604
6605 return result;
6606 }
6607
6608
6609 /* expand_expr: generate code for computing expression EXP.
6610 An rtx for the computed value is returned. The value is never null.
6611 In the case of a void EXP, const0_rtx is returned.
6612
6613 The value may be stored in TARGET if TARGET is nonzero.
6614 TARGET is just a suggestion; callers must assume that
6615 the rtx returned may not be the same as TARGET.
6616
6617 If TARGET is CONST0_RTX, it means that the value will be ignored.
6618
6619 If TMODE is not VOIDmode, it suggests generating the
6620 result in mode TMODE. But this is done only when convenient.
6621 Otherwise, TMODE is ignored and the value generated in its natural mode.
6622 TMODE is just a suggestion; callers must assume that
6623 the rtx returned may not have mode TMODE.
6624
6625 Note that TARGET may have neither TMODE nor MODE. In that case, it
6626 probably will not be used.
6627
6628 If MODIFIER is EXPAND_SUM then when EXP is an addition
6629 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6630 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6631 products as above, or REG or MEM, or constant.
6632 Ordinarily in such cases we would output mul or add instructions
6633 and then return a pseudo reg containing the sum.
6634
6635 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6636 it also marks a label as absolutely required (it can't be dead).
6637 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6638 This is used for outputting expressions used in initializers.
6639
6640 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6641 with a constant address even if that address is not normally legitimate.
6642 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6643
6644 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6645 a call parameter. Such targets require special care as we haven't yet
6646 marked TARGET so that it's safe from being trashed by libcalls. We
6647 don't want to use TARGET for anything but the final result;
6648 Intermediate values must go elsewhere. Additionally, calls to
6649 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6650
6651 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6652 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6653 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6654 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6655 recursively. */
6656
6657 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6658 enum expand_modifier, rtx *);
6659
6660 rtx
6661 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6662 enum expand_modifier modifier, rtx *alt_rtl)
6663 {
6664 int rn = -1;
6665 rtx ret, last = NULL;
6666
6667 /* Handle ERROR_MARK before anybody tries to access its type. */
6668 if (TREE_CODE (exp) == ERROR_MARK
6669 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6670 {
6671 ret = CONST0_RTX (tmode);
6672 return ret ? ret : const0_rtx;
6673 }
6674
6675 if (flag_non_call_exceptions)
6676 {
6677 rn = lookup_stmt_eh_region (exp);
6678 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6679 if (rn >= 0)
6680 last = get_last_insn ();
6681 }
6682
6683 /* If this is an expression of some kind and it has an associated line
6684 number, then emit the line number before expanding the expression.
6685
6686 We need to save and restore the file and line information so that
6687 errors discovered during expansion are emitted with the right
6688 information. It would be better of the diagnostic routines
6689 used the file/line information embedded in the tree nodes rather
6690 than globals. */
6691 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6692 {
6693 location_t saved_location = input_location;
6694 input_location = EXPR_LOCATION (exp);
6695 emit_line_note (input_location);
6696
6697 /* Record where the insns produced belong. */
6698 record_block_change (TREE_BLOCK (exp));
6699
6700 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6701
6702 input_location = saved_location;
6703 }
6704 else
6705 {
6706 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6707 }
6708
6709 /* If using non-call exceptions, mark all insns that may trap.
6710 expand_call() will mark CALL_INSNs before we get to this code,
6711 but it doesn't handle libcalls, and these may trap. */
6712 if (rn >= 0)
6713 {
6714 rtx insn;
6715 for (insn = next_real_insn (last); insn;
6716 insn = next_real_insn (insn))
6717 {
6718 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6719 /* If we want exceptions for non-call insns, any
6720 may_trap_p instruction may throw. */
6721 && GET_CODE (PATTERN (insn)) != CLOBBER
6722 && GET_CODE (PATTERN (insn)) != USE
6723 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6724 {
6725 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6726 REG_NOTES (insn));
6727 }
6728 }
6729 }
6730
6731 return ret;
6732 }
6733
6734 static rtx
6735 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6736 enum expand_modifier modifier, rtx *alt_rtl)
6737 {
6738 rtx op0, op1, temp, decl_rtl;
6739 tree type = TREE_TYPE (exp);
6740 int unsignedp;
6741 enum machine_mode mode;
6742 enum tree_code code = TREE_CODE (exp);
6743 optab this_optab;
6744 rtx subtarget, original_target;
6745 int ignore;
6746 tree context, subexp0, subexp1;
6747 bool reduce_bit_field = false;
6748 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6749 ? reduce_to_bit_field_precision ((expr), \
6750 target, \
6751 type) \
6752 : (expr))
6753
6754 mode = TYPE_MODE (type);
6755 unsignedp = TYPE_UNSIGNED (type);
6756 if (lang_hooks.reduce_bit_field_operations
6757 && TREE_CODE (type) == INTEGER_TYPE
6758 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6759 {
6760 /* An operation in what may be a bit-field type needs the
6761 result to be reduced to the precision of the bit-field type,
6762 which is narrower than that of the type's mode. */
6763 reduce_bit_field = true;
6764 if (modifier == EXPAND_STACK_PARM)
6765 target = 0;
6766 }
6767
6768 /* Use subtarget as the target for operand 0 of a binary operation. */
6769 subtarget = get_subtarget (target);
6770 original_target = target;
6771 ignore = (target == const0_rtx
6772 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6773 || code == CONVERT_EXPR || code == COND_EXPR
6774 || code == VIEW_CONVERT_EXPR)
6775 && TREE_CODE (type) == VOID_TYPE));
6776
6777 /* If we are going to ignore this result, we need only do something
6778 if there is a side-effect somewhere in the expression. If there
6779 is, short-circuit the most common cases here. Note that we must
6780 not call expand_expr with anything but const0_rtx in case this
6781 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6782
6783 if (ignore)
6784 {
6785 if (! TREE_SIDE_EFFECTS (exp))
6786 return const0_rtx;
6787
6788 /* Ensure we reference a volatile object even if value is ignored, but
6789 don't do this if all we are doing is taking its address. */
6790 if (TREE_THIS_VOLATILE (exp)
6791 && TREE_CODE (exp) != FUNCTION_DECL
6792 && mode != VOIDmode && mode != BLKmode
6793 && modifier != EXPAND_CONST_ADDRESS)
6794 {
6795 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6796 if (MEM_P (temp))
6797 temp = copy_to_reg (temp);
6798 return const0_rtx;
6799 }
6800
6801 if (TREE_CODE_CLASS (code) == tcc_unary
6802 || code == COMPONENT_REF || code == INDIRECT_REF)
6803 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6804 modifier);
6805
6806 else if (TREE_CODE_CLASS (code) == tcc_binary
6807 || TREE_CODE_CLASS (code) == tcc_comparison
6808 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6809 {
6810 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6811 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6812 return const0_rtx;
6813 }
6814 else if (code == BIT_FIELD_REF)
6815 {
6816 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6817 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6818 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6819 return const0_rtx;
6820 }
6821
6822 target = 0;
6823 }
6824
6825
6826 switch (code)
6827 {
6828 case LABEL_DECL:
6829 {
6830 tree function = decl_function_context (exp);
6831
6832 temp = label_rtx (exp);
6833 temp = gen_rtx_LABEL_REF (Pmode, temp);
6834
6835 if (function != current_function_decl
6836 && function != 0)
6837 LABEL_REF_NONLOCAL_P (temp) = 1;
6838
6839 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6840 return temp;
6841 }
6842
6843 case SSA_NAME:
6844 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6845 NULL);
6846
6847 case PARM_DECL:
6848 case VAR_DECL:
6849 /* If a static var's type was incomplete when the decl was written,
6850 but the type is complete now, lay out the decl now. */
6851 if (DECL_SIZE (exp) == 0
6852 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6853 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6854 layout_decl (exp, 0);
6855
6856 /* ... fall through ... */
6857
6858 case FUNCTION_DECL:
6859 case RESULT_DECL:
6860 decl_rtl = DECL_RTL (exp);
6861 gcc_assert (decl_rtl);
6862
6863 /* Ensure variable marked as used even if it doesn't go through
6864 a parser. If it hasn't be used yet, write out an external
6865 definition. */
6866 if (! TREE_USED (exp))
6867 {
6868 assemble_external (exp);
6869 TREE_USED (exp) = 1;
6870 }
6871
6872 /* Show we haven't gotten RTL for this yet. */
6873 temp = 0;
6874
6875 /* Variables inherited from containing functions should have
6876 been lowered by this point. */
6877 context = decl_function_context (exp);
6878 gcc_assert (!context
6879 || context == current_function_decl
6880 || TREE_STATIC (exp)
6881 /* ??? C++ creates functions that are not TREE_STATIC. */
6882 || TREE_CODE (exp) == FUNCTION_DECL);
6883
6884 /* This is the case of an array whose size is to be determined
6885 from its initializer, while the initializer is still being parsed.
6886 See expand_decl. */
6887
6888 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6889 temp = validize_mem (decl_rtl);
6890
6891 /* If DECL_RTL is memory, we are in the normal case and either
6892 the address is not valid or it is not a register and -fforce-addr
6893 is specified, get the address into a register. */
6894
6895 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
6896 {
6897 if (alt_rtl)
6898 *alt_rtl = decl_rtl;
6899 decl_rtl = use_anchored_address (decl_rtl);
6900 if (modifier != EXPAND_CONST_ADDRESS
6901 && modifier != EXPAND_SUM
6902 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6903 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6904 temp = replace_equiv_address (decl_rtl,
6905 copy_rtx (XEXP (decl_rtl, 0)));
6906 }
6907
6908 /* If we got something, return it. But first, set the alignment
6909 if the address is a register. */
6910 if (temp != 0)
6911 {
6912 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6913 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6914
6915 return temp;
6916 }
6917
6918 /* If the mode of DECL_RTL does not match that of the decl, it
6919 must be a promoted value. We return a SUBREG of the wanted mode,
6920 but mark it so that we know that it was already extended. */
6921
6922 if (REG_P (decl_rtl)
6923 && GET_MODE (decl_rtl) != DECL_MODE (exp))
6924 {
6925 enum machine_mode pmode;
6926
6927 /* Get the signedness used for this variable. Ensure we get the
6928 same mode we got when the variable was declared. */
6929 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6930 (TREE_CODE (exp) == RESULT_DECL
6931 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
6932 gcc_assert (GET_MODE (decl_rtl) == pmode);
6933
6934 temp = gen_lowpart_SUBREG (mode, decl_rtl);
6935 SUBREG_PROMOTED_VAR_P (temp) = 1;
6936 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6937 return temp;
6938 }
6939
6940 return decl_rtl;
6941
6942 case INTEGER_CST:
6943 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6944 TREE_INT_CST_HIGH (exp), mode);
6945
6946 /* ??? If overflow is set, fold will have done an incomplete job,
6947 which can result in (plus xx (const_int 0)), which can get
6948 simplified by validate_replace_rtx during virtual register
6949 instantiation, which can result in unrecognizable insns.
6950 Avoid this by forcing all overflows into registers. */
6951 if (TREE_CONSTANT_OVERFLOW (exp)
6952 && modifier != EXPAND_INITIALIZER)
6953 temp = force_reg (mode, temp);
6954
6955 return temp;
6956
6957 case VECTOR_CST:
6958 {
6959 tree tmp = NULL_TREE;
6960 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
6961 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
6962 return const_vector_from_tree (exp);
6963 if (GET_MODE_CLASS (mode) == MODE_INT)
6964 {
6965 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
6966 if (type_for_mode)
6967 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
6968 }
6969 if (!tmp)
6970 tmp = build_constructor_from_list (type,
6971 TREE_VECTOR_CST_ELTS (exp));
6972 return expand_expr (tmp, ignore ? const0_rtx : target,
6973 tmode, modifier);
6974 }
6975
6976 case CONST_DECL:
6977 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6978
6979 case REAL_CST:
6980 /* If optimized, generate immediate CONST_DOUBLE
6981 which will be turned into memory by reload if necessary.
6982
6983 We used to force a register so that loop.c could see it. But
6984 this does not allow gen_* patterns to perform optimizations with
6985 the constants. It also produces two insns in cases like "x = 1.0;".
6986 On most machines, floating-point constants are not permitted in
6987 many insns, so we'd end up copying it to a register in any case.
6988
6989 Now, we do the copying in expand_binop, if appropriate. */
6990 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6991 TYPE_MODE (TREE_TYPE (exp)));
6992
6993 case COMPLEX_CST:
6994 /* Handle evaluating a complex constant in a CONCAT target. */
6995 if (original_target && GET_CODE (original_target) == CONCAT)
6996 {
6997 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6998 rtx rtarg, itarg;
6999
7000 rtarg = XEXP (original_target, 0);
7001 itarg = XEXP (original_target, 1);
7002
7003 /* Move the real and imaginary parts separately. */
7004 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7005 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7006
7007 if (op0 != rtarg)
7008 emit_move_insn (rtarg, op0);
7009 if (op1 != itarg)
7010 emit_move_insn (itarg, op1);
7011
7012 return original_target;
7013 }
7014
7015 /* ... fall through ... */
7016
7017 case STRING_CST:
7018 temp = expand_expr_constant (exp, 1, modifier);
7019
7020 /* temp contains a constant address.
7021 On RISC machines where a constant address isn't valid,
7022 make some insns to get that address into a register. */
7023 if (modifier != EXPAND_CONST_ADDRESS
7024 && modifier != EXPAND_INITIALIZER
7025 && modifier != EXPAND_SUM
7026 && (! memory_address_p (mode, XEXP (temp, 0))
7027 || flag_force_addr))
7028 return replace_equiv_address (temp,
7029 copy_rtx (XEXP (temp, 0)));
7030 return temp;
7031
7032 case SAVE_EXPR:
7033 {
7034 tree val = TREE_OPERAND (exp, 0);
7035 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7036
7037 if (!SAVE_EXPR_RESOLVED_P (exp))
7038 {
7039 /* We can indeed still hit this case, typically via builtin
7040 expanders calling save_expr immediately before expanding
7041 something. Assume this means that we only have to deal
7042 with non-BLKmode values. */
7043 gcc_assert (GET_MODE (ret) != BLKmode);
7044
7045 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7046 DECL_ARTIFICIAL (val) = 1;
7047 DECL_IGNORED_P (val) = 1;
7048 TREE_OPERAND (exp, 0) = val;
7049 SAVE_EXPR_RESOLVED_P (exp) = 1;
7050
7051 if (!CONSTANT_P (ret))
7052 ret = copy_to_reg (ret);
7053 SET_DECL_RTL (val, ret);
7054 }
7055
7056 return ret;
7057 }
7058
7059 case GOTO_EXPR:
7060 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7061 expand_goto (TREE_OPERAND (exp, 0));
7062 else
7063 expand_computed_goto (TREE_OPERAND (exp, 0));
7064 return const0_rtx;
7065
7066 case CONSTRUCTOR:
7067 /* If we don't need the result, just ensure we evaluate any
7068 subexpressions. */
7069 if (ignore)
7070 {
7071 unsigned HOST_WIDE_INT idx;
7072 tree value;
7073
7074 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7075 expand_expr (value, const0_rtx, VOIDmode, 0);
7076
7077 return const0_rtx;
7078 }
7079
7080 /* Try to avoid creating a temporary at all. This is possible
7081 if all of the initializer is zero.
7082 FIXME: try to handle all [0..255] initializers we can handle
7083 with memset. */
7084 else if (TREE_STATIC (exp)
7085 && !TREE_ADDRESSABLE (exp)
7086 && target != 0 && mode == BLKmode
7087 && all_zeros_p (exp))
7088 {
7089 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7090 return target;
7091 }
7092
7093 /* All elts simple constants => refer to a constant in memory. But
7094 if this is a non-BLKmode mode, let it store a field at a time
7095 since that should make a CONST_INT or CONST_DOUBLE when we
7096 fold. Likewise, if we have a target we can use, it is best to
7097 store directly into the target unless the type is large enough
7098 that memcpy will be used. If we are making an initializer and
7099 all operands are constant, put it in memory as well.
7100
7101 FIXME: Avoid trying to fill vector constructors piece-meal.
7102 Output them with output_constant_def below unless we're sure
7103 they're zeros. This should go away when vector initializers
7104 are treated like VECTOR_CST instead of arrays.
7105 */
7106 else if ((TREE_STATIC (exp)
7107 && ((mode == BLKmode
7108 && ! (target != 0 && safe_from_p (target, exp, 1)))
7109 || TREE_ADDRESSABLE (exp)
7110 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7111 && (! MOVE_BY_PIECES_P
7112 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7113 TYPE_ALIGN (type)))
7114 && ! mostly_zeros_p (exp))))
7115 || ((modifier == EXPAND_INITIALIZER
7116 || modifier == EXPAND_CONST_ADDRESS)
7117 && TREE_CONSTANT (exp)))
7118 {
7119 rtx constructor = expand_expr_constant (exp, 1, modifier);
7120
7121 if (modifier != EXPAND_CONST_ADDRESS
7122 && modifier != EXPAND_INITIALIZER
7123 && modifier != EXPAND_SUM)
7124 constructor = validize_mem (constructor);
7125
7126 return constructor;
7127 }
7128 else
7129 {
7130 /* Handle calls that pass values in multiple non-contiguous
7131 locations. The Irix 6 ABI has examples of this. */
7132 if (target == 0 || ! safe_from_p (target, exp, 1)
7133 || GET_CODE (target) == PARALLEL
7134 || modifier == EXPAND_STACK_PARM)
7135 target
7136 = assign_temp (build_qualified_type (type,
7137 (TYPE_QUALS (type)
7138 | (TREE_READONLY (exp)
7139 * TYPE_QUAL_CONST))),
7140 0, TREE_ADDRESSABLE (exp), 1);
7141
7142 store_constructor (exp, target, 0, int_expr_size (exp));
7143 return target;
7144 }
7145
7146 case MISALIGNED_INDIRECT_REF:
7147 case ALIGN_INDIRECT_REF:
7148 case INDIRECT_REF:
7149 {
7150 tree exp1 = TREE_OPERAND (exp, 0);
7151
7152 if (modifier != EXPAND_WRITE)
7153 {
7154 tree t;
7155
7156 t = fold_read_from_constant_string (exp);
7157 if (t)
7158 return expand_expr (t, target, tmode, modifier);
7159 }
7160
7161 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7162 op0 = memory_address (mode, op0);
7163
7164 if (code == ALIGN_INDIRECT_REF)
7165 {
7166 int align = TYPE_ALIGN_UNIT (type);
7167 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7168 op0 = memory_address (mode, op0);
7169 }
7170
7171 temp = gen_rtx_MEM (mode, op0);
7172
7173 set_mem_attributes (temp, exp, 0);
7174
7175 /* Resolve the misalignment now, so that we don't have to remember
7176 to resolve it later. Of course, this only works for reads. */
7177 /* ??? When we get around to supporting writes, we'll have to handle
7178 this in store_expr directly. The vectorizer isn't generating
7179 those yet, however. */
7180 if (code == MISALIGNED_INDIRECT_REF)
7181 {
7182 int icode;
7183 rtx reg, insn;
7184
7185 gcc_assert (modifier == EXPAND_NORMAL
7186 || modifier == EXPAND_STACK_PARM);
7187
7188 /* The vectorizer should have already checked the mode. */
7189 icode = movmisalign_optab->handlers[mode].insn_code;
7190 gcc_assert (icode != CODE_FOR_nothing);
7191
7192 /* We've already validated the memory, and we're creating a
7193 new pseudo destination. The predicates really can't fail. */
7194 reg = gen_reg_rtx (mode);
7195
7196 /* Nor can the insn generator. */
7197 insn = GEN_FCN (icode) (reg, temp);
7198 emit_insn (insn);
7199
7200 return reg;
7201 }
7202
7203 return temp;
7204 }
7205
7206 case TARGET_MEM_REF:
7207 {
7208 struct mem_address addr;
7209
7210 get_address_description (exp, &addr);
7211 op0 = addr_for_mem_ref (&addr, true);
7212 op0 = memory_address (mode, op0);
7213 temp = gen_rtx_MEM (mode, op0);
7214 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7215 }
7216 return temp;
7217
7218 case ARRAY_REF:
7219
7220 {
7221 tree array = TREE_OPERAND (exp, 0);
7222 tree index = TREE_OPERAND (exp, 1);
7223
7224 /* Fold an expression like: "foo"[2].
7225 This is not done in fold so it won't happen inside &.
7226 Don't fold if this is for wide characters since it's too
7227 difficult to do correctly and this is a very rare case. */
7228
7229 if (modifier != EXPAND_CONST_ADDRESS
7230 && modifier != EXPAND_INITIALIZER
7231 && modifier != EXPAND_MEMORY)
7232 {
7233 tree t = fold_read_from_constant_string (exp);
7234
7235 if (t)
7236 return expand_expr (t, target, tmode, modifier);
7237 }
7238
7239 /* If this is a constant index into a constant array,
7240 just get the value from the array. Handle both the cases when
7241 we have an explicit constructor and when our operand is a variable
7242 that was declared const. */
7243
7244 if (modifier != EXPAND_CONST_ADDRESS
7245 && modifier != EXPAND_INITIALIZER
7246 && modifier != EXPAND_MEMORY
7247 && TREE_CODE (array) == CONSTRUCTOR
7248 && ! TREE_SIDE_EFFECTS (array)
7249 && TREE_CODE (index) == INTEGER_CST)
7250 {
7251 unsigned HOST_WIDE_INT ix;
7252 tree field, value;
7253
7254 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7255 field, value)
7256 if (tree_int_cst_equal (field, index))
7257 {
7258 if (!TREE_SIDE_EFFECTS (value))
7259 return expand_expr (fold (value), target, tmode, modifier);
7260 break;
7261 }
7262 }
7263
7264 else if (optimize >= 1
7265 && modifier != EXPAND_CONST_ADDRESS
7266 && modifier != EXPAND_INITIALIZER
7267 && modifier != EXPAND_MEMORY
7268 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7269 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7270 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7271 && targetm.binds_local_p (array))
7272 {
7273 if (TREE_CODE (index) == INTEGER_CST)
7274 {
7275 tree init = DECL_INITIAL (array);
7276
7277 if (TREE_CODE (init) == CONSTRUCTOR)
7278 {
7279 unsigned HOST_WIDE_INT ix;
7280 tree field, value;
7281
7282 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7283 field, value)
7284 if (tree_int_cst_equal (field, index))
7285 {
7286 if (!TREE_SIDE_EFFECTS (value))
7287 return expand_expr (fold (value), target, tmode,
7288 modifier);
7289 break;
7290 }
7291 }
7292 else if(TREE_CODE (init) == STRING_CST)
7293 {
7294 tree index1 = index;
7295 tree low_bound = array_ref_low_bound (exp);
7296 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7297
7298 /* Optimize the special-case of a zero lower bound.
7299
7300 We convert the low_bound to sizetype to avoid some problems
7301 with constant folding. (E.g. suppose the lower bound is 1,
7302 and its mode is QI. Without the conversion,l (ARRAY
7303 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7304 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7305
7306 if (! integer_zerop (low_bound))
7307 index1 = size_diffop (index1, fold_convert (sizetype,
7308 low_bound));
7309
7310 if (0 > compare_tree_int (index1,
7311 TREE_STRING_LENGTH (init)))
7312 {
7313 tree type = TREE_TYPE (TREE_TYPE (init));
7314 enum machine_mode mode = TYPE_MODE (type);
7315
7316 if (GET_MODE_CLASS (mode) == MODE_INT
7317 && GET_MODE_SIZE (mode) == 1)
7318 return gen_int_mode (TREE_STRING_POINTER (init)
7319 [TREE_INT_CST_LOW (index1)],
7320 mode);
7321 }
7322 }
7323 }
7324 }
7325 }
7326 goto normal_inner_ref;
7327
7328 case COMPONENT_REF:
7329 /* If the operand is a CONSTRUCTOR, we can just extract the
7330 appropriate field if it is present. */
7331 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7332 {
7333 unsigned HOST_WIDE_INT idx;
7334 tree field, value;
7335
7336 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7337 idx, field, value)
7338 if (field == TREE_OPERAND (exp, 1)
7339 /* We can normally use the value of the field in the
7340 CONSTRUCTOR. However, if this is a bitfield in
7341 an integral mode that we can fit in a HOST_WIDE_INT,
7342 we must mask only the number of bits in the bitfield,
7343 since this is done implicitly by the constructor. If
7344 the bitfield does not meet either of those conditions,
7345 we can't do this optimization. */
7346 && (! DECL_BIT_FIELD (field)
7347 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7348 && (GET_MODE_BITSIZE (DECL_MODE (field))
7349 <= HOST_BITS_PER_WIDE_INT))))
7350 {
7351 if (DECL_BIT_FIELD (field)
7352 && modifier == EXPAND_STACK_PARM)
7353 target = 0;
7354 op0 = expand_expr (value, target, tmode, modifier);
7355 if (DECL_BIT_FIELD (field))
7356 {
7357 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7358 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7359
7360 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7361 {
7362 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7363 op0 = expand_and (imode, op0, op1, target);
7364 }
7365 else
7366 {
7367 tree count
7368 = build_int_cst (NULL_TREE,
7369 GET_MODE_BITSIZE (imode) - bitsize);
7370
7371 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7372 target, 0);
7373 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7374 target, 0);
7375 }
7376 }
7377
7378 return op0;
7379 }
7380 }
7381 goto normal_inner_ref;
7382
7383 case BIT_FIELD_REF:
7384 case ARRAY_RANGE_REF:
7385 normal_inner_ref:
7386 {
7387 enum machine_mode mode1;
7388 HOST_WIDE_INT bitsize, bitpos;
7389 tree offset;
7390 int volatilep = 0;
7391 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7392 &mode1, &unsignedp, &volatilep, true);
7393 rtx orig_op0;
7394
7395 /* If we got back the original object, something is wrong. Perhaps
7396 we are evaluating an expression too early. In any event, don't
7397 infinitely recurse. */
7398 gcc_assert (tem != exp);
7399
7400 /* If TEM's type is a union of variable size, pass TARGET to the inner
7401 computation, since it will need a temporary and TARGET is known
7402 to have to do. This occurs in unchecked conversion in Ada. */
7403
7404 orig_op0 = op0
7405 = expand_expr (tem,
7406 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7407 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7408 != INTEGER_CST)
7409 && modifier != EXPAND_STACK_PARM
7410 ? target : NULL_RTX),
7411 VOIDmode,
7412 (modifier == EXPAND_INITIALIZER
7413 || modifier == EXPAND_CONST_ADDRESS
7414 || modifier == EXPAND_STACK_PARM)
7415 ? modifier : EXPAND_NORMAL);
7416
7417 /* If this is a constant, put it into a register if it is a legitimate
7418 constant, OFFSET is 0, and we won't try to extract outside the
7419 register (in case we were passed a partially uninitialized object
7420 or a view_conversion to a larger size). Force the constant to
7421 memory otherwise. */
7422 if (CONSTANT_P (op0))
7423 {
7424 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7425 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7426 && offset == 0
7427 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7428 op0 = force_reg (mode, op0);
7429 else
7430 op0 = validize_mem (force_const_mem (mode, op0));
7431 }
7432
7433 /* Otherwise, if this object not in memory and we either have an
7434 offset, a BLKmode result, or a reference outside the object, put it
7435 there. Such cases can occur in Ada if we have unchecked conversion
7436 of an expression from a scalar type to an array or record type or
7437 for an ARRAY_RANGE_REF whose type is BLKmode. */
7438 else if (!MEM_P (op0)
7439 && (offset != 0
7440 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7441 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7442 {
7443 tree nt = build_qualified_type (TREE_TYPE (tem),
7444 (TYPE_QUALS (TREE_TYPE (tem))
7445 | TYPE_QUAL_CONST));
7446 rtx memloc = assign_temp (nt, 1, 1, 1);
7447
7448 emit_move_insn (memloc, op0);
7449 op0 = memloc;
7450 }
7451
7452 if (offset != 0)
7453 {
7454 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7455 EXPAND_SUM);
7456
7457 gcc_assert (MEM_P (op0));
7458
7459 #ifdef POINTERS_EXTEND_UNSIGNED
7460 if (GET_MODE (offset_rtx) != Pmode)
7461 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7462 #else
7463 if (GET_MODE (offset_rtx) != ptr_mode)
7464 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7465 #endif
7466
7467 if (GET_MODE (op0) == BLKmode
7468 /* A constant address in OP0 can have VOIDmode, we must
7469 not try to call force_reg in that case. */
7470 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7471 && bitsize != 0
7472 && (bitpos % bitsize) == 0
7473 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7474 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7475 {
7476 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7477 bitpos = 0;
7478 }
7479
7480 op0 = offset_address (op0, offset_rtx,
7481 highest_pow2_factor (offset));
7482 }
7483
7484 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7485 record its alignment as BIGGEST_ALIGNMENT. */
7486 if (MEM_P (op0) && bitpos == 0 && offset != 0
7487 && is_aligning_offset (offset, tem))
7488 set_mem_align (op0, BIGGEST_ALIGNMENT);
7489
7490 /* Don't forget about volatility even if this is a bitfield. */
7491 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7492 {
7493 if (op0 == orig_op0)
7494 op0 = copy_rtx (op0);
7495
7496 MEM_VOLATILE_P (op0) = 1;
7497 }
7498
7499 /* The following code doesn't handle CONCAT.
7500 Assume only bitpos == 0 can be used for CONCAT, due to
7501 one element arrays having the same mode as its element. */
7502 if (GET_CODE (op0) == CONCAT)
7503 {
7504 gcc_assert (bitpos == 0
7505 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7506 return op0;
7507 }
7508
7509 /* In cases where an aligned union has an unaligned object
7510 as a field, we might be extracting a BLKmode value from
7511 an integer-mode (e.g., SImode) object. Handle this case
7512 by doing the extract into an object as wide as the field
7513 (which we know to be the width of a basic mode), then
7514 storing into memory, and changing the mode to BLKmode. */
7515 if (mode1 == VOIDmode
7516 || REG_P (op0) || GET_CODE (op0) == SUBREG
7517 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7518 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7519 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7520 && modifier != EXPAND_CONST_ADDRESS
7521 && modifier != EXPAND_INITIALIZER)
7522 /* If the field isn't aligned enough to fetch as a memref,
7523 fetch it as a bit field. */
7524 || (mode1 != BLKmode
7525 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7526 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7527 || (MEM_P (op0)
7528 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7529 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7530 && ((modifier == EXPAND_CONST_ADDRESS
7531 || modifier == EXPAND_INITIALIZER)
7532 ? STRICT_ALIGNMENT
7533 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7534 || (bitpos % BITS_PER_UNIT != 0)))
7535 /* If the type and the field are a constant size and the
7536 size of the type isn't the same size as the bitfield,
7537 we must use bitfield operations. */
7538 || (bitsize >= 0
7539 && TYPE_SIZE (TREE_TYPE (exp))
7540 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7541 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7542 bitsize)))
7543 {
7544 enum machine_mode ext_mode = mode;
7545
7546 if (ext_mode == BLKmode
7547 && ! (target != 0 && MEM_P (op0)
7548 && MEM_P (target)
7549 && bitpos % BITS_PER_UNIT == 0))
7550 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7551
7552 if (ext_mode == BLKmode)
7553 {
7554 if (target == 0)
7555 target = assign_temp (type, 0, 1, 1);
7556
7557 if (bitsize == 0)
7558 return target;
7559
7560 /* In this case, BITPOS must start at a byte boundary and
7561 TARGET, if specified, must be a MEM. */
7562 gcc_assert (MEM_P (op0)
7563 && (!target || MEM_P (target))
7564 && !(bitpos % BITS_PER_UNIT));
7565
7566 emit_block_move (target,
7567 adjust_address (op0, VOIDmode,
7568 bitpos / BITS_PER_UNIT),
7569 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7570 / BITS_PER_UNIT),
7571 (modifier == EXPAND_STACK_PARM
7572 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7573
7574 return target;
7575 }
7576
7577 op0 = validize_mem (op0);
7578
7579 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7580 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7581
7582 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7583 (modifier == EXPAND_STACK_PARM
7584 ? NULL_RTX : target),
7585 ext_mode, ext_mode);
7586
7587 /* If the result is a record type and BITSIZE is narrower than
7588 the mode of OP0, an integral mode, and this is a big endian
7589 machine, we must put the field into the high-order bits. */
7590 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7591 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7592 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7593 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7594 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7595 - bitsize),
7596 op0, 1);
7597
7598 /* If the result type is BLKmode, store the data into a temporary
7599 of the appropriate type, but with the mode corresponding to the
7600 mode for the data we have (op0's mode). It's tempting to make
7601 this a constant type, since we know it's only being stored once,
7602 but that can cause problems if we are taking the address of this
7603 COMPONENT_REF because the MEM of any reference via that address
7604 will have flags corresponding to the type, which will not
7605 necessarily be constant. */
7606 if (mode == BLKmode)
7607 {
7608 rtx new
7609 = assign_stack_temp_for_type
7610 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7611
7612 emit_move_insn (new, op0);
7613 op0 = copy_rtx (new);
7614 PUT_MODE (op0, BLKmode);
7615 set_mem_attributes (op0, exp, 1);
7616 }
7617
7618 return op0;
7619 }
7620
7621 /* If the result is BLKmode, use that to access the object
7622 now as well. */
7623 if (mode == BLKmode)
7624 mode1 = BLKmode;
7625
7626 /* Get a reference to just this component. */
7627 if (modifier == EXPAND_CONST_ADDRESS
7628 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7629 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7630 else
7631 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7632
7633 if (op0 == orig_op0)
7634 op0 = copy_rtx (op0);
7635
7636 set_mem_attributes (op0, exp, 0);
7637 if (REG_P (XEXP (op0, 0)))
7638 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7639
7640 MEM_VOLATILE_P (op0) |= volatilep;
7641 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7642 || modifier == EXPAND_CONST_ADDRESS
7643 || modifier == EXPAND_INITIALIZER)
7644 return op0;
7645 else if (target == 0)
7646 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7647
7648 convert_move (target, op0, unsignedp);
7649 return target;
7650 }
7651
7652 case OBJ_TYPE_REF:
7653 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7654
7655 case CALL_EXPR:
7656 /* Check for a built-in function. */
7657 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7658 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7659 == FUNCTION_DECL)
7660 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7661 {
7662 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7663 == BUILT_IN_FRONTEND)
7664 return lang_hooks.expand_expr (exp, original_target,
7665 tmode, modifier,
7666 alt_rtl);
7667 else
7668 return expand_builtin (exp, target, subtarget, tmode, ignore);
7669 }
7670
7671 return expand_call (exp, target, ignore);
7672
7673 case NON_LVALUE_EXPR:
7674 case NOP_EXPR:
7675 case CONVERT_EXPR:
7676 if (TREE_OPERAND (exp, 0) == error_mark_node)
7677 return const0_rtx;
7678
7679 if (TREE_CODE (type) == UNION_TYPE)
7680 {
7681 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7682
7683 /* If both input and output are BLKmode, this conversion isn't doing
7684 anything except possibly changing memory attribute. */
7685 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7686 {
7687 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7688 modifier);
7689
7690 result = copy_rtx (result);
7691 set_mem_attributes (result, exp, 0);
7692 return result;
7693 }
7694
7695 if (target == 0)
7696 {
7697 if (TYPE_MODE (type) != BLKmode)
7698 target = gen_reg_rtx (TYPE_MODE (type));
7699 else
7700 target = assign_temp (type, 0, 1, 1);
7701 }
7702
7703 if (MEM_P (target))
7704 /* Store data into beginning of memory target. */
7705 store_expr (TREE_OPERAND (exp, 0),
7706 adjust_address (target, TYPE_MODE (valtype), 0),
7707 modifier == EXPAND_STACK_PARM);
7708
7709 else
7710 {
7711 gcc_assert (REG_P (target));
7712
7713 /* Store this field into a union of the proper type. */
7714 store_field (target,
7715 MIN ((int_size_in_bytes (TREE_TYPE
7716 (TREE_OPERAND (exp, 0)))
7717 * BITS_PER_UNIT),
7718 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7719 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7720 type, 0);
7721 }
7722
7723 /* Return the entire union. */
7724 return target;
7725 }
7726
7727 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7728 {
7729 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7730 modifier);
7731
7732 /* If the signedness of the conversion differs and OP0 is
7733 a promoted SUBREG, clear that indication since we now
7734 have to do the proper extension. */
7735 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7736 && GET_CODE (op0) == SUBREG)
7737 SUBREG_PROMOTED_VAR_P (op0) = 0;
7738
7739 return REDUCE_BIT_FIELD (op0);
7740 }
7741
7742 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7743 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7744 if (GET_MODE (op0) == mode)
7745 ;
7746
7747 /* If OP0 is a constant, just convert it into the proper mode. */
7748 else if (CONSTANT_P (op0))
7749 {
7750 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7751 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7752
7753 if (modifier == EXPAND_INITIALIZER)
7754 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7755 subreg_lowpart_offset (mode,
7756 inner_mode));
7757 else
7758 op0= convert_modes (mode, inner_mode, op0,
7759 TYPE_UNSIGNED (inner_type));
7760 }
7761
7762 else if (modifier == EXPAND_INITIALIZER)
7763 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7764
7765 else if (target == 0)
7766 op0 = convert_to_mode (mode, op0,
7767 TYPE_UNSIGNED (TREE_TYPE
7768 (TREE_OPERAND (exp, 0))));
7769 else
7770 {
7771 convert_move (target, op0,
7772 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7773 op0 = target;
7774 }
7775
7776 return REDUCE_BIT_FIELD (op0);
7777
7778 case VIEW_CONVERT_EXPR:
7779 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7780
7781 /* If the input and output modes are both the same, we are done. */
7782 if (TYPE_MODE (type) == GET_MODE (op0))
7783 ;
7784 /* If neither mode is BLKmode, and both modes are the same size
7785 then we can use gen_lowpart. */
7786 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7787 && GET_MODE_SIZE (TYPE_MODE (type))
7788 == GET_MODE_SIZE (GET_MODE (op0)))
7789 {
7790 if (GET_CODE (op0) == SUBREG)
7791 op0 = force_reg (GET_MODE (op0), op0);
7792 op0 = gen_lowpart (TYPE_MODE (type), op0);
7793 }
7794 /* If both modes are integral, then we can convert from one to the
7795 other. */
7796 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7797 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7798 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7799 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7800 /* As a last resort, spill op0 to memory, and reload it in a
7801 different mode. */
7802 else if (!MEM_P (op0))
7803 {
7804 /* If the operand is not a MEM, force it into memory. Since we
7805 are going to be changing the mode of the MEM, don't call
7806 force_const_mem for constants because we don't allow pool
7807 constants to change mode. */
7808 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7809
7810 gcc_assert (!TREE_ADDRESSABLE (exp));
7811
7812 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7813 target
7814 = assign_stack_temp_for_type
7815 (TYPE_MODE (inner_type),
7816 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7817
7818 emit_move_insn (target, op0);
7819 op0 = target;
7820 }
7821
7822 /* At this point, OP0 is in the correct mode. If the output type is such
7823 that the operand is known to be aligned, indicate that it is.
7824 Otherwise, we need only be concerned about alignment for non-BLKmode
7825 results. */
7826 if (MEM_P (op0))
7827 {
7828 op0 = copy_rtx (op0);
7829
7830 if (TYPE_ALIGN_OK (type))
7831 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7832 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7833 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7834 {
7835 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7836 HOST_WIDE_INT temp_size
7837 = MAX (int_size_in_bytes (inner_type),
7838 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7839 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7840 temp_size, 0, type);
7841 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7842
7843 gcc_assert (!TREE_ADDRESSABLE (exp));
7844
7845 if (GET_MODE (op0) == BLKmode)
7846 emit_block_move (new_with_op0_mode, op0,
7847 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7848 (modifier == EXPAND_STACK_PARM
7849 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7850 else
7851 emit_move_insn (new_with_op0_mode, op0);
7852
7853 op0 = new;
7854 }
7855
7856 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7857 }
7858
7859 return op0;
7860
7861 case PLUS_EXPR:
7862 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7863 something else, make sure we add the register to the constant and
7864 then to the other thing. This case can occur during strength
7865 reduction and doing it this way will produce better code if the
7866 frame pointer or argument pointer is eliminated.
7867
7868 fold-const.c will ensure that the constant is always in the inner
7869 PLUS_EXPR, so the only case we need to do anything about is if
7870 sp, ap, or fp is our second argument, in which case we must swap
7871 the innermost first argument and our second argument. */
7872
7873 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7874 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7875 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7876 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7877 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7878 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7879 {
7880 tree t = TREE_OPERAND (exp, 1);
7881
7882 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7883 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7884 }
7885
7886 /* If the result is to be ptr_mode and we are adding an integer to
7887 something, we might be forming a constant. So try to use
7888 plus_constant. If it produces a sum and we can't accept it,
7889 use force_operand. This allows P = &ARR[const] to generate
7890 efficient code on machines where a SYMBOL_REF is not a valid
7891 address.
7892
7893 If this is an EXPAND_SUM call, always return the sum. */
7894 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7895 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7896 {
7897 if (modifier == EXPAND_STACK_PARM)
7898 target = 0;
7899 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7900 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7901 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7902 {
7903 rtx constant_part;
7904
7905 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7906 EXPAND_SUM);
7907 /* Use immed_double_const to ensure that the constant is
7908 truncated according to the mode of OP1, then sign extended
7909 to a HOST_WIDE_INT. Using the constant directly can result
7910 in non-canonical RTL in a 64x32 cross compile. */
7911 constant_part
7912 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7913 (HOST_WIDE_INT) 0,
7914 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7915 op1 = plus_constant (op1, INTVAL (constant_part));
7916 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7917 op1 = force_operand (op1, target);
7918 return REDUCE_BIT_FIELD (op1);
7919 }
7920
7921 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7922 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7923 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7924 {
7925 rtx constant_part;
7926
7927 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7928 (modifier == EXPAND_INITIALIZER
7929 ? EXPAND_INITIALIZER : EXPAND_SUM));
7930 if (! CONSTANT_P (op0))
7931 {
7932 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7933 VOIDmode, modifier);
7934 /* Return a PLUS if modifier says it's OK. */
7935 if (modifier == EXPAND_SUM
7936 || modifier == EXPAND_INITIALIZER)
7937 return simplify_gen_binary (PLUS, mode, op0, op1);
7938 goto binop2;
7939 }
7940 /* Use immed_double_const to ensure that the constant is
7941 truncated according to the mode of OP1, then sign extended
7942 to a HOST_WIDE_INT. Using the constant directly can result
7943 in non-canonical RTL in a 64x32 cross compile. */
7944 constant_part
7945 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7946 (HOST_WIDE_INT) 0,
7947 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7948 op0 = plus_constant (op0, INTVAL (constant_part));
7949 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7950 op0 = force_operand (op0, target);
7951 return REDUCE_BIT_FIELD (op0);
7952 }
7953 }
7954
7955 /* No sense saving up arithmetic to be done
7956 if it's all in the wrong mode to form part of an address.
7957 And force_operand won't know whether to sign-extend or
7958 zero-extend. */
7959 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7960 || mode != ptr_mode)
7961 {
7962 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7963 subtarget, &op0, &op1, 0);
7964 if (op0 == const0_rtx)
7965 return op1;
7966 if (op1 == const0_rtx)
7967 return op0;
7968 goto binop2;
7969 }
7970
7971 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7972 subtarget, &op0, &op1, modifier);
7973 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7974
7975 case MINUS_EXPR:
7976 /* For initializers, we are allowed to return a MINUS of two
7977 symbolic constants. Here we handle all cases when both operands
7978 are constant. */
7979 /* Handle difference of two symbolic constants,
7980 for the sake of an initializer. */
7981 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7982 && really_constant_p (TREE_OPERAND (exp, 0))
7983 && really_constant_p (TREE_OPERAND (exp, 1)))
7984 {
7985 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7986 NULL_RTX, &op0, &op1, modifier);
7987
7988 /* If the last operand is a CONST_INT, use plus_constant of
7989 the negated constant. Else make the MINUS. */
7990 if (GET_CODE (op1) == CONST_INT)
7991 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7992 else
7993 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7994 }
7995
7996 /* No sense saving up arithmetic to be done
7997 if it's all in the wrong mode to form part of an address.
7998 And force_operand won't know whether to sign-extend or
7999 zero-extend. */
8000 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8001 || mode != ptr_mode)
8002 goto binop;
8003
8004 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8005 subtarget, &op0, &op1, modifier);
8006
8007 /* Convert A - const to A + (-const). */
8008 if (GET_CODE (op1) == CONST_INT)
8009 {
8010 op1 = negate_rtx (mode, op1);
8011 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8012 }
8013
8014 goto binop2;
8015
8016 case MULT_EXPR:
8017 /* If first operand is constant, swap them.
8018 Thus the following special case checks need only
8019 check the second operand. */
8020 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8021 {
8022 tree t1 = TREE_OPERAND (exp, 0);
8023 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8024 TREE_OPERAND (exp, 1) = t1;
8025 }
8026
8027 /* Attempt to return something suitable for generating an
8028 indexed address, for machines that support that. */
8029
8030 if (modifier == EXPAND_SUM && mode == ptr_mode
8031 && host_integerp (TREE_OPERAND (exp, 1), 0))
8032 {
8033 tree exp1 = TREE_OPERAND (exp, 1);
8034
8035 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8036 EXPAND_SUM);
8037
8038 if (!REG_P (op0))
8039 op0 = force_operand (op0, NULL_RTX);
8040 if (!REG_P (op0))
8041 op0 = copy_to_mode_reg (mode, op0);
8042
8043 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8044 gen_int_mode (tree_low_cst (exp1, 0),
8045 TYPE_MODE (TREE_TYPE (exp1)))));
8046 }
8047
8048 if (modifier == EXPAND_STACK_PARM)
8049 target = 0;
8050
8051 /* Check for multiplying things that have been extended
8052 from a narrower type. If this machine supports multiplying
8053 in that narrower type with a result in the desired type,
8054 do it that way, and avoid the explicit type-conversion. */
8055
8056 subexp0 = TREE_OPERAND (exp, 0);
8057 subexp1 = TREE_OPERAND (exp, 1);
8058 /* First, check if we have a multiplication of one signed and one
8059 unsigned operand. */
8060 if (TREE_CODE (subexp0) == NOP_EXPR
8061 && TREE_CODE (subexp1) == NOP_EXPR
8062 && TREE_CODE (type) == INTEGER_TYPE
8063 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8064 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8065 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8066 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8067 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8068 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8069 {
8070 enum machine_mode innermode
8071 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8072 this_optab = usmul_widen_optab;
8073 if (mode == GET_MODE_WIDER_MODE (innermode))
8074 {
8075 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8076 {
8077 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8078 expand_operands (TREE_OPERAND (subexp0, 0),
8079 TREE_OPERAND (subexp1, 0),
8080 NULL_RTX, &op0, &op1, 0);
8081 else
8082 expand_operands (TREE_OPERAND (subexp0, 0),
8083 TREE_OPERAND (subexp1, 0),
8084 NULL_RTX, &op1, &op0, 0);
8085
8086 goto binop3;
8087 }
8088 }
8089 }
8090 /* Check for a multiplication with matching signedness. */
8091 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8092 && TREE_CODE (type) == INTEGER_TYPE
8093 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8094 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8095 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8096 && int_fits_type_p (TREE_OPERAND (exp, 1),
8097 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8098 /* Don't use a widening multiply if a shift will do. */
8099 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8100 > HOST_BITS_PER_WIDE_INT)
8101 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8102 ||
8103 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8104 && (TYPE_PRECISION (TREE_TYPE
8105 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8106 == TYPE_PRECISION (TREE_TYPE
8107 (TREE_OPERAND
8108 (TREE_OPERAND (exp, 0), 0))))
8109 /* If both operands are extended, they must either both
8110 be zero-extended or both be sign-extended. */
8111 && (TYPE_UNSIGNED (TREE_TYPE
8112 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8113 == TYPE_UNSIGNED (TREE_TYPE
8114 (TREE_OPERAND
8115 (TREE_OPERAND (exp, 0), 0)))))))
8116 {
8117 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8118 enum machine_mode innermode = TYPE_MODE (op0type);
8119 bool zextend_p = TYPE_UNSIGNED (op0type);
8120 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8121 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8122
8123 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8124 {
8125 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8126 {
8127 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8128 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8129 TREE_OPERAND (exp, 1),
8130 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8131 else
8132 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8133 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8134 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8135 goto binop3;
8136 }
8137 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8138 && innermode == word_mode)
8139 {
8140 rtx htem, hipart;
8141 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8142 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8143 op1 = convert_modes (innermode, mode,
8144 expand_normal (TREE_OPERAND (exp, 1)),
8145 unsignedp);
8146 else
8147 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8148 temp = expand_binop (mode, other_optab, op0, op1, target,
8149 unsignedp, OPTAB_LIB_WIDEN);
8150 hipart = gen_highpart (innermode, temp);
8151 htem = expand_mult_highpart_adjust (innermode, hipart,
8152 op0, op1, hipart,
8153 zextend_p);
8154 if (htem != hipart)
8155 emit_move_insn (hipart, htem);
8156 return REDUCE_BIT_FIELD (temp);
8157 }
8158 }
8159 }
8160 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8161 subtarget, &op0, &op1, 0);
8162 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8163
8164 case TRUNC_DIV_EXPR:
8165 case FLOOR_DIV_EXPR:
8166 case CEIL_DIV_EXPR:
8167 case ROUND_DIV_EXPR:
8168 case EXACT_DIV_EXPR:
8169 if (modifier == EXPAND_STACK_PARM)
8170 target = 0;
8171 /* Possible optimization: compute the dividend with EXPAND_SUM
8172 then if the divisor is constant can optimize the case
8173 where some terms of the dividend have coeffs divisible by it. */
8174 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8175 subtarget, &op0, &op1, 0);
8176 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8177
8178 case RDIV_EXPR:
8179 goto binop;
8180
8181 case TRUNC_MOD_EXPR:
8182 case FLOOR_MOD_EXPR:
8183 case CEIL_MOD_EXPR:
8184 case ROUND_MOD_EXPR:
8185 if (modifier == EXPAND_STACK_PARM)
8186 target = 0;
8187 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8188 subtarget, &op0, &op1, 0);
8189 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8190
8191 case FIX_TRUNC_EXPR:
8192 op0 = expand_normal (TREE_OPERAND (exp, 0));
8193 if (target == 0 || modifier == EXPAND_STACK_PARM)
8194 target = gen_reg_rtx (mode);
8195 expand_fix (target, op0, unsignedp);
8196 return target;
8197
8198 case FLOAT_EXPR:
8199 op0 = expand_normal (TREE_OPERAND (exp, 0));
8200 if (target == 0 || modifier == EXPAND_STACK_PARM)
8201 target = gen_reg_rtx (mode);
8202 /* expand_float can't figure out what to do if FROM has VOIDmode.
8203 So give it the correct mode. With -O, cse will optimize this. */
8204 if (GET_MODE (op0) == VOIDmode)
8205 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8206 op0);
8207 expand_float (target, op0,
8208 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8209 return target;
8210
8211 case NEGATE_EXPR:
8212 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8213 if (modifier == EXPAND_STACK_PARM)
8214 target = 0;
8215 temp = expand_unop (mode,
8216 optab_for_tree_code (NEGATE_EXPR, type),
8217 op0, target, 0);
8218 gcc_assert (temp);
8219 return REDUCE_BIT_FIELD (temp);
8220
8221 case ABS_EXPR:
8222 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8223 if (modifier == EXPAND_STACK_PARM)
8224 target = 0;
8225
8226 /* ABS_EXPR is not valid for complex arguments. */
8227 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8228 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8229
8230 /* Unsigned abs is simply the operand. Testing here means we don't
8231 risk generating incorrect code below. */
8232 if (TYPE_UNSIGNED (type))
8233 return op0;
8234
8235 return expand_abs (mode, op0, target, unsignedp,
8236 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8237
8238 case MAX_EXPR:
8239 case MIN_EXPR:
8240 target = original_target;
8241 if (target == 0
8242 || modifier == EXPAND_STACK_PARM
8243 || (MEM_P (target) && MEM_VOLATILE_P (target))
8244 || GET_MODE (target) != mode
8245 || (REG_P (target)
8246 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8247 target = gen_reg_rtx (mode);
8248 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8249 target, &op0, &op1, 0);
8250
8251 /* First try to do it with a special MIN or MAX instruction.
8252 If that does not win, use a conditional jump to select the proper
8253 value. */
8254 this_optab = optab_for_tree_code (code, type);
8255 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8256 OPTAB_WIDEN);
8257 if (temp != 0)
8258 return temp;
8259
8260 /* At this point, a MEM target is no longer useful; we will get better
8261 code without it. */
8262
8263 if (! REG_P (target))
8264 target = gen_reg_rtx (mode);
8265
8266 /* If op1 was placed in target, swap op0 and op1. */
8267 if (target != op0 && target == op1)
8268 {
8269 temp = op0;
8270 op0 = op1;
8271 op1 = temp;
8272 }
8273
8274 /* We generate better code and avoid problems with op1 mentioning
8275 target by forcing op1 into a pseudo if it isn't a constant. */
8276 if (! CONSTANT_P (op1))
8277 op1 = force_reg (mode, op1);
8278
8279 {
8280 enum rtx_code comparison_code;
8281 rtx cmpop1 = op1;
8282
8283 if (code == MAX_EXPR)
8284 comparison_code = unsignedp ? GEU : GE;
8285 else
8286 comparison_code = unsignedp ? LEU : LE;
8287
8288 /* Canonicalize to comparisons against 0. */
8289 if (op1 == const1_rtx)
8290 {
8291 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8292 or (a != 0 ? a : 1) for unsigned.
8293 For MIN we are safe converting (a <= 1 ? a : 1)
8294 into (a <= 0 ? a : 1) */
8295 cmpop1 = const0_rtx;
8296 if (code == MAX_EXPR)
8297 comparison_code = unsignedp ? NE : GT;
8298 }
8299 if (op1 == constm1_rtx && !unsignedp)
8300 {
8301 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8302 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8303 cmpop1 = const0_rtx;
8304 if (code == MIN_EXPR)
8305 comparison_code = LT;
8306 }
8307 #ifdef HAVE_conditional_move
8308 /* Use a conditional move if possible. */
8309 if (can_conditionally_move_p (mode))
8310 {
8311 rtx insn;
8312
8313 /* ??? Same problem as in expmed.c: emit_conditional_move
8314 forces a stack adjustment via compare_from_rtx, and we
8315 lose the stack adjustment if the sequence we are about
8316 to create is discarded. */
8317 do_pending_stack_adjust ();
8318
8319 start_sequence ();
8320
8321 /* Try to emit the conditional move. */
8322 insn = emit_conditional_move (target, comparison_code,
8323 op0, cmpop1, mode,
8324 op0, op1, mode,
8325 unsignedp);
8326
8327 /* If we could do the conditional move, emit the sequence,
8328 and return. */
8329 if (insn)
8330 {
8331 rtx seq = get_insns ();
8332 end_sequence ();
8333 emit_insn (seq);
8334 return target;
8335 }
8336
8337 /* Otherwise discard the sequence and fall back to code with
8338 branches. */
8339 end_sequence ();
8340 }
8341 #endif
8342 if (target != op0)
8343 emit_move_insn (target, op0);
8344
8345 temp = gen_label_rtx ();
8346 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8347 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8348 }
8349 emit_move_insn (target, op1);
8350 emit_label (temp);
8351 return target;
8352
8353 case BIT_NOT_EXPR:
8354 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8355 if (modifier == EXPAND_STACK_PARM)
8356 target = 0;
8357 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8358 gcc_assert (temp);
8359 return temp;
8360
8361 /* ??? Can optimize bitwise operations with one arg constant.
8362 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8363 and (a bitwise1 b) bitwise2 b (etc)
8364 but that is probably not worth while. */
8365
8366 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8367 boolean values when we want in all cases to compute both of them. In
8368 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8369 as actual zero-or-1 values and then bitwise anding. In cases where
8370 there cannot be any side effects, better code would be made by
8371 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8372 how to recognize those cases. */
8373
8374 case TRUTH_AND_EXPR:
8375 code = BIT_AND_EXPR;
8376 case BIT_AND_EXPR:
8377 goto binop;
8378
8379 case TRUTH_OR_EXPR:
8380 code = BIT_IOR_EXPR;
8381 case BIT_IOR_EXPR:
8382 goto binop;
8383
8384 case TRUTH_XOR_EXPR:
8385 code = BIT_XOR_EXPR;
8386 case BIT_XOR_EXPR:
8387 goto binop;
8388
8389 case LSHIFT_EXPR:
8390 case RSHIFT_EXPR:
8391 case LROTATE_EXPR:
8392 case RROTATE_EXPR:
8393 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8394 subtarget = 0;
8395 if (modifier == EXPAND_STACK_PARM)
8396 target = 0;
8397 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8398 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8399 unsignedp);
8400
8401 /* Could determine the answer when only additive constants differ. Also,
8402 the addition of one can be handled by changing the condition. */
8403 case LT_EXPR:
8404 case LE_EXPR:
8405 case GT_EXPR:
8406 case GE_EXPR:
8407 case EQ_EXPR:
8408 case NE_EXPR:
8409 case UNORDERED_EXPR:
8410 case ORDERED_EXPR:
8411 case UNLT_EXPR:
8412 case UNLE_EXPR:
8413 case UNGT_EXPR:
8414 case UNGE_EXPR:
8415 case UNEQ_EXPR:
8416 case LTGT_EXPR:
8417 temp = do_store_flag (exp,
8418 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8419 tmode != VOIDmode ? tmode : mode, 0);
8420 if (temp != 0)
8421 return temp;
8422
8423 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8424 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8425 && original_target
8426 && REG_P (original_target)
8427 && (GET_MODE (original_target)
8428 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8429 {
8430 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8431 VOIDmode, 0);
8432
8433 /* If temp is constant, we can just compute the result. */
8434 if (GET_CODE (temp) == CONST_INT)
8435 {
8436 if (INTVAL (temp) != 0)
8437 emit_move_insn (target, const1_rtx);
8438 else
8439 emit_move_insn (target, const0_rtx);
8440
8441 return target;
8442 }
8443
8444 if (temp != original_target)
8445 {
8446 enum machine_mode mode1 = GET_MODE (temp);
8447 if (mode1 == VOIDmode)
8448 mode1 = tmode != VOIDmode ? tmode : mode;
8449
8450 temp = copy_to_mode_reg (mode1, temp);
8451 }
8452
8453 op1 = gen_label_rtx ();
8454 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8455 GET_MODE (temp), unsignedp, op1);
8456 emit_move_insn (temp, const1_rtx);
8457 emit_label (op1);
8458 return temp;
8459 }
8460
8461 /* If no set-flag instruction, must generate a conditional store
8462 into a temporary variable. Drop through and handle this
8463 like && and ||. */
8464
8465 if (! ignore
8466 && (target == 0
8467 || modifier == EXPAND_STACK_PARM
8468 || ! safe_from_p (target, exp, 1)
8469 /* Make sure we don't have a hard reg (such as function's return
8470 value) live across basic blocks, if not optimizing. */
8471 || (!optimize && REG_P (target)
8472 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8473 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8474
8475 if (target)
8476 emit_move_insn (target, const0_rtx);
8477
8478 op1 = gen_label_rtx ();
8479 jumpifnot (exp, op1);
8480
8481 if (target)
8482 emit_move_insn (target, const1_rtx);
8483
8484 emit_label (op1);
8485 return ignore ? const0_rtx : target;
8486
8487 case TRUTH_NOT_EXPR:
8488 if (modifier == EXPAND_STACK_PARM)
8489 target = 0;
8490 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8491 /* The parser is careful to generate TRUTH_NOT_EXPR
8492 only with operands that are always zero or one. */
8493 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8494 target, 1, OPTAB_LIB_WIDEN);
8495 gcc_assert (temp);
8496 return temp;
8497
8498 case STATEMENT_LIST:
8499 {
8500 tree_stmt_iterator iter;
8501
8502 gcc_assert (ignore);
8503
8504 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8505 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8506 }
8507 return const0_rtx;
8508
8509 case COND_EXPR:
8510 /* A COND_EXPR with its type being VOID_TYPE represents a
8511 conditional jump and is handled in
8512 expand_gimple_cond_expr. */
8513 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8514
8515 /* Note that COND_EXPRs whose type is a structure or union
8516 are required to be constructed to contain assignments of
8517 a temporary variable, so that we can evaluate them here
8518 for side effect only. If type is void, we must do likewise. */
8519
8520 gcc_assert (!TREE_ADDRESSABLE (type)
8521 && !ignore
8522 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8523 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8524
8525 /* If we are not to produce a result, we have no target. Otherwise,
8526 if a target was specified use it; it will not be used as an
8527 intermediate target unless it is safe. If no target, use a
8528 temporary. */
8529
8530 if (modifier != EXPAND_STACK_PARM
8531 && original_target
8532 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8533 && GET_MODE (original_target) == mode
8534 #ifdef HAVE_conditional_move
8535 && (! can_conditionally_move_p (mode)
8536 || REG_P (original_target))
8537 #endif
8538 && !MEM_P (original_target))
8539 temp = original_target;
8540 else
8541 temp = assign_temp (type, 0, 0, 1);
8542
8543 do_pending_stack_adjust ();
8544 NO_DEFER_POP;
8545 op0 = gen_label_rtx ();
8546 op1 = gen_label_rtx ();
8547 jumpifnot (TREE_OPERAND (exp, 0), op0);
8548 store_expr (TREE_OPERAND (exp, 1), temp,
8549 modifier == EXPAND_STACK_PARM);
8550
8551 emit_jump_insn (gen_jump (op1));
8552 emit_barrier ();
8553 emit_label (op0);
8554 store_expr (TREE_OPERAND (exp, 2), temp,
8555 modifier == EXPAND_STACK_PARM);
8556
8557 emit_label (op1);
8558 OK_DEFER_POP;
8559 return temp;
8560
8561 case VEC_COND_EXPR:
8562 target = expand_vec_cond_expr (exp, target);
8563 return target;
8564
8565 case MODIFY_EXPR:
8566 {
8567 tree lhs = TREE_OPERAND (exp, 0);
8568 tree rhs = TREE_OPERAND (exp, 1);
8569
8570 gcc_assert (ignore);
8571
8572 /* Check for |= or &= of a bitfield of size one into another bitfield
8573 of size 1. In this case, (unless we need the result of the
8574 assignment) we can do this more efficiently with a
8575 test followed by an assignment, if necessary.
8576
8577 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8578 things change so we do, this code should be enhanced to
8579 support it. */
8580 if (TREE_CODE (lhs) == COMPONENT_REF
8581 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8582 || TREE_CODE (rhs) == BIT_AND_EXPR)
8583 && TREE_OPERAND (rhs, 0) == lhs
8584 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8585 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8586 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8587 {
8588 rtx label = gen_label_rtx ();
8589 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8590 do_jump (TREE_OPERAND (rhs, 1),
8591 value ? label : 0,
8592 value ? 0 : label);
8593 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8594 do_pending_stack_adjust ();
8595 emit_label (label);
8596 return const0_rtx;
8597 }
8598
8599 expand_assignment (lhs, rhs);
8600
8601 return const0_rtx;
8602 }
8603
8604 case RETURN_EXPR:
8605 if (!TREE_OPERAND (exp, 0))
8606 expand_null_return ();
8607 else
8608 expand_return (TREE_OPERAND (exp, 0));
8609 return const0_rtx;
8610
8611 case ADDR_EXPR:
8612 return expand_expr_addr_expr (exp, target, tmode, modifier);
8613
8614 case COMPLEX_EXPR:
8615 /* Get the rtx code of the operands. */
8616 op0 = expand_normal (TREE_OPERAND (exp, 0));
8617 op1 = expand_normal (TREE_OPERAND (exp, 1));
8618
8619 if (!target)
8620 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8621
8622 /* Move the real (op0) and imaginary (op1) parts to their location. */
8623 write_complex_part (target, op0, false);
8624 write_complex_part (target, op1, true);
8625
8626 return target;
8627
8628 case REALPART_EXPR:
8629 op0 = expand_normal (TREE_OPERAND (exp, 0));
8630 return read_complex_part (op0, false);
8631
8632 case IMAGPART_EXPR:
8633 op0 = expand_normal (TREE_OPERAND (exp, 0));
8634 return read_complex_part (op0, true);
8635
8636 case RESX_EXPR:
8637 expand_resx_expr (exp);
8638 return const0_rtx;
8639
8640 case TRY_CATCH_EXPR:
8641 case CATCH_EXPR:
8642 case EH_FILTER_EXPR:
8643 case TRY_FINALLY_EXPR:
8644 /* Lowered by tree-eh.c. */
8645 gcc_unreachable ();
8646
8647 case WITH_CLEANUP_EXPR:
8648 case CLEANUP_POINT_EXPR:
8649 case TARGET_EXPR:
8650 case CASE_LABEL_EXPR:
8651 case VA_ARG_EXPR:
8652 case BIND_EXPR:
8653 case INIT_EXPR:
8654 case CONJ_EXPR:
8655 case COMPOUND_EXPR:
8656 case PREINCREMENT_EXPR:
8657 case PREDECREMENT_EXPR:
8658 case POSTINCREMENT_EXPR:
8659 case POSTDECREMENT_EXPR:
8660 case LOOP_EXPR:
8661 case EXIT_EXPR:
8662 case TRUTH_ANDIF_EXPR:
8663 case TRUTH_ORIF_EXPR:
8664 /* Lowered by gimplify.c. */
8665 gcc_unreachable ();
8666
8667 case EXC_PTR_EXPR:
8668 return get_exception_pointer (cfun);
8669
8670 case FILTER_EXPR:
8671 return get_exception_filter (cfun);
8672
8673 case FDESC_EXPR:
8674 /* Function descriptors are not valid except for as
8675 initialization constants, and should not be expanded. */
8676 gcc_unreachable ();
8677
8678 case SWITCH_EXPR:
8679 expand_case (exp);
8680 return const0_rtx;
8681
8682 case LABEL_EXPR:
8683 expand_label (TREE_OPERAND (exp, 0));
8684 return const0_rtx;
8685
8686 case ASM_EXPR:
8687 expand_asm_expr (exp);
8688 return const0_rtx;
8689
8690 case WITH_SIZE_EXPR:
8691 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8692 have pulled out the size to use in whatever context it needed. */
8693 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8694 modifier, alt_rtl);
8695
8696 case REALIGN_LOAD_EXPR:
8697 {
8698 tree oprnd0 = TREE_OPERAND (exp, 0);
8699 tree oprnd1 = TREE_OPERAND (exp, 1);
8700 tree oprnd2 = TREE_OPERAND (exp, 2);
8701 rtx op2;
8702
8703 this_optab = optab_for_tree_code (code, type);
8704 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8705 op2 = expand_normal (oprnd2);
8706 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8707 target, unsignedp);
8708 gcc_assert (temp);
8709 return temp;
8710 }
8711
8712 case DOT_PROD_EXPR:
8713 {
8714 tree oprnd0 = TREE_OPERAND (exp, 0);
8715 tree oprnd1 = TREE_OPERAND (exp, 1);
8716 tree oprnd2 = TREE_OPERAND (exp, 2);
8717 rtx op2;
8718
8719 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8720 op2 = expand_normal (oprnd2);
8721 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8722 target, unsignedp);
8723 return target;
8724 }
8725
8726 case WIDEN_SUM_EXPR:
8727 {
8728 tree oprnd0 = TREE_OPERAND (exp, 0);
8729 tree oprnd1 = TREE_OPERAND (exp, 1);
8730
8731 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8732 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8733 target, unsignedp);
8734 return target;
8735 }
8736
8737 case REDUC_MAX_EXPR:
8738 case REDUC_MIN_EXPR:
8739 case REDUC_PLUS_EXPR:
8740 {
8741 op0 = expand_normal (TREE_OPERAND (exp, 0));
8742 this_optab = optab_for_tree_code (code, type);
8743 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8744 gcc_assert (temp);
8745 return temp;
8746 }
8747
8748 case VEC_LSHIFT_EXPR:
8749 case VEC_RSHIFT_EXPR:
8750 {
8751 target = expand_vec_shift_expr (exp, target);
8752 return target;
8753 }
8754
8755 case VEC_UNPACK_HI_EXPR:
8756 case VEC_UNPACK_LO_EXPR:
8757 {
8758 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8759 this_optab = optab_for_tree_code (code, type);
8760 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
8761 target, unsignedp);
8762 gcc_assert (temp);
8763 return temp;
8764 }
8765
8766 case VEC_WIDEN_MULT_HI_EXPR:
8767 case VEC_WIDEN_MULT_LO_EXPR:
8768 {
8769 tree oprnd0 = TREE_OPERAND (exp, 0);
8770 tree oprnd1 = TREE_OPERAND (exp, 1);
8771
8772 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8773 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
8774 target, unsignedp);
8775 gcc_assert (target);
8776 return target;
8777 }
8778
8779 case VEC_PACK_MOD_EXPR:
8780 case VEC_PACK_SAT_EXPR:
8781 {
8782 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8783 goto binop;
8784 }
8785
8786 default:
8787 return lang_hooks.expand_expr (exp, original_target, tmode,
8788 modifier, alt_rtl);
8789 }
8790
8791 /* Here to do an ordinary binary operator. */
8792 binop:
8793 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8794 subtarget, &op0, &op1, 0);
8795 binop2:
8796 this_optab = optab_for_tree_code (code, type);
8797 binop3:
8798 if (modifier == EXPAND_STACK_PARM)
8799 target = 0;
8800 temp = expand_binop (mode, this_optab, op0, op1, target,
8801 unsignedp, OPTAB_LIB_WIDEN);
8802 gcc_assert (temp);
8803 return REDUCE_BIT_FIELD (temp);
8804 }
8805 #undef REDUCE_BIT_FIELD
8806 \f
8807 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8808 signedness of TYPE), possibly returning the result in TARGET. */
8809 static rtx
8810 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8811 {
8812 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8813 if (target && GET_MODE (target) != GET_MODE (exp))
8814 target = 0;
8815 if (TYPE_UNSIGNED (type))
8816 {
8817 rtx mask;
8818 if (prec < HOST_BITS_PER_WIDE_INT)
8819 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8820 GET_MODE (exp));
8821 else
8822 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8823 ((unsigned HOST_WIDE_INT) 1
8824 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8825 GET_MODE (exp));
8826 return expand_and (GET_MODE (exp), exp, mask, target);
8827 }
8828 else
8829 {
8830 tree count = build_int_cst (NULL_TREE,
8831 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8832 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8833 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8834 }
8835 }
8836 \f
8837 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8838 when applied to the address of EXP produces an address known to be
8839 aligned more than BIGGEST_ALIGNMENT. */
8840
8841 static int
8842 is_aligning_offset (tree offset, tree exp)
8843 {
8844 /* Strip off any conversions. */
8845 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8846 || TREE_CODE (offset) == NOP_EXPR
8847 || TREE_CODE (offset) == CONVERT_EXPR)
8848 offset = TREE_OPERAND (offset, 0);
8849
8850 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8851 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8852 if (TREE_CODE (offset) != BIT_AND_EXPR
8853 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8854 || compare_tree_int (TREE_OPERAND (offset, 1),
8855 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8856 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8857 return 0;
8858
8859 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8860 It must be NEGATE_EXPR. Then strip any more conversions. */
8861 offset = TREE_OPERAND (offset, 0);
8862 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8863 || TREE_CODE (offset) == NOP_EXPR
8864 || TREE_CODE (offset) == CONVERT_EXPR)
8865 offset = TREE_OPERAND (offset, 0);
8866
8867 if (TREE_CODE (offset) != NEGATE_EXPR)
8868 return 0;
8869
8870 offset = TREE_OPERAND (offset, 0);
8871 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8872 || TREE_CODE (offset) == NOP_EXPR
8873 || TREE_CODE (offset) == CONVERT_EXPR)
8874 offset = TREE_OPERAND (offset, 0);
8875
8876 /* This must now be the address of EXP. */
8877 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8878 }
8879 \f
8880 /* Return the tree node if an ARG corresponds to a string constant or zero
8881 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8882 in bytes within the string that ARG is accessing. The type of the
8883 offset will be `sizetype'. */
8884
8885 tree
8886 string_constant (tree arg, tree *ptr_offset)
8887 {
8888 tree array, offset;
8889 STRIP_NOPS (arg);
8890
8891 if (TREE_CODE (arg) == ADDR_EXPR)
8892 {
8893 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8894 {
8895 *ptr_offset = size_zero_node;
8896 return TREE_OPERAND (arg, 0);
8897 }
8898 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8899 {
8900 array = TREE_OPERAND (arg, 0);
8901 offset = size_zero_node;
8902 }
8903 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8904 {
8905 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8906 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8907 if (TREE_CODE (array) != STRING_CST
8908 && TREE_CODE (array) != VAR_DECL)
8909 return 0;
8910 }
8911 else
8912 return 0;
8913 }
8914 else if (TREE_CODE (arg) == PLUS_EXPR)
8915 {
8916 tree arg0 = TREE_OPERAND (arg, 0);
8917 tree arg1 = TREE_OPERAND (arg, 1);
8918
8919 STRIP_NOPS (arg0);
8920 STRIP_NOPS (arg1);
8921
8922 if (TREE_CODE (arg0) == ADDR_EXPR
8923 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8924 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8925 {
8926 array = TREE_OPERAND (arg0, 0);
8927 offset = arg1;
8928 }
8929 else if (TREE_CODE (arg1) == ADDR_EXPR
8930 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8931 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8932 {
8933 array = TREE_OPERAND (arg1, 0);
8934 offset = arg0;
8935 }
8936 else
8937 return 0;
8938 }
8939 else
8940 return 0;
8941
8942 if (TREE_CODE (array) == STRING_CST)
8943 {
8944 *ptr_offset = fold_convert (sizetype, offset);
8945 return array;
8946 }
8947 else if (TREE_CODE (array) == VAR_DECL)
8948 {
8949 int length;
8950
8951 /* Variables initialized to string literals can be handled too. */
8952 if (DECL_INITIAL (array) == NULL_TREE
8953 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8954 return 0;
8955
8956 /* If they are read-only, non-volatile and bind locally. */
8957 if (! TREE_READONLY (array)
8958 || TREE_SIDE_EFFECTS (array)
8959 || ! targetm.binds_local_p (array))
8960 return 0;
8961
8962 /* Avoid const char foo[4] = "abcde"; */
8963 if (DECL_SIZE_UNIT (array) == NULL_TREE
8964 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8965 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8966 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8967 return 0;
8968
8969 /* If variable is bigger than the string literal, OFFSET must be constant
8970 and inside of the bounds of the string literal. */
8971 offset = fold_convert (sizetype, offset);
8972 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8973 && (! host_integerp (offset, 1)
8974 || compare_tree_int (offset, length) >= 0))
8975 return 0;
8976
8977 *ptr_offset = offset;
8978 return DECL_INITIAL (array);
8979 }
8980
8981 return 0;
8982 }
8983 \f
8984 /* Generate code to calculate EXP using a store-flag instruction
8985 and return an rtx for the result. EXP is either a comparison
8986 or a TRUTH_NOT_EXPR whose operand is a comparison.
8987
8988 If TARGET is nonzero, store the result there if convenient.
8989
8990 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8991 cheap.
8992
8993 Return zero if there is no suitable set-flag instruction
8994 available on this machine.
8995
8996 Once expand_expr has been called on the arguments of the comparison,
8997 we are committed to doing the store flag, since it is not safe to
8998 re-evaluate the expression. We emit the store-flag insn by calling
8999 emit_store_flag, but only expand the arguments if we have a reason
9000 to believe that emit_store_flag will be successful. If we think that
9001 it will, but it isn't, we have to simulate the store-flag with a
9002 set/jump/set sequence. */
9003
9004 static rtx
9005 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9006 {
9007 enum rtx_code code;
9008 tree arg0, arg1, type;
9009 tree tem;
9010 enum machine_mode operand_mode;
9011 int invert = 0;
9012 int unsignedp;
9013 rtx op0, op1;
9014 enum insn_code icode;
9015 rtx subtarget = target;
9016 rtx result, label;
9017
9018 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9019 result at the end. We can't simply invert the test since it would
9020 have already been inverted if it were valid. This case occurs for
9021 some floating-point comparisons. */
9022
9023 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9024 invert = 1, exp = TREE_OPERAND (exp, 0);
9025
9026 arg0 = TREE_OPERAND (exp, 0);
9027 arg1 = TREE_OPERAND (exp, 1);
9028
9029 /* Don't crash if the comparison was erroneous. */
9030 if (arg0 == error_mark_node || arg1 == error_mark_node)
9031 return const0_rtx;
9032
9033 type = TREE_TYPE (arg0);
9034 operand_mode = TYPE_MODE (type);
9035 unsignedp = TYPE_UNSIGNED (type);
9036
9037 /* We won't bother with BLKmode store-flag operations because it would mean
9038 passing a lot of information to emit_store_flag. */
9039 if (operand_mode == BLKmode)
9040 return 0;
9041
9042 /* We won't bother with store-flag operations involving function pointers
9043 when function pointers must be canonicalized before comparisons. */
9044 #ifdef HAVE_canonicalize_funcptr_for_compare
9045 if (HAVE_canonicalize_funcptr_for_compare
9046 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9047 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9048 == FUNCTION_TYPE))
9049 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9050 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9051 == FUNCTION_TYPE))))
9052 return 0;
9053 #endif
9054
9055 STRIP_NOPS (arg0);
9056 STRIP_NOPS (arg1);
9057
9058 /* Get the rtx comparison code to use. We know that EXP is a comparison
9059 operation of some type. Some comparisons against 1 and -1 can be
9060 converted to comparisons with zero. Do so here so that the tests
9061 below will be aware that we have a comparison with zero. These
9062 tests will not catch constants in the first operand, but constants
9063 are rarely passed as the first operand. */
9064
9065 switch (TREE_CODE (exp))
9066 {
9067 case EQ_EXPR:
9068 code = EQ;
9069 break;
9070 case NE_EXPR:
9071 code = NE;
9072 break;
9073 case LT_EXPR:
9074 if (integer_onep (arg1))
9075 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9076 else
9077 code = unsignedp ? LTU : LT;
9078 break;
9079 case LE_EXPR:
9080 if (! unsignedp && integer_all_onesp (arg1))
9081 arg1 = integer_zero_node, code = LT;
9082 else
9083 code = unsignedp ? LEU : LE;
9084 break;
9085 case GT_EXPR:
9086 if (! unsignedp && integer_all_onesp (arg1))
9087 arg1 = integer_zero_node, code = GE;
9088 else
9089 code = unsignedp ? GTU : GT;
9090 break;
9091 case GE_EXPR:
9092 if (integer_onep (arg1))
9093 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9094 else
9095 code = unsignedp ? GEU : GE;
9096 break;
9097
9098 case UNORDERED_EXPR:
9099 code = UNORDERED;
9100 break;
9101 case ORDERED_EXPR:
9102 code = ORDERED;
9103 break;
9104 case UNLT_EXPR:
9105 code = UNLT;
9106 break;
9107 case UNLE_EXPR:
9108 code = UNLE;
9109 break;
9110 case UNGT_EXPR:
9111 code = UNGT;
9112 break;
9113 case UNGE_EXPR:
9114 code = UNGE;
9115 break;
9116 case UNEQ_EXPR:
9117 code = UNEQ;
9118 break;
9119 case LTGT_EXPR:
9120 code = LTGT;
9121 break;
9122
9123 default:
9124 gcc_unreachable ();
9125 }
9126
9127 /* Put a constant second. */
9128 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9129 {
9130 tem = arg0; arg0 = arg1; arg1 = tem;
9131 code = swap_condition (code);
9132 }
9133
9134 /* If this is an equality or inequality test of a single bit, we can
9135 do this by shifting the bit being tested to the low-order bit and
9136 masking the result with the constant 1. If the condition was EQ,
9137 we xor it with 1. This does not require an scc insn and is faster
9138 than an scc insn even if we have it.
9139
9140 The code to make this transformation was moved into fold_single_bit_test,
9141 so we just call into the folder and expand its result. */
9142
9143 if ((code == NE || code == EQ)
9144 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9145 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9146 {
9147 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9148 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9149 arg0, arg1, type),
9150 target, VOIDmode, EXPAND_NORMAL);
9151 }
9152
9153 /* Now see if we are likely to be able to do this. Return if not. */
9154 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9155 return 0;
9156
9157 icode = setcc_gen_code[(int) code];
9158
9159 if (icode == CODE_FOR_nothing)
9160 {
9161 enum machine_mode wmode;
9162
9163 for (wmode = operand_mode;
9164 icode == CODE_FOR_nothing && wmode != VOIDmode;
9165 wmode = GET_MODE_WIDER_MODE (wmode))
9166 icode = cstore_optab->handlers[(int) wmode].insn_code;
9167 }
9168
9169 if (icode == CODE_FOR_nothing
9170 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9171 {
9172 /* We can only do this if it is one of the special cases that
9173 can be handled without an scc insn. */
9174 if ((code == LT && integer_zerop (arg1))
9175 || (! only_cheap && code == GE && integer_zerop (arg1)))
9176 ;
9177 else if (! only_cheap && (code == NE || code == EQ)
9178 && TREE_CODE (type) != REAL_TYPE
9179 && ((abs_optab->handlers[(int) operand_mode].insn_code
9180 != CODE_FOR_nothing)
9181 || (ffs_optab->handlers[(int) operand_mode].insn_code
9182 != CODE_FOR_nothing)))
9183 ;
9184 else
9185 return 0;
9186 }
9187
9188 if (! get_subtarget (target)
9189 || GET_MODE (subtarget) != operand_mode)
9190 subtarget = 0;
9191
9192 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9193
9194 if (target == 0)
9195 target = gen_reg_rtx (mode);
9196
9197 result = emit_store_flag (target, code, op0, op1,
9198 operand_mode, unsignedp, 1);
9199
9200 if (result)
9201 {
9202 if (invert)
9203 result = expand_binop (mode, xor_optab, result, const1_rtx,
9204 result, 0, OPTAB_LIB_WIDEN);
9205 return result;
9206 }
9207
9208 /* If this failed, we have to do this with set/compare/jump/set code. */
9209 if (!REG_P (target)
9210 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9211 target = gen_reg_rtx (GET_MODE (target));
9212
9213 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9214 label = gen_label_rtx ();
9215 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9216 NULL_RTX, label);
9217
9218 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9219 emit_label (label);
9220
9221 return target;
9222 }
9223 \f
9224
9225 /* Stubs in case we haven't got a casesi insn. */
9226 #ifndef HAVE_casesi
9227 # define HAVE_casesi 0
9228 # define gen_casesi(a, b, c, d, e) (0)
9229 # define CODE_FOR_casesi CODE_FOR_nothing
9230 #endif
9231
9232 /* If the machine does not have a case insn that compares the bounds,
9233 this means extra overhead for dispatch tables, which raises the
9234 threshold for using them. */
9235 #ifndef CASE_VALUES_THRESHOLD
9236 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9237 #endif /* CASE_VALUES_THRESHOLD */
9238
9239 unsigned int
9240 case_values_threshold (void)
9241 {
9242 return CASE_VALUES_THRESHOLD;
9243 }
9244
9245 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9246 0 otherwise (i.e. if there is no casesi instruction). */
9247 int
9248 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9249 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9250 {
9251 enum machine_mode index_mode = SImode;
9252 int index_bits = GET_MODE_BITSIZE (index_mode);
9253 rtx op1, op2, index;
9254 enum machine_mode op_mode;
9255
9256 if (! HAVE_casesi)
9257 return 0;
9258
9259 /* Convert the index to SImode. */
9260 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9261 {
9262 enum machine_mode omode = TYPE_MODE (index_type);
9263 rtx rangertx = expand_normal (range);
9264
9265 /* We must handle the endpoints in the original mode. */
9266 index_expr = build2 (MINUS_EXPR, index_type,
9267 index_expr, minval);
9268 minval = integer_zero_node;
9269 index = expand_normal (index_expr);
9270 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9271 omode, 1, default_label);
9272 /* Now we can safely truncate. */
9273 index = convert_to_mode (index_mode, index, 0);
9274 }
9275 else
9276 {
9277 if (TYPE_MODE (index_type) != index_mode)
9278 {
9279 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9280 index_expr = fold_convert (index_type, index_expr);
9281 }
9282
9283 index = expand_normal (index_expr);
9284 }
9285
9286 do_pending_stack_adjust ();
9287
9288 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9289 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9290 (index, op_mode))
9291 index = copy_to_mode_reg (op_mode, index);
9292
9293 op1 = expand_normal (minval);
9294
9295 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9296 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9297 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9298 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9299 (op1, op_mode))
9300 op1 = copy_to_mode_reg (op_mode, op1);
9301
9302 op2 = expand_normal (range);
9303
9304 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9305 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9306 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9307 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9308 (op2, op_mode))
9309 op2 = copy_to_mode_reg (op_mode, op2);
9310
9311 emit_jump_insn (gen_casesi (index, op1, op2,
9312 table_label, default_label));
9313 return 1;
9314 }
9315
9316 /* Attempt to generate a tablejump instruction; same concept. */
9317 #ifndef HAVE_tablejump
9318 #define HAVE_tablejump 0
9319 #define gen_tablejump(x, y) (0)
9320 #endif
9321
9322 /* Subroutine of the next function.
9323
9324 INDEX is the value being switched on, with the lowest value
9325 in the table already subtracted.
9326 MODE is its expected mode (needed if INDEX is constant).
9327 RANGE is the length of the jump table.
9328 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9329
9330 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9331 index value is out of range. */
9332
9333 static void
9334 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9335 rtx default_label)
9336 {
9337 rtx temp, vector;
9338
9339 if (INTVAL (range) > cfun->max_jumptable_ents)
9340 cfun->max_jumptable_ents = INTVAL (range);
9341
9342 /* Do an unsigned comparison (in the proper mode) between the index
9343 expression and the value which represents the length of the range.
9344 Since we just finished subtracting the lower bound of the range
9345 from the index expression, this comparison allows us to simultaneously
9346 check that the original index expression value is both greater than
9347 or equal to the minimum value of the range and less than or equal to
9348 the maximum value of the range. */
9349
9350 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9351 default_label);
9352
9353 /* If index is in range, it must fit in Pmode.
9354 Convert to Pmode so we can index with it. */
9355 if (mode != Pmode)
9356 index = convert_to_mode (Pmode, index, 1);
9357
9358 /* Don't let a MEM slip through, because then INDEX that comes
9359 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9360 and break_out_memory_refs will go to work on it and mess it up. */
9361 #ifdef PIC_CASE_VECTOR_ADDRESS
9362 if (flag_pic && !REG_P (index))
9363 index = copy_to_mode_reg (Pmode, index);
9364 #endif
9365
9366 /* If flag_force_addr were to affect this address
9367 it could interfere with the tricky assumptions made
9368 about addresses that contain label-refs,
9369 which may be valid only very near the tablejump itself. */
9370 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9371 GET_MODE_SIZE, because this indicates how large insns are. The other
9372 uses should all be Pmode, because they are addresses. This code
9373 could fail if addresses and insns are not the same size. */
9374 index = gen_rtx_PLUS (Pmode,
9375 gen_rtx_MULT (Pmode, index,
9376 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9377 gen_rtx_LABEL_REF (Pmode, table_label));
9378 #ifdef PIC_CASE_VECTOR_ADDRESS
9379 if (flag_pic)
9380 index = PIC_CASE_VECTOR_ADDRESS (index);
9381 else
9382 #endif
9383 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9384 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9385 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9386 convert_move (temp, vector, 0);
9387
9388 emit_jump_insn (gen_tablejump (temp, table_label));
9389
9390 /* If we are generating PIC code or if the table is PC-relative, the
9391 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9392 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9393 emit_barrier ();
9394 }
9395
9396 int
9397 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9398 rtx table_label, rtx default_label)
9399 {
9400 rtx index;
9401
9402 if (! HAVE_tablejump)
9403 return 0;
9404
9405 index_expr = fold_build2 (MINUS_EXPR, index_type,
9406 fold_convert (index_type, index_expr),
9407 fold_convert (index_type, minval));
9408 index = expand_normal (index_expr);
9409 do_pending_stack_adjust ();
9410
9411 do_tablejump (index, TYPE_MODE (index_type),
9412 convert_modes (TYPE_MODE (index_type),
9413 TYPE_MODE (TREE_TYPE (range)),
9414 expand_normal (range),
9415 TYPE_UNSIGNED (TREE_TYPE (range))),
9416 table_label, default_label);
9417 return 1;
9418 }
9419
9420 /* Nonzero if the mode is a valid vector mode for this architecture.
9421 This returns nonzero even if there is no hardware support for the
9422 vector mode, but we can emulate with narrower modes. */
9423
9424 int
9425 vector_mode_valid_p (enum machine_mode mode)
9426 {
9427 enum mode_class class = GET_MODE_CLASS (mode);
9428 enum machine_mode innermode;
9429
9430 /* Doh! What's going on? */
9431 if (class != MODE_VECTOR_INT
9432 && class != MODE_VECTOR_FLOAT)
9433 return 0;
9434
9435 /* Hardware support. Woo hoo! */
9436 if (targetm.vector_mode_supported_p (mode))
9437 return 1;
9438
9439 innermode = GET_MODE_INNER (mode);
9440
9441 /* We should probably return 1 if requesting V4DI and we have no DI,
9442 but we have V2DI, but this is probably very unlikely. */
9443
9444 /* If we have support for the inner mode, we can safely emulate it.
9445 We may not have V2DI, but me can emulate with a pair of DIs. */
9446 return targetm.scalar_mode_supported_p (innermode);
9447 }
9448
9449 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9450 static rtx
9451 const_vector_from_tree (tree exp)
9452 {
9453 rtvec v;
9454 int units, i;
9455 tree link, elt;
9456 enum machine_mode inner, mode;
9457
9458 mode = TYPE_MODE (TREE_TYPE (exp));
9459
9460 if (initializer_zerop (exp))
9461 return CONST0_RTX (mode);
9462
9463 units = GET_MODE_NUNITS (mode);
9464 inner = GET_MODE_INNER (mode);
9465
9466 v = rtvec_alloc (units);
9467
9468 link = TREE_VECTOR_CST_ELTS (exp);
9469 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9470 {
9471 elt = TREE_VALUE (link);
9472
9473 if (TREE_CODE (elt) == REAL_CST)
9474 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9475 inner);
9476 else
9477 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9478 TREE_INT_CST_HIGH (elt),
9479 inner);
9480 }
9481
9482 /* Initialize remaining elements to 0. */
9483 for (; i < units; ++i)
9484 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9485
9486 return gen_rtx_CONST_VECTOR (mode, v);
9487 }
9488 #include "gt-expr.h"