Collections.java (UnmodifiableMap.toArray): Imported changes from Classpath.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "typeclass.h"
46 #include "toplev.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "intl.h"
50 #include "tm_p.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
54 #include "target.h"
55 #include "timevar.h"
56
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
59
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
62
63 #ifdef PUSH_ROUNDING
64
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
70
71 #endif
72
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
80
81
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
89
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
93 {
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
105 };
106
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
109
110 struct store_by_pieces
111 {
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
121 };
122
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
145 tree, tree, int);
146
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
148
149 static int is_aligning_offset (tree, tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
160
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
164
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
167
168 /* Record for each mode whether we can float-extend from memory. */
169
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
171
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO)
178 #endif
179
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO)
186 #endif
187
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) MOVE_RATIO)
195 #endif
196
197 /* This array records the insn_code of insns to perform block moves. */
198 enum insn_code movmem_optab[NUM_MACHINE_MODES];
199
200 /* This array records the insn_code of insns to perform block sets. */
201 enum insn_code setmem_optab[NUM_MACHINE_MODES];
202
203 /* These arrays record the insn_code of three different kinds of insns
204 to perform block compares. */
205 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
206 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
208
209 /* Synchronization primitives. */
210 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
211 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
230 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
232
233 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
234
235 #ifndef SLOW_UNALIGNED_ACCESS
236 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
237 #endif
238 \f
239 /* This is run once per compilation to set up which modes can be used
240 directly in memory and to initialize the block move optab. */
241
242 void
243 init_expr_once (void)
244 {
245 rtx insn, pat;
246 enum machine_mode mode;
247 int num_clobbers;
248 rtx mem, mem1;
249 rtx reg;
250
251 /* Try indexing by frame ptr and try by stack ptr.
252 It is known that on the Convex the stack ptr isn't a valid index.
253 With luck, one or the other is valid on any machine. */
254 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
255 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
256
257 /* A scratch register we can modify in-place below to avoid
258 useless RTL allocations. */
259 reg = gen_rtx_REG (VOIDmode, -1);
260
261 insn = rtx_alloc (INSN);
262 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
263 PATTERN (insn) = pat;
264
265 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
266 mode = (enum machine_mode) ((int) mode + 1))
267 {
268 int regno;
269
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
272 PUT_MODE (mem1, mode);
273 PUT_MODE (reg, mode);
274
275 /* See if there is some register that can be used in this mode and
276 directly loaded or stored from memory. */
277
278 if (mode != VOIDmode && mode != BLKmode)
279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
280 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
281 regno++)
282 {
283 if (! HARD_REGNO_MODE_OK (regno, mode))
284 continue;
285
286 REGNO (reg) = regno;
287
288 SET_SRC (pat) = mem;
289 SET_DEST (pat) = reg;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_load[(int) mode] = 1;
292
293 SET_SRC (pat) = mem1;
294 SET_DEST (pat) = reg;
295 if (recog (pat, insn, &num_clobbers) >= 0)
296 direct_load[(int) mode] = 1;
297
298 SET_SRC (pat) = reg;
299 SET_DEST (pat) = mem;
300 if (recog (pat, insn, &num_clobbers) >= 0)
301 direct_store[(int) mode] = 1;
302
303 SET_SRC (pat) = reg;
304 SET_DEST (pat) = mem1;
305 if (recog (pat, insn, &num_clobbers) >= 0)
306 direct_store[(int) mode] = 1;
307 }
308 }
309
310 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
311
312 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
313 mode = GET_MODE_WIDER_MODE (mode))
314 {
315 enum machine_mode srcmode;
316 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
317 srcmode = GET_MODE_WIDER_MODE (srcmode))
318 {
319 enum insn_code ic;
320
321 ic = can_extend_p (mode, srcmode, 0);
322 if (ic == CODE_FOR_nothing)
323 continue;
324
325 PUT_MODE (mem, srcmode);
326
327 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
328 float_extend_from_mem[mode][srcmode] = true;
329 }
330 }
331 }
332
333 /* This is run at the start of compiling a function. */
334
335 void
336 init_expr (void)
337 {
338 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
339 }
340 \f
341 /* Copy data from FROM to TO, where the machine modes are not the same.
342 Both modes may be integer, or both may be floating.
343 UNSIGNEDP should be nonzero if FROM is an unsigned type.
344 This causes zero-extension instead of sign-extension. */
345
346 void
347 convert_move (rtx to, rtx from, int unsignedp)
348 {
349 enum machine_mode to_mode = GET_MODE (to);
350 enum machine_mode from_mode = GET_MODE (from);
351 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
352 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
353 enum insn_code code;
354 rtx libcall;
355
356 /* rtx code for making an equivalent value. */
357 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
358 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
359
360
361 gcc_assert (to_real == from_real);
362
363 /* If the source and destination are already the same, then there's
364 nothing to do. */
365 if (to == from)
366 return;
367
368 /* If FROM is a SUBREG that indicates that we have already done at least
369 the required extension, strip it. We don't handle such SUBREGs as
370 TO here. */
371
372 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
373 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
374 >= GET_MODE_SIZE (to_mode))
375 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
376 from = gen_lowpart (to_mode, from), from_mode = to_mode;
377
378 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
379
380 if (to_mode == from_mode
381 || (from_mode == VOIDmode && CONSTANT_P (from)))
382 {
383 emit_move_insn (to, from);
384 return;
385 }
386
387 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
388 {
389 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
390
391 if (VECTOR_MODE_P (to_mode))
392 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
393 else
394 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
395
396 emit_move_insn (to, from);
397 return;
398 }
399
400 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
401 {
402 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
403 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
404 return;
405 }
406
407 if (to_real)
408 {
409 rtx value, insns;
410 convert_optab tab;
411
412 gcc_assert ((GET_MODE_PRECISION (from_mode)
413 != GET_MODE_PRECISION (to_mode))
414 || (DECIMAL_FLOAT_MODE_P (from_mode)
415 != DECIMAL_FLOAT_MODE_P (to_mode)));
416
417 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
418 /* Conversion between decimal float and binary float, same size. */
419 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
420 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
421 tab = sext_optab;
422 else
423 tab = trunc_optab;
424
425 /* Try converting directly if the insn is supported. */
426
427 code = tab->handlers[to_mode][from_mode].insn_code;
428 if (code != CODE_FOR_nothing)
429 {
430 emit_unop_insn (code, to, from,
431 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
432 return;
433 }
434
435 /* Otherwise use a libcall. */
436 libcall = tab->handlers[to_mode][from_mode].libfunc;
437
438 /* Is this conversion implemented yet? */
439 gcc_assert (libcall);
440
441 start_sequence ();
442 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
443 1, from, from_mode);
444 insns = get_insns ();
445 end_sequence ();
446 emit_libcall_block (insns, to, value,
447 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
448 from)
449 : gen_rtx_FLOAT_EXTEND (to_mode, from));
450 return;
451 }
452
453 /* Handle pointer conversion. */ /* SPEE 900220. */
454 /* Targets are expected to provide conversion insns between PxImode and
455 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
456 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
457 {
458 enum machine_mode full_mode
459 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
460
461 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
462 != CODE_FOR_nothing);
463
464 if (full_mode != from_mode)
465 from = convert_to_mode (full_mode, from, unsignedp);
466 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
467 to, from, UNKNOWN);
468 return;
469 }
470 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
471 {
472 rtx new_from;
473 enum machine_mode full_mode
474 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
475
476 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
477 != CODE_FOR_nothing);
478
479 if (to_mode == full_mode)
480 {
481 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
482 to, from, UNKNOWN);
483 return;
484 }
485
486 new_from = gen_reg_rtx (full_mode);
487 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
488 new_from, from, UNKNOWN);
489
490 /* else proceed to integer conversions below. */
491 from_mode = full_mode;
492 from = new_from;
493 }
494
495 /* Now both modes are integers. */
496
497 /* Handle expanding beyond a word. */
498 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
499 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
500 {
501 rtx insns;
502 rtx lowpart;
503 rtx fill_value;
504 rtx lowfrom;
505 int i;
506 enum machine_mode lowpart_mode;
507 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
508
509 /* Try converting directly if the insn is supported. */
510 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
511 != CODE_FOR_nothing)
512 {
513 /* If FROM is a SUBREG, put it into a register. Do this
514 so that we always generate the same set of insns for
515 better cse'ing; if an intermediate assignment occurred,
516 we won't be doing the operation directly on the SUBREG. */
517 if (optimize > 0 && GET_CODE (from) == SUBREG)
518 from = force_reg (from_mode, from);
519 emit_unop_insn (code, to, from, equiv_code);
520 return;
521 }
522 /* Next, try converting via full word. */
523 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
524 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
525 != CODE_FOR_nothing))
526 {
527 if (REG_P (to))
528 {
529 if (reg_overlap_mentioned_p (to, from))
530 from = force_reg (from_mode, from);
531 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
532 }
533 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
534 emit_unop_insn (code, to,
535 gen_lowpart (word_mode, to), equiv_code);
536 return;
537 }
538
539 /* No special multiword conversion insn; do it by hand. */
540 start_sequence ();
541
542 /* Since we will turn this into a no conflict block, we must ensure
543 that the source does not overlap the target. */
544
545 if (reg_overlap_mentioned_p (to, from))
546 from = force_reg (from_mode, from);
547
548 /* Get a copy of FROM widened to a word, if necessary. */
549 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
550 lowpart_mode = word_mode;
551 else
552 lowpart_mode = from_mode;
553
554 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
555
556 lowpart = gen_lowpart (lowpart_mode, to);
557 emit_move_insn (lowpart, lowfrom);
558
559 /* Compute the value to put in each remaining word. */
560 if (unsignedp)
561 fill_value = const0_rtx;
562 else
563 {
564 #ifdef HAVE_slt
565 if (HAVE_slt
566 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
567 && STORE_FLAG_VALUE == -1)
568 {
569 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
570 lowpart_mode, 0);
571 fill_value = gen_reg_rtx (word_mode);
572 emit_insn (gen_slt (fill_value));
573 }
574 else
575 #endif
576 {
577 fill_value
578 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
579 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
580 NULL_RTX, 0);
581 fill_value = convert_to_mode (word_mode, fill_value, 1);
582 }
583 }
584
585 /* Fill the remaining words. */
586 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
587 {
588 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
589 rtx subword = operand_subword (to, index, 1, to_mode);
590
591 gcc_assert (subword);
592
593 if (fill_value != subword)
594 emit_move_insn (subword, fill_value);
595 }
596
597 insns = get_insns ();
598 end_sequence ();
599
600 emit_no_conflict_block (insns, to, from, NULL_RTX,
601 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
602 return;
603 }
604
605 /* Truncating multi-word to a word or less. */
606 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
607 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
608 {
609 if (!((MEM_P (from)
610 && ! MEM_VOLATILE_P (from)
611 && direct_load[(int) to_mode]
612 && ! mode_dependent_address_p (XEXP (from, 0)))
613 || REG_P (from)
614 || GET_CODE (from) == SUBREG))
615 from = force_reg (from_mode, from);
616 convert_move (to, gen_lowpart (word_mode, from), 0);
617 return;
618 }
619
620 /* Now follow all the conversions between integers
621 no more than a word long. */
622
623 /* For truncation, usually we can just refer to FROM in a narrower mode. */
624 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
625 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
626 GET_MODE_BITSIZE (from_mode)))
627 {
628 if (!((MEM_P (from)
629 && ! MEM_VOLATILE_P (from)
630 && direct_load[(int) to_mode]
631 && ! mode_dependent_address_p (XEXP (from, 0)))
632 || REG_P (from)
633 || GET_CODE (from) == SUBREG))
634 from = force_reg (from_mode, from);
635 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
636 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
637 from = copy_to_reg (from);
638 emit_move_insn (to, gen_lowpart (to_mode, from));
639 return;
640 }
641
642 /* Handle extension. */
643 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
644 {
645 /* Convert directly if that works. */
646 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
647 != CODE_FOR_nothing)
648 {
649 emit_unop_insn (code, to, from, equiv_code);
650 return;
651 }
652 else
653 {
654 enum machine_mode intermediate;
655 rtx tmp;
656 tree shift_amount;
657
658 /* Search for a mode to convert via. */
659 for (intermediate = from_mode; intermediate != VOIDmode;
660 intermediate = GET_MODE_WIDER_MODE (intermediate))
661 if (((can_extend_p (to_mode, intermediate, unsignedp)
662 != CODE_FOR_nothing)
663 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
664 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
665 GET_MODE_BITSIZE (intermediate))))
666 && (can_extend_p (intermediate, from_mode, unsignedp)
667 != CODE_FOR_nothing))
668 {
669 convert_move (to, convert_to_mode (intermediate, from,
670 unsignedp), unsignedp);
671 return;
672 }
673
674 /* No suitable intermediate mode.
675 Generate what we need with shifts. */
676 shift_amount = build_int_cst (NULL_TREE,
677 GET_MODE_BITSIZE (to_mode)
678 - GET_MODE_BITSIZE (from_mode));
679 from = gen_lowpart (to_mode, force_reg (from_mode, from));
680 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
681 to, unsignedp);
682 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
683 to, unsignedp);
684 if (tmp != to)
685 emit_move_insn (to, tmp);
686 return;
687 }
688 }
689
690 /* Support special truncate insns for certain modes. */
691 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
692 {
693 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
694 to, from, UNKNOWN);
695 return;
696 }
697
698 /* Handle truncation of volatile memrefs, and so on;
699 the things that couldn't be truncated directly,
700 and for which there was no special instruction.
701
702 ??? Code above formerly short-circuited this, for most integer
703 mode pairs, with a force_reg in from_mode followed by a recursive
704 call to this routine. Appears always to have been wrong. */
705 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
706 {
707 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
708 emit_move_insn (to, temp);
709 return;
710 }
711
712 /* Mode combination is not recognized. */
713 gcc_unreachable ();
714 }
715
716 /* Return an rtx for a value that would result
717 from converting X to mode MODE.
718 Both X and MODE may be floating, or both integer.
719 UNSIGNEDP is nonzero if X is an unsigned value.
720 This can be done by referring to a part of X in place
721 or by copying to a new temporary with conversion. */
722
723 rtx
724 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
725 {
726 return convert_modes (mode, VOIDmode, x, unsignedp);
727 }
728
729 /* Return an rtx for a value that would result
730 from converting X from mode OLDMODE to mode MODE.
731 Both modes may be floating, or both integer.
732 UNSIGNEDP is nonzero if X is an unsigned value.
733
734 This can be done by referring to a part of X in place
735 or by copying to a new temporary with conversion.
736
737 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
738
739 rtx
740 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
741 {
742 rtx temp;
743
744 /* If FROM is a SUBREG that indicates that we have already done at least
745 the required extension, strip it. */
746
747 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
748 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
749 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
750 x = gen_lowpart (mode, x);
751
752 if (GET_MODE (x) != VOIDmode)
753 oldmode = GET_MODE (x);
754
755 if (mode == oldmode)
756 return x;
757
758 /* There is one case that we must handle specially: If we are converting
759 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
760 we are to interpret the constant as unsigned, gen_lowpart will do
761 the wrong if the constant appears negative. What we want to do is
762 make the high-order word of the constant zero, not all ones. */
763
764 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
765 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
766 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
767 {
768 HOST_WIDE_INT val = INTVAL (x);
769
770 if (oldmode != VOIDmode
771 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
772 {
773 int width = GET_MODE_BITSIZE (oldmode);
774
775 /* We need to zero extend VAL. */
776 val &= ((HOST_WIDE_INT) 1 << width) - 1;
777 }
778
779 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
780 }
781
782 /* We can do this with a gen_lowpart if both desired and current modes
783 are integer, and this is either a constant integer, a register, or a
784 non-volatile MEM. Except for the constant case where MODE is no
785 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
786
787 if ((GET_CODE (x) == CONST_INT
788 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
789 || (GET_MODE_CLASS (mode) == MODE_INT
790 && GET_MODE_CLASS (oldmode) == MODE_INT
791 && (GET_CODE (x) == CONST_DOUBLE
792 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
793 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
794 && direct_load[(int) mode])
795 || (REG_P (x)
796 && (! HARD_REGISTER_P (x)
797 || HARD_REGNO_MODE_OK (REGNO (x), mode))
798 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
799 GET_MODE_BITSIZE (GET_MODE (x)))))))))
800 {
801 /* ?? If we don't know OLDMODE, we have to assume here that
802 X does not need sign- or zero-extension. This may not be
803 the case, but it's the best we can do. */
804 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
805 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
806 {
807 HOST_WIDE_INT val = INTVAL (x);
808 int width = GET_MODE_BITSIZE (oldmode);
809
810 /* We must sign or zero-extend in this case. Start by
811 zero-extending, then sign extend if we need to. */
812 val &= ((HOST_WIDE_INT) 1 << width) - 1;
813 if (! unsignedp
814 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
815 val |= (HOST_WIDE_INT) (-1) << width;
816
817 return gen_int_mode (val, mode);
818 }
819
820 return gen_lowpart (mode, x);
821 }
822
823 /* Converting from integer constant into mode is always equivalent to an
824 subreg operation. */
825 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
826 {
827 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
828 return simplify_gen_subreg (mode, x, oldmode, 0);
829 }
830
831 temp = gen_reg_rtx (mode);
832 convert_move (temp, x, unsignedp);
833 return temp;
834 }
835 \f
836 /* STORE_MAX_PIECES is the number of bytes at a time that we can
837 store efficiently. Due to internal GCC limitations, this is
838 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
839 for an immediate constant. */
840
841 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
842
843 /* Determine whether the LEN bytes can be moved by using several move
844 instructions. Return nonzero if a call to move_by_pieces should
845 succeed. */
846
847 int
848 can_move_by_pieces (unsigned HOST_WIDE_INT len,
849 unsigned int align ATTRIBUTE_UNUSED)
850 {
851 return MOVE_BY_PIECES_P (len, align);
852 }
853
854 /* Generate several move instructions to copy LEN bytes from block FROM to
855 block TO. (These are MEM rtx's with BLKmode).
856
857 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
858 used to push FROM to the stack.
859
860 ALIGN is maximum stack alignment we can assume.
861
862 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
863 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
864 stpcpy. */
865
866 rtx
867 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
868 unsigned int align, int endp)
869 {
870 struct move_by_pieces data;
871 rtx to_addr, from_addr = XEXP (from, 0);
872 unsigned int max_size = MOVE_MAX_PIECES + 1;
873 enum machine_mode mode = VOIDmode, tmode;
874 enum insn_code icode;
875
876 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
877
878 data.offset = 0;
879 data.from_addr = from_addr;
880 if (to)
881 {
882 to_addr = XEXP (to, 0);
883 data.to = to;
884 data.autinc_to
885 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
886 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
887 data.reverse
888 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
889 }
890 else
891 {
892 to_addr = NULL_RTX;
893 data.to = NULL_RTX;
894 data.autinc_to = 1;
895 #ifdef STACK_GROWS_DOWNWARD
896 data.reverse = 1;
897 #else
898 data.reverse = 0;
899 #endif
900 }
901 data.to_addr = to_addr;
902 data.from = from;
903 data.autinc_from
904 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
905 || GET_CODE (from_addr) == POST_INC
906 || GET_CODE (from_addr) == POST_DEC);
907
908 data.explicit_inc_from = 0;
909 data.explicit_inc_to = 0;
910 if (data.reverse) data.offset = len;
911 data.len = len;
912
913 /* If copying requires more than two move insns,
914 copy addresses to registers (to make displacements shorter)
915 and use post-increment if available. */
916 if (!(data.autinc_from && data.autinc_to)
917 && move_by_pieces_ninsns (len, align, max_size) > 2)
918 {
919 /* Find the mode of the largest move... */
920 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
921 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
922 if (GET_MODE_SIZE (tmode) < max_size)
923 mode = tmode;
924
925 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
926 {
927 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
928 data.autinc_from = 1;
929 data.explicit_inc_from = -1;
930 }
931 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
932 {
933 data.from_addr = copy_addr_to_reg (from_addr);
934 data.autinc_from = 1;
935 data.explicit_inc_from = 1;
936 }
937 if (!data.autinc_from && CONSTANT_P (from_addr))
938 data.from_addr = copy_addr_to_reg (from_addr);
939 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
940 {
941 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
942 data.autinc_to = 1;
943 data.explicit_inc_to = -1;
944 }
945 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
946 {
947 data.to_addr = copy_addr_to_reg (to_addr);
948 data.autinc_to = 1;
949 data.explicit_inc_to = 1;
950 }
951 if (!data.autinc_to && CONSTANT_P (to_addr))
952 data.to_addr = copy_addr_to_reg (to_addr);
953 }
954
955 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
956 if (align >= GET_MODE_ALIGNMENT (tmode))
957 align = GET_MODE_ALIGNMENT (tmode);
958 else
959 {
960 enum machine_mode xmode;
961
962 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
963 tmode != VOIDmode;
964 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
965 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
966 || SLOW_UNALIGNED_ACCESS (tmode, align))
967 break;
968
969 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
970 }
971
972 /* First move what we can in the largest integer mode, then go to
973 successively smaller modes. */
974
975 while (max_size > 1)
976 {
977 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
978 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
979 if (GET_MODE_SIZE (tmode) < max_size)
980 mode = tmode;
981
982 if (mode == VOIDmode)
983 break;
984
985 icode = mov_optab->handlers[(int) mode].insn_code;
986 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
987 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
988
989 max_size = GET_MODE_SIZE (mode);
990 }
991
992 /* The code above should have handled everything. */
993 gcc_assert (!data.len);
994
995 if (endp)
996 {
997 rtx to1;
998
999 gcc_assert (!data.reverse);
1000 if (data.autinc_to)
1001 {
1002 if (endp == 2)
1003 {
1004 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1005 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1006 else
1007 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1008 -1));
1009 }
1010 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1011 data.offset);
1012 }
1013 else
1014 {
1015 if (endp == 2)
1016 --data.offset;
1017 to1 = adjust_address (data.to, QImode, data.offset);
1018 }
1019 return to1;
1020 }
1021 else
1022 return data.to;
1023 }
1024
1025 /* Return number of insns required to move L bytes by pieces.
1026 ALIGN (in bits) is maximum alignment we can assume. */
1027
1028 static unsigned HOST_WIDE_INT
1029 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1030 unsigned int max_size)
1031 {
1032 unsigned HOST_WIDE_INT n_insns = 0;
1033 enum machine_mode tmode;
1034
1035 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1036 if (align >= GET_MODE_ALIGNMENT (tmode))
1037 align = GET_MODE_ALIGNMENT (tmode);
1038 else
1039 {
1040 enum machine_mode tmode, xmode;
1041
1042 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1043 tmode != VOIDmode;
1044 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1045 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1046 || SLOW_UNALIGNED_ACCESS (tmode, align))
1047 break;
1048
1049 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1050 }
1051
1052 while (max_size > 1)
1053 {
1054 enum machine_mode mode = VOIDmode;
1055 enum insn_code icode;
1056
1057 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1058 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1059 if (GET_MODE_SIZE (tmode) < max_size)
1060 mode = tmode;
1061
1062 if (mode == VOIDmode)
1063 break;
1064
1065 icode = mov_optab->handlers[(int) mode].insn_code;
1066 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1067 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1068
1069 max_size = GET_MODE_SIZE (mode);
1070 }
1071
1072 gcc_assert (!l);
1073 return n_insns;
1074 }
1075
1076 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1077 with move instructions for mode MODE. GENFUN is the gen_... function
1078 to make a move insn for that mode. DATA has all the other info. */
1079
1080 static void
1081 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1082 struct move_by_pieces *data)
1083 {
1084 unsigned int size = GET_MODE_SIZE (mode);
1085 rtx to1 = NULL_RTX, from1;
1086
1087 while (data->len >= size)
1088 {
1089 if (data->reverse)
1090 data->offset -= size;
1091
1092 if (data->to)
1093 {
1094 if (data->autinc_to)
1095 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1096 data->offset);
1097 else
1098 to1 = adjust_address (data->to, mode, data->offset);
1099 }
1100
1101 if (data->autinc_from)
1102 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1103 data->offset);
1104 else
1105 from1 = adjust_address (data->from, mode, data->offset);
1106
1107 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1108 emit_insn (gen_add2_insn (data->to_addr,
1109 GEN_INT (-(HOST_WIDE_INT)size)));
1110 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1111 emit_insn (gen_add2_insn (data->from_addr,
1112 GEN_INT (-(HOST_WIDE_INT)size)));
1113
1114 if (data->to)
1115 emit_insn ((*genfun) (to1, from1));
1116 else
1117 {
1118 #ifdef PUSH_ROUNDING
1119 emit_single_push_insn (mode, from1, NULL);
1120 #else
1121 gcc_unreachable ();
1122 #endif
1123 }
1124
1125 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1126 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1128 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1129
1130 if (! data->reverse)
1131 data->offset += size;
1132
1133 data->len -= size;
1134 }
1135 }
1136 \f
1137 /* Emit code to move a block Y to a block X. This may be done with
1138 string-move instructions, with multiple scalar move instructions,
1139 or with a library call.
1140
1141 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1142 SIZE is an rtx that says how long they are.
1143 ALIGN is the maximum alignment we can assume they have.
1144 METHOD describes what kind of copy this is, and what mechanisms may be used.
1145
1146 Return the address of the new block, if memcpy is called and returns it,
1147 0 otherwise. */
1148
1149 rtx
1150 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1151 unsigned int expected_align, HOST_WIDE_INT expected_size)
1152 {
1153 bool may_use_call;
1154 rtx retval = 0;
1155 unsigned int align;
1156
1157 switch (method)
1158 {
1159 case BLOCK_OP_NORMAL:
1160 case BLOCK_OP_TAILCALL:
1161 may_use_call = true;
1162 break;
1163
1164 case BLOCK_OP_CALL_PARM:
1165 may_use_call = block_move_libcall_safe_for_call_parm ();
1166
1167 /* Make inhibit_defer_pop nonzero around the library call
1168 to force it to pop the arguments right away. */
1169 NO_DEFER_POP;
1170 break;
1171
1172 case BLOCK_OP_NO_LIBCALL:
1173 may_use_call = false;
1174 break;
1175
1176 default:
1177 gcc_unreachable ();
1178 }
1179
1180 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1181
1182 gcc_assert (MEM_P (x));
1183 gcc_assert (MEM_P (y));
1184 gcc_assert (size);
1185
1186 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1187 block copy is more efficient for other large modes, e.g. DCmode. */
1188 x = adjust_address (x, BLKmode, 0);
1189 y = adjust_address (y, BLKmode, 0);
1190
1191 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1192 can be incorrect is coming from __builtin_memcpy. */
1193 if (GET_CODE (size) == CONST_INT)
1194 {
1195 if (INTVAL (size) == 0)
1196 return 0;
1197
1198 x = shallow_copy_rtx (x);
1199 y = shallow_copy_rtx (y);
1200 set_mem_size (x, size);
1201 set_mem_size (y, size);
1202 }
1203
1204 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1205 move_by_pieces (x, y, INTVAL (size), align, 0);
1206 else if (emit_block_move_via_movmem (x, y, size, align,
1207 expected_align, expected_size))
1208 ;
1209 else if (may_use_call)
1210 retval = emit_block_move_via_libcall (x, y, size,
1211 method == BLOCK_OP_TAILCALL);
1212 else
1213 emit_block_move_via_loop (x, y, size, align);
1214
1215 if (method == BLOCK_OP_CALL_PARM)
1216 OK_DEFER_POP;
1217
1218 return retval;
1219 }
1220
1221 rtx
1222 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1223 {
1224 return emit_block_move_hints (x, y, size, method, 0, -1);
1225 }
1226
1227 /* A subroutine of emit_block_move. Returns true if calling the
1228 block move libcall will not clobber any parameters which may have
1229 already been placed on the stack. */
1230
1231 static bool
1232 block_move_libcall_safe_for_call_parm (void)
1233 {
1234 /* If arguments are pushed on the stack, then they're safe. */
1235 if (PUSH_ARGS)
1236 return true;
1237
1238 /* If registers go on the stack anyway, any argument is sure to clobber
1239 an outgoing argument. */
1240 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1241 {
1242 tree fn = emit_block_move_libcall_fn (false);
1243 (void) fn;
1244 if (REG_PARM_STACK_SPACE (fn) != 0)
1245 return false;
1246 }
1247 #endif
1248
1249 /* If any argument goes in memory, then it might clobber an outgoing
1250 argument. */
1251 {
1252 CUMULATIVE_ARGS args_so_far;
1253 tree fn, arg;
1254
1255 fn = emit_block_move_libcall_fn (false);
1256 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1257
1258 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1259 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1260 {
1261 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1262 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1263 if (!tmp || !REG_P (tmp))
1264 return false;
1265 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1266 return false;
1267 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1268 }
1269 }
1270 return true;
1271 }
1272
1273 /* A subroutine of emit_block_move. Expand a movmem pattern;
1274 return true if successful. */
1275
1276 static bool
1277 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1278 unsigned int expected_align, HOST_WIDE_INT expected_size)
1279 {
1280 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1281 int save_volatile_ok = volatile_ok;
1282 enum machine_mode mode;
1283
1284 if (expected_align < align)
1285 expected_align = align;
1286
1287 /* Since this is a move insn, we don't care about volatility. */
1288 volatile_ok = 1;
1289
1290 /* Try the most limited insn first, because there's no point
1291 including more than one in the machine description unless
1292 the more limited one has some advantage. */
1293
1294 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1295 mode = GET_MODE_WIDER_MODE (mode))
1296 {
1297 enum insn_code code = movmem_optab[(int) mode];
1298 insn_operand_predicate_fn pred;
1299
1300 if (code != CODE_FOR_nothing
1301 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1302 here because if SIZE is less than the mode mask, as it is
1303 returned by the macro, it will definitely be less than the
1304 actual mode mask. */
1305 && ((GET_CODE (size) == CONST_INT
1306 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1307 <= (GET_MODE_MASK (mode) >> 1)))
1308 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1309 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1310 || (*pred) (x, BLKmode))
1311 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1312 || (*pred) (y, BLKmode))
1313 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1314 || (*pred) (opalign, VOIDmode)))
1315 {
1316 rtx op2;
1317 rtx last = get_last_insn ();
1318 rtx pat;
1319
1320 op2 = convert_to_mode (mode, size, 1);
1321 pred = insn_data[(int) code].operand[2].predicate;
1322 if (pred != 0 && ! (*pred) (op2, mode))
1323 op2 = copy_to_mode_reg (mode, op2);
1324
1325 /* ??? When called via emit_block_move_for_call, it'd be
1326 nice if there were some way to inform the backend, so
1327 that it doesn't fail the expansion because it thinks
1328 emitting the libcall would be more efficient. */
1329
1330 if (insn_data[(int) code].n_operands == 4)
1331 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1332 else
1333 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1334 GEN_INT (expected_align),
1335 GEN_INT (expected_size));
1336 if (pat)
1337 {
1338 emit_insn (pat);
1339 volatile_ok = save_volatile_ok;
1340 return true;
1341 }
1342 else
1343 delete_insns_since (last);
1344 }
1345 }
1346
1347 volatile_ok = save_volatile_ok;
1348 return false;
1349 }
1350
1351 /* A subroutine of emit_block_move. Expand a call to memcpy.
1352 Return the return value from memcpy, 0 otherwise. */
1353
1354 rtx
1355 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1356 {
1357 rtx dst_addr, src_addr;
1358 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1359 enum machine_mode size_mode;
1360 rtx retval;
1361
1362 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1363 pseudos. We can then place those new pseudos into a VAR_DECL and
1364 use them later. */
1365
1366 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1367 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1368
1369 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1370 src_addr = convert_memory_address (ptr_mode, src_addr);
1371
1372 dst_tree = make_tree (ptr_type_node, dst_addr);
1373 src_tree = make_tree (ptr_type_node, src_addr);
1374
1375 size_mode = TYPE_MODE (sizetype);
1376
1377 size = convert_to_mode (size_mode, size, 1);
1378 size = copy_to_mode_reg (size_mode, size);
1379
1380 /* It is incorrect to use the libcall calling conventions to call
1381 memcpy in this context. This could be a user call to memcpy and
1382 the user may wish to examine the return value from memcpy. For
1383 targets where libcalls and normal calls have different conventions
1384 for returning pointers, we could end up generating incorrect code. */
1385
1386 size_tree = make_tree (sizetype, size);
1387
1388 fn = emit_block_move_libcall_fn (true);
1389 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1390 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1391 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1392
1393 /* Now we have to build up the CALL_EXPR itself. */
1394 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1395 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1396 call_expr, arg_list, NULL_TREE);
1397 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1398
1399 retval = expand_normal (call_expr);
1400
1401 return retval;
1402 }
1403
1404 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1405 for the function we use for block copies. The first time FOR_CALL
1406 is true, we call assemble_external. */
1407
1408 static GTY(()) tree block_move_fn;
1409
1410 void
1411 init_block_move_fn (const char *asmspec)
1412 {
1413 if (!block_move_fn)
1414 {
1415 tree args, fn;
1416
1417 fn = get_identifier ("memcpy");
1418 args = build_function_type_list (ptr_type_node, ptr_type_node,
1419 const_ptr_type_node, sizetype,
1420 NULL_TREE);
1421
1422 fn = build_decl (FUNCTION_DECL, fn, args);
1423 DECL_EXTERNAL (fn) = 1;
1424 TREE_PUBLIC (fn) = 1;
1425 DECL_ARTIFICIAL (fn) = 1;
1426 TREE_NOTHROW (fn) = 1;
1427 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1428 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1429
1430 block_move_fn = fn;
1431 }
1432
1433 if (asmspec)
1434 set_user_assembler_name (block_move_fn, asmspec);
1435 }
1436
1437 static tree
1438 emit_block_move_libcall_fn (int for_call)
1439 {
1440 static bool emitted_extern;
1441
1442 if (!block_move_fn)
1443 init_block_move_fn (NULL);
1444
1445 if (for_call && !emitted_extern)
1446 {
1447 emitted_extern = true;
1448 make_decl_rtl (block_move_fn);
1449 assemble_external (block_move_fn);
1450 }
1451
1452 return block_move_fn;
1453 }
1454
1455 /* A subroutine of emit_block_move. Copy the data via an explicit
1456 loop. This is used only when libcalls are forbidden. */
1457 /* ??? It'd be nice to copy in hunks larger than QImode. */
1458
1459 static void
1460 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1461 unsigned int align ATTRIBUTE_UNUSED)
1462 {
1463 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1464 enum machine_mode iter_mode;
1465
1466 iter_mode = GET_MODE (size);
1467 if (iter_mode == VOIDmode)
1468 iter_mode = word_mode;
1469
1470 top_label = gen_label_rtx ();
1471 cmp_label = gen_label_rtx ();
1472 iter = gen_reg_rtx (iter_mode);
1473
1474 emit_move_insn (iter, const0_rtx);
1475
1476 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1477 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1478 do_pending_stack_adjust ();
1479
1480 emit_jump (cmp_label);
1481 emit_label (top_label);
1482
1483 tmp = convert_modes (Pmode, iter_mode, iter, true);
1484 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1485 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1486 x = change_address (x, QImode, x_addr);
1487 y = change_address (y, QImode, y_addr);
1488
1489 emit_move_insn (x, y);
1490
1491 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1492 true, OPTAB_LIB_WIDEN);
1493 if (tmp != iter)
1494 emit_move_insn (iter, tmp);
1495
1496 emit_label (cmp_label);
1497
1498 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1499 true, top_label);
1500 }
1501 \f
1502 /* Copy all or part of a value X into registers starting at REGNO.
1503 The number of registers to be filled is NREGS. */
1504
1505 void
1506 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1507 {
1508 int i;
1509 #ifdef HAVE_load_multiple
1510 rtx pat;
1511 rtx last;
1512 #endif
1513
1514 if (nregs == 0)
1515 return;
1516
1517 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1518 x = validize_mem (force_const_mem (mode, x));
1519
1520 /* See if the machine can do this with a load multiple insn. */
1521 #ifdef HAVE_load_multiple
1522 if (HAVE_load_multiple)
1523 {
1524 last = get_last_insn ();
1525 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1526 GEN_INT (nregs));
1527 if (pat)
1528 {
1529 emit_insn (pat);
1530 return;
1531 }
1532 else
1533 delete_insns_since (last);
1534 }
1535 #endif
1536
1537 for (i = 0; i < nregs; i++)
1538 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1539 operand_subword_force (x, i, mode));
1540 }
1541
1542 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1543 The number of registers to be filled is NREGS. */
1544
1545 void
1546 move_block_from_reg (int regno, rtx x, int nregs)
1547 {
1548 int i;
1549
1550 if (nregs == 0)
1551 return;
1552
1553 /* See if the machine can do this with a store multiple insn. */
1554 #ifdef HAVE_store_multiple
1555 if (HAVE_store_multiple)
1556 {
1557 rtx last = get_last_insn ();
1558 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1559 GEN_INT (nregs));
1560 if (pat)
1561 {
1562 emit_insn (pat);
1563 return;
1564 }
1565 else
1566 delete_insns_since (last);
1567 }
1568 #endif
1569
1570 for (i = 0; i < nregs; i++)
1571 {
1572 rtx tem = operand_subword (x, i, 1, BLKmode);
1573
1574 gcc_assert (tem);
1575
1576 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1577 }
1578 }
1579
1580 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1581 ORIG, where ORIG is a non-consecutive group of registers represented by
1582 a PARALLEL. The clone is identical to the original except in that the
1583 original set of registers is replaced by a new set of pseudo registers.
1584 The new set has the same modes as the original set. */
1585
1586 rtx
1587 gen_group_rtx (rtx orig)
1588 {
1589 int i, length;
1590 rtx *tmps;
1591
1592 gcc_assert (GET_CODE (orig) == PARALLEL);
1593
1594 length = XVECLEN (orig, 0);
1595 tmps = alloca (sizeof (rtx) * length);
1596
1597 /* Skip a NULL entry in first slot. */
1598 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1599
1600 if (i)
1601 tmps[0] = 0;
1602
1603 for (; i < length; i++)
1604 {
1605 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1606 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1607
1608 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1609 }
1610
1611 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1612 }
1613
1614 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1615 except that values are placed in TMPS[i], and must later be moved
1616 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1617
1618 static void
1619 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1620 {
1621 rtx src;
1622 int start, i;
1623 enum machine_mode m = GET_MODE (orig_src);
1624
1625 gcc_assert (GET_CODE (dst) == PARALLEL);
1626
1627 if (m != VOIDmode
1628 && !SCALAR_INT_MODE_P (m)
1629 && !MEM_P (orig_src)
1630 && GET_CODE (orig_src) != CONCAT)
1631 {
1632 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1633 if (imode == BLKmode)
1634 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1635 else
1636 src = gen_reg_rtx (imode);
1637 if (imode != BLKmode)
1638 src = gen_lowpart (GET_MODE (orig_src), src);
1639 emit_move_insn (src, orig_src);
1640 /* ...and back again. */
1641 if (imode != BLKmode)
1642 src = gen_lowpart (imode, src);
1643 emit_group_load_1 (tmps, dst, src, type, ssize);
1644 return;
1645 }
1646
1647 /* Check for a NULL entry, used to indicate that the parameter goes
1648 both on the stack and in registers. */
1649 if (XEXP (XVECEXP (dst, 0, 0), 0))
1650 start = 0;
1651 else
1652 start = 1;
1653
1654 /* Process the pieces. */
1655 for (i = start; i < XVECLEN (dst, 0); i++)
1656 {
1657 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1658 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1659 unsigned int bytelen = GET_MODE_SIZE (mode);
1660 int shift = 0;
1661
1662 /* Handle trailing fragments that run over the size of the struct. */
1663 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1664 {
1665 /* Arrange to shift the fragment to where it belongs.
1666 extract_bit_field loads to the lsb of the reg. */
1667 if (
1668 #ifdef BLOCK_REG_PADDING
1669 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1670 == (BYTES_BIG_ENDIAN ? upward : downward)
1671 #else
1672 BYTES_BIG_ENDIAN
1673 #endif
1674 )
1675 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1676 bytelen = ssize - bytepos;
1677 gcc_assert (bytelen > 0);
1678 }
1679
1680 /* If we won't be loading directly from memory, protect the real source
1681 from strange tricks we might play; but make sure that the source can
1682 be loaded directly into the destination. */
1683 src = orig_src;
1684 if (!MEM_P (orig_src)
1685 && (!CONSTANT_P (orig_src)
1686 || (GET_MODE (orig_src) != mode
1687 && GET_MODE (orig_src) != VOIDmode)))
1688 {
1689 if (GET_MODE (orig_src) == VOIDmode)
1690 src = gen_reg_rtx (mode);
1691 else
1692 src = gen_reg_rtx (GET_MODE (orig_src));
1693
1694 emit_move_insn (src, orig_src);
1695 }
1696
1697 /* Optimize the access just a bit. */
1698 if (MEM_P (src)
1699 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1700 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1701 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1702 && bytelen == GET_MODE_SIZE (mode))
1703 {
1704 tmps[i] = gen_reg_rtx (mode);
1705 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1706 }
1707 else if (COMPLEX_MODE_P (mode)
1708 && GET_MODE (src) == mode
1709 && bytelen == GET_MODE_SIZE (mode))
1710 /* Let emit_move_complex do the bulk of the work. */
1711 tmps[i] = src;
1712 else if (GET_CODE (src) == CONCAT)
1713 {
1714 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1715 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1716
1717 if ((bytepos == 0 && bytelen == slen0)
1718 || (bytepos != 0 && bytepos + bytelen <= slen))
1719 {
1720 /* The following assumes that the concatenated objects all
1721 have the same size. In this case, a simple calculation
1722 can be used to determine the object and the bit field
1723 to be extracted. */
1724 tmps[i] = XEXP (src, bytepos / slen0);
1725 if (! CONSTANT_P (tmps[i])
1726 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1727 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1728 (bytepos % slen0) * BITS_PER_UNIT,
1729 1, NULL_RTX, mode, mode);
1730 }
1731 else
1732 {
1733 rtx mem;
1734
1735 gcc_assert (!bytepos);
1736 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1737 emit_move_insn (mem, src);
1738 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1739 0, 1, NULL_RTX, mode, mode);
1740 }
1741 }
1742 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1743 SIMD register, which is currently broken. While we get GCC
1744 to emit proper RTL for these cases, let's dump to memory. */
1745 else if (VECTOR_MODE_P (GET_MODE (dst))
1746 && REG_P (src))
1747 {
1748 int slen = GET_MODE_SIZE (GET_MODE (src));
1749 rtx mem;
1750
1751 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1752 emit_move_insn (mem, src);
1753 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1754 }
1755 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1756 && XVECLEN (dst, 0) > 1)
1757 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1758 else if (CONSTANT_P (src)
1759 || (REG_P (src) && GET_MODE (src) == mode))
1760 tmps[i] = src;
1761 else
1762 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1763 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1764 mode, mode);
1765
1766 if (shift)
1767 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1768 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1769 }
1770 }
1771
1772 /* Emit code to move a block SRC of type TYPE to a block DST,
1773 where DST is non-consecutive registers represented by a PARALLEL.
1774 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1775 if not known. */
1776
1777 void
1778 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1779 {
1780 rtx *tmps;
1781 int i;
1782
1783 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1784 emit_group_load_1 (tmps, dst, src, type, ssize);
1785
1786 /* Copy the extracted pieces into the proper (probable) hard regs. */
1787 for (i = 0; i < XVECLEN (dst, 0); i++)
1788 {
1789 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1790 if (d == NULL)
1791 continue;
1792 emit_move_insn (d, tmps[i]);
1793 }
1794 }
1795
1796 /* Similar, but load SRC into new pseudos in a format that looks like
1797 PARALLEL. This can later be fed to emit_group_move to get things
1798 in the right place. */
1799
1800 rtx
1801 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1802 {
1803 rtvec vec;
1804 int i;
1805
1806 vec = rtvec_alloc (XVECLEN (parallel, 0));
1807 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1808
1809 /* Convert the vector to look just like the original PARALLEL, except
1810 with the computed values. */
1811 for (i = 0; i < XVECLEN (parallel, 0); i++)
1812 {
1813 rtx e = XVECEXP (parallel, 0, i);
1814 rtx d = XEXP (e, 0);
1815
1816 if (d)
1817 {
1818 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1819 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1820 }
1821 RTVEC_ELT (vec, i) = e;
1822 }
1823
1824 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1825 }
1826
1827 /* Emit code to move a block SRC to block DST, where SRC and DST are
1828 non-consecutive groups of registers, each represented by a PARALLEL. */
1829
1830 void
1831 emit_group_move (rtx dst, rtx src)
1832 {
1833 int i;
1834
1835 gcc_assert (GET_CODE (src) == PARALLEL
1836 && GET_CODE (dst) == PARALLEL
1837 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1838
1839 /* Skip first entry if NULL. */
1840 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1841 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1842 XEXP (XVECEXP (src, 0, i), 0));
1843 }
1844
1845 /* Move a group of registers represented by a PARALLEL into pseudos. */
1846
1847 rtx
1848 emit_group_move_into_temps (rtx src)
1849 {
1850 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1851 int i;
1852
1853 for (i = 0; i < XVECLEN (src, 0); i++)
1854 {
1855 rtx e = XVECEXP (src, 0, i);
1856 rtx d = XEXP (e, 0);
1857
1858 if (d)
1859 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1860 RTVEC_ELT (vec, i) = e;
1861 }
1862
1863 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1864 }
1865
1866 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1867 where SRC is non-consecutive registers represented by a PARALLEL.
1868 SSIZE represents the total size of block ORIG_DST, or -1 if not
1869 known. */
1870
1871 void
1872 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1873 {
1874 rtx *tmps, dst;
1875 int start, finish, i;
1876 enum machine_mode m = GET_MODE (orig_dst);
1877
1878 gcc_assert (GET_CODE (src) == PARALLEL);
1879
1880 if (!SCALAR_INT_MODE_P (m)
1881 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1882 {
1883 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1884 if (imode == BLKmode)
1885 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1886 else
1887 dst = gen_reg_rtx (imode);
1888 emit_group_store (dst, src, type, ssize);
1889 if (imode != BLKmode)
1890 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1891 emit_move_insn (orig_dst, dst);
1892 return;
1893 }
1894
1895 /* Check for a NULL entry, used to indicate that the parameter goes
1896 both on the stack and in registers. */
1897 if (XEXP (XVECEXP (src, 0, 0), 0))
1898 start = 0;
1899 else
1900 start = 1;
1901 finish = XVECLEN (src, 0);
1902
1903 tmps = alloca (sizeof (rtx) * finish);
1904
1905 /* Copy the (probable) hard regs into pseudos. */
1906 for (i = start; i < finish; i++)
1907 {
1908 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1909 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1910 {
1911 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1912 emit_move_insn (tmps[i], reg);
1913 }
1914 else
1915 tmps[i] = reg;
1916 }
1917
1918 /* If we won't be storing directly into memory, protect the real destination
1919 from strange tricks we might play. */
1920 dst = orig_dst;
1921 if (GET_CODE (dst) == PARALLEL)
1922 {
1923 rtx temp;
1924
1925 /* We can get a PARALLEL dst if there is a conditional expression in
1926 a return statement. In that case, the dst and src are the same,
1927 so no action is necessary. */
1928 if (rtx_equal_p (dst, src))
1929 return;
1930
1931 /* It is unclear if we can ever reach here, but we may as well handle
1932 it. Allocate a temporary, and split this into a store/load to/from
1933 the temporary. */
1934
1935 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1936 emit_group_store (temp, src, type, ssize);
1937 emit_group_load (dst, temp, type, ssize);
1938 return;
1939 }
1940 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1941 {
1942 enum machine_mode outer = GET_MODE (dst);
1943 enum machine_mode inner;
1944 HOST_WIDE_INT bytepos;
1945 bool done = false;
1946 rtx temp;
1947
1948 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1949 dst = gen_reg_rtx (outer);
1950
1951 /* Make life a bit easier for combine. */
1952 /* If the first element of the vector is the low part
1953 of the destination mode, use a paradoxical subreg to
1954 initialize the destination. */
1955 if (start < finish)
1956 {
1957 inner = GET_MODE (tmps[start]);
1958 bytepos = subreg_lowpart_offset (inner, outer);
1959 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1960 {
1961 temp = simplify_gen_subreg (outer, tmps[start],
1962 inner, 0);
1963 if (temp)
1964 {
1965 emit_move_insn (dst, temp);
1966 done = true;
1967 start++;
1968 }
1969 }
1970 }
1971
1972 /* If the first element wasn't the low part, try the last. */
1973 if (!done
1974 && start < finish - 1)
1975 {
1976 inner = GET_MODE (tmps[finish - 1]);
1977 bytepos = subreg_lowpart_offset (inner, outer);
1978 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1979 {
1980 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1981 inner, 0);
1982 if (temp)
1983 {
1984 emit_move_insn (dst, temp);
1985 done = true;
1986 finish--;
1987 }
1988 }
1989 }
1990
1991 /* Otherwise, simply initialize the result to zero. */
1992 if (!done)
1993 emit_move_insn (dst, CONST0_RTX (outer));
1994 }
1995
1996 /* Process the pieces. */
1997 for (i = start; i < finish; i++)
1998 {
1999 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2000 enum machine_mode mode = GET_MODE (tmps[i]);
2001 unsigned int bytelen = GET_MODE_SIZE (mode);
2002 rtx dest = dst;
2003
2004 /* Handle trailing fragments that run over the size of the struct. */
2005 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2006 {
2007 /* store_bit_field always takes its value from the lsb.
2008 Move the fragment to the lsb if it's not already there. */
2009 if (
2010 #ifdef BLOCK_REG_PADDING
2011 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2012 == (BYTES_BIG_ENDIAN ? upward : downward)
2013 #else
2014 BYTES_BIG_ENDIAN
2015 #endif
2016 )
2017 {
2018 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2019 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2020 build_int_cst (NULL_TREE, shift),
2021 tmps[i], 0);
2022 }
2023 bytelen = ssize - bytepos;
2024 }
2025
2026 if (GET_CODE (dst) == CONCAT)
2027 {
2028 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2029 dest = XEXP (dst, 0);
2030 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2031 {
2032 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2033 dest = XEXP (dst, 1);
2034 }
2035 else
2036 {
2037 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2038 dest = assign_stack_temp (GET_MODE (dest),
2039 GET_MODE_SIZE (GET_MODE (dest)), 0);
2040 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2041 tmps[i]);
2042 dst = dest;
2043 break;
2044 }
2045 }
2046
2047 /* Optimize the access just a bit. */
2048 if (MEM_P (dest)
2049 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2050 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2051 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2052 && bytelen == GET_MODE_SIZE (mode))
2053 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2054 else
2055 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2056 mode, tmps[i]);
2057 }
2058
2059 /* Copy from the pseudo into the (probable) hard reg. */
2060 if (orig_dst != dst)
2061 emit_move_insn (orig_dst, dst);
2062 }
2063
2064 /* Generate code to copy a BLKmode object of TYPE out of a
2065 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2066 is null, a stack temporary is created. TGTBLK is returned.
2067
2068 The purpose of this routine is to handle functions that return
2069 BLKmode structures in registers. Some machines (the PA for example)
2070 want to return all small structures in registers regardless of the
2071 structure's alignment. */
2072
2073 rtx
2074 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2075 {
2076 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2077 rtx src = NULL, dst = NULL;
2078 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2079 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2080
2081 if (tgtblk == 0)
2082 {
2083 tgtblk = assign_temp (build_qualified_type (type,
2084 (TYPE_QUALS (type)
2085 | TYPE_QUAL_CONST)),
2086 0, 1, 1);
2087 preserve_temp_slots (tgtblk);
2088 }
2089
2090 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2091 into a new pseudo which is a full word. */
2092
2093 if (GET_MODE (srcreg) != BLKmode
2094 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2095 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2096
2097 /* If the structure doesn't take up a whole number of words, see whether
2098 SRCREG is padded on the left or on the right. If it's on the left,
2099 set PADDING_CORRECTION to the number of bits to skip.
2100
2101 In most ABIs, the structure will be returned at the least end of
2102 the register, which translates to right padding on little-endian
2103 targets and left padding on big-endian targets. The opposite
2104 holds if the structure is returned at the most significant
2105 end of the register. */
2106 if (bytes % UNITS_PER_WORD != 0
2107 && (targetm.calls.return_in_msb (type)
2108 ? !BYTES_BIG_ENDIAN
2109 : BYTES_BIG_ENDIAN))
2110 padding_correction
2111 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2112
2113 /* Copy the structure BITSIZE bites at a time.
2114
2115 We could probably emit more efficient code for machines which do not use
2116 strict alignment, but it doesn't seem worth the effort at the current
2117 time. */
2118 for (bitpos = 0, xbitpos = padding_correction;
2119 bitpos < bytes * BITS_PER_UNIT;
2120 bitpos += bitsize, xbitpos += bitsize)
2121 {
2122 /* We need a new source operand each time xbitpos is on a
2123 word boundary and when xbitpos == padding_correction
2124 (the first time through). */
2125 if (xbitpos % BITS_PER_WORD == 0
2126 || xbitpos == padding_correction)
2127 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2128 GET_MODE (srcreg));
2129
2130 /* We need a new destination operand each time bitpos is on
2131 a word boundary. */
2132 if (bitpos % BITS_PER_WORD == 0)
2133 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2134
2135 /* Use xbitpos for the source extraction (right justified) and
2136 xbitpos for the destination store (left justified). */
2137 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2138 extract_bit_field (src, bitsize,
2139 xbitpos % BITS_PER_WORD, 1,
2140 NULL_RTX, word_mode, word_mode));
2141 }
2142
2143 return tgtblk;
2144 }
2145
2146 /* Add a USE expression for REG to the (possibly empty) list pointed
2147 to by CALL_FUSAGE. REG must denote a hard register. */
2148
2149 void
2150 use_reg (rtx *call_fusage, rtx reg)
2151 {
2152 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2153
2154 *call_fusage
2155 = gen_rtx_EXPR_LIST (VOIDmode,
2156 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2157 }
2158
2159 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2160 starting at REGNO. All of these registers must be hard registers. */
2161
2162 void
2163 use_regs (rtx *call_fusage, int regno, int nregs)
2164 {
2165 int i;
2166
2167 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2168
2169 for (i = 0; i < nregs; i++)
2170 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2171 }
2172
2173 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2174 PARALLEL REGS. This is for calls that pass values in multiple
2175 non-contiguous locations. The Irix 6 ABI has examples of this. */
2176
2177 void
2178 use_group_regs (rtx *call_fusage, rtx regs)
2179 {
2180 int i;
2181
2182 for (i = 0; i < XVECLEN (regs, 0); i++)
2183 {
2184 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2185
2186 /* A NULL entry means the parameter goes both on the stack and in
2187 registers. This can also be a MEM for targets that pass values
2188 partially on the stack and partially in registers. */
2189 if (reg != 0 && REG_P (reg))
2190 use_reg (call_fusage, reg);
2191 }
2192 }
2193 \f
2194
2195 /* Determine whether the LEN bytes generated by CONSTFUN can be
2196 stored to memory using several move instructions. CONSTFUNDATA is
2197 a pointer which will be passed as argument in every CONSTFUN call.
2198 ALIGN is maximum alignment we can assume. Return nonzero if a
2199 call to store_by_pieces should succeed. */
2200
2201 int
2202 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2203 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2204 void *constfundata, unsigned int align)
2205 {
2206 unsigned HOST_WIDE_INT l;
2207 unsigned int max_size;
2208 HOST_WIDE_INT offset = 0;
2209 enum machine_mode mode, tmode;
2210 enum insn_code icode;
2211 int reverse;
2212 rtx cst;
2213
2214 if (len == 0)
2215 return 1;
2216
2217 if (! STORE_BY_PIECES_P (len, align))
2218 return 0;
2219
2220 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2221 if (align >= GET_MODE_ALIGNMENT (tmode))
2222 align = GET_MODE_ALIGNMENT (tmode);
2223 else
2224 {
2225 enum machine_mode xmode;
2226
2227 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2228 tmode != VOIDmode;
2229 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2230 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2231 || SLOW_UNALIGNED_ACCESS (tmode, align))
2232 break;
2233
2234 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2235 }
2236
2237 /* We would first store what we can in the largest integer mode, then go to
2238 successively smaller modes. */
2239
2240 for (reverse = 0;
2241 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2242 reverse++)
2243 {
2244 l = len;
2245 mode = VOIDmode;
2246 max_size = STORE_MAX_PIECES + 1;
2247 while (max_size > 1)
2248 {
2249 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2250 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2251 if (GET_MODE_SIZE (tmode) < max_size)
2252 mode = tmode;
2253
2254 if (mode == VOIDmode)
2255 break;
2256
2257 icode = mov_optab->handlers[(int) mode].insn_code;
2258 if (icode != CODE_FOR_nothing
2259 && align >= GET_MODE_ALIGNMENT (mode))
2260 {
2261 unsigned int size = GET_MODE_SIZE (mode);
2262
2263 while (l >= size)
2264 {
2265 if (reverse)
2266 offset -= size;
2267
2268 cst = (*constfun) (constfundata, offset, mode);
2269 if (!LEGITIMATE_CONSTANT_P (cst))
2270 return 0;
2271
2272 if (!reverse)
2273 offset += size;
2274
2275 l -= size;
2276 }
2277 }
2278
2279 max_size = GET_MODE_SIZE (mode);
2280 }
2281
2282 /* The code above should have handled everything. */
2283 gcc_assert (!l);
2284 }
2285
2286 return 1;
2287 }
2288
2289 /* Generate several move instructions to store LEN bytes generated by
2290 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2291 pointer which will be passed as argument in every CONSTFUN call.
2292 ALIGN is maximum alignment we can assume.
2293 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2294 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2295 stpcpy. */
2296
2297 rtx
2298 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2299 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2300 void *constfundata, unsigned int align, int endp)
2301 {
2302 struct store_by_pieces data;
2303
2304 if (len == 0)
2305 {
2306 gcc_assert (endp != 2);
2307 return to;
2308 }
2309
2310 gcc_assert (STORE_BY_PIECES_P (len, align));
2311 data.constfun = constfun;
2312 data.constfundata = constfundata;
2313 data.len = len;
2314 data.to = to;
2315 store_by_pieces_1 (&data, align);
2316 if (endp)
2317 {
2318 rtx to1;
2319
2320 gcc_assert (!data.reverse);
2321 if (data.autinc_to)
2322 {
2323 if (endp == 2)
2324 {
2325 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2326 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2327 else
2328 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2329 -1));
2330 }
2331 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2332 data.offset);
2333 }
2334 else
2335 {
2336 if (endp == 2)
2337 --data.offset;
2338 to1 = adjust_address (data.to, QImode, data.offset);
2339 }
2340 return to1;
2341 }
2342 else
2343 return data.to;
2344 }
2345
2346 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2347 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2348
2349 static void
2350 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2351 {
2352 struct store_by_pieces data;
2353
2354 if (len == 0)
2355 return;
2356
2357 data.constfun = clear_by_pieces_1;
2358 data.constfundata = NULL;
2359 data.len = len;
2360 data.to = to;
2361 store_by_pieces_1 (&data, align);
2362 }
2363
2364 /* Callback routine for clear_by_pieces.
2365 Return const0_rtx unconditionally. */
2366
2367 static rtx
2368 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2369 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2370 enum machine_mode mode ATTRIBUTE_UNUSED)
2371 {
2372 return const0_rtx;
2373 }
2374
2375 /* Subroutine of clear_by_pieces and store_by_pieces.
2376 Generate several move instructions to store LEN bytes of block TO. (A MEM
2377 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2378
2379 static void
2380 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2381 unsigned int align ATTRIBUTE_UNUSED)
2382 {
2383 rtx to_addr = XEXP (data->to, 0);
2384 unsigned int max_size = STORE_MAX_PIECES + 1;
2385 enum machine_mode mode = VOIDmode, tmode;
2386 enum insn_code icode;
2387
2388 data->offset = 0;
2389 data->to_addr = to_addr;
2390 data->autinc_to
2391 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2392 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2393
2394 data->explicit_inc_to = 0;
2395 data->reverse
2396 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2397 if (data->reverse)
2398 data->offset = data->len;
2399
2400 /* If storing requires more than two move insns,
2401 copy addresses to registers (to make displacements shorter)
2402 and use post-increment if available. */
2403 if (!data->autinc_to
2404 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2405 {
2406 /* Determine the main mode we'll be using. */
2407 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2408 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2409 if (GET_MODE_SIZE (tmode) < max_size)
2410 mode = tmode;
2411
2412 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2413 {
2414 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2415 data->autinc_to = 1;
2416 data->explicit_inc_to = -1;
2417 }
2418
2419 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2420 && ! data->autinc_to)
2421 {
2422 data->to_addr = copy_addr_to_reg (to_addr);
2423 data->autinc_to = 1;
2424 data->explicit_inc_to = 1;
2425 }
2426
2427 if ( !data->autinc_to && CONSTANT_P (to_addr))
2428 data->to_addr = copy_addr_to_reg (to_addr);
2429 }
2430
2431 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2432 if (align >= GET_MODE_ALIGNMENT (tmode))
2433 align = GET_MODE_ALIGNMENT (tmode);
2434 else
2435 {
2436 enum machine_mode xmode;
2437
2438 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2439 tmode != VOIDmode;
2440 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2441 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2442 || SLOW_UNALIGNED_ACCESS (tmode, align))
2443 break;
2444
2445 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2446 }
2447
2448 /* First store what we can in the largest integer mode, then go to
2449 successively smaller modes. */
2450
2451 while (max_size > 1)
2452 {
2453 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2454 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2455 if (GET_MODE_SIZE (tmode) < max_size)
2456 mode = tmode;
2457
2458 if (mode == VOIDmode)
2459 break;
2460
2461 icode = mov_optab->handlers[(int) mode].insn_code;
2462 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2463 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2464
2465 max_size = GET_MODE_SIZE (mode);
2466 }
2467
2468 /* The code above should have handled everything. */
2469 gcc_assert (!data->len);
2470 }
2471
2472 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2473 with move instructions for mode MODE. GENFUN is the gen_... function
2474 to make a move insn for that mode. DATA has all the other info. */
2475
2476 static void
2477 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2478 struct store_by_pieces *data)
2479 {
2480 unsigned int size = GET_MODE_SIZE (mode);
2481 rtx to1, cst;
2482
2483 while (data->len >= size)
2484 {
2485 if (data->reverse)
2486 data->offset -= size;
2487
2488 if (data->autinc_to)
2489 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2490 data->offset);
2491 else
2492 to1 = adjust_address (data->to, mode, data->offset);
2493
2494 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2495 emit_insn (gen_add2_insn (data->to_addr,
2496 GEN_INT (-(HOST_WIDE_INT) size)));
2497
2498 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2499 emit_insn ((*genfun) (to1, cst));
2500
2501 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2502 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2503
2504 if (! data->reverse)
2505 data->offset += size;
2506
2507 data->len -= size;
2508 }
2509 }
2510 \f
2511 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2512 its length in bytes. */
2513
2514 rtx
2515 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2516 unsigned int expected_align, HOST_WIDE_INT expected_size)
2517 {
2518 enum machine_mode mode = GET_MODE (object);
2519 unsigned int align;
2520
2521 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2522
2523 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2524 just move a zero. Otherwise, do this a piece at a time. */
2525 if (mode != BLKmode
2526 && GET_CODE (size) == CONST_INT
2527 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2528 {
2529 rtx zero = CONST0_RTX (mode);
2530 if (zero != NULL)
2531 {
2532 emit_move_insn (object, zero);
2533 return NULL;
2534 }
2535
2536 if (COMPLEX_MODE_P (mode))
2537 {
2538 zero = CONST0_RTX (GET_MODE_INNER (mode));
2539 if (zero != NULL)
2540 {
2541 write_complex_part (object, zero, 0);
2542 write_complex_part (object, zero, 1);
2543 return NULL;
2544 }
2545 }
2546 }
2547
2548 if (size == const0_rtx)
2549 return NULL;
2550
2551 align = MEM_ALIGN (object);
2552
2553 if (GET_CODE (size) == CONST_INT
2554 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2555 clear_by_pieces (object, INTVAL (size), align);
2556 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2557 expected_align, expected_size))
2558 ;
2559 else
2560 return set_storage_via_libcall (object, size, const0_rtx,
2561 method == BLOCK_OP_TAILCALL);
2562
2563 return NULL;
2564 }
2565
2566 rtx
2567 clear_storage (rtx object, rtx size, enum block_op_methods method)
2568 {
2569 return clear_storage_hints (object, size, method, 0, -1);
2570 }
2571
2572
2573 /* A subroutine of clear_storage. Expand a call to memset.
2574 Return the return value of memset, 0 otherwise. */
2575
2576 rtx
2577 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2578 {
2579 tree call_expr, arg_list, fn, object_tree, size_tree, val_tree;
2580 enum machine_mode size_mode;
2581 rtx retval;
2582
2583 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2584 place those into new pseudos into a VAR_DECL and use them later. */
2585
2586 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2587
2588 size_mode = TYPE_MODE (sizetype);
2589 size = convert_to_mode (size_mode, size, 1);
2590 size = copy_to_mode_reg (size_mode, size);
2591
2592 /* It is incorrect to use the libcall calling conventions to call
2593 memset in this context. This could be a user call to memset and
2594 the user may wish to examine the return value from memset. For
2595 targets where libcalls and normal calls have different conventions
2596 for returning pointers, we could end up generating incorrect code. */
2597
2598 object_tree = make_tree (ptr_type_node, object);
2599 if (GET_CODE (val) != CONST_INT)
2600 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2601 size_tree = make_tree (sizetype, size);
2602 val_tree = make_tree (integer_type_node, val);
2603
2604 fn = clear_storage_libcall_fn (true);
2605 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2606 arg_list = tree_cons (NULL_TREE, val_tree, arg_list);
2607 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2608
2609 /* Now we have to build up the CALL_EXPR itself. */
2610 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2611 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2612 call_expr, arg_list, NULL_TREE);
2613 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2614
2615 retval = expand_normal (call_expr);
2616
2617 return retval;
2618 }
2619
2620 /* A subroutine of set_storage_via_libcall. Create the tree node
2621 for the function we use for block clears. The first time FOR_CALL
2622 is true, we call assemble_external. */
2623
2624 static GTY(()) tree block_clear_fn;
2625
2626 void
2627 init_block_clear_fn (const char *asmspec)
2628 {
2629 if (!block_clear_fn)
2630 {
2631 tree fn, args;
2632
2633 fn = get_identifier ("memset");
2634 args = build_function_type_list (ptr_type_node, ptr_type_node,
2635 integer_type_node, sizetype,
2636 NULL_TREE);
2637
2638 fn = build_decl (FUNCTION_DECL, fn, args);
2639 DECL_EXTERNAL (fn) = 1;
2640 TREE_PUBLIC (fn) = 1;
2641 DECL_ARTIFICIAL (fn) = 1;
2642 TREE_NOTHROW (fn) = 1;
2643 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2644 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2645
2646 block_clear_fn = fn;
2647 }
2648
2649 if (asmspec)
2650 set_user_assembler_name (block_clear_fn, asmspec);
2651 }
2652
2653 static tree
2654 clear_storage_libcall_fn (int for_call)
2655 {
2656 static bool emitted_extern;
2657
2658 if (!block_clear_fn)
2659 init_block_clear_fn (NULL);
2660
2661 if (for_call && !emitted_extern)
2662 {
2663 emitted_extern = true;
2664 make_decl_rtl (block_clear_fn);
2665 assemble_external (block_clear_fn);
2666 }
2667
2668 return block_clear_fn;
2669 }
2670 \f
2671 /* Expand a setmem pattern; return true if successful. */
2672
2673 bool
2674 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2675 unsigned int expected_align, HOST_WIDE_INT expected_size)
2676 {
2677 /* Try the most limited insn first, because there's no point
2678 including more than one in the machine description unless
2679 the more limited one has some advantage. */
2680
2681 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2682 enum machine_mode mode;
2683
2684 if (expected_align < align)
2685 expected_align = align;
2686
2687 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2688 mode = GET_MODE_WIDER_MODE (mode))
2689 {
2690 enum insn_code code = setmem_optab[(int) mode];
2691 insn_operand_predicate_fn pred;
2692
2693 if (code != CODE_FOR_nothing
2694 /* We don't need MODE to be narrower than
2695 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2696 the mode mask, as it is returned by the macro, it will
2697 definitely be less than the actual mode mask. */
2698 && ((GET_CODE (size) == CONST_INT
2699 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2700 <= (GET_MODE_MASK (mode) >> 1)))
2701 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2702 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2703 || (*pred) (object, BLKmode))
2704 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2705 || (*pred) (opalign, VOIDmode)))
2706 {
2707 rtx opsize, opchar;
2708 enum machine_mode char_mode;
2709 rtx last = get_last_insn ();
2710 rtx pat;
2711
2712 opsize = convert_to_mode (mode, size, 1);
2713 pred = insn_data[(int) code].operand[1].predicate;
2714 if (pred != 0 && ! (*pred) (opsize, mode))
2715 opsize = copy_to_mode_reg (mode, opsize);
2716
2717 opchar = val;
2718 char_mode = insn_data[(int) code].operand[2].mode;
2719 if (char_mode != VOIDmode)
2720 {
2721 opchar = convert_to_mode (char_mode, opchar, 1);
2722 pred = insn_data[(int) code].operand[2].predicate;
2723 if (pred != 0 && ! (*pred) (opchar, char_mode))
2724 opchar = copy_to_mode_reg (char_mode, opchar);
2725 }
2726
2727 if (insn_data[(int) code].n_operands == 4)
2728 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2729 else
2730 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2731 GEN_INT (expected_align),
2732 GEN_INT (expected_size));
2733 if (pat)
2734 {
2735 emit_insn (pat);
2736 return true;
2737 }
2738 else
2739 delete_insns_since (last);
2740 }
2741 }
2742
2743 return false;
2744 }
2745
2746 \f
2747 /* Write to one of the components of the complex value CPLX. Write VAL to
2748 the real part if IMAG_P is false, and the imaginary part if its true. */
2749
2750 static void
2751 write_complex_part (rtx cplx, rtx val, bool imag_p)
2752 {
2753 enum machine_mode cmode;
2754 enum machine_mode imode;
2755 unsigned ibitsize;
2756
2757 if (GET_CODE (cplx) == CONCAT)
2758 {
2759 emit_move_insn (XEXP (cplx, imag_p), val);
2760 return;
2761 }
2762
2763 cmode = GET_MODE (cplx);
2764 imode = GET_MODE_INNER (cmode);
2765 ibitsize = GET_MODE_BITSIZE (imode);
2766
2767 /* For MEMs simplify_gen_subreg may generate an invalid new address
2768 because, e.g., the original address is considered mode-dependent
2769 by the target, which restricts simplify_subreg from invoking
2770 adjust_address_nv. Instead of preparing fallback support for an
2771 invalid address, we call adjust_address_nv directly. */
2772 if (MEM_P (cplx))
2773 {
2774 emit_move_insn (adjust_address_nv (cplx, imode,
2775 imag_p ? GET_MODE_SIZE (imode) : 0),
2776 val);
2777 return;
2778 }
2779
2780 /* If the sub-object is at least word sized, then we know that subregging
2781 will work. This special case is important, since store_bit_field
2782 wants to operate on integer modes, and there's rarely an OImode to
2783 correspond to TCmode. */
2784 if (ibitsize >= BITS_PER_WORD
2785 /* For hard regs we have exact predicates. Assume we can split
2786 the original object if it spans an even number of hard regs.
2787 This special case is important for SCmode on 64-bit platforms
2788 where the natural size of floating-point regs is 32-bit. */
2789 || (REG_P (cplx)
2790 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2791 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2792 {
2793 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2794 imag_p ? GET_MODE_SIZE (imode) : 0);
2795 if (part)
2796 {
2797 emit_move_insn (part, val);
2798 return;
2799 }
2800 else
2801 /* simplify_gen_subreg may fail for sub-word MEMs. */
2802 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2803 }
2804
2805 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2806 }
2807
2808 /* Extract one of the components of the complex value CPLX. Extract the
2809 real part if IMAG_P is false, and the imaginary part if it's true. */
2810
2811 static rtx
2812 read_complex_part (rtx cplx, bool imag_p)
2813 {
2814 enum machine_mode cmode, imode;
2815 unsigned ibitsize;
2816
2817 if (GET_CODE (cplx) == CONCAT)
2818 return XEXP (cplx, imag_p);
2819
2820 cmode = GET_MODE (cplx);
2821 imode = GET_MODE_INNER (cmode);
2822 ibitsize = GET_MODE_BITSIZE (imode);
2823
2824 /* Special case reads from complex constants that got spilled to memory. */
2825 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2826 {
2827 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2828 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2829 {
2830 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2831 if (CONSTANT_CLASS_P (part))
2832 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2833 }
2834 }
2835
2836 /* For MEMs simplify_gen_subreg may generate an invalid new address
2837 because, e.g., the original address is considered mode-dependent
2838 by the target, which restricts simplify_subreg from invoking
2839 adjust_address_nv. Instead of preparing fallback support for an
2840 invalid address, we call adjust_address_nv directly. */
2841 if (MEM_P (cplx))
2842 return adjust_address_nv (cplx, imode,
2843 imag_p ? GET_MODE_SIZE (imode) : 0);
2844
2845 /* If the sub-object is at least word sized, then we know that subregging
2846 will work. This special case is important, since extract_bit_field
2847 wants to operate on integer modes, and there's rarely an OImode to
2848 correspond to TCmode. */
2849 if (ibitsize >= BITS_PER_WORD
2850 /* For hard regs we have exact predicates. Assume we can split
2851 the original object if it spans an even number of hard regs.
2852 This special case is important for SCmode on 64-bit platforms
2853 where the natural size of floating-point regs is 32-bit. */
2854 || (REG_P (cplx)
2855 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2856 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2857 {
2858 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2859 imag_p ? GET_MODE_SIZE (imode) : 0);
2860 if (ret)
2861 return ret;
2862 else
2863 /* simplify_gen_subreg may fail for sub-word MEMs. */
2864 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2865 }
2866
2867 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2868 true, NULL_RTX, imode, imode);
2869 }
2870 \f
2871 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2872 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2873 represented in NEW_MODE. If FORCE is true, this will never happen, as
2874 we'll force-create a SUBREG if needed. */
2875
2876 static rtx
2877 emit_move_change_mode (enum machine_mode new_mode,
2878 enum machine_mode old_mode, rtx x, bool force)
2879 {
2880 rtx ret;
2881
2882 if (MEM_P (x))
2883 {
2884 /* We don't have to worry about changing the address since the
2885 size in bytes is supposed to be the same. */
2886 if (reload_in_progress)
2887 {
2888 /* Copy the MEM to change the mode and move any
2889 substitutions from the old MEM to the new one. */
2890 ret = adjust_address_nv (x, new_mode, 0);
2891 copy_replacements (x, ret);
2892 }
2893 else
2894 ret = adjust_address (x, new_mode, 0);
2895 }
2896 else
2897 {
2898 /* Note that we do want simplify_subreg's behavior of validating
2899 that the new mode is ok for a hard register. If we were to use
2900 simplify_gen_subreg, we would create the subreg, but would
2901 probably run into the target not being able to implement it. */
2902 /* Except, of course, when FORCE is true, when this is exactly what
2903 we want. Which is needed for CCmodes on some targets. */
2904 if (force)
2905 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2906 else
2907 ret = simplify_subreg (new_mode, x, old_mode, 0);
2908 }
2909
2910 return ret;
2911 }
2912
2913 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2914 an integer mode of the same size as MODE. Returns the instruction
2915 emitted, or NULL if such a move could not be generated. */
2916
2917 static rtx
2918 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2919 {
2920 enum machine_mode imode;
2921 enum insn_code code;
2922
2923 /* There must exist a mode of the exact size we require. */
2924 imode = int_mode_for_mode (mode);
2925 if (imode == BLKmode)
2926 return NULL_RTX;
2927
2928 /* The target must support moves in this mode. */
2929 code = mov_optab->handlers[imode].insn_code;
2930 if (code == CODE_FOR_nothing)
2931 return NULL_RTX;
2932
2933 x = emit_move_change_mode (imode, mode, x, force);
2934 if (x == NULL_RTX)
2935 return NULL_RTX;
2936 y = emit_move_change_mode (imode, mode, y, force);
2937 if (y == NULL_RTX)
2938 return NULL_RTX;
2939 return emit_insn (GEN_FCN (code) (x, y));
2940 }
2941
2942 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2943 Return an equivalent MEM that does not use an auto-increment. */
2944
2945 static rtx
2946 emit_move_resolve_push (enum machine_mode mode, rtx x)
2947 {
2948 enum rtx_code code = GET_CODE (XEXP (x, 0));
2949 HOST_WIDE_INT adjust;
2950 rtx temp;
2951
2952 adjust = GET_MODE_SIZE (mode);
2953 #ifdef PUSH_ROUNDING
2954 adjust = PUSH_ROUNDING (adjust);
2955 #endif
2956 if (code == PRE_DEC || code == POST_DEC)
2957 adjust = -adjust;
2958 else if (code == PRE_MODIFY || code == POST_MODIFY)
2959 {
2960 rtx expr = XEXP (XEXP (x, 0), 1);
2961 HOST_WIDE_INT val;
2962
2963 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2964 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2965 val = INTVAL (XEXP (expr, 1));
2966 if (GET_CODE (expr) == MINUS)
2967 val = -val;
2968 gcc_assert (adjust == val || adjust == -val);
2969 adjust = val;
2970 }
2971
2972 /* Do not use anti_adjust_stack, since we don't want to update
2973 stack_pointer_delta. */
2974 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2975 GEN_INT (adjust), stack_pointer_rtx,
2976 0, OPTAB_LIB_WIDEN);
2977 if (temp != stack_pointer_rtx)
2978 emit_move_insn (stack_pointer_rtx, temp);
2979
2980 switch (code)
2981 {
2982 case PRE_INC:
2983 case PRE_DEC:
2984 case PRE_MODIFY:
2985 temp = stack_pointer_rtx;
2986 break;
2987 case POST_INC:
2988 case POST_DEC:
2989 case POST_MODIFY:
2990 temp = plus_constant (stack_pointer_rtx, -adjust);
2991 break;
2992 default:
2993 gcc_unreachable ();
2994 }
2995
2996 return replace_equiv_address (x, temp);
2997 }
2998
2999 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3000 X is known to satisfy push_operand, and MODE is known to be complex.
3001 Returns the last instruction emitted. */
3002
3003 static rtx
3004 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3005 {
3006 enum machine_mode submode = GET_MODE_INNER (mode);
3007 bool imag_first;
3008
3009 #ifdef PUSH_ROUNDING
3010 unsigned int submodesize = GET_MODE_SIZE (submode);
3011
3012 /* In case we output to the stack, but the size is smaller than the
3013 machine can push exactly, we need to use move instructions. */
3014 if (PUSH_ROUNDING (submodesize) != submodesize)
3015 {
3016 x = emit_move_resolve_push (mode, x);
3017 return emit_move_insn (x, y);
3018 }
3019 #endif
3020
3021 /* Note that the real part always precedes the imag part in memory
3022 regardless of machine's endianness. */
3023 switch (GET_CODE (XEXP (x, 0)))
3024 {
3025 case PRE_DEC:
3026 case POST_DEC:
3027 imag_first = true;
3028 break;
3029 case PRE_INC:
3030 case POST_INC:
3031 imag_first = false;
3032 break;
3033 default:
3034 gcc_unreachable ();
3035 }
3036
3037 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3038 read_complex_part (y, imag_first));
3039 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3040 read_complex_part (y, !imag_first));
3041 }
3042
3043 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3044 MODE is known to be complex. Returns the last instruction emitted. */
3045
3046 static rtx
3047 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3048 {
3049 bool try_int;
3050
3051 /* Need to take special care for pushes, to maintain proper ordering
3052 of the data, and possibly extra padding. */
3053 if (push_operand (x, mode))
3054 return emit_move_complex_push (mode, x, y);
3055
3056 /* See if we can coerce the target into moving both values at once. */
3057
3058 /* Move floating point as parts. */
3059 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3060 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3061 try_int = false;
3062 /* Not possible if the values are inherently not adjacent. */
3063 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3064 try_int = false;
3065 /* Is possible if both are registers (or subregs of registers). */
3066 else if (register_operand (x, mode) && register_operand (y, mode))
3067 try_int = true;
3068 /* If one of the operands is a memory, and alignment constraints
3069 are friendly enough, we may be able to do combined memory operations.
3070 We do not attempt this if Y is a constant because that combination is
3071 usually better with the by-parts thing below. */
3072 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3073 && (!STRICT_ALIGNMENT
3074 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3075 try_int = true;
3076 else
3077 try_int = false;
3078
3079 if (try_int)
3080 {
3081 rtx ret;
3082
3083 /* For memory to memory moves, optimal behavior can be had with the
3084 existing block move logic. */
3085 if (MEM_P (x) && MEM_P (y))
3086 {
3087 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3088 BLOCK_OP_NO_LIBCALL);
3089 return get_last_insn ();
3090 }
3091
3092 ret = emit_move_via_integer (mode, x, y, true);
3093 if (ret)
3094 return ret;
3095 }
3096
3097 /* Show the output dies here. This is necessary for SUBREGs
3098 of pseudos since we cannot track their lifetimes correctly;
3099 hard regs shouldn't appear here except as return values. */
3100 if (!reload_completed && !reload_in_progress
3101 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3102 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3103
3104 write_complex_part (x, read_complex_part (y, false), false);
3105 write_complex_part (x, read_complex_part (y, true), true);
3106 return get_last_insn ();
3107 }
3108
3109 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3110 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3111
3112 static rtx
3113 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3114 {
3115 rtx ret;
3116
3117 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3118 if (mode != CCmode)
3119 {
3120 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3121 if (code != CODE_FOR_nothing)
3122 {
3123 x = emit_move_change_mode (CCmode, mode, x, true);
3124 y = emit_move_change_mode (CCmode, mode, y, true);
3125 return emit_insn (GEN_FCN (code) (x, y));
3126 }
3127 }
3128
3129 /* Otherwise, find the MODE_INT mode of the same width. */
3130 ret = emit_move_via_integer (mode, x, y, false);
3131 gcc_assert (ret != NULL);
3132 return ret;
3133 }
3134
3135 /* Return true if word I of OP lies entirely in the
3136 undefined bits of a paradoxical subreg. */
3137
3138 static bool
3139 undefined_operand_subword_p (rtx op, int i)
3140 {
3141 enum machine_mode innermode, innermostmode;
3142 int offset;
3143 if (GET_CODE (op) != SUBREG)
3144 return false;
3145 innermode = GET_MODE (op);
3146 innermostmode = GET_MODE (SUBREG_REG (op));
3147 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3148 /* The SUBREG_BYTE represents offset, as if the value were stored in
3149 memory, except for a paradoxical subreg where we define
3150 SUBREG_BYTE to be 0; undo this exception as in
3151 simplify_subreg. */
3152 if (SUBREG_BYTE (op) == 0
3153 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3154 {
3155 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3156 if (WORDS_BIG_ENDIAN)
3157 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3158 if (BYTES_BIG_ENDIAN)
3159 offset += difference % UNITS_PER_WORD;
3160 }
3161 if (offset >= GET_MODE_SIZE (innermostmode)
3162 || offset <= -GET_MODE_SIZE (word_mode))
3163 return true;
3164 return false;
3165 }
3166
3167 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3168 MODE is any multi-word or full-word mode that lacks a move_insn
3169 pattern. Note that you will get better code if you define such
3170 patterns, even if they must turn into multiple assembler instructions. */
3171
3172 static rtx
3173 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3174 {
3175 rtx last_insn = 0;
3176 rtx seq, inner;
3177 bool need_clobber;
3178 int i;
3179
3180 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3181
3182 /* If X is a push on the stack, do the push now and replace
3183 X with a reference to the stack pointer. */
3184 if (push_operand (x, mode))
3185 x = emit_move_resolve_push (mode, x);
3186
3187 /* If we are in reload, see if either operand is a MEM whose address
3188 is scheduled for replacement. */
3189 if (reload_in_progress && MEM_P (x)
3190 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3191 x = replace_equiv_address_nv (x, inner);
3192 if (reload_in_progress && MEM_P (y)
3193 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3194 y = replace_equiv_address_nv (y, inner);
3195
3196 start_sequence ();
3197
3198 need_clobber = false;
3199 for (i = 0;
3200 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3201 i++)
3202 {
3203 rtx xpart = operand_subword (x, i, 1, mode);
3204 rtx ypart;
3205
3206 /* Do not generate code for a move if it would come entirely
3207 from the undefined bits of a paradoxical subreg. */
3208 if (undefined_operand_subword_p (y, i))
3209 continue;
3210
3211 ypart = operand_subword (y, i, 1, mode);
3212
3213 /* If we can't get a part of Y, put Y into memory if it is a
3214 constant. Otherwise, force it into a register. Then we must
3215 be able to get a part of Y. */
3216 if (ypart == 0 && CONSTANT_P (y))
3217 {
3218 y = use_anchored_address (force_const_mem (mode, y));
3219 ypart = operand_subword (y, i, 1, mode);
3220 }
3221 else if (ypart == 0)
3222 ypart = operand_subword_force (y, i, mode);
3223
3224 gcc_assert (xpart && ypart);
3225
3226 need_clobber |= (GET_CODE (xpart) == SUBREG);
3227
3228 last_insn = emit_move_insn (xpart, ypart);
3229 }
3230
3231 seq = get_insns ();
3232 end_sequence ();
3233
3234 /* Show the output dies here. This is necessary for SUBREGs
3235 of pseudos since we cannot track their lifetimes correctly;
3236 hard regs shouldn't appear here except as return values.
3237 We never want to emit such a clobber after reload. */
3238 if (x != y
3239 && ! (reload_in_progress || reload_completed)
3240 && need_clobber != 0)
3241 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3242
3243 emit_insn (seq);
3244
3245 return last_insn;
3246 }
3247
3248 /* Low level part of emit_move_insn.
3249 Called just like emit_move_insn, but assumes X and Y
3250 are basically valid. */
3251
3252 rtx
3253 emit_move_insn_1 (rtx x, rtx y)
3254 {
3255 enum machine_mode mode = GET_MODE (x);
3256 enum insn_code code;
3257
3258 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3259
3260 code = mov_optab->handlers[mode].insn_code;
3261 if (code != CODE_FOR_nothing)
3262 return emit_insn (GEN_FCN (code) (x, y));
3263
3264 /* Expand complex moves by moving real part and imag part. */
3265 if (COMPLEX_MODE_P (mode))
3266 return emit_move_complex (mode, x, y);
3267
3268 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3269 {
3270 rtx result = emit_move_via_integer (mode, x, y, true);
3271
3272 /* If we can't find an integer mode, use multi words. */
3273 if (result)
3274 return result;
3275 else
3276 return emit_move_multi_word (mode, x, y);
3277 }
3278
3279 if (GET_MODE_CLASS (mode) == MODE_CC)
3280 return emit_move_ccmode (mode, x, y);
3281
3282 /* Try using a move pattern for the corresponding integer mode. This is
3283 only safe when simplify_subreg can convert MODE constants into integer
3284 constants. At present, it can only do this reliably if the value
3285 fits within a HOST_WIDE_INT. */
3286 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3287 {
3288 rtx ret = emit_move_via_integer (mode, x, y, false);
3289 if (ret)
3290 return ret;
3291 }
3292
3293 return emit_move_multi_word (mode, x, y);
3294 }
3295
3296 /* Generate code to copy Y into X.
3297 Both Y and X must have the same mode, except that
3298 Y can be a constant with VOIDmode.
3299 This mode cannot be BLKmode; use emit_block_move for that.
3300
3301 Return the last instruction emitted. */
3302
3303 rtx
3304 emit_move_insn (rtx x, rtx y)
3305 {
3306 enum machine_mode mode = GET_MODE (x);
3307 rtx y_cst = NULL_RTX;
3308 rtx last_insn, set;
3309
3310 gcc_assert (mode != BLKmode
3311 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3312
3313 if (CONSTANT_P (y))
3314 {
3315 if (optimize
3316 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3317 && (last_insn = compress_float_constant (x, y)))
3318 return last_insn;
3319
3320 y_cst = y;
3321
3322 if (!LEGITIMATE_CONSTANT_P (y))
3323 {
3324 y = force_const_mem (mode, y);
3325
3326 /* If the target's cannot_force_const_mem prevented the spill,
3327 assume that the target's move expanders will also take care
3328 of the non-legitimate constant. */
3329 if (!y)
3330 y = y_cst;
3331 else
3332 y = use_anchored_address (y);
3333 }
3334 }
3335
3336 /* If X or Y are memory references, verify that their addresses are valid
3337 for the machine. */
3338 if (MEM_P (x)
3339 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3340 && ! push_operand (x, GET_MODE (x)))
3341 || (flag_force_addr
3342 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3343 x = validize_mem (x);
3344
3345 if (MEM_P (y)
3346 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3347 || (flag_force_addr
3348 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3349 y = validize_mem (y);
3350
3351 gcc_assert (mode != BLKmode);
3352
3353 last_insn = emit_move_insn_1 (x, y);
3354
3355 if (y_cst && REG_P (x)
3356 && (set = single_set (last_insn)) != NULL_RTX
3357 && SET_DEST (set) == x
3358 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3359 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3360
3361 return last_insn;
3362 }
3363
3364 /* If Y is representable exactly in a narrower mode, and the target can
3365 perform the extension directly from constant or memory, then emit the
3366 move as an extension. */
3367
3368 static rtx
3369 compress_float_constant (rtx x, rtx y)
3370 {
3371 enum machine_mode dstmode = GET_MODE (x);
3372 enum machine_mode orig_srcmode = GET_MODE (y);
3373 enum machine_mode srcmode;
3374 REAL_VALUE_TYPE r;
3375 int oldcost, newcost;
3376
3377 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3378
3379 if (LEGITIMATE_CONSTANT_P (y))
3380 oldcost = rtx_cost (y, SET);
3381 else
3382 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3383
3384 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3385 srcmode != orig_srcmode;
3386 srcmode = GET_MODE_WIDER_MODE (srcmode))
3387 {
3388 enum insn_code ic;
3389 rtx trunc_y, last_insn;
3390
3391 /* Skip if the target can't extend this way. */
3392 ic = can_extend_p (dstmode, srcmode, 0);
3393 if (ic == CODE_FOR_nothing)
3394 continue;
3395
3396 /* Skip if the narrowed value isn't exact. */
3397 if (! exact_real_truncate (srcmode, &r))
3398 continue;
3399
3400 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3401
3402 if (LEGITIMATE_CONSTANT_P (trunc_y))
3403 {
3404 /* Skip if the target needs extra instructions to perform
3405 the extension. */
3406 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3407 continue;
3408 /* This is valid, but may not be cheaper than the original. */
3409 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3410 if (oldcost < newcost)
3411 continue;
3412 }
3413 else if (float_extend_from_mem[dstmode][srcmode])
3414 {
3415 trunc_y = force_const_mem (srcmode, trunc_y);
3416 /* This is valid, but may not be cheaper than the original. */
3417 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3418 if (oldcost < newcost)
3419 continue;
3420 trunc_y = validize_mem (trunc_y);
3421 }
3422 else
3423 continue;
3424
3425 /* For CSE's benefit, force the compressed constant pool entry
3426 into a new pseudo. This constant may be used in different modes,
3427 and if not, combine will put things back together for us. */
3428 trunc_y = force_reg (srcmode, trunc_y);
3429 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3430 last_insn = get_last_insn ();
3431
3432 if (REG_P (x))
3433 set_unique_reg_note (last_insn, REG_EQUAL, y);
3434
3435 return last_insn;
3436 }
3437
3438 return NULL_RTX;
3439 }
3440 \f
3441 /* Pushing data onto the stack. */
3442
3443 /* Push a block of length SIZE (perhaps variable)
3444 and return an rtx to address the beginning of the block.
3445 The value may be virtual_outgoing_args_rtx.
3446
3447 EXTRA is the number of bytes of padding to push in addition to SIZE.
3448 BELOW nonzero means this padding comes at low addresses;
3449 otherwise, the padding comes at high addresses. */
3450
3451 rtx
3452 push_block (rtx size, int extra, int below)
3453 {
3454 rtx temp;
3455
3456 size = convert_modes (Pmode, ptr_mode, size, 1);
3457 if (CONSTANT_P (size))
3458 anti_adjust_stack (plus_constant (size, extra));
3459 else if (REG_P (size) && extra == 0)
3460 anti_adjust_stack (size);
3461 else
3462 {
3463 temp = copy_to_mode_reg (Pmode, size);
3464 if (extra != 0)
3465 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3466 temp, 0, OPTAB_LIB_WIDEN);
3467 anti_adjust_stack (temp);
3468 }
3469
3470 #ifndef STACK_GROWS_DOWNWARD
3471 if (0)
3472 #else
3473 if (1)
3474 #endif
3475 {
3476 temp = virtual_outgoing_args_rtx;
3477 if (extra != 0 && below)
3478 temp = plus_constant (temp, extra);
3479 }
3480 else
3481 {
3482 if (GET_CODE (size) == CONST_INT)
3483 temp = plus_constant (virtual_outgoing_args_rtx,
3484 -INTVAL (size) - (below ? 0 : extra));
3485 else if (extra != 0 && !below)
3486 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3487 negate_rtx (Pmode, plus_constant (size, extra)));
3488 else
3489 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3490 negate_rtx (Pmode, size));
3491 }
3492
3493 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3494 }
3495
3496 #ifdef PUSH_ROUNDING
3497
3498 /* Emit single push insn. */
3499
3500 static void
3501 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3502 {
3503 rtx dest_addr;
3504 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3505 rtx dest;
3506 enum insn_code icode;
3507 insn_operand_predicate_fn pred;
3508
3509 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3510 /* If there is push pattern, use it. Otherwise try old way of throwing
3511 MEM representing push operation to move expander. */
3512 icode = push_optab->handlers[(int) mode].insn_code;
3513 if (icode != CODE_FOR_nothing)
3514 {
3515 if (((pred = insn_data[(int) icode].operand[0].predicate)
3516 && !((*pred) (x, mode))))
3517 x = force_reg (mode, x);
3518 emit_insn (GEN_FCN (icode) (x));
3519 return;
3520 }
3521 if (GET_MODE_SIZE (mode) == rounded_size)
3522 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3523 /* If we are to pad downward, adjust the stack pointer first and
3524 then store X into the stack location using an offset. This is
3525 because emit_move_insn does not know how to pad; it does not have
3526 access to type. */
3527 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3528 {
3529 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3530 HOST_WIDE_INT offset;
3531
3532 emit_move_insn (stack_pointer_rtx,
3533 expand_binop (Pmode,
3534 #ifdef STACK_GROWS_DOWNWARD
3535 sub_optab,
3536 #else
3537 add_optab,
3538 #endif
3539 stack_pointer_rtx,
3540 GEN_INT (rounded_size),
3541 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3542
3543 offset = (HOST_WIDE_INT) padding_size;
3544 #ifdef STACK_GROWS_DOWNWARD
3545 if (STACK_PUSH_CODE == POST_DEC)
3546 /* We have already decremented the stack pointer, so get the
3547 previous value. */
3548 offset += (HOST_WIDE_INT) rounded_size;
3549 #else
3550 if (STACK_PUSH_CODE == POST_INC)
3551 /* We have already incremented the stack pointer, so get the
3552 previous value. */
3553 offset -= (HOST_WIDE_INT) rounded_size;
3554 #endif
3555 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3556 }
3557 else
3558 {
3559 #ifdef STACK_GROWS_DOWNWARD
3560 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3561 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3562 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3563 #else
3564 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3565 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3566 GEN_INT (rounded_size));
3567 #endif
3568 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3569 }
3570
3571 dest = gen_rtx_MEM (mode, dest_addr);
3572
3573 if (type != 0)
3574 {
3575 set_mem_attributes (dest, type, 1);
3576
3577 if (flag_optimize_sibling_calls)
3578 /* Function incoming arguments may overlap with sibling call
3579 outgoing arguments and we cannot allow reordering of reads
3580 from function arguments with stores to outgoing arguments
3581 of sibling calls. */
3582 set_mem_alias_set (dest, 0);
3583 }
3584 emit_move_insn (dest, x);
3585 }
3586 #endif
3587
3588 /* Generate code to push X onto the stack, assuming it has mode MODE and
3589 type TYPE.
3590 MODE is redundant except when X is a CONST_INT (since they don't
3591 carry mode info).
3592 SIZE is an rtx for the size of data to be copied (in bytes),
3593 needed only if X is BLKmode.
3594
3595 ALIGN (in bits) is maximum alignment we can assume.
3596
3597 If PARTIAL and REG are both nonzero, then copy that many of the first
3598 bytes of X into registers starting with REG, and push the rest of X.
3599 The amount of space pushed is decreased by PARTIAL bytes.
3600 REG must be a hard register in this case.
3601 If REG is zero but PARTIAL is not, take any all others actions for an
3602 argument partially in registers, but do not actually load any
3603 registers.
3604
3605 EXTRA is the amount in bytes of extra space to leave next to this arg.
3606 This is ignored if an argument block has already been allocated.
3607
3608 On a machine that lacks real push insns, ARGS_ADDR is the address of
3609 the bottom of the argument block for this call. We use indexing off there
3610 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3611 argument block has not been preallocated.
3612
3613 ARGS_SO_FAR is the size of args previously pushed for this call.
3614
3615 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3616 for arguments passed in registers. If nonzero, it will be the number
3617 of bytes required. */
3618
3619 void
3620 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3621 unsigned int align, int partial, rtx reg, int extra,
3622 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3623 rtx alignment_pad)
3624 {
3625 rtx xinner;
3626 enum direction stack_direction
3627 #ifdef STACK_GROWS_DOWNWARD
3628 = downward;
3629 #else
3630 = upward;
3631 #endif
3632
3633 /* Decide where to pad the argument: `downward' for below,
3634 `upward' for above, or `none' for don't pad it.
3635 Default is below for small data on big-endian machines; else above. */
3636 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3637
3638 /* Invert direction if stack is post-decrement.
3639 FIXME: why? */
3640 if (STACK_PUSH_CODE == POST_DEC)
3641 if (where_pad != none)
3642 where_pad = (where_pad == downward ? upward : downward);
3643
3644 xinner = x;
3645
3646 if (mode == BLKmode
3647 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3648 {
3649 /* Copy a block into the stack, entirely or partially. */
3650
3651 rtx temp;
3652 int used;
3653 int offset;
3654 int skip;
3655
3656 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3657 used = partial - offset;
3658
3659 if (mode != BLKmode)
3660 {
3661 /* A value is to be stored in an insufficiently aligned
3662 stack slot; copy via a suitably aligned slot if
3663 necessary. */
3664 size = GEN_INT (GET_MODE_SIZE (mode));
3665 if (!MEM_P (xinner))
3666 {
3667 temp = assign_temp (type, 0, 1, 1);
3668 emit_move_insn (temp, xinner);
3669 xinner = temp;
3670 }
3671 }
3672
3673 gcc_assert (size);
3674
3675 /* USED is now the # of bytes we need not copy to the stack
3676 because registers will take care of them. */
3677
3678 if (partial != 0)
3679 xinner = adjust_address (xinner, BLKmode, used);
3680
3681 /* If the partial register-part of the arg counts in its stack size,
3682 skip the part of stack space corresponding to the registers.
3683 Otherwise, start copying to the beginning of the stack space,
3684 by setting SKIP to 0. */
3685 skip = (reg_parm_stack_space == 0) ? 0 : used;
3686
3687 #ifdef PUSH_ROUNDING
3688 /* Do it with several push insns if that doesn't take lots of insns
3689 and if there is no difficulty with push insns that skip bytes
3690 on the stack for alignment purposes. */
3691 if (args_addr == 0
3692 && PUSH_ARGS
3693 && GET_CODE (size) == CONST_INT
3694 && skip == 0
3695 && MEM_ALIGN (xinner) >= align
3696 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3697 /* Here we avoid the case of a structure whose weak alignment
3698 forces many pushes of a small amount of data,
3699 and such small pushes do rounding that causes trouble. */
3700 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3701 || align >= BIGGEST_ALIGNMENT
3702 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3703 == (align / BITS_PER_UNIT)))
3704 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3705 {
3706 /* Push padding now if padding above and stack grows down,
3707 or if padding below and stack grows up.
3708 But if space already allocated, this has already been done. */
3709 if (extra && args_addr == 0
3710 && where_pad != none && where_pad != stack_direction)
3711 anti_adjust_stack (GEN_INT (extra));
3712
3713 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3714 }
3715 else
3716 #endif /* PUSH_ROUNDING */
3717 {
3718 rtx target;
3719
3720 /* Otherwise make space on the stack and copy the data
3721 to the address of that space. */
3722
3723 /* Deduct words put into registers from the size we must copy. */
3724 if (partial != 0)
3725 {
3726 if (GET_CODE (size) == CONST_INT)
3727 size = GEN_INT (INTVAL (size) - used);
3728 else
3729 size = expand_binop (GET_MODE (size), sub_optab, size,
3730 GEN_INT (used), NULL_RTX, 0,
3731 OPTAB_LIB_WIDEN);
3732 }
3733
3734 /* Get the address of the stack space.
3735 In this case, we do not deal with EXTRA separately.
3736 A single stack adjust will do. */
3737 if (! args_addr)
3738 {
3739 temp = push_block (size, extra, where_pad == downward);
3740 extra = 0;
3741 }
3742 else if (GET_CODE (args_so_far) == CONST_INT)
3743 temp = memory_address (BLKmode,
3744 plus_constant (args_addr,
3745 skip + INTVAL (args_so_far)));
3746 else
3747 temp = memory_address (BLKmode,
3748 plus_constant (gen_rtx_PLUS (Pmode,
3749 args_addr,
3750 args_so_far),
3751 skip));
3752
3753 if (!ACCUMULATE_OUTGOING_ARGS)
3754 {
3755 /* If the source is referenced relative to the stack pointer,
3756 copy it to another register to stabilize it. We do not need
3757 to do this if we know that we won't be changing sp. */
3758
3759 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3760 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3761 temp = copy_to_reg (temp);
3762 }
3763
3764 target = gen_rtx_MEM (BLKmode, temp);
3765
3766 /* We do *not* set_mem_attributes here, because incoming arguments
3767 may overlap with sibling call outgoing arguments and we cannot
3768 allow reordering of reads from function arguments with stores
3769 to outgoing arguments of sibling calls. We do, however, want
3770 to record the alignment of the stack slot. */
3771 /* ALIGN may well be better aligned than TYPE, e.g. due to
3772 PARM_BOUNDARY. Assume the caller isn't lying. */
3773 set_mem_align (target, align);
3774
3775 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3776 }
3777 }
3778 else if (partial > 0)
3779 {
3780 /* Scalar partly in registers. */
3781
3782 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3783 int i;
3784 int not_stack;
3785 /* # bytes of start of argument
3786 that we must make space for but need not store. */
3787 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3788 int args_offset = INTVAL (args_so_far);
3789 int skip;
3790
3791 /* Push padding now if padding above and stack grows down,
3792 or if padding below and stack grows up.
3793 But if space already allocated, this has already been done. */
3794 if (extra && args_addr == 0
3795 && where_pad != none && where_pad != stack_direction)
3796 anti_adjust_stack (GEN_INT (extra));
3797
3798 /* If we make space by pushing it, we might as well push
3799 the real data. Otherwise, we can leave OFFSET nonzero
3800 and leave the space uninitialized. */
3801 if (args_addr == 0)
3802 offset = 0;
3803
3804 /* Now NOT_STACK gets the number of words that we don't need to
3805 allocate on the stack. Convert OFFSET to words too. */
3806 not_stack = (partial - offset) / UNITS_PER_WORD;
3807 offset /= UNITS_PER_WORD;
3808
3809 /* If the partial register-part of the arg counts in its stack size,
3810 skip the part of stack space corresponding to the registers.
3811 Otherwise, start copying to the beginning of the stack space,
3812 by setting SKIP to 0. */
3813 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3814
3815 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3816 x = validize_mem (force_const_mem (mode, x));
3817
3818 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3819 SUBREGs of such registers are not allowed. */
3820 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3821 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3822 x = copy_to_reg (x);
3823
3824 /* Loop over all the words allocated on the stack for this arg. */
3825 /* We can do it by words, because any scalar bigger than a word
3826 has a size a multiple of a word. */
3827 #ifndef PUSH_ARGS_REVERSED
3828 for (i = not_stack; i < size; i++)
3829 #else
3830 for (i = size - 1; i >= not_stack; i--)
3831 #endif
3832 if (i >= not_stack + offset)
3833 emit_push_insn (operand_subword_force (x, i, mode),
3834 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3835 0, args_addr,
3836 GEN_INT (args_offset + ((i - not_stack + skip)
3837 * UNITS_PER_WORD)),
3838 reg_parm_stack_space, alignment_pad);
3839 }
3840 else
3841 {
3842 rtx addr;
3843 rtx dest;
3844
3845 /* Push padding now if padding above and stack grows down,
3846 or if padding below and stack grows up.
3847 But if space already allocated, this has already been done. */
3848 if (extra && args_addr == 0
3849 && where_pad != none && where_pad != stack_direction)
3850 anti_adjust_stack (GEN_INT (extra));
3851
3852 #ifdef PUSH_ROUNDING
3853 if (args_addr == 0 && PUSH_ARGS)
3854 emit_single_push_insn (mode, x, type);
3855 else
3856 #endif
3857 {
3858 if (GET_CODE (args_so_far) == CONST_INT)
3859 addr
3860 = memory_address (mode,
3861 plus_constant (args_addr,
3862 INTVAL (args_so_far)));
3863 else
3864 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3865 args_so_far));
3866 dest = gen_rtx_MEM (mode, addr);
3867
3868 /* We do *not* set_mem_attributes here, because incoming arguments
3869 may overlap with sibling call outgoing arguments and we cannot
3870 allow reordering of reads from function arguments with stores
3871 to outgoing arguments of sibling calls. We do, however, want
3872 to record the alignment of the stack slot. */
3873 /* ALIGN may well be better aligned than TYPE, e.g. due to
3874 PARM_BOUNDARY. Assume the caller isn't lying. */
3875 set_mem_align (dest, align);
3876
3877 emit_move_insn (dest, x);
3878 }
3879 }
3880
3881 /* If part should go in registers, copy that part
3882 into the appropriate registers. Do this now, at the end,
3883 since mem-to-mem copies above may do function calls. */
3884 if (partial > 0 && reg != 0)
3885 {
3886 /* Handle calls that pass values in multiple non-contiguous locations.
3887 The Irix 6 ABI has examples of this. */
3888 if (GET_CODE (reg) == PARALLEL)
3889 emit_group_load (reg, x, type, -1);
3890 else
3891 {
3892 gcc_assert (partial % UNITS_PER_WORD == 0);
3893 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3894 }
3895 }
3896
3897 if (extra && args_addr == 0 && where_pad == stack_direction)
3898 anti_adjust_stack (GEN_INT (extra));
3899
3900 if (alignment_pad && args_addr == 0)
3901 anti_adjust_stack (alignment_pad);
3902 }
3903 \f
3904 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3905 operations. */
3906
3907 static rtx
3908 get_subtarget (rtx x)
3909 {
3910 return (optimize
3911 || x == 0
3912 /* Only registers can be subtargets. */
3913 || !REG_P (x)
3914 /* Don't use hard regs to avoid extending their life. */
3915 || REGNO (x) < FIRST_PSEUDO_REGISTER
3916 ? 0 : x);
3917 }
3918
3919 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3920 FIELD is a bitfield. Returns true if the optimization was successful,
3921 and there's nothing else to do. */
3922
3923 static bool
3924 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3925 unsigned HOST_WIDE_INT bitpos,
3926 enum machine_mode mode1, rtx str_rtx,
3927 tree to, tree src)
3928 {
3929 enum machine_mode str_mode = GET_MODE (str_rtx);
3930 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3931 tree op0, op1;
3932 rtx value, result;
3933 optab binop;
3934
3935 if (mode1 != VOIDmode
3936 || bitsize >= BITS_PER_WORD
3937 || str_bitsize > BITS_PER_WORD
3938 || TREE_SIDE_EFFECTS (to)
3939 || TREE_THIS_VOLATILE (to))
3940 return false;
3941
3942 STRIP_NOPS (src);
3943 if (!BINARY_CLASS_P (src)
3944 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3945 return false;
3946
3947 op0 = TREE_OPERAND (src, 0);
3948 op1 = TREE_OPERAND (src, 1);
3949 STRIP_NOPS (op0);
3950
3951 if (!operand_equal_p (to, op0, 0))
3952 return false;
3953
3954 if (MEM_P (str_rtx))
3955 {
3956 unsigned HOST_WIDE_INT offset1;
3957
3958 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3959 str_mode = word_mode;
3960 str_mode = get_best_mode (bitsize, bitpos,
3961 MEM_ALIGN (str_rtx), str_mode, 0);
3962 if (str_mode == VOIDmode)
3963 return false;
3964 str_bitsize = GET_MODE_BITSIZE (str_mode);
3965
3966 offset1 = bitpos;
3967 bitpos %= str_bitsize;
3968 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3969 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3970 }
3971 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3972 return false;
3973
3974 /* If the bit field covers the whole REG/MEM, store_field
3975 will likely generate better code. */
3976 if (bitsize >= str_bitsize)
3977 return false;
3978
3979 /* We can't handle fields split across multiple entities. */
3980 if (bitpos + bitsize > str_bitsize)
3981 return false;
3982
3983 if (BYTES_BIG_ENDIAN)
3984 bitpos = str_bitsize - bitpos - bitsize;
3985
3986 switch (TREE_CODE (src))
3987 {
3988 case PLUS_EXPR:
3989 case MINUS_EXPR:
3990 /* For now, just optimize the case of the topmost bitfield
3991 where we don't need to do any masking and also
3992 1 bit bitfields where xor can be used.
3993 We might win by one instruction for the other bitfields
3994 too if insv/extv instructions aren't used, so that
3995 can be added later. */
3996 if (bitpos + bitsize != str_bitsize
3997 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3998 break;
3999
4000 value = expand_expr (op1, NULL_RTX, str_mode, 0);
4001 value = convert_modes (str_mode,
4002 TYPE_MODE (TREE_TYPE (op1)), value,
4003 TYPE_UNSIGNED (TREE_TYPE (op1)));
4004
4005 /* We may be accessing data outside the field, which means
4006 we can alias adjacent data. */
4007 if (MEM_P (str_rtx))
4008 {
4009 str_rtx = shallow_copy_rtx (str_rtx);
4010 set_mem_alias_set (str_rtx, 0);
4011 set_mem_expr (str_rtx, 0);
4012 }
4013
4014 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4015 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4016 {
4017 value = expand_and (str_mode, value, const1_rtx, NULL);
4018 binop = xor_optab;
4019 }
4020 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4021 build_int_cst (NULL_TREE, bitpos),
4022 NULL_RTX, 1);
4023 result = expand_binop (str_mode, binop, str_rtx,
4024 value, str_rtx, 1, OPTAB_WIDEN);
4025 if (result != str_rtx)
4026 emit_move_insn (str_rtx, result);
4027 return true;
4028
4029 case BIT_IOR_EXPR:
4030 case BIT_XOR_EXPR:
4031 if (TREE_CODE (op1) != INTEGER_CST)
4032 break;
4033 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
4034 value = convert_modes (GET_MODE (str_rtx),
4035 TYPE_MODE (TREE_TYPE (op1)), value,
4036 TYPE_UNSIGNED (TREE_TYPE (op1)));
4037
4038 /* We may be accessing data outside the field, which means
4039 we can alias adjacent data. */
4040 if (MEM_P (str_rtx))
4041 {
4042 str_rtx = shallow_copy_rtx (str_rtx);
4043 set_mem_alias_set (str_rtx, 0);
4044 set_mem_expr (str_rtx, 0);
4045 }
4046
4047 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4048 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4049 {
4050 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4051 - 1);
4052 value = expand_and (GET_MODE (str_rtx), value, mask,
4053 NULL_RTX);
4054 }
4055 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4056 build_int_cst (NULL_TREE, bitpos),
4057 NULL_RTX, 1);
4058 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4059 value, str_rtx, 1, OPTAB_WIDEN);
4060 if (result != str_rtx)
4061 emit_move_insn (str_rtx, result);
4062 return true;
4063
4064 default:
4065 break;
4066 }
4067
4068 return false;
4069 }
4070
4071
4072 /* Expand an assignment that stores the value of FROM into TO. */
4073
4074 void
4075 expand_assignment (tree to, tree from)
4076 {
4077 rtx to_rtx = 0;
4078 rtx result;
4079
4080 /* Don't crash if the lhs of the assignment was erroneous. */
4081 if (TREE_CODE (to) == ERROR_MARK)
4082 {
4083 result = expand_normal (from);
4084 return;
4085 }
4086
4087 /* Optimize away no-op moves without side-effects. */
4088 if (operand_equal_p (to, from, 0))
4089 return;
4090
4091 /* Assignment of a structure component needs special treatment
4092 if the structure component's rtx is not simply a MEM.
4093 Assignment of an array element at a constant index, and assignment of
4094 an array element in an unaligned packed structure field, has the same
4095 problem. */
4096 if (handled_component_p (to)
4097 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4098 {
4099 enum machine_mode mode1;
4100 HOST_WIDE_INT bitsize, bitpos;
4101 tree offset;
4102 int unsignedp;
4103 int volatilep = 0;
4104 tree tem;
4105
4106 push_temp_slots ();
4107 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4108 &unsignedp, &volatilep, true);
4109
4110 /* If we are going to use store_bit_field and extract_bit_field,
4111 make sure to_rtx will be safe for multiple use. */
4112
4113 to_rtx = expand_normal (tem);
4114
4115 if (offset != 0)
4116 {
4117 rtx offset_rtx;
4118
4119 if (!MEM_P (to_rtx))
4120 {
4121 /* We can get constant negative offsets into arrays with broken
4122 user code. Translate this to a trap instead of ICEing. */
4123 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4124 expand_builtin_trap ();
4125 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4126 }
4127
4128 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4129 #ifdef POINTERS_EXTEND_UNSIGNED
4130 if (GET_MODE (offset_rtx) != Pmode)
4131 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4132 #else
4133 if (GET_MODE (offset_rtx) != ptr_mode)
4134 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4135 #endif
4136
4137 /* A constant address in TO_RTX can have VOIDmode, we must not try
4138 to call force_reg for that case. Avoid that case. */
4139 if (MEM_P (to_rtx)
4140 && GET_MODE (to_rtx) == BLKmode
4141 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4142 && bitsize > 0
4143 && (bitpos % bitsize) == 0
4144 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4145 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4146 {
4147 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4148 bitpos = 0;
4149 }
4150
4151 to_rtx = offset_address (to_rtx, offset_rtx,
4152 highest_pow2_factor_for_target (to,
4153 offset));
4154 }
4155
4156 /* Handle expand_expr of a complex value returning a CONCAT. */
4157 if (GET_CODE (to_rtx) == CONCAT)
4158 {
4159 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4160 {
4161 gcc_assert (bitpos == 0);
4162 result = store_expr (from, to_rtx, false);
4163 }
4164 else
4165 {
4166 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4167 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4168 }
4169 }
4170 else
4171 {
4172 if (MEM_P (to_rtx))
4173 {
4174 /* If the field is at offset zero, we could have been given the
4175 DECL_RTX of the parent struct. Don't munge it. */
4176 to_rtx = shallow_copy_rtx (to_rtx);
4177
4178 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4179
4180 /* Deal with volatile and readonly fields. The former is only
4181 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4182 if (volatilep)
4183 MEM_VOLATILE_P (to_rtx) = 1;
4184 if (component_uses_parent_alias_set (to))
4185 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4186 }
4187
4188 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4189 to_rtx, to, from))
4190 result = NULL;
4191 else
4192 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4193 TREE_TYPE (tem), get_alias_set (to));
4194 }
4195
4196 if (result)
4197 preserve_temp_slots (result);
4198 free_temp_slots ();
4199 pop_temp_slots ();
4200 return;
4201 }
4202
4203 /* If the rhs is a function call and its value is not an aggregate,
4204 call the function before we start to compute the lhs.
4205 This is needed for correct code for cases such as
4206 val = setjmp (buf) on machines where reference to val
4207 requires loading up part of an address in a separate insn.
4208
4209 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4210 since it might be a promoted variable where the zero- or sign- extension
4211 needs to be done. Handling this in the normal way is safe because no
4212 computation is done before the call. */
4213 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4214 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4215 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4216 && REG_P (DECL_RTL (to))))
4217 {
4218 rtx value;
4219
4220 push_temp_slots ();
4221 value = expand_normal (from);
4222 if (to_rtx == 0)
4223 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4224
4225 /* Handle calls that return values in multiple non-contiguous locations.
4226 The Irix 6 ABI has examples of this. */
4227 if (GET_CODE (to_rtx) == PARALLEL)
4228 emit_group_load (to_rtx, value, TREE_TYPE (from),
4229 int_size_in_bytes (TREE_TYPE (from)));
4230 else if (GET_MODE (to_rtx) == BLKmode)
4231 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4232 else
4233 {
4234 if (POINTER_TYPE_P (TREE_TYPE (to)))
4235 value = convert_memory_address (GET_MODE (to_rtx), value);
4236 emit_move_insn (to_rtx, value);
4237 }
4238 preserve_temp_slots (to_rtx);
4239 free_temp_slots ();
4240 pop_temp_slots ();
4241 return;
4242 }
4243
4244 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4245 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4246
4247 if (to_rtx == 0)
4248 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4249
4250 /* Don't move directly into a return register. */
4251 if (TREE_CODE (to) == RESULT_DECL
4252 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4253 {
4254 rtx temp;
4255
4256 push_temp_slots ();
4257 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4258
4259 if (GET_CODE (to_rtx) == PARALLEL)
4260 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4261 int_size_in_bytes (TREE_TYPE (from)));
4262 else
4263 emit_move_insn (to_rtx, temp);
4264
4265 preserve_temp_slots (to_rtx);
4266 free_temp_slots ();
4267 pop_temp_slots ();
4268 return;
4269 }
4270
4271 /* In case we are returning the contents of an object which overlaps
4272 the place the value is being stored, use a safe function when copying
4273 a value through a pointer into a structure value return block. */
4274 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4275 && current_function_returns_struct
4276 && !current_function_returns_pcc_struct)
4277 {
4278 rtx from_rtx, size;
4279
4280 push_temp_slots ();
4281 size = expr_size (from);
4282 from_rtx = expand_normal (from);
4283
4284 emit_library_call (memmove_libfunc, LCT_NORMAL,
4285 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4286 XEXP (from_rtx, 0), Pmode,
4287 convert_to_mode (TYPE_MODE (sizetype),
4288 size, TYPE_UNSIGNED (sizetype)),
4289 TYPE_MODE (sizetype));
4290
4291 preserve_temp_slots (to_rtx);
4292 free_temp_slots ();
4293 pop_temp_slots ();
4294 return;
4295 }
4296
4297 /* Compute FROM and store the value in the rtx we got. */
4298
4299 push_temp_slots ();
4300 result = store_expr (from, to_rtx, 0);
4301 preserve_temp_slots (result);
4302 free_temp_slots ();
4303 pop_temp_slots ();
4304 return;
4305 }
4306
4307 /* Generate code for computing expression EXP,
4308 and storing the value into TARGET.
4309
4310 If the mode is BLKmode then we may return TARGET itself.
4311 It turns out that in BLKmode it doesn't cause a problem.
4312 because C has no operators that could combine two different
4313 assignments into the same BLKmode object with different values
4314 with no sequence point. Will other languages need this to
4315 be more thorough?
4316
4317 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4318 stack, and block moves may need to be treated specially. */
4319
4320 rtx
4321 store_expr (tree exp, rtx target, int call_param_p)
4322 {
4323 rtx temp;
4324 rtx alt_rtl = NULL_RTX;
4325 int dont_return_target = 0;
4326
4327 if (VOID_TYPE_P (TREE_TYPE (exp)))
4328 {
4329 /* C++ can generate ?: expressions with a throw expression in one
4330 branch and an rvalue in the other. Here, we resolve attempts to
4331 store the throw expression's nonexistent result. */
4332 gcc_assert (!call_param_p);
4333 expand_expr (exp, const0_rtx, VOIDmode, 0);
4334 return NULL_RTX;
4335 }
4336 if (TREE_CODE (exp) == COMPOUND_EXPR)
4337 {
4338 /* Perform first part of compound expression, then assign from second
4339 part. */
4340 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4341 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4342 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4343 }
4344 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4345 {
4346 /* For conditional expression, get safe form of the target. Then
4347 test the condition, doing the appropriate assignment on either
4348 side. This avoids the creation of unnecessary temporaries.
4349 For non-BLKmode, it is more efficient not to do this. */
4350
4351 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4352
4353 do_pending_stack_adjust ();
4354 NO_DEFER_POP;
4355 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4356 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4357 emit_jump_insn (gen_jump (lab2));
4358 emit_barrier ();
4359 emit_label (lab1);
4360 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4361 emit_label (lab2);
4362 OK_DEFER_POP;
4363
4364 return NULL_RTX;
4365 }
4366 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4367 /* If this is a scalar in a register that is stored in a wider mode
4368 than the declared mode, compute the result into its declared mode
4369 and then convert to the wider mode. Our value is the computed
4370 expression. */
4371 {
4372 rtx inner_target = 0;
4373
4374 /* We can do the conversion inside EXP, which will often result
4375 in some optimizations. Do the conversion in two steps: first
4376 change the signedness, if needed, then the extend. But don't
4377 do this if the type of EXP is a subtype of something else
4378 since then the conversion might involve more than just
4379 converting modes. */
4380 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4381 && TREE_TYPE (TREE_TYPE (exp)) == 0
4382 && (!lang_hooks.reduce_bit_field_operations
4383 || (GET_MODE_PRECISION (GET_MODE (target))
4384 == TYPE_PRECISION (TREE_TYPE (exp)))))
4385 {
4386 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4387 != SUBREG_PROMOTED_UNSIGNED_P (target))
4388 exp = fold_convert
4389 (lang_hooks.types.signed_or_unsigned_type
4390 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4391
4392 exp = fold_convert (lang_hooks.types.type_for_mode
4393 (GET_MODE (SUBREG_REG (target)),
4394 SUBREG_PROMOTED_UNSIGNED_P (target)),
4395 exp);
4396
4397 inner_target = SUBREG_REG (target);
4398 }
4399
4400 temp = expand_expr (exp, inner_target, VOIDmode,
4401 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4402
4403 /* If TEMP is a VOIDmode constant, use convert_modes to make
4404 sure that we properly convert it. */
4405 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4406 {
4407 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4408 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4409 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4410 GET_MODE (target), temp,
4411 SUBREG_PROMOTED_UNSIGNED_P (target));
4412 }
4413
4414 convert_move (SUBREG_REG (target), temp,
4415 SUBREG_PROMOTED_UNSIGNED_P (target));
4416
4417 return NULL_RTX;
4418 }
4419 else
4420 {
4421 temp = expand_expr_real (exp, target, GET_MODE (target),
4422 (call_param_p
4423 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4424 &alt_rtl);
4425 /* Return TARGET if it's a specified hardware register.
4426 If TARGET is a volatile mem ref, either return TARGET
4427 or return a reg copied *from* TARGET; ANSI requires this.
4428
4429 Otherwise, if TEMP is not TARGET, return TEMP
4430 if it is constant (for efficiency),
4431 or if we really want the correct value. */
4432 if (!(target && REG_P (target)
4433 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4434 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4435 && ! rtx_equal_p (temp, target)
4436 && CONSTANT_P (temp))
4437 dont_return_target = 1;
4438 }
4439
4440 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4441 the same as that of TARGET, adjust the constant. This is needed, for
4442 example, in case it is a CONST_DOUBLE and we want only a word-sized
4443 value. */
4444 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4445 && TREE_CODE (exp) != ERROR_MARK
4446 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4447 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4448 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4449
4450 /* If value was not generated in the target, store it there.
4451 Convert the value to TARGET's type first if necessary and emit the
4452 pending incrementations that have been queued when expanding EXP.
4453 Note that we cannot emit the whole queue blindly because this will
4454 effectively disable the POST_INC optimization later.
4455
4456 If TEMP and TARGET compare equal according to rtx_equal_p, but
4457 one or both of them are volatile memory refs, we have to distinguish
4458 two cases:
4459 - expand_expr has used TARGET. In this case, we must not generate
4460 another copy. This can be detected by TARGET being equal according
4461 to == .
4462 - expand_expr has not used TARGET - that means that the source just
4463 happens to have the same RTX form. Since temp will have been created
4464 by expand_expr, it will compare unequal according to == .
4465 We must generate a copy in this case, to reach the correct number
4466 of volatile memory references. */
4467
4468 if ((! rtx_equal_p (temp, target)
4469 || (temp != target && (side_effects_p (temp)
4470 || side_effects_p (target))))
4471 && TREE_CODE (exp) != ERROR_MARK
4472 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4473 but TARGET is not valid memory reference, TEMP will differ
4474 from TARGET although it is really the same location. */
4475 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4476 /* If there's nothing to copy, don't bother. Don't call
4477 expr_size unless necessary, because some front-ends (C++)
4478 expr_size-hook must not be given objects that are not
4479 supposed to be bit-copied or bit-initialized. */
4480 && expr_size (exp) != const0_rtx)
4481 {
4482 if (GET_MODE (temp) != GET_MODE (target)
4483 && GET_MODE (temp) != VOIDmode)
4484 {
4485 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4486 if (dont_return_target)
4487 {
4488 /* In this case, we will return TEMP,
4489 so make sure it has the proper mode.
4490 But don't forget to store the value into TARGET. */
4491 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4492 emit_move_insn (target, temp);
4493 }
4494 else
4495 convert_move (target, temp, unsignedp);
4496 }
4497
4498 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4499 {
4500 /* Handle copying a string constant into an array. The string
4501 constant may be shorter than the array. So copy just the string's
4502 actual length, and clear the rest. First get the size of the data
4503 type of the string, which is actually the size of the target. */
4504 rtx size = expr_size (exp);
4505
4506 if (GET_CODE (size) == CONST_INT
4507 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4508 emit_block_move (target, temp, size,
4509 (call_param_p
4510 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4511 else
4512 {
4513 /* Compute the size of the data to copy from the string. */
4514 tree copy_size
4515 = size_binop (MIN_EXPR,
4516 make_tree (sizetype, size),
4517 size_int (TREE_STRING_LENGTH (exp)));
4518 rtx copy_size_rtx
4519 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4520 (call_param_p
4521 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4522 rtx label = 0;
4523
4524 /* Copy that much. */
4525 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4526 TYPE_UNSIGNED (sizetype));
4527 emit_block_move (target, temp, copy_size_rtx,
4528 (call_param_p
4529 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4530
4531 /* Figure out how much is left in TARGET that we have to clear.
4532 Do all calculations in ptr_mode. */
4533 if (GET_CODE (copy_size_rtx) == CONST_INT)
4534 {
4535 size = plus_constant (size, -INTVAL (copy_size_rtx));
4536 target = adjust_address (target, BLKmode,
4537 INTVAL (copy_size_rtx));
4538 }
4539 else
4540 {
4541 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4542 copy_size_rtx, NULL_RTX, 0,
4543 OPTAB_LIB_WIDEN);
4544
4545 #ifdef POINTERS_EXTEND_UNSIGNED
4546 if (GET_MODE (copy_size_rtx) != Pmode)
4547 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4548 TYPE_UNSIGNED (sizetype));
4549 #endif
4550
4551 target = offset_address (target, copy_size_rtx,
4552 highest_pow2_factor (copy_size));
4553 label = gen_label_rtx ();
4554 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4555 GET_MODE (size), 0, label);
4556 }
4557
4558 if (size != const0_rtx)
4559 clear_storage (target, size, BLOCK_OP_NORMAL);
4560
4561 if (label)
4562 emit_label (label);
4563 }
4564 }
4565 /* Handle calls that return values in multiple non-contiguous locations.
4566 The Irix 6 ABI has examples of this. */
4567 else if (GET_CODE (target) == PARALLEL)
4568 emit_group_load (target, temp, TREE_TYPE (exp),
4569 int_size_in_bytes (TREE_TYPE (exp)));
4570 else if (GET_MODE (temp) == BLKmode)
4571 emit_block_move (target, temp, expr_size (exp),
4572 (call_param_p
4573 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4574 else
4575 {
4576 temp = force_operand (temp, target);
4577 if (temp != target)
4578 emit_move_insn (target, temp);
4579 }
4580 }
4581
4582 return NULL_RTX;
4583 }
4584 \f
4585 /* Helper for categorize_ctor_elements. Identical interface. */
4586
4587 static bool
4588 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4589 HOST_WIDE_INT *p_elt_count,
4590 bool *p_must_clear)
4591 {
4592 unsigned HOST_WIDE_INT idx;
4593 HOST_WIDE_INT nz_elts, elt_count;
4594 tree value, purpose;
4595
4596 /* Whether CTOR is a valid constant initializer, in accordance with what
4597 initializer_constant_valid_p does. If inferred from the constructor
4598 elements, true until proven otherwise. */
4599 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4600 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4601
4602 nz_elts = 0;
4603 elt_count = 0;
4604
4605 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4606 {
4607 HOST_WIDE_INT mult;
4608
4609 mult = 1;
4610 if (TREE_CODE (purpose) == RANGE_EXPR)
4611 {
4612 tree lo_index = TREE_OPERAND (purpose, 0);
4613 tree hi_index = TREE_OPERAND (purpose, 1);
4614
4615 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4616 mult = (tree_low_cst (hi_index, 1)
4617 - tree_low_cst (lo_index, 1) + 1);
4618 }
4619
4620 switch (TREE_CODE (value))
4621 {
4622 case CONSTRUCTOR:
4623 {
4624 HOST_WIDE_INT nz = 0, ic = 0;
4625
4626 bool const_elt_p
4627 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4628
4629 nz_elts += mult * nz;
4630 elt_count += mult * ic;
4631
4632 if (const_from_elts_p && const_p)
4633 const_p = const_elt_p;
4634 }
4635 break;
4636
4637 case INTEGER_CST:
4638 case REAL_CST:
4639 if (!initializer_zerop (value))
4640 nz_elts += mult;
4641 elt_count += mult;
4642 break;
4643
4644 case STRING_CST:
4645 nz_elts += mult * TREE_STRING_LENGTH (value);
4646 elt_count += mult * TREE_STRING_LENGTH (value);
4647 break;
4648
4649 case COMPLEX_CST:
4650 if (!initializer_zerop (TREE_REALPART (value)))
4651 nz_elts += mult;
4652 if (!initializer_zerop (TREE_IMAGPART (value)))
4653 nz_elts += mult;
4654 elt_count += mult;
4655 break;
4656
4657 case VECTOR_CST:
4658 {
4659 tree v;
4660 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4661 {
4662 if (!initializer_zerop (TREE_VALUE (v)))
4663 nz_elts += mult;
4664 elt_count += mult;
4665 }
4666 }
4667 break;
4668
4669 default:
4670 nz_elts += mult;
4671 elt_count += mult;
4672
4673 if (const_from_elts_p && const_p)
4674 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4675 != NULL_TREE;
4676 break;
4677 }
4678 }
4679
4680 if (!*p_must_clear
4681 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4682 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4683 {
4684 tree init_sub_type;
4685 bool clear_this = true;
4686
4687 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4688 {
4689 /* We don't expect more than one element of the union to be
4690 initialized. Not sure what we should do otherwise... */
4691 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4692 == 1);
4693
4694 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4695 CONSTRUCTOR_ELTS (ctor),
4696 0)->value);
4697
4698 /* ??? We could look at each element of the union, and find the
4699 largest element. Which would avoid comparing the size of the
4700 initialized element against any tail padding in the union.
4701 Doesn't seem worth the effort... */
4702 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4703 TYPE_SIZE (init_sub_type)) == 1)
4704 {
4705 /* And now we have to find out if the element itself is fully
4706 constructed. E.g. for union { struct { int a, b; } s; } u
4707 = { .s = { .a = 1 } }. */
4708 if (elt_count == count_type_elements (init_sub_type, false))
4709 clear_this = false;
4710 }
4711 }
4712
4713 *p_must_clear = clear_this;
4714 }
4715
4716 *p_nz_elts += nz_elts;
4717 *p_elt_count += elt_count;
4718
4719 return const_p;
4720 }
4721
4722 /* Examine CTOR to discover:
4723 * how many scalar fields are set to nonzero values,
4724 and place it in *P_NZ_ELTS;
4725 * how many scalar fields in total are in CTOR,
4726 and place it in *P_ELT_COUNT.
4727 * if a type is a union, and the initializer from the constructor
4728 is not the largest element in the union, then set *p_must_clear.
4729
4730 Return whether or not CTOR is a valid static constant initializer, the same
4731 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4732
4733 bool
4734 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4735 HOST_WIDE_INT *p_elt_count,
4736 bool *p_must_clear)
4737 {
4738 *p_nz_elts = 0;
4739 *p_elt_count = 0;
4740 *p_must_clear = false;
4741
4742 return
4743 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4744 }
4745
4746 /* Count the number of scalars in TYPE. Return -1 on overflow or
4747 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4748 array member at the end of the structure. */
4749
4750 HOST_WIDE_INT
4751 count_type_elements (tree type, bool allow_flexarr)
4752 {
4753 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4754 switch (TREE_CODE (type))
4755 {
4756 case ARRAY_TYPE:
4757 {
4758 tree telts = array_type_nelts (type);
4759 if (telts && host_integerp (telts, 1))
4760 {
4761 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4762 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4763 if (n == 0)
4764 return 0;
4765 else if (max / n > m)
4766 return n * m;
4767 }
4768 return -1;
4769 }
4770
4771 case RECORD_TYPE:
4772 {
4773 HOST_WIDE_INT n = 0, t;
4774 tree f;
4775
4776 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4777 if (TREE_CODE (f) == FIELD_DECL)
4778 {
4779 t = count_type_elements (TREE_TYPE (f), false);
4780 if (t < 0)
4781 {
4782 /* Check for structures with flexible array member. */
4783 tree tf = TREE_TYPE (f);
4784 if (allow_flexarr
4785 && TREE_CHAIN (f) == NULL
4786 && TREE_CODE (tf) == ARRAY_TYPE
4787 && TYPE_DOMAIN (tf)
4788 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4789 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4790 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4791 && int_size_in_bytes (type) >= 0)
4792 break;
4793
4794 return -1;
4795 }
4796 n += t;
4797 }
4798
4799 return n;
4800 }
4801
4802 case UNION_TYPE:
4803 case QUAL_UNION_TYPE:
4804 {
4805 /* Ho hum. How in the world do we guess here? Clearly it isn't
4806 right to count the fields. Guess based on the number of words. */
4807 HOST_WIDE_INT n = int_size_in_bytes (type);
4808 if (n < 0)
4809 return -1;
4810 return n / UNITS_PER_WORD;
4811 }
4812
4813 case COMPLEX_TYPE:
4814 return 2;
4815
4816 case VECTOR_TYPE:
4817 return TYPE_VECTOR_SUBPARTS (type);
4818
4819 case INTEGER_TYPE:
4820 case REAL_TYPE:
4821 case ENUMERAL_TYPE:
4822 case BOOLEAN_TYPE:
4823 case POINTER_TYPE:
4824 case OFFSET_TYPE:
4825 case REFERENCE_TYPE:
4826 return 1;
4827
4828 case VOID_TYPE:
4829 case METHOD_TYPE:
4830 case FUNCTION_TYPE:
4831 case LANG_TYPE:
4832 default:
4833 gcc_unreachable ();
4834 }
4835 }
4836
4837 /* Return 1 if EXP contains mostly (3/4) zeros. */
4838
4839 static int
4840 mostly_zeros_p (tree exp)
4841 {
4842 if (TREE_CODE (exp) == CONSTRUCTOR)
4843
4844 {
4845 HOST_WIDE_INT nz_elts, count, elts;
4846 bool must_clear;
4847
4848 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4849 if (must_clear)
4850 return 1;
4851
4852 elts = count_type_elements (TREE_TYPE (exp), false);
4853
4854 return nz_elts < elts / 4;
4855 }
4856
4857 return initializer_zerop (exp);
4858 }
4859
4860 /* Return 1 if EXP contains all zeros. */
4861
4862 static int
4863 all_zeros_p (tree exp)
4864 {
4865 if (TREE_CODE (exp) == CONSTRUCTOR)
4866
4867 {
4868 HOST_WIDE_INT nz_elts, count;
4869 bool must_clear;
4870
4871 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4872 return nz_elts == 0;
4873 }
4874
4875 return initializer_zerop (exp);
4876 }
4877 \f
4878 /* Helper function for store_constructor.
4879 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4880 TYPE is the type of the CONSTRUCTOR, not the element type.
4881 CLEARED is as for store_constructor.
4882 ALIAS_SET is the alias set to use for any stores.
4883
4884 This provides a recursive shortcut back to store_constructor when it isn't
4885 necessary to go through store_field. This is so that we can pass through
4886 the cleared field to let store_constructor know that we may not have to
4887 clear a substructure if the outer structure has already been cleared. */
4888
4889 static void
4890 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4891 HOST_WIDE_INT bitpos, enum machine_mode mode,
4892 tree exp, tree type, int cleared, int alias_set)
4893 {
4894 if (TREE_CODE (exp) == CONSTRUCTOR
4895 /* We can only call store_constructor recursively if the size and
4896 bit position are on a byte boundary. */
4897 && bitpos % BITS_PER_UNIT == 0
4898 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4899 /* If we have a nonzero bitpos for a register target, then we just
4900 let store_field do the bitfield handling. This is unlikely to
4901 generate unnecessary clear instructions anyways. */
4902 && (bitpos == 0 || MEM_P (target)))
4903 {
4904 if (MEM_P (target))
4905 target
4906 = adjust_address (target,
4907 GET_MODE (target) == BLKmode
4908 || 0 != (bitpos
4909 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4910 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4911
4912
4913 /* Update the alias set, if required. */
4914 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4915 && MEM_ALIAS_SET (target) != 0)
4916 {
4917 target = copy_rtx (target);
4918 set_mem_alias_set (target, alias_set);
4919 }
4920
4921 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4922 }
4923 else
4924 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4925 }
4926
4927 /* Store the value of constructor EXP into the rtx TARGET.
4928 TARGET is either a REG or a MEM; we know it cannot conflict, since
4929 safe_from_p has been called.
4930 CLEARED is true if TARGET is known to have been zero'd.
4931 SIZE is the number of bytes of TARGET we are allowed to modify: this
4932 may not be the same as the size of EXP if we are assigning to a field
4933 which has been packed to exclude padding bits. */
4934
4935 static void
4936 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4937 {
4938 tree type = TREE_TYPE (exp);
4939 #ifdef WORD_REGISTER_OPERATIONS
4940 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4941 #endif
4942
4943 switch (TREE_CODE (type))
4944 {
4945 case RECORD_TYPE:
4946 case UNION_TYPE:
4947 case QUAL_UNION_TYPE:
4948 {
4949 unsigned HOST_WIDE_INT idx;
4950 tree field, value;
4951
4952 /* If size is zero or the target is already cleared, do nothing. */
4953 if (size == 0 || cleared)
4954 cleared = 1;
4955 /* We either clear the aggregate or indicate the value is dead. */
4956 else if ((TREE_CODE (type) == UNION_TYPE
4957 || TREE_CODE (type) == QUAL_UNION_TYPE)
4958 && ! CONSTRUCTOR_ELTS (exp))
4959 /* If the constructor is empty, clear the union. */
4960 {
4961 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4962 cleared = 1;
4963 }
4964
4965 /* If we are building a static constructor into a register,
4966 set the initial value as zero so we can fold the value into
4967 a constant. But if more than one register is involved,
4968 this probably loses. */
4969 else if (REG_P (target) && TREE_STATIC (exp)
4970 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4971 {
4972 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4973 cleared = 1;
4974 }
4975
4976 /* If the constructor has fewer fields than the structure or
4977 if we are initializing the structure to mostly zeros, clear
4978 the whole structure first. Don't do this if TARGET is a
4979 register whose mode size isn't equal to SIZE since
4980 clear_storage can't handle this case. */
4981 else if (size > 0
4982 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4983 != fields_length (type))
4984 || mostly_zeros_p (exp))
4985 && (!REG_P (target)
4986 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4987 == size)))
4988 {
4989 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4990 cleared = 1;
4991 }
4992
4993 if (! cleared)
4994 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4995
4996 /* Store each element of the constructor into the
4997 corresponding field of TARGET. */
4998 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4999 {
5000 enum machine_mode mode;
5001 HOST_WIDE_INT bitsize;
5002 HOST_WIDE_INT bitpos = 0;
5003 tree offset;
5004 rtx to_rtx = target;
5005
5006 /* Just ignore missing fields. We cleared the whole
5007 structure, above, if any fields are missing. */
5008 if (field == 0)
5009 continue;
5010
5011 if (cleared && initializer_zerop (value))
5012 continue;
5013
5014 if (host_integerp (DECL_SIZE (field), 1))
5015 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5016 else
5017 bitsize = -1;
5018
5019 mode = DECL_MODE (field);
5020 if (DECL_BIT_FIELD (field))
5021 mode = VOIDmode;
5022
5023 offset = DECL_FIELD_OFFSET (field);
5024 if (host_integerp (offset, 0)
5025 && host_integerp (bit_position (field), 0))
5026 {
5027 bitpos = int_bit_position (field);
5028 offset = 0;
5029 }
5030 else
5031 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5032
5033 if (offset)
5034 {
5035 rtx offset_rtx;
5036
5037 offset
5038 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5039 make_tree (TREE_TYPE (exp),
5040 target));
5041
5042 offset_rtx = expand_normal (offset);
5043 gcc_assert (MEM_P (to_rtx));
5044
5045 #ifdef POINTERS_EXTEND_UNSIGNED
5046 if (GET_MODE (offset_rtx) != Pmode)
5047 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5048 #else
5049 if (GET_MODE (offset_rtx) != ptr_mode)
5050 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5051 #endif
5052
5053 to_rtx = offset_address (to_rtx, offset_rtx,
5054 highest_pow2_factor (offset));
5055 }
5056
5057 #ifdef WORD_REGISTER_OPERATIONS
5058 /* If this initializes a field that is smaller than a
5059 word, at the start of a word, try to widen it to a full
5060 word. This special case allows us to output C++ member
5061 function initializations in a form that the optimizers
5062 can understand. */
5063 if (REG_P (target)
5064 && bitsize < BITS_PER_WORD
5065 && bitpos % BITS_PER_WORD == 0
5066 && GET_MODE_CLASS (mode) == MODE_INT
5067 && TREE_CODE (value) == INTEGER_CST
5068 && exp_size >= 0
5069 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5070 {
5071 tree type = TREE_TYPE (value);
5072
5073 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5074 {
5075 type = lang_hooks.types.type_for_size
5076 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5077 value = fold_convert (type, value);
5078 }
5079
5080 if (BYTES_BIG_ENDIAN)
5081 value
5082 = fold_build2 (LSHIFT_EXPR, type, value,
5083 build_int_cst (type,
5084 BITS_PER_WORD - bitsize));
5085 bitsize = BITS_PER_WORD;
5086 mode = word_mode;
5087 }
5088 #endif
5089
5090 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5091 && DECL_NONADDRESSABLE_P (field))
5092 {
5093 to_rtx = copy_rtx (to_rtx);
5094 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5095 }
5096
5097 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5098 value, type, cleared,
5099 get_alias_set (TREE_TYPE (field)));
5100 }
5101 break;
5102 }
5103 case ARRAY_TYPE:
5104 {
5105 tree value, index;
5106 unsigned HOST_WIDE_INT i;
5107 int need_to_clear;
5108 tree domain;
5109 tree elttype = TREE_TYPE (type);
5110 int const_bounds_p;
5111 HOST_WIDE_INT minelt = 0;
5112 HOST_WIDE_INT maxelt = 0;
5113
5114 domain = TYPE_DOMAIN (type);
5115 const_bounds_p = (TYPE_MIN_VALUE (domain)
5116 && TYPE_MAX_VALUE (domain)
5117 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5118 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5119
5120 /* If we have constant bounds for the range of the type, get them. */
5121 if (const_bounds_p)
5122 {
5123 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5124 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5125 }
5126
5127 /* If the constructor has fewer elements than the array, clear
5128 the whole array first. Similarly if this is static
5129 constructor of a non-BLKmode object. */
5130 if (cleared)
5131 need_to_clear = 0;
5132 else if (REG_P (target) && TREE_STATIC (exp))
5133 need_to_clear = 1;
5134 else
5135 {
5136 unsigned HOST_WIDE_INT idx;
5137 tree index, value;
5138 HOST_WIDE_INT count = 0, zero_count = 0;
5139 need_to_clear = ! const_bounds_p;
5140
5141 /* This loop is a more accurate version of the loop in
5142 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5143 is also needed to check for missing elements. */
5144 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5145 {
5146 HOST_WIDE_INT this_node_count;
5147
5148 if (need_to_clear)
5149 break;
5150
5151 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5152 {
5153 tree lo_index = TREE_OPERAND (index, 0);
5154 tree hi_index = TREE_OPERAND (index, 1);
5155
5156 if (! host_integerp (lo_index, 1)
5157 || ! host_integerp (hi_index, 1))
5158 {
5159 need_to_clear = 1;
5160 break;
5161 }
5162
5163 this_node_count = (tree_low_cst (hi_index, 1)
5164 - tree_low_cst (lo_index, 1) + 1);
5165 }
5166 else
5167 this_node_count = 1;
5168
5169 count += this_node_count;
5170 if (mostly_zeros_p (value))
5171 zero_count += this_node_count;
5172 }
5173
5174 /* Clear the entire array first if there are any missing
5175 elements, or if the incidence of zero elements is >=
5176 75%. */
5177 if (! need_to_clear
5178 && (count < maxelt - minelt + 1
5179 || 4 * zero_count >= 3 * count))
5180 need_to_clear = 1;
5181 }
5182
5183 if (need_to_clear && size > 0)
5184 {
5185 if (REG_P (target))
5186 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5187 else
5188 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5189 cleared = 1;
5190 }
5191
5192 if (!cleared && REG_P (target))
5193 /* Inform later passes that the old value is dead. */
5194 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5195
5196 /* Store each element of the constructor into the
5197 corresponding element of TARGET, determined by counting the
5198 elements. */
5199 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5200 {
5201 enum machine_mode mode;
5202 HOST_WIDE_INT bitsize;
5203 HOST_WIDE_INT bitpos;
5204 int unsignedp;
5205 rtx xtarget = target;
5206
5207 if (cleared && initializer_zerop (value))
5208 continue;
5209
5210 unsignedp = TYPE_UNSIGNED (elttype);
5211 mode = TYPE_MODE (elttype);
5212 if (mode == BLKmode)
5213 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5214 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5215 : -1);
5216 else
5217 bitsize = GET_MODE_BITSIZE (mode);
5218
5219 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5220 {
5221 tree lo_index = TREE_OPERAND (index, 0);
5222 tree hi_index = TREE_OPERAND (index, 1);
5223 rtx index_r, pos_rtx;
5224 HOST_WIDE_INT lo, hi, count;
5225 tree position;
5226
5227 /* If the range is constant and "small", unroll the loop. */
5228 if (const_bounds_p
5229 && host_integerp (lo_index, 0)
5230 && host_integerp (hi_index, 0)
5231 && (lo = tree_low_cst (lo_index, 0),
5232 hi = tree_low_cst (hi_index, 0),
5233 count = hi - lo + 1,
5234 (!MEM_P (target)
5235 || count <= 2
5236 || (host_integerp (TYPE_SIZE (elttype), 1)
5237 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5238 <= 40 * 8)))))
5239 {
5240 lo -= minelt; hi -= minelt;
5241 for (; lo <= hi; lo++)
5242 {
5243 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5244
5245 if (MEM_P (target)
5246 && !MEM_KEEP_ALIAS_SET_P (target)
5247 && TREE_CODE (type) == ARRAY_TYPE
5248 && TYPE_NONALIASED_COMPONENT (type))
5249 {
5250 target = copy_rtx (target);
5251 MEM_KEEP_ALIAS_SET_P (target) = 1;
5252 }
5253
5254 store_constructor_field
5255 (target, bitsize, bitpos, mode, value, type, cleared,
5256 get_alias_set (elttype));
5257 }
5258 }
5259 else
5260 {
5261 rtx loop_start = gen_label_rtx ();
5262 rtx loop_end = gen_label_rtx ();
5263 tree exit_cond;
5264
5265 expand_normal (hi_index);
5266 unsignedp = TYPE_UNSIGNED (domain);
5267
5268 index = build_decl (VAR_DECL, NULL_TREE, domain);
5269
5270 index_r
5271 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5272 &unsignedp, 0));
5273 SET_DECL_RTL (index, index_r);
5274 store_expr (lo_index, index_r, 0);
5275
5276 /* Build the head of the loop. */
5277 do_pending_stack_adjust ();
5278 emit_label (loop_start);
5279
5280 /* Assign value to element index. */
5281 position =
5282 fold_convert (ssizetype,
5283 fold_build2 (MINUS_EXPR,
5284 TREE_TYPE (index),
5285 index,
5286 TYPE_MIN_VALUE (domain)));
5287
5288 position =
5289 size_binop (MULT_EXPR, position,
5290 fold_convert (ssizetype,
5291 TYPE_SIZE_UNIT (elttype)));
5292
5293 pos_rtx = expand_normal (position);
5294 xtarget = offset_address (target, pos_rtx,
5295 highest_pow2_factor (position));
5296 xtarget = adjust_address (xtarget, mode, 0);
5297 if (TREE_CODE (value) == CONSTRUCTOR)
5298 store_constructor (value, xtarget, cleared,
5299 bitsize / BITS_PER_UNIT);
5300 else
5301 store_expr (value, xtarget, 0);
5302
5303 /* Generate a conditional jump to exit the loop. */
5304 exit_cond = build2 (LT_EXPR, integer_type_node,
5305 index, hi_index);
5306 jumpif (exit_cond, loop_end);
5307
5308 /* Update the loop counter, and jump to the head of
5309 the loop. */
5310 expand_assignment (index,
5311 build2 (PLUS_EXPR, TREE_TYPE (index),
5312 index, integer_one_node));
5313
5314 emit_jump (loop_start);
5315
5316 /* Build the end of the loop. */
5317 emit_label (loop_end);
5318 }
5319 }
5320 else if ((index != 0 && ! host_integerp (index, 0))
5321 || ! host_integerp (TYPE_SIZE (elttype), 1))
5322 {
5323 tree position;
5324
5325 if (index == 0)
5326 index = ssize_int (1);
5327
5328 if (minelt)
5329 index = fold_convert (ssizetype,
5330 fold_build2 (MINUS_EXPR,
5331 TREE_TYPE (index),
5332 index,
5333 TYPE_MIN_VALUE (domain)));
5334
5335 position =
5336 size_binop (MULT_EXPR, index,
5337 fold_convert (ssizetype,
5338 TYPE_SIZE_UNIT (elttype)));
5339 xtarget = offset_address (target,
5340 expand_normal (position),
5341 highest_pow2_factor (position));
5342 xtarget = adjust_address (xtarget, mode, 0);
5343 store_expr (value, xtarget, 0);
5344 }
5345 else
5346 {
5347 if (index != 0)
5348 bitpos = ((tree_low_cst (index, 0) - minelt)
5349 * tree_low_cst (TYPE_SIZE (elttype), 1));
5350 else
5351 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5352
5353 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5354 && TREE_CODE (type) == ARRAY_TYPE
5355 && TYPE_NONALIASED_COMPONENT (type))
5356 {
5357 target = copy_rtx (target);
5358 MEM_KEEP_ALIAS_SET_P (target) = 1;
5359 }
5360 store_constructor_field (target, bitsize, bitpos, mode, value,
5361 type, cleared, get_alias_set (elttype));
5362 }
5363 }
5364 break;
5365 }
5366
5367 case VECTOR_TYPE:
5368 {
5369 unsigned HOST_WIDE_INT idx;
5370 constructor_elt *ce;
5371 int i;
5372 int need_to_clear;
5373 int icode = 0;
5374 tree elttype = TREE_TYPE (type);
5375 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5376 enum machine_mode eltmode = TYPE_MODE (elttype);
5377 HOST_WIDE_INT bitsize;
5378 HOST_WIDE_INT bitpos;
5379 rtvec vector = NULL;
5380 unsigned n_elts;
5381
5382 gcc_assert (eltmode != BLKmode);
5383
5384 n_elts = TYPE_VECTOR_SUBPARTS (type);
5385 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5386 {
5387 enum machine_mode mode = GET_MODE (target);
5388
5389 icode = (int) vec_init_optab->handlers[mode].insn_code;
5390 if (icode != CODE_FOR_nothing)
5391 {
5392 unsigned int i;
5393
5394 vector = rtvec_alloc (n_elts);
5395 for (i = 0; i < n_elts; i++)
5396 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5397 }
5398 }
5399
5400 /* If the constructor has fewer elements than the vector,
5401 clear the whole array first. Similarly if this is static
5402 constructor of a non-BLKmode object. */
5403 if (cleared)
5404 need_to_clear = 0;
5405 else if (REG_P (target) && TREE_STATIC (exp))
5406 need_to_clear = 1;
5407 else
5408 {
5409 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5410 tree value;
5411
5412 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5413 {
5414 int n_elts_here = tree_low_cst
5415 (int_const_binop (TRUNC_DIV_EXPR,
5416 TYPE_SIZE (TREE_TYPE (value)),
5417 TYPE_SIZE (elttype), 0), 1);
5418
5419 count += n_elts_here;
5420 if (mostly_zeros_p (value))
5421 zero_count += n_elts_here;
5422 }
5423
5424 /* Clear the entire vector first if there are any missing elements,
5425 or if the incidence of zero elements is >= 75%. */
5426 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5427 }
5428
5429 if (need_to_clear && size > 0 && !vector)
5430 {
5431 if (REG_P (target))
5432 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5433 else
5434 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5435 cleared = 1;
5436 }
5437
5438 /* Inform later passes that the old value is dead. */
5439 if (!cleared && !vector && REG_P (target))
5440 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5441
5442 /* Store each element of the constructor into the corresponding
5443 element of TARGET, determined by counting the elements. */
5444 for (idx = 0, i = 0;
5445 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5446 idx++, i += bitsize / elt_size)
5447 {
5448 HOST_WIDE_INT eltpos;
5449 tree value = ce->value;
5450
5451 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5452 if (cleared && initializer_zerop (value))
5453 continue;
5454
5455 if (ce->index)
5456 eltpos = tree_low_cst (ce->index, 1);
5457 else
5458 eltpos = i;
5459
5460 if (vector)
5461 {
5462 /* Vector CONSTRUCTORs should only be built from smaller
5463 vectors in the case of BLKmode vectors. */
5464 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5465 RTVEC_ELT (vector, eltpos)
5466 = expand_normal (value);
5467 }
5468 else
5469 {
5470 enum machine_mode value_mode =
5471 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5472 ? TYPE_MODE (TREE_TYPE (value))
5473 : eltmode;
5474 bitpos = eltpos * elt_size;
5475 store_constructor_field (target, bitsize, bitpos,
5476 value_mode, value, type,
5477 cleared, get_alias_set (elttype));
5478 }
5479 }
5480
5481 if (vector)
5482 emit_insn (GEN_FCN (icode)
5483 (target,
5484 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5485 break;
5486 }
5487
5488 default:
5489 gcc_unreachable ();
5490 }
5491 }
5492
5493 /* Store the value of EXP (an expression tree)
5494 into a subfield of TARGET which has mode MODE and occupies
5495 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5496 If MODE is VOIDmode, it means that we are storing into a bit-field.
5497
5498 Always return const0_rtx unless we have something particular to
5499 return.
5500
5501 TYPE is the type of the underlying object,
5502
5503 ALIAS_SET is the alias set for the destination. This value will
5504 (in general) be different from that for TARGET, since TARGET is a
5505 reference to the containing structure. */
5506
5507 static rtx
5508 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5509 enum machine_mode mode, tree exp, tree type, int alias_set)
5510 {
5511 HOST_WIDE_INT width_mask = 0;
5512
5513 if (TREE_CODE (exp) == ERROR_MARK)
5514 return const0_rtx;
5515
5516 /* If we have nothing to store, do nothing unless the expression has
5517 side-effects. */
5518 if (bitsize == 0)
5519 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5520 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5521 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5522
5523 /* If we are storing into an unaligned field of an aligned union that is
5524 in a register, we may have the mode of TARGET being an integer mode but
5525 MODE == BLKmode. In that case, get an aligned object whose size and
5526 alignment are the same as TARGET and store TARGET into it (we can avoid
5527 the store if the field being stored is the entire width of TARGET). Then
5528 call ourselves recursively to store the field into a BLKmode version of
5529 that object. Finally, load from the object into TARGET. This is not
5530 very efficient in general, but should only be slightly more expensive
5531 than the otherwise-required unaligned accesses. Perhaps this can be
5532 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5533 twice, once with emit_move_insn and once via store_field. */
5534
5535 if (mode == BLKmode
5536 && (REG_P (target) || GET_CODE (target) == SUBREG))
5537 {
5538 rtx object = assign_temp (type, 0, 1, 1);
5539 rtx blk_object = adjust_address (object, BLKmode, 0);
5540
5541 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5542 emit_move_insn (object, target);
5543
5544 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5545
5546 emit_move_insn (target, object);
5547
5548 /* We want to return the BLKmode version of the data. */
5549 return blk_object;
5550 }
5551
5552 if (GET_CODE (target) == CONCAT)
5553 {
5554 /* We're storing into a struct containing a single __complex. */
5555
5556 gcc_assert (!bitpos);
5557 return store_expr (exp, target, 0);
5558 }
5559
5560 /* If the structure is in a register or if the component
5561 is a bit field, we cannot use addressing to access it.
5562 Use bit-field techniques or SUBREG to store in it. */
5563
5564 if (mode == VOIDmode
5565 || (mode != BLKmode && ! direct_store[(int) mode]
5566 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5567 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5568 || REG_P (target)
5569 || GET_CODE (target) == SUBREG
5570 /* If the field isn't aligned enough to store as an ordinary memref,
5571 store it as a bit field. */
5572 || (mode != BLKmode
5573 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5574 || bitpos % GET_MODE_ALIGNMENT (mode))
5575 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5576 || (bitpos % BITS_PER_UNIT != 0)))
5577 /* If the RHS and field are a constant size and the size of the
5578 RHS isn't the same size as the bitfield, we must use bitfield
5579 operations. */
5580 || (bitsize >= 0
5581 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5582 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5583 {
5584 rtx temp;
5585
5586 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5587 implies a mask operation. If the precision is the same size as
5588 the field we're storing into, that mask is redundant. This is
5589 particularly common with bit field assignments generated by the
5590 C front end. */
5591 if (TREE_CODE (exp) == NOP_EXPR)
5592 {
5593 tree type = TREE_TYPE (exp);
5594 if (INTEGRAL_TYPE_P (type)
5595 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5596 && bitsize == TYPE_PRECISION (type))
5597 {
5598 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5599 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5600 exp = TREE_OPERAND (exp, 0);
5601 }
5602 }
5603
5604 temp = expand_normal (exp);
5605
5606 /* If BITSIZE is narrower than the size of the type of EXP
5607 we will be narrowing TEMP. Normally, what's wanted are the
5608 low-order bits. However, if EXP's type is a record and this is
5609 big-endian machine, we want the upper BITSIZE bits. */
5610 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5611 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5612 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5613 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5614 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5615 - bitsize),
5616 NULL_RTX, 1);
5617
5618 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5619 MODE. */
5620 if (mode != VOIDmode && mode != BLKmode
5621 && mode != TYPE_MODE (TREE_TYPE (exp)))
5622 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5623
5624 /* If the modes of TARGET and TEMP are both BLKmode, both
5625 must be in memory and BITPOS must be aligned on a byte
5626 boundary. If so, we simply do a block copy. */
5627 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5628 {
5629 gcc_assert (MEM_P (target) && MEM_P (temp)
5630 && !(bitpos % BITS_PER_UNIT));
5631
5632 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5633 emit_block_move (target, temp,
5634 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5635 / BITS_PER_UNIT),
5636 BLOCK_OP_NORMAL);
5637
5638 return const0_rtx;
5639 }
5640
5641 /* Store the value in the bitfield. */
5642 store_bit_field (target, bitsize, bitpos, mode, temp);
5643
5644 return const0_rtx;
5645 }
5646 else
5647 {
5648 /* Now build a reference to just the desired component. */
5649 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5650
5651 if (to_rtx == target)
5652 to_rtx = copy_rtx (to_rtx);
5653
5654 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5655 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5656 set_mem_alias_set (to_rtx, alias_set);
5657
5658 return store_expr (exp, to_rtx, 0);
5659 }
5660 }
5661 \f
5662 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5663 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5664 codes and find the ultimate containing object, which we return.
5665
5666 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5667 bit position, and *PUNSIGNEDP to the signedness of the field.
5668 If the position of the field is variable, we store a tree
5669 giving the variable offset (in units) in *POFFSET.
5670 This offset is in addition to the bit position.
5671 If the position is not variable, we store 0 in *POFFSET.
5672
5673 If any of the extraction expressions is volatile,
5674 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5675
5676 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5677 is a mode that can be used to access the field. In that case, *PBITSIZE
5678 is redundant.
5679
5680 If the field describes a variable-sized object, *PMODE is set to
5681 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5682 this case, but the address of the object can be found.
5683
5684 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5685 look through nodes that serve as markers of a greater alignment than
5686 the one that can be deduced from the expression. These nodes make it
5687 possible for front-ends to prevent temporaries from being created by
5688 the middle-end on alignment considerations. For that purpose, the
5689 normal operating mode at high-level is to always pass FALSE so that
5690 the ultimate containing object is really returned; moreover, the
5691 associated predicate handled_component_p will always return TRUE
5692 on these nodes, thus indicating that they are essentially handled
5693 by get_inner_reference. TRUE should only be passed when the caller
5694 is scanning the expression in order to build another representation
5695 and specifically knows how to handle these nodes; as such, this is
5696 the normal operating mode in the RTL expanders. */
5697
5698 tree
5699 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5700 HOST_WIDE_INT *pbitpos, tree *poffset,
5701 enum machine_mode *pmode, int *punsignedp,
5702 int *pvolatilep, bool keep_aligning)
5703 {
5704 tree size_tree = 0;
5705 enum machine_mode mode = VOIDmode;
5706 tree offset = size_zero_node;
5707 tree bit_offset = bitsize_zero_node;
5708 tree tem;
5709
5710 /* First get the mode, signedness, and size. We do this from just the
5711 outermost expression. */
5712 if (TREE_CODE (exp) == COMPONENT_REF)
5713 {
5714 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5715 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5716 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5717
5718 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5719 }
5720 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5721 {
5722 size_tree = TREE_OPERAND (exp, 1);
5723 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5724
5725 /* For vector types, with the correct size of access, use the mode of
5726 inner type. */
5727 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5728 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5729 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5730 mode = TYPE_MODE (TREE_TYPE (exp));
5731 }
5732 else
5733 {
5734 mode = TYPE_MODE (TREE_TYPE (exp));
5735 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5736
5737 if (mode == BLKmode)
5738 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5739 else
5740 *pbitsize = GET_MODE_BITSIZE (mode);
5741 }
5742
5743 if (size_tree != 0)
5744 {
5745 if (! host_integerp (size_tree, 1))
5746 mode = BLKmode, *pbitsize = -1;
5747 else
5748 *pbitsize = tree_low_cst (size_tree, 1);
5749 }
5750
5751 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5752 and find the ultimate containing object. */
5753 while (1)
5754 {
5755 switch (TREE_CODE (exp))
5756 {
5757 case BIT_FIELD_REF:
5758 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5759 TREE_OPERAND (exp, 2));
5760 break;
5761
5762 case COMPONENT_REF:
5763 {
5764 tree field = TREE_OPERAND (exp, 1);
5765 tree this_offset = component_ref_field_offset (exp);
5766
5767 /* If this field hasn't been filled in yet, don't go past it.
5768 This should only happen when folding expressions made during
5769 type construction. */
5770 if (this_offset == 0)
5771 break;
5772
5773 offset = size_binop (PLUS_EXPR, offset, this_offset);
5774 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5775 DECL_FIELD_BIT_OFFSET (field));
5776
5777 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5778 }
5779 break;
5780
5781 case ARRAY_REF:
5782 case ARRAY_RANGE_REF:
5783 {
5784 tree index = TREE_OPERAND (exp, 1);
5785 tree low_bound = array_ref_low_bound (exp);
5786 tree unit_size = array_ref_element_size (exp);
5787
5788 /* We assume all arrays have sizes that are a multiple of a byte.
5789 First subtract the lower bound, if any, in the type of the
5790 index, then convert to sizetype and multiply by the size of
5791 the array element. */
5792 if (! integer_zerop (low_bound))
5793 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5794 index, low_bound);
5795
5796 offset = size_binop (PLUS_EXPR, offset,
5797 size_binop (MULT_EXPR,
5798 fold_convert (sizetype, index),
5799 unit_size));
5800 }
5801 break;
5802
5803 case REALPART_EXPR:
5804 break;
5805
5806 case IMAGPART_EXPR:
5807 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5808 bitsize_int (*pbitsize));
5809 break;
5810
5811 case VIEW_CONVERT_EXPR:
5812 if (keep_aligning && STRICT_ALIGNMENT
5813 && (TYPE_ALIGN (TREE_TYPE (exp))
5814 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5815 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5816 < BIGGEST_ALIGNMENT)
5817 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5818 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5819 goto done;
5820 break;
5821
5822 default:
5823 goto done;
5824 }
5825
5826 /* If any reference in the chain is volatile, the effect is volatile. */
5827 if (TREE_THIS_VOLATILE (exp))
5828 *pvolatilep = 1;
5829
5830 exp = TREE_OPERAND (exp, 0);
5831 }
5832 done:
5833
5834 /* If OFFSET is constant, see if we can return the whole thing as a
5835 constant bit position. Otherwise, split it up. */
5836 if (host_integerp (offset, 0)
5837 && 0 != (tem = size_binop (MULT_EXPR,
5838 fold_convert (bitsizetype, offset),
5839 bitsize_unit_node))
5840 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5841 && host_integerp (tem, 0))
5842 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5843 else
5844 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5845
5846 *pmode = mode;
5847 return exp;
5848 }
5849
5850 /* Return a tree of sizetype representing the size, in bytes, of the element
5851 of EXP, an ARRAY_REF. */
5852
5853 tree
5854 array_ref_element_size (tree exp)
5855 {
5856 tree aligned_size = TREE_OPERAND (exp, 3);
5857 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5858
5859 /* If a size was specified in the ARRAY_REF, it's the size measured
5860 in alignment units of the element type. So multiply by that value. */
5861 if (aligned_size)
5862 {
5863 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5864 sizetype from another type of the same width and signedness. */
5865 if (TREE_TYPE (aligned_size) != sizetype)
5866 aligned_size = fold_convert (sizetype, aligned_size);
5867 return size_binop (MULT_EXPR, aligned_size,
5868 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5869 }
5870
5871 /* Otherwise, take the size from that of the element type. Substitute
5872 any PLACEHOLDER_EXPR that we have. */
5873 else
5874 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5875 }
5876
5877 /* Return a tree representing the lower bound of the array mentioned in
5878 EXP, an ARRAY_REF. */
5879
5880 tree
5881 array_ref_low_bound (tree exp)
5882 {
5883 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5884
5885 /* If a lower bound is specified in EXP, use it. */
5886 if (TREE_OPERAND (exp, 2))
5887 return TREE_OPERAND (exp, 2);
5888
5889 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5890 substituting for a PLACEHOLDER_EXPR as needed. */
5891 if (domain_type && TYPE_MIN_VALUE (domain_type))
5892 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5893
5894 /* Otherwise, return a zero of the appropriate type. */
5895 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5896 }
5897
5898 /* Return a tree representing the upper bound of the array mentioned in
5899 EXP, an ARRAY_REF. */
5900
5901 tree
5902 array_ref_up_bound (tree exp)
5903 {
5904 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5905
5906 /* If there is a domain type and it has an upper bound, use it, substituting
5907 for a PLACEHOLDER_EXPR as needed. */
5908 if (domain_type && TYPE_MAX_VALUE (domain_type))
5909 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5910
5911 /* Otherwise fail. */
5912 return NULL_TREE;
5913 }
5914
5915 /* Return a tree representing the offset, in bytes, of the field referenced
5916 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5917
5918 tree
5919 component_ref_field_offset (tree exp)
5920 {
5921 tree aligned_offset = TREE_OPERAND (exp, 2);
5922 tree field = TREE_OPERAND (exp, 1);
5923
5924 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5925 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5926 value. */
5927 if (aligned_offset)
5928 {
5929 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5930 sizetype from another type of the same width and signedness. */
5931 if (TREE_TYPE (aligned_offset) != sizetype)
5932 aligned_offset = fold_convert (sizetype, aligned_offset);
5933 return size_binop (MULT_EXPR, aligned_offset,
5934 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5935 }
5936
5937 /* Otherwise, take the offset from that of the field. Substitute
5938 any PLACEHOLDER_EXPR that we have. */
5939 else
5940 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5941 }
5942
5943 /* Return 1 if T is an expression that get_inner_reference handles. */
5944
5945 int
5946 handled_component_p (tree t)
5947 {
5948 switch (TREE_CODE (t))
5949 {
5950 case BIT_FIELD_REF:
5951 case COMPONENT_REF:
5952 case ARRAY_REF:
5953 case ARRAY_RANGE_REF:
5954 case VIEW_CONVERT_EXPR:
5955 case REALPART_EXPR:
5956 case IMAGPART_EXPR:
5957 return 1;
5958
5959 default:
5960 return 0;
5961 }
5962 }
5963 \f
5964 /* Given an rtx VALUE that may contain additions and multiplications, return
5965 an equivalent value that just refers to a register, memory, or constant.
5966 This is done by generating instructions to perform the arithmetic and
5967 returning a pseudo-register containing the value.
5968
5969 The returned value may be a REG, SUBREG, MEM or constant. */
5970
5971 rtx
5972 force_operand (rtx value, rtx target)
5973 {
5974 rtx op1, op2;
5975 /* Use subtarget as the target for operand 0 of a binary operation. */
5976 rtx subtarget = get_subtarget (target);
5977 enum rtx_code code = GET_CODE (value);
5978
5979 /* Check for subreg applied to an expression produced by loop optimizer. */
5980 if (code == SUBREG
5981 && !REG_P (SUBREG_REG (value))
5982 && !MEM_P (SUBREG_REG (value)))
5983 {
5984 value = simplify_gen_subreg (GET_MODE (value),
5985 force_reg (GET_MODE (SUBREG_REG (value)),
5986 force_operand (SUBREG_REG (value),
5987 NULL_RTX)),
5988 GET_MODE (SUBREG_REG (value)),
5989 SUBREG_BYTE (value));
5990 code = GET_CODE (value);
5991 }
5992
5993 /* Check for a PIC address load. */
5994 if ((code == PLUS || code == MINUS)
5995 && XEXP (value, 0) == pic_offset_table_rtx
5996 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5997 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5998 || GET_CODE (XEXP (value, 1)) == CONST))
5999 {
6000 if (!subtarget)
6001 subtarget = gen_reg_rtx (GET_MODE (value));
6002 emit_move_insn (subtarget, value);
6003 return subtarget;
6004 }
6005
6006 if (ARITHMETIC_P (value))
6007 {
6008 op2 = XEXP (value, 1);
6009 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6010 subtarget = 0;
6011 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6012 {
6013 code = PLUS;
6014 op2 = negate_rtx (GET_MODE (value), op2);
6015 }
6016
6017 /* Check for an addition with OP2 a constant integer and our first
6018 operand a PLUS of a virtual register and something else. In that
6019 case, we want to emit the sum of the virtual register and the
6020 constant first and then add the other value. This allows virtual
6021 register instantiation to simply modify the constant rather than
6022 creating another one around this addition. */
6023 if (code == PLUS && GET_CODE (op2) == CONST_INT
6024 && GET_CODE (XEXP (value, 0)) == PLUS
6025 && REG_P (XEXP (XEXP (value, 0), 0))
6026 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6027 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6028 {
6029 rtx temp = expand_simple_binop (GET_MODE (value), code,
6030 XEXP (XEXP (value, 0), 0), op2,
6031 subtarget, 0, OPTAB_LIB_WIDEN);
6032 return expand_simple_binop (GET_MODE (value), code, temp,
6033 force_operand (XEXP (XEXP (value,
6034 0), 1), 0),
6035 target, 0, OPTAB_LIB_WIDEN);
6036 }
6037
6038 op1 = force_operand (XEXP (value, 0), subtarget);
6039 op2 = force_operand (op2, NULL_RTX);
6040 switch (code)
6041 {
6042 case MULT:
6043 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6044 case DIV:
6045 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6046 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6047 target, 1, OPTAB_LIB_WIDEN);
6048 else
6049 return expand_divmod (0,
6050 FLOAT_MODE_P (GET_MODE (value))
6051 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6052 GET_MODE (value), op1, op2, target, 0);
6053 break;
6054 case MOD:
6055 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6056 target, 0);
6057 break;
6058 case UDIV:
6059 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6060 target, 1);
6061 break;
6062 case UMOD:
6063 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6064 target, 1);
6065 break;
6066 case ASHIFTRT:
6067 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6068 target, 0, OPTAB_LIB_WIDEN);
6069 break;
6070 default:
6071 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6072 target, 1, OPTAB_LIB_WIDEN);
6073 }
6074 }
6075 if (UNARY_P (value))
6076 {
6077 if (!target)
6078 target = gen_reg_rtx (GET_MODE (value));
6079 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6080 switch (code)
6081 {
6082 case ZERO_EXTEND:
6083 case SIGN_EXTEND:
6084 case TRUNCATE:
6085 case FLOAT_EXTEND:
6086 case FLOAT_TRUNCATE:
6087 convert_move (target, op1, code == ZERO_EXTEND);
6088 return target;
6089
6090 case FIX:
6091 case UNSIGNED_FIX:
6092 expand_fix (target, op1, code == UNSIGNED_FIX);
6093 return target;
6094
6095 case FLOAT:
6096 case UNSIGNED_FLOAT:
6097 expand_float (target, op1, code == UNSIGNED_FLOAT);
6098 return target;
6099
6100 default:
6101 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6102 }
6103 }
6104
6105 #ifdef INSN_SCHEDULING
6106 /* On machines that have insn scheduling, we want all memory reference to be
6107 explicit, so we need to deal with such paradoxical SUBREGs. */
6108 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6109 && (GET_MODE_SIZE (GET_MODE (value))
6110 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6111 value
6112 = simplify_gen_subreg (GET_MODE (value),
6113 force_reg (GET_MODE (SUBREG_REG (value)),
6114 force_operand (SUBREG_REG (value),
6115 NULL_RTX)),
6116 GET_MODE (SUBREG_REG (value)),
6117 SUBREG_BYTE (value));
6118 #endif
6119
6120 return value;
6121 }
6122 \f
6123 /* Subroutine of expand_expr: return nonzero iff there is no way that
6124 EXP can reference X, which is being modified. TOP_P is nonzero if this
6125 call is going to be used to determine whether we need a temporary
6126 for EXP, as opposed to a recursive call to this function.
6127
6128 It is always safe for this routine to return zero since it merely
6129 searches for optimization opportunities. */
6130
6131 int
6132 safe_from_p (rtx x, tree exp, int top_p)
6133 {
6134 rtx exp_rtl = 0;
6135 int i, nops;
6136
6137 if (x == 0
6138 /* If EXP has varying size, we MUST use a target since we currently
6139 have no way of allocating temporaries of variable size
6140 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6141 So we assume here that something at a higher level has prevented a
6142 clash. This is somewhat bogus, but the best we can do. Only
6143 do this when X is BLKmode and when we are at the top level. */
6144 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6145 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6146 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6147 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6148 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6149 != INTEGER_CST)
6150 && GET_MODE (x) == BLKmode)
6151 /* If X is in the outgoing argument area, it is always safe. */
6152 || (MEM_P (x)
6153 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6154 || (GET_CODE (XEXP (x, 0)) == PLUS
6155 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6156 return 1;
6157
6158 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6159 find the underlying pseudo. */
6160 if (GET_CODE (x) == SUBREG)
6161 {
6162 x = SUBREG_REG (x);
6163 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6164 return 0;
6165 }
6166
6167 /* Now look at our tree code and possibly recurse. */
6168 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6169 {
6170 case tcc_declaration:
6171 exp_rtl = DECL_RTL_IF_SET (exp);
6172 break;
6173
6174 case tcc_constant:
6175 return 1;
6176
6177 case tcc_exceptional:
6178 if (TREE_CODE (exp) == TREE_LIST)
6179 {
6180 while (1)
6181 {
6182 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6183 return 0;
6184 exp = TREE_CHAIN (exp);
6185 if (!exp)
6186 return 1;
6187 if (TREE_CODE (exp) != TREE_LIST)
6188 return safe_from_p (x, exp, 0);
6189 }
6190 }
6191 else if (TREE_CODE (exp) == CONSTRUCTOR)
6192 {
6193 constructor_elt *ce;
6194 unsigned HOST_WIDE_INT idx;
6195
6196 for (idx = 0;
6197 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6198 idx++)
6199 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6200 || !safe_from_p (x, ce->value, 0))
6201 return 0;
6202 return 1;
6203 }
6204 else if (TREE_CODE (exp) == ERROR_MARK)
6205 return 1; /* An already-visited SAVE_EXPR? */
6206 else
6207 return 0;
6208
6209 case tcc_statement:
6210 /* The only case we look at here is the DECL_INITIAL inside a
6211 DECL_EXPR. */
6212 return (TREE_CODE (exp) != DECL_EXPR
6213 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6214 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6215 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6216
6217 case tcc_binary:
6218 case tcc_comparison:
6219 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6220 return 0;
6221 /* Fall through. */
6222
6223 case tcc_unary:
6224 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6225
6226 case tcc_expression:
6227 case tcc_reference:
6228 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6229 the expression. If it is set, we conflict iff we are that rtx or
6230 both are in memory. Otherwise, we check all operands of the
6231 expression recursively. */
6232
6233 switch (TREE_CODE (exp))
6234 {
6235 case ADDR_EXPR:
6236 /* If the operand is static or we are static, we can't conflict.
6237 Likewise if we don't conflict with the operand at all. */
6238 if (staticp (TREE_OPERAND (exp, 0))
6239 || TREE_STATIC (exp)
6240 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6241 return 1;
6242
6243 /* Otherwise, the only way this can conflict is if we are taking
6244 the address of a DECL a that address if part of X, which is
6245 very rare. */
6246 exp = TREE_OPERAND (exp, 0);
6247 if (DECL_P (exp))
6248 {
6249 if (!DECL_RTL_SET_P (exp)
6250 || !MEM_P (DECL_RTL (exp)))
6251 return 0;
6252 else
6253 exp_rtl = XEXP (DECL_RTL (exp), 0);
6254 }
6255 break;
6256
6257 case MISALIGNED_INDIRECT_REF:
6258 case ALIGN_INDIRECT_REF:
6259 case INDIRECT_REF:
6260 if (MEM_P (x)
6261 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6262 get_alias_set (exp)))
6263 return 0;
6264 break;
6265
6266 case CALL_EXPR:
6267 /* Assume that the call will clobber all hard registers and
6268 all of memory. */
6269 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6270 || MEM_P (x))
6271 return 0;
6272 break;
6273
6274 case WITH_CLEANUP_EXPR:
6275 case CLEANUP_POINT_EXPR:
6276 /* Lowered by gimplify.c. */
6277 gcc_unreachable ();
6278
6279 case SAVE_EXPR:
6280 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6281
6282 default:
6283 break;
6284 }
6285
6286 /* If we have an rtx, we do not need to scan our operands. */
6287 if (exp_rtl)
6288 break;
6289
6290 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6291 for (i = 0; i < nops; i++)
6292 if (TREE_OPERAND (exp, i) != 0
6293 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6294 return 0;
6295
6296 /* If this is a language-specific tree code, it may require
6297 special handling. */
6298 if ((unsigned int) TREE_CODE (exp)
6299 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6300 && !lang_hooks.safe_from_p (x, exp))
6301 return 0;
6302 break;
6303
6304 case tcc_type:
6305 /* Should never get a type here. */
6306 gcc_unreachable ();
6307
6308 case tcc_gimple_stmt:
6309 gcc_unreachable ();
6310 }
6311
6312 /* If we have an rtl, find any enclosed object. Then see if we conflict
6313 with it. */
6314 if (exp_rtl)
6315 {
6316 if (GET_CODE (exp_rtl) == SUBREG)
6317 {
6318 exp_rtl = SUBREG_REG (exp_rtl);
6319 if (REG_P (exp_rtl)
6320 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6321 return 0;
6322 }
6323
6324 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6325 are memory and they conflict. */
6326 return ! (rtx_equal_p (x, exp_rtl)
6327 || (MEM_P (x) && MEM_P (exp_rtl)
6328 && true_dependence (exp_rtl, VOIDmode, x,
6329 rtx_addr_varies_p)));
6330 }
6331
6332 /* If we reach here, it is safe. */
6333 return 1;
6334 }
6335
6336 \f
6337 /* Return the highest power of two that EXP is known to be a multiple of.
6338 This is used in updating alignment of MEMs in array references. */
6339
6340 unsigned HOST_WIDE_INT
6341 highest_pow2_factor (tree exp)
6342 {
6343 unsigned HOST_WIDE_INT c0, c1;
6344
6345 switch (TREE_CODE (exp))
6346 {
6347 case INTEGER_CST:
6348 /* We can find the lowest bit that's a one. If the low
6349 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6350 We need to handle this case since we can find it in a COND_EXPR,
6351 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6352 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6353 later ICE. */
6354 if (TREE_OVERFLOW (exp))
6355 return BIGGEST_ALIGNMENT;
6356 else
6357 {
6358 /* Note: tree_low_cst is intentionally not used here,
6359 we don't care about the upper bits. */
6360 c0 = TREE_INT_CST_LOW (exp);
6361 c0 &= -c0;
6362 return c0 ? c0 : BIGGEST_ALIGNMENT;
6363 }
6364 break;
6365
6366 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6367 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6368 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6369 return MIN (c0, c1);
6370
6371 case MULT_EXPR:
6372 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6373 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6374 return c0 * c1;
6375
6376 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6377 case CEIL_DIV_EXPR:
6378 if (integer_pow2p (TREE_OPERAND (exp, 1))
6379 && host_integerp (TREE_OPERAND (exp, 1), 1))
6380 {
6381 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6382 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6383 return MAX (1, c0 / c1);
6384 }
6385 break;
6386
6387 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6388 case SAVE_EXPR:
6389 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6390
6391 case COMPOUND_EXPR:
6392 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6393
6394 case COND_EXPR:
6395 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6396 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6397 return MIN (c0, c1);
6398
6399 default:
6400 break;
6401 }
6402
6403 return 1;
6404 }
6405
6406 /* Similar, except that the alignment requirements of TARGET are
6407 taken into account. Assume it is at least as aligned as its
6408 type, unless it is a COMPONENT_REF in which case the layout of
6409 the structure gives the alignment. */
6410
6411 static unsigned HOST_WIDE_INT
6412 highest_pow2_factor_for_target (tree target, tree exp)
6413 {
6414 unsigned HOST_WIDE_INT target_align, factor;
6415
6416 factor = highest_pow2_factor (exp);
6417 if (TREE_CODE (target) == COMPONENT_REF)
6418 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6419 else
6420 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6421 return MAX (factor, target_align);
6422 }
6423 \f
6424 /* Return &VAR expression for emulated thread local VAR. */
6425
6426 static tree
6427 emutls_var_address (tree var)
6428 {
6429 tree emuvar = emutls_decl (var);
6430 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6431 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6432 tree arglist = build_tree_list (NULL_TREE, arg);
6433 tree call = build_function_call_expr (fn, arglist);
6434 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6435 }
6436 \f
6437 /* Expands variable VAR. */
6438
6439 void
6440 expand_var (tree var)
6441 {
6442 if (DECL_EXTERNAL (var))
6443 return;
6444
6445 if (TREE_STATIC (var))
6446 /* If this is an inlined copy of a static local variable,
6447 look up the original decl. */
6448 var = DECL_ORIGIN (var);
6449
6450 if (TREE_STATIC (var)
6451 ? !TREE_ASM_WRITTEN (var)
6452 : !DECL_RTL_SET_P (var))
6453 {
6454 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6455 /* Should be ignored. */;
6456 else if (lang_hooks.expand_decl (var))
6457 /* OK. */;
6458 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6459 expand_decl (var);
6460 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6461 rest_of_decl_compilation (var, 0, 0);
6462 else
6463 /* No expansion needed. */
6464 gcc_assert (TREE_CODE (var) == TYPE_DECL
6465 || TREE_CODE (var) == CONST_DECL
6466 || TREE_CODE (var) == FUNCTION_DECL
6467 || TREE_CODE (var) == LABEL_DECL);
6468 }
6469 }
6470
6471 /* Subroutine of expand_expr. Expand the two operands of a binary
6472 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6473 The value may be stored in TARGET if TARGET is nonzero. The
6474 MODIFIER argument is as documented by expand_expr. */
6475
6476 static void
6477 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6478 enum expand_modifier modifier)
6479 {
6480 if (! safe_from_p (target, exp1, 1))
6481 target = 0;
6482 if (operand_equal_p (exp0, exp1, 0))
6483 {
6484 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6485 *op1 = copy_rtx (*op0);
6486 }
6487 else
6488 {
6489 /* If we need to preserve evaluation order, copy exp0 into its own
6490 temporary variable so that it can't be clobbered by exp1. */
6491 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6492 exp0 = save_expr (exp0);
6493 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6494 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6495 }
6496 }
6497
6498 \f
6499 /* Return a MEM that contains constant EXP. DEFER is as for
6500 output_constant_def and MODIFIER is as for expand_expr. */
6501
6502 static rtx
6503 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6504 {
6505 rtx mem;
6506
6507 mem = output_constant_def (exp, defer);
6508 if (modifier != EXPAND_INITIALIZER)
6509 mem = use_anchored_address (mem);
6510 return mem;
6511 }
6512
6513 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6514 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6515
6516 static rtx
6517 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6518 enum expand_modifier modifier)
6519 {
6520 rtx result, subtarget;
6521 tree inner, offset;
6522 HOST_WIDE_INT bitsize, bitpos;
6523 int volatilep, unsignedp;
6524 enum machine_mode mode1;
6525
6526 /* If we are taking the address of a constant and are at the top level,
6527 we have to use output_constant_def since we can't call force_const_mem
6528 at top level. */
6529 /* ??? This should be considered a front-end bug. We should not be
6530 generating ADDR_EXPR of something that isn't an LVALUE. The only
6531 exception here is STRING_CST. */
6532 if (TREE_CODE (exp) == CONSTRUCTOR
6533 || CONSTANT_CLASS_P (exp))
6534 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6535
6536 /* Everything must be something allowed by is_gimple_addressable. */
6537 switch (TREE_CODE (exp))
6538 {
6539 case INDIRECT_REF:
6540 /* This case will happen via recursion for &a->b. */
6541 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6542
6543 case CONST_DECL:
6544 /* Recurse and make the output_constant_def clause above handle this. */
6545 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6546 tmode, modifier);
6547
6548 case REALPART_EXPR:
6549 /* The real part of the complex number is always first, therefore
6550 the address is the same as the address of the parent object. */
6551 offset = 0;
6552 bitpos = 0;
6553 inner = TREE_OPERAND (exp, 0);
6554 break;
6555
6556 case IMAGPART_EXPR:
6557 /* The imaginary part of the complex number is always second.
6558 The expression is therefore always offset by the size of the
6559 scalar type. */
6560 offset = 0;
6561 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6562 inner = TREE_OPERAND (exp, 0);
6563 break;
6564
6565 case VAR_DECL:
6566 /* TLS emulation hook - replace __thread VAR's &VAR with
6567 __emutls_get_address (&_emutls.VAR). */
6568 if (! targetm.have_tls
6569 && TREE_CODE (exp) == VAR_DECL
6570 && DECL_THREAD_LOCAL_P (exp))
6571 {
6572 exp = emutls_var_address (exp);
6573 return expand_expr (exp, target, tmode, modifier);
6574 }
6575 /* Fall through. */
6576
6577 default:
6578 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6579 expand_expr, as that can have various side effects; LABEL_DECLs for
6580 example, may not have their DECL_RTL set yet. Assume language
6581 specific tree nodes can be expanded in some interesting way. */
6582 if (DECL_P (exp)
6583 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6584 {
6585 result = expand_expr (exp, target, tmode,
6586 modifier == EXPAND_INITIALIZER
6587 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6588
6589 /* If the DECL isn't in memory, then the DECL wasn't properly
6590 marked TREE_ADDRESSABLE, which will be either a front-end
6591 or a tree optimizer bug. */
6592 gcc_assert (MEM_P (result));
6593 result = XEXP (result, 0);
6594
6595 /* ??? Is this needed anymore? */
6596 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6597 {
6598 assemble_external (exp);
6599 TREE_USED (exp) = 1;
6600 }
6601
6602 if (modifier != EXPAND_INITIALIZER
6603 && modifier != EXPAND_CONST_ADDRESS)
6604 result = force_operand (result, target);
6605 return result;
6606 }
6607
6608 /* Pass FALSE as the last argument to get_inner_reference although
6609 we are expanding to RTL. The rationale is that we know how to
6610 handle "aligning nodes" here: we can just bypass them because
6611 they won't change the final object whose address will be returned
6612 (they actually exist only for that purpose). */
6613 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6614 &mode1, &unsignedp, &volatilep, false);
6615 break;
6616 }
6617
6618 /* We must have made progress. */
6619 gcc_assert (inner != exp);
6620
6621 subtarget = offset || bitpos ? NULL_RTX : target;
6622 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6623
6624 if (offset)
6625 {
6626 rtx tmp;
6627
6628 if (modifier != EXPAND_NORMAL)
6629 result = force_operand (result, NULL);
6630 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6631
6632 result = convert_memory_address (tmode, result);
6633 tmp = convert_memory_address (tmode, tmp);
6634
6635 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6636 result = gen_rtx_PLUS (tmode, result, tmp);
6637 else
6638 {
6639 subtarget = bitpos ? NULL_RTX : target;
6640 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6641 1, OPTAB_LIB_WIDEN);
6642 }
6643 }
6644
6645 if (bitpos)
6646 {
6647 /* Someone beforehand should have rejected taking the address
6648 of such an object. */
6649 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6650
6651 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6652 if (modifier < EXPAND_SUM)
6653 result = force_operand (result, target);
6654 }
6655
6656 return result;
6657 }
6658
6659 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6660 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6661
6662 static rtx
6663 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6664 enum expand_modifier modifier)
6665 {
6666 enum machine_mode rmode;
6667 rtx result;
6668
6669 /* Target mode of VOIDmode says "whatever's natural". */
6670 if (tmode == VOIDmode)
6671 tmode = TYPE_MODE (TREE_TYPE (exp));
6672
6673 /* We can get called with some Weird Things if the user does silliness
6674 like "(short) &a". In that case, convert_memory_address won't do
6675 the right thing, so ignore the given target mode. */
6676 if (tmode != Pmode && tmode != ptr_mode)
6677 tmode = Pmode;
6678
6679 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6680 tmode, modifier);
6681
6682 /* Despite expand_expr claims concerning ignoring TMODE when not
6683 strictly convenient, stuff breaks if we don't honor it. Note
6684 that combined with the above, we only do this for pointer modes. */
6685 rmode = GET_MODE (result);
6686 if (rmode == VOIDmode)
6687 rmode = tmode;
6688 if (rmode != tmode)
6689 result = convert_memory_address (tmode, result);
6690
6691 return result;
6692 }
6693
6694
6695 /* expand_expr: generate code for computing expression EXP.
6696 An rtx for the computed value is returned. The value is never null.
6697 In the case of a void EXP, const0_rtx is returned.
6698
6699 The value may be stored in TARGET if TARGET is nonzero.
6700 TARGET is just a suggestion; callers must assume that
6701 the rtx returned may not be the same as TARGET.
6702
6703 If TARGET is CONST0_RTX, it means that the value will be ignored.
6704
6705 If TMODE is not VOIDmode, it suggests generating the
6706 result in mode TMODE. But this is done only when convenient.
6707 Otherwise, TMODE is ignored and the value generated in its natural mode.
6708 TMODE is just a suggestion; callers must assume that
6709 the rtx returned may not have mode TMODE.
6710
6711 Note that TARGET may have neither TMODE nor MODE. In that case, it
6712 probably will not be used.
6713
6714 If MODIFIER is EXPAND_SUM then when EXP is an addition
6715 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6716 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6717 products as above, or REG or MEM, or constant.
6718 Ordinarily in such cases we would output mul or add instructions
6719 and then return a pseudo reg containing the sum.
6720
6721 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6722 it also marks a label as absolutely required (it can't be dead).
6723 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6724 This is used for outputting expressions used in initializers.
6725
6726 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6727 with a constant address even if that address is not normally legitimate.
6728 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6729
6730 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6731 a call parameter. Such targets require special care as we haven't yet
6732 marked TARGET so that it's safe from being trashed by libcalls. We
6733 don't want to use TARGET for anything but the final result;
6734 Intermediate values must go elsewhere. Additionally, calls to
6735 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6736
6737 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6738 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6739 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6740 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6741 recursively. */
6742
6743 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6744 enum expand_modifier, rtx *);
6745
6746 rtx
6747 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6748 enum expand_modifier modifier, rtx *alt_rtl)
6749 {
6750 int rn = -1;
6751 rtx ret, last = NULL;
6752
6753 /* Handle ERROR_MARK before anybody tries to access its type. */
6754 if (TREE_CODE (exp) == ERROR_MARK
6755 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
6756 {
6757 ret = CONST0_RTX (tmode);
6758 return ret ? ret : const0_rtx;
6759 }
6760
6761 if (flag_non_call_exceptions)
6762 {
6763 rn = lookup_stmt_eh_region (exp);
6764 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6765 if (rn >= 0)
6766 last = get_last_insn ();
6767 }
6768
6769 /* If this is an expression of some kind and it has an associated line
6770 number, then emit the line number before expanding the expression.
6771
6772 We need to save and restore the file and line information so that
6773 errors discovered during expansion are emitted with the right
6774 information. It would be better of the diagnostic routines
6775 used the file/line information embedded in the tree nodes rather
6776 than globals. */
6777 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6778 {
6779 location_t saved_location = input_location;
6780 input_location = EXPR_LOCATION (exp);
6781 emit_line_note (input_location);
6782
6783 /* Record where the insns produced belong. */
6784 record_block_change (TREE_BLOCK (exp));
6785
6786 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6787
6788 input_location = saved_location;
6789 }
6790 else
6791 {
6792 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6793 }
6794
6795 /* If using non-call exceptions, mark all insns that may trap.
6796 expand_call() will mark CALL_INSNs before we get to this code,
6797 but it doesn't handle libcalls, and these may trap. */
6798 if (rn >= 0)
6799 {
6800 rtx insn;
6801 for (insn = next_real_insn (last); insn;
6802 insn = next_real_insn (insn))
6803 {
6804 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6805 /* If we want exceptions for non-call insns, any
6806 may_trap_p instruction may throw. */
6807 && GET_CODE (PATTERN (insn)) != CLOBBER
6808 && GET_CODE (PATTERN (insn)) != USE
6809 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6810 {
6811 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6812 REG_NOTES (insn));
6813 }
6814 }
6815 }
6816
6817 return ret;
6818 }
6819
6820 static rtx
6821 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6822 enum expand_modifier modifier, rtx *alt_rtl)
6823 {
6824 rtx op0, op1, temp, decl_rtl;
6825 tree type;
6826 int unsignedp;
6827 enum machine_mode mode;
6828 enum tree_code code = TREE_CODE (exp);
6829 optab this_optab;
6830 rtx subtarget, original_target;
6831 int ignore;
6832 tree context, subexp0, subexp1;
6833 bool reduce_bit_field = false;
6834 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6835 ? reduce_to_bit_field_precision ((expr), \
6836 target, \
6837 type) \
6838 : (expr))
6839
6840 if (GIMPLE_STMT_P (exp))
6841 {
6842 type = void_type_node;
6843 mode = VOIDmode;
6844 unsignedp = 0;
6845 }
6846 else
6847 {
6848 type = TREE_TYPE (exp);
6849 mode = TYPE_MODE (type);
6850 unsignedp = TYPE_UNSIGNED (type);
6851 }
6852 if (lang_hooks.reduce_bit_field_operations
6853 && TREE_CODE (type) == INTEGER_TYPE
6854 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6855 {
6856 /* An operation in what may be a bit-field type needs the
6857 result to be reduced to the precision of the bit-field type,
6858 which is narrower than that of the type's mode. */
6859 reduce_bit_field = true;
6860 if (modifier == EXPAND_STACK_PARM)
6861 target = 0;
6862 }
6863
6864 /* Use subtarget as the target for operand 0 of a binary operation. */
6865 subtarget = get_subtarget (target);
6866 original_target = target;
6867 ignore = (target == const0_rtx
6868 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6869 || code == CONVERT_EXPR || code == COND_EXPR
6870 || code == VIEW_CONVERT_EXPR)
6871 && TREE_CODE (type) == VOID_TYPE));
6872
6873 /* If we are going to ignore this result, we need only do something
6874 if there is a side-effect somewhere in the expression. If there
6875 is, short-circuit the most common cases here. Note that we must
6876 not call expand_expr with anything but const0_rtx in case this
6877 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6878
6879 if (ignore)
6880 {
6881 if (! TREE_SIDE_EFFECTS (exp))
6882 return const0_rtx;
6883
6884 /* Ensure we reference a volatile object even if value is ignored, but
6885 don't do this if all we are doing is taking its address. */
6886 if (TREE_THIS_VOLATILE (exp)
6887 && TREE_CODE (exp) != FUNCTION_DECL
6888 && mode != VOIDmode && mode != BLKmode
6889 && modifier != EXPAND_CONST_ADDRESS)
6890 {
6891 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6892 if (MEM_P (temp))
6893 temp = copy_to_reg (temp);
6894 return const0_rtx;
6895 }
6896
6897 if (TREE_CODE_CLASS (code) == tcc_unary
6898 || code == COMPONENT_REF || code == INDIRECT_REF)
6899 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6900 modifier);
6901
6902 else if (TREE_CODE_CLASS (code) == tcc_binary
6903 || TREE_CODE_CLASS (code) == tcc_comparison
6904 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6905 {
6906 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6907 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6908 return const0_rtx;
6909 }
6910 else if (code == BIT_FIELD_REF)
6911 {
6912 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6913 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6914 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6915 return const0_rtx;
6916 }
6917
6918 target = 0;
6919 }
6920
6921
6922 switch (code)
6923 {
6924 case LABEL_DECL:
6925 {
6926 tree function = decl_function_context (exp);
6927
6928 temp = label_rtx (exp);
6929 temp = gen_rtx_LABEL_REF (Pmode, temp);
6930
6931 if (function != current_function_decl
6932 && function != 0)
6933 LABEL_REF_NONLOCAL_P (temp) = 1;
6934
6935 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6936 return temp;
6937 }
6938
6939 case SSA_NAME:
6940 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6941 NULL);
6942
6943 case PARM_DECL:
6944 case VAR_DECL:
6945 /* If a static var's type was incomplete when the decl was written,
6946 but the type is complete now, lay out the decl now. */
6947 if (DECL_SIZE (exp) == 0
6948 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6949 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6950 layout_decl (exp, 0);
6951
6952 /* TLS emulation hook - replace __thread vars with
6953 *__emutls_get_address (&_emutls.var). */
6954 if (! targetm.have_tls
6955 && TREE_CODE (exp) == VAR_DECL
6956 && DECL_THREAD_LOCAL_P (exp))
6957 {
6958 exp = build_fold_indirect_ref (emutls_var_address (exp));
6959 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
6960 }
6961
6962 /* ... fall through ... */
6963
6964 case FUNCTION_DECL:
6965 case RESULT_DECL:
6966 decl_rtl = DECL_RTL (exp);
6967 gcc_assert (decl_rtl);
6968
6969 /* Ensure variable marked as used even if it doesn't go through
6970 a parser. If it hasn't be used yet, write out an external
6971 definition. */
6972 if (! TREE_USED (exp))
6973 {
6974 assemble_external (exp);
6975 TREE_USED (exp) = 1;
6976 }
6977
6978 /* Show we haven't gotten RTL for this yet. */
6979 temp = 0;
6980
6981 /* Variables inherited from containing functions should have
6982 been lowered by this point. */
6983 context = decl_function_context (exp);
6984 gcc_assert (!context
6985 || context == current_function_decl
6986 || TREE_STATIC (exp)
6987 /* ??? C++ creates functions that are not TREE_STATIC. */
6988 || TREE_CODE (exp) == FUNCTION_DECL);
6989
6990 /* This is the case of an array whose size is to be determined
6991 from its initializer, while the initializer is still being parsed.
6992 See expand_decl. */
6993
6994 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6995 temp = validize_mem (decl_rtl);
6996
6997 /* If DECL_RTL is memory, we are in the normal case and either
6998 the address is not valid or it is not a register and -fforce-addr
6999 is specified, get the address into a register. */
7000
7001 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7002 {
7003 if (alt_rtl)
7004 *alt_rtl = decl_rtl;
7005 decl_rtl = use_anchored_address (decl_rtl);
7006 if (modifier != EXPAND_CONST_ADDRESS
7007 && modifier != EXPAND_SUM
7008 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
7009 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
7010 temp = replace_equiv_address (decl_rtl,
7011 copy_rtx (XEXP (decl_rtl, 0)));
7012 }
7013
7014 /* If we got something, return it. But first, set the alignment
7015 if the address is a register. */
7016 if (temp != 0)
7017 {
7018 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7019 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7020
7021 return temp;
7022 }
7023
7024 /* If the mode of DECL_RTL does not match that of the decl, it
7025 must be a promoted value. We return a SUBREG of the wanted mode,
7026 but mark it so that we know that it was already extended. */
7027
7028 if (REG_P (decl_rtl)
7029 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7030 {
7031 enum machine_mode pmode;
7032
7033 /* Get the signedness used for this variable. Ensure we get the
7034 same mode we got when the variable was declared. */
7035 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7036 (TREE_CODE (exp) == RESULT_DECL
7037 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7038 gcc_assert (GET_MODE (decl_rtl) == pmode);
7039
7040 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7041 SUBREG_PROMOTED_VAR_P (temp) = 1;
7042 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7043 return temp;
7044 }
7045
7046 return decl_rtl;
7047
7048 case INTEGER_CST:
7049 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7050 TREE_INT_CST_HIGH (exp), mode);
7051
7052 /* ??? If overflow is set, fold will have done an incomplete job,
7053 which can result in (plus xx (const_int 0)), which can get
7054 simplified by validate_replace_rtx during virtual register
7055 instantiation, which can result in unrecognizable insns.
7056 Avoid this by forcing all overflows into registers. */
7057 if (TREE_OVERFLOW (exp)
7058 && modifier != EXPAND_INITIALIZER)
7059 temp = force_reg (mode, temp);
7060
7061 return temp;
7062
7063 case VECTOR_CST:
7064 {
7065 tree tmp = NULL_TREE;
7066 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7067 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7068 return const_vector_from_tree (exp);
7069 if (GET_MODE_CLASS (mode) == MODE_INT)
7070 {
7071 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7072 if (type_for_mode)
7073 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7074 }
7075 if (!tmp)
7076 tmp = build_constructor_from_list (type,
7077 TREE_VECTOR_CST_ELTS (exp));
7078 return expand_expr (tmp, ignore ? const0_rtx : target,
7079 tmode, modifier);
7080 }
7081
7082 case CONST_DECL:
7083 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7084
7085 case REAL_CST:
7086 /* If optimized, generate immediate CONST_DOUBLE
7087 which will be turned into memory by reload if necessary.
7088
7089 We used to force a register so that loop.c could see it. But
7090 this does not allow gen_* patterns to perform optimizations with
7091 the constants. It also produces two insns in cases like "x = 1.0;".
7092 On most machines, floating-point constants are not permitted in
7093 many insns, so we'd end up copying it to a register in any case.
7094
7095 Now, we do the copying in expand_binop, if appropriate. */
7096 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7097 TYPE_MODE (TREE_TYPE (exp)));
7098
7099 case COMPLEX_CST:
7100 /* Handle evaluating a complex constant in a CONCAT target. */
7101 if (original_target && GET_CODE (original_target) == CONCAT)
7102 {
7103 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7104 rtx rtarg, itarg;
7105
7106 rtarg = XEXP (original_target, 0);
7107 itarg = XEXP (original_target, 1);
7108
7109 /* Move the real and imaginary parts separately. */
7110 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7111 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7112
7113 if (op0 != rtarg)
7114 emit_move_insn (rtarg, op0);
7115 if (op1 != itarg)
7116 emit_move_insn (itarg, op1);
7117
7118 return original_target;
7119 }
7120
7121 /* ... fall through ... */
7122
7123 case STRING_CST:
7124 temp = expand_expr_constant (exp, 1, modifier);
7125
7126 /* temp contains a constant address.
7127 On RISC machines where a constant address isn't valid,
7128 make some insns to get that address into a register. */
7129 if (modifier != EXPAND_CONST_ADDRESS
7130 && modifier != EXPAND_INITIALIZER
7131 && modifier != EXPAND_SUM
7132 && (! memory_address_p (mode, XEXP (temp, 0))
7133 || flag_force_addr))
7134 return replace_equiv_address (temp,
7135 copy_rtx (XEXP (temp, 0)));
7136 return temp;
7137
7138 case SAVE_EXPR:
7139 {
7140 tree val = TREE_OPERAND (exp, 0);
7141 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7142
7143 if (!SAVE_EXPR_RESOLVED_P (exp))
7144 {
7145 /* We can indeed still hit this case, typically via builtin
7146 expanders calling save_expr immediately before expanding
7147 something. Assume this means that we only have to deal
7148 with non-BLKmode values. */
7149 gcc_assert (GET_MODE (ret) != BLKmode);
7150
7151 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7152 DECL_ARTIFICIAL (val) = 1;
7153 DECL_IGNORED_P (val) = 1;
7154 TREE_OPERAND (exp, 0) = val;
7155 SAVE_EXPR_RESOLVED_P (exp) = 1;
7156
7157 if (!CONSTANT_P (ret))
7158 ret = copy_to_reg (ret);
7159 SET_DECL_RTL (val, ret);
7160 }
7161
7162 return ret;
7163 }
7164
7165 case GOTO_EXPR:
7166 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7167 expand_goto (TREE_OPERAND (exp, 0));
7168 else
7169 expand_computed_goto (TREE_OPERAND (exp, 0));
7170 return const0_rtx;
7171
7172 case CONSTRUCTOR:
7173 /* If we don't need the result, just ensure we evaluate any
7174 subexpressions. */
7175 if (ignore)
7176 {
7177 unsigned HOST_WIDE_INT idx;
7178 tree value;
7179
7180 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7181 expand_expr (value, const0_rtx, VOIDmode, 0);
7182
7183 return const0_rtx;
7184 }
7185
7186 /* Try to avoid creating a temporary at all. This is possible
7187 if all of the initializer is zero.
7188 FIXME: try to handle all [0..255] initializers we can handle
7189 with memset. */
7190 else if (TREE_STATIC (exp)
7191 && !TREE_ADDRESSABLE (exp)
7192 && target != 0 && mode == BLKmode
7193 && all_zeros_p (exp))
7194 {
7195 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7196 return target;
7197 }
7198
7199 /* All elts simple constants => refer to a constant in memory. But
7200 if this is a non-BLKmode mode, let it store a field at a time
7201 since that should make a CONST_INT or CONST_DOUBLE when we
7202 fold. Likewise, if we have a target we can use, it is best to
7203 store directly into the target unless the type is large enough
7204 that memcpy will be used. If we are making an initializer and
7205 all operands are constant, put it in memory as well.
7206
7207 FIXME: Avoid trying to fill vector constructors piece-meal.
7208 Output them with output_constant_def below unless we're sure
7209 they're zeros. This should go away when vector initializers
7210 are treated like VECTOR_CST instead of arrays.
7211 */
7212 else if ((TREE_STATIC (exp)
7213 && ((mode == BLKmode
7214 && ! (target != 0 && safe_from_p (target, exp, 1)))
7215 || TREE_ADDRESSABLE (exp)
7216 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7217 && (! MOVE_BY_PIECES_P
7218 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7219 TYPE_ALIGN (type)))
7220 && ! mostly_zeros_p (exp))))
7221 || ((modifier == EXPAND_INITIALIZER
7222 || modifier == EXPAND_CONST_ADDRESS)
7223 && TREE_CONSTANT (exp)))
7224 {
7225 rtx constructor = expand_expr_constant (exp, 1, modifier);
7226
7227 if (modifier != EXPAND_CONST_ADDRESS
7228 && modifier != EXPAND_INITIALIZER
7229 && modifier != EXPAND_SUM)
7230 constructor = validize_mem (constructor);
7231
7232 return constructor;
7233 }
7234 else
7235 {
7236 /* Handle calls that pass values in multiple non-contiguous
7237 locations. The Irix 6 ABI has examples of this. */
7238 if (target == 0 || ! safe_from_p (target, exp, 1)
7239 || GET_CODE (target) == PARALLEL
7240 || modifier == EXPAND_STACK_PARM)
7241 target
7242 = assign_temp (build_qualified_type (type,
7243 (TYPE_QUALS (type)
7244 | (TREE_READONLY (exp)
7245 * TYPE_QUAL_CONST))),
7246 0, TREE_ADDRESSABLE (exp), 1);
7247
7248 store_constructor (exp, target, 0, int_expr_size (exp));
7249 return target;
7250 }
7251
7252 case MISALIGNED_INDIRECT_REF:
7253 case ALIGN_INDIRECT_REF:
7254 case INDIRECT_REF:
7255 {
7256 tree exp1 = TREE_OPERAND (exp, 0);
7257
7258 if (modifier != EXPAND_WRITE)
7259 {
7260 tree t;
7261
7262 t = fold_read_from_constant_string (exp);
7263 if (t)
7264 return expand_expr (t, target, tmode, modifier);
7265 }
7266
7267 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7268 op0 = memory_address (mode, op0);
7269
7270 if (code == ALIGN_INDIRECT_REF)
7271 {
7272 int align = TYPE_ALIGN_UNIT (type);
7273 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7274 op0 = memory_address (mode, op0);
7275 }
7276
7277 temp = gen_rtx_MEM (mode, op0);
7278
7279 set_mem_attributes (temp, exp, 0);
7280
7281 /* Resolve the misalignment now, so that we don't have to remember
7282 to resolve it later. Of course, this only works for reads. */
7283 /* ??? When we get around to supporting writes, we'll have to handle
7284 this in store_expr directly. The vectorizer isn't generating
7285 those yet, however. */
7286 if (code == MISALIGNED_INDIRECT_REF)
7287 {
7288 int icode;
7289 rtx reg, insn;
7290
7291 gcc_assert (modifier == EXPAND_NORMAL
7292 || modifier == EXPAND_STACK_PARM);
7293
7294 /* The vectorizer should have already checked the mode. */
7295 icode = movmisalign_optab->handlers[mode].insn_code;
7296 gcc_assert (icode != CODE_FOR_nothing);
7297
7298 /* We've already validated the memory, and we're creating a
7299 new pseudo destination. The predicates really can't fail. */
7300 reg = gen_reg_rtx (mode);
7301
7302 /* Nor can the insn generator. */
7303 insn = GEN_FCN (icode) (reg, temp);
7304 emit_insn (insn);
7305
7306 return reg;
7307 }
7308
7309 return temp;
7310 }
7311
7312 case TARGET_MEM_REF:
7313 {
7314 struct mem_address addr;
7315
7316 get_address_description (exp, &addr);
7317 op0 = addr_for_mem_ref (&addr, true);
7318 op0 = memory_address (mode, op0);
7319 temp = gen_rtx_MEM (mode, op0);
7320 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7321 }
7322 return temp;
7323
7324 case ARRAY_REF:
7325
7326 {
7327 tree array = TREE_OPERAND (exp, 0);
7328 tree index = TREE_OPERAND (exp, 1);
7329
7330 /* Fold an expression like: "foo"[2].
7331 This is not done in fold so it won't happen inside &.
7332 Don't fold if this is for wide characters since it's too
7333 difficult to do correctly and this is a very rare case. */
7334
7335 if (modifier != EXPAND_CONST_ADDRESS
7336 && modifier != EXPAND_INITIALIZER
7337 && modifier != EXPAND_MEMORY)
7338 {
7339 tree t = fold_read_from_constant_string (exp);
7340
7341 if (t)
7342 return expand_expr (t, target, tmode, modifier);
7343 }
7344
7345 /* If this is a constant index into a constant array,
7346 just get the value from the array. Handle both the cases when
7347 we have an explicit constructor and when our operand is a variable
7348 that was declared const. */
7349
7350 if (modifier != EXPAND_CONST_ADDRESS
7351 && modifier != EXPAND_INITIALIZER
7352 && modifier != EXPAND_MEMORY
7353 && TREE_CODE (array) == CONSTRUCTOR
7354 && ! TREE_SIDE_EFFECTS (array)
7355 && TREE_CODE (index) == INTEGER_CST)
7356 {
7357 unsigned HOST_WIDE_INT ix;
7358 tree field, value;
7359
7360 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7361 field, value)
7362 if (tree_int_cst_equal (field, index))
7363 {
7364 if (!TREE_SIDE_EFFECTS (value))
7365 return expand_expr (fold (value), target, tmode, modifier);
7366 break;
7367 }
7368 }
7369
7370 else if (optimize >= 1
7371 && modifier != EXPAND_CONST_ADDRESS
7372 && modifier != EXPAND_INITIALIZER
7373 && modifier != EXPAND_MEMORY
7374 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7375 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7376 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7377 && targetm.binds_local_p (array))
7378 {
7379 if (TREE_CODE (index) == INTEGER_CST)
7380 {
7381 tree init = DECL_INITIAL (array);
7382
7383 if (TREE_CODE (init) == CONSTRUCTOR)
7384 {
7385 unsigned HOST_WIDE_INT ix;
7386 tree field, value;
7387
7388 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7389 field, value)
7390 if (tree_int_cst_equal (field, index))
7391 {
7392 if (!TREE_SIDE_EFFECTS (value))
7393 return expand_expr (fold (value), target, tmode,
7394 modifier);
7395 break;
7396 }
7397 }
7398 else if(TREE_CODE (init) == STRING_CST)
7399 {
7400 tree index1 = index;
7401 tree low_bound = array_ref_low_bound (exp);
7402 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7403
7404 /* Optimize the special-case of a zero lower bound.
7405
7406 We convert the low_bound to sizetype to avoid some problems
7407 with constant folding. (E.g. suppose the lower bound is 1,
7408 and its mode is QI. Without the conversion,l (ARRAY
7409 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7410 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7411
7412 if (! integer_zerop (low_bound))
7413 index1 = size_diffop (index1, fold_convert (sizetype,
7414 low_bound));
7415
7416 if (0 > compare_tree_int (index1,
7417 TREE_STRING_LENGTH (init)))
7418 {
7419 tree type = TREE_TYPE (TREE_TYPE (init));
7420 enum machine_mode mode = TYPE_MODE (type);
7421
7422 if (GET_MODE_CLASS (mode) == MODE_INT
7423 && GET_MODE_SIZE (mode) == 1)
7424 return gen_int_mode (TREE_STRING_POINTER (init)
7425 [TREE_INT_CST_LOW (index1)],
7426 mode);
7427 }
7428 }
7429 }
7430 }
7431 }
7432 goto normal_inner_ref;
7433
7434 case COMPONENT_REF:
7435 /* If the operand is a CONSTRUCTOR, we can just extract the
7436 appropriate field if it is present. */
7437 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7438 {
7439 unsigned HOST_WIDE_INT idx;
7440 tree field, value;
7441
7442 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7443 idx, field, value)
7444 if (field == TREE_OPERAND (exp, 1)
7445 /* We can normally use the value of the field in the
7446 CONSTRUCTOR. However, if this is a bitfield in
7447 an integral mode that we can fit in a HOST_WIDE_INT,
7448 we must mask only the number of bits in the bitfield,
7449 since this is done implicitly by the constructor. If
7450 the bitfield does not meet either of those conditions,
7451 we can't do this optimization. */
7452 && (! DECL_BIT_FIELD (field)
7453 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7454 && (GET_MODE_BITSIZE (DECL_MODE (field))
7455 <= HOST_BITS_PER_WIDE_INT))))
7456 {
7457 if (DECL_BIT_FIELD (field)
7458 && modifier == EXPAND_STACK_PARM)
7459 target = 0;
7460 op0 = expand_expr (value, target, tmode, modifier);
7461 if (DECL_BIT_FIELD (field))
7462 {
7463 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7464 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7465
7466 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7467 {
7468 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7469 op0 = expand_and (imode, op0, op1, target);
7470 }
7471 else
7472 {
7473 tree count
7474 = build_int_cst (NULL_TREE,
7475 GET_MODE_BITSIZE (imode) - bitsize);
7476
7477 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7478 target, 0);
7479 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7480 target, 0);
7481 }
7482 }
7483
7484 return op0;
7485 }
7486 }
7487 goto normal_inner_ref;
7488
7489 case BIT_FIELD_REF:
7490 case ARRAY_RANGE_REF:
7491 normal_inner_ref:
7492 {
7493 enum machine_mode mode1;
7494 HOST_WIDE_INT bitsize, bitpos;
7495 tree offset;
7496 int volatilep = 0;
7497 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7498 &mode1, &unsignedp, &volatilep, true);
7499 rtx orig_op0;
7500
7501 /* If we got back the original object, something is wrong. Perhaps
7502 we are evaluating an expression too early. In any event, don't
7503 infinitely recurse. */
7504 gcc_assert (tem != exp);
7505
7506 /* If TEM's type is a union of variable size, pass TARGET to the inner
7507 computation, since it will need a temporary and TARGET is known
7508 to have to do. This occurs in unchecked conversion in Ada. */
7509
7510 orig_op0 = op0
7511 = expand_expr (tem,
7512 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7513 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7514 != INTEGER_CST)
7515 && modifier != EXPAND_STACK_PARM
7516 ? target : NULL_RTX),
7517 VOIDmode,
7518 (modifier == EXPAND_INITIALIZER
7519 || modifier == EXPAND_CONST_ADDRESS
7520 || modifier == EXPAND_STACK_PARM)
7521 ? modifier : EXPAND_NORMAL);
7522
7523 /* If this is a constant, put it into a register if it is a legitimate
7524 constant, OFFSET is 0, and we won't try to extract outside the
7525 register (in case we were passed a partially uninitialized object
7526 or a view_conversion to a larger size). Force the constant to
7527 memory otherwise. */
7528 if (CONSTANT_P (op0))
7529 {
7530 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7531 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7532 && offset == 0
7533 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7534 op0 = force_reg (mode, op0);
7535 else
7536 op0 = validize_mem (force_const_mem (mode, op0));
7537 }
7538
7539 /* Otherwise, if this object not in memory and we either have an
7540 offset, a BLKmode result, or a reference outside the object, put it
7541 there. Such cases can occur in Ada if we have unchecked conversion
7542 of an expression from a scalar type to an array or record type or
7543 for an ARRAY_RANGE_REF whose type is BLKmode. */
7544 else if (!MEM_P (op0)
7545 && (offset != 0
7546 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7547 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7548 {
7549 tree nt = build_qualified_type (TREE_TYPE (tem),
7550 (TYPE_QUALS (TREE_TYPE (tem))
7551 | TYPE_QUAL_CONST));
7552 rtx memloc = assign_temp (nt, 1, 1, 1);
7553
7554 emit_move_insn (memloc, op0);
7555 op0 = memloc;
7556 }
7557
7558 if (offset != 0)
7559 {
7560 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7561 EXPAND_SUM);
7562
7563 gcc_assert (MEM_P (op0));
7564
7565 #ifdef POINTERS_EXTEND_UNSIGNED
7566 if (GET_MODE (offset_rtx) != Pmode)
7567 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7568 #else
7569 if (GET_MODE (offset_rtx) != ptr_mode)
7570 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7571 #endif
7572
7573 if (GET_MODE (op0) == BLKmode
7574 /* A constant address in OP0 can have VOIDmode, we must
7575 not try to call force_reg in that case. */
7576 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7577 && bitsize != 0
7578 && (bitpos % bitsize) == 0
7579 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7580 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7581 {
7582 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7583 bitpos = 0;
7584 }
7585
7586 op0 = offset_address (op0, offset_rtx,
7587 highest_pow2_factor (offset));
7588 }
7589
7590 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7591 record its alignment as BIGGEST_ALIGNMENT. */
7592 if (MEM_P (op0) && bitpos == 0 && offset != 0
7593 && is_aligning_offset (offset, tem))
7594 set_mem_align (op0, BIGGEST_ALIGNMENT);
7595
7596 /* Don't forget about volatility even if this is a bitfield. */
7597 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7598 {
7599 if (op0 == orig_op0)
7600 op0 = copy_rtx (op0);
7601
7602 MEM_VOLATILE_P (op0) = 1;
7603 }
7604
7605 /* The following code doesn't handle CONCAT.
7606 Assume only bitpos == 0 can be used for CONCAT, due to
7607 one element arrays having the same mode as its element. */
7608 if (GET_CODE (op0) == CONCAT)
7609 {
7610 gcc_assert (bitpos == 0
7611 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7612 return op0;
7613 }
7614
7615 /* In cases where an aligned union has an unaligned object
7616 as a field, we might be extracting a BLKmode value from
7617 an integer-mode (e.g., SImode) object. Handle this case
7618 by doing the extract into an object as wide as the field
7619 (which we know to be the width of a basic mode), then
7620 storing into memory, and changing the mode to BLKmode. */
7621 if (mode1 == VOIDmode
7622 || REG_P (op0) || GET_CODE (op0) == SUBREG
7623 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7624 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7625 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7626 && modifier != EXPAND_CONST_ADDRESS
7627 && modifier != EXPAND_INITIALIZER)
7628 /* If the field isn't aligned enough to fetch as a memref,
7629 fetch it as a bit field. */
7630 || (mode1 != BLKmode
7631 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7632 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7633 || (MEM_P (op0)
7634 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7635 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7636 && ((modifier == EXPAND_CONST_ADDRESS
7637 || modifier == EXPAND_INITIALIZER)
7638 ? STRICT_ALIGNMENT
7639 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7640 || (bitpos % BITS_PER_UNIT != 0)))
7641 /* If the type and the field are a constant size and the
7642 size of the type isn't the same size as the bitfield,
7643 we must use bitfield operations. */
7644 || (bitsize >= 0
7645 && TYPE_SIZE (TREE_TYPE (exp))
7646 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7647 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7648 bitsize)))
7649 {
7650 enum machine_mode ext_mode = mode;
7651
7652 if (ext_mode == BLKmode
7653 && ! (target != 0 && MEM_P (op0)
7654 && MEM_P (target)
7655 && bitpos % BITS_PER_UNIT == 0))
7656 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7657
7658 if (ext_mode == BLKmode)
7659 {
7660 if (target == 0)
7661 target = assign_temp (type, 0, 1, 1);
7662
7663 if (bitsize == 0)
7664 return target;
7665
7666 /* In this case, BITPOS must start at a byte boundary and
7667 TARGET, if specified, must be a MEM. */
7668 gcc_assert (MEM_P (op0)
7669 && (!target || MEM_P (target))
7670 && !(bitpos % BITS_PER_UNIT));
7671
7672 emit_block_move (target,
7673 adjust_address (op0, VOIDmode,
7674 bitpos / BITS_PER_UNIT),
7675 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7676 / BITS_PER_UNIT),
7677 (modifier == EXPAND_STACK_PARM
7678 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7679
7680 return target;
7681 }
7682
7683 op0 = validize_mem (op0);
7684
7685 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7686 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7687
7688 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7689 (modifier == EXPAND_STACK_PARM
7690 ? NULL_RTX : target),
7691 ext_mode, ext_mode);
7692
7693 /* If the result is a record type and BITSIZE is narrower than
7694 the mode of OP0, an integral mode, and this is a big endian
7695 machine, we must put the field into the high-order bits. */
7696 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7697 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7698 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7699 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7700 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7701 - bitsize),
7702 op0, 1);
7703
7704 /* If the result type is BLKmode, store the data into a temporary
7705 of the appropriate type, but with the mode corresponding to the
7706 mode for the data we have (op0's mode). It's tempting to make
7707 this a constant type, since we know it's only being stored once,
7708 but that can cause problems if we are taking the address of this
7709 COMPONENT_REF because the MEM of any reference via that address
7710 will have flags corresponding to the type, which will not
7711 necessarily be constant. */
7712 if (mode == BLKmode)
7713 {
7714 rtx new
7715 = assign_stack_temp_for_type
7716 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7717
7718 emit_move_insn (new, op0);
7719 op0 = copy_rtx (new);
7720 PUT_MODE (op0, BLKmode);
7721 set_mem_attributes (op0, exp, 1);
7722 }
7723
7724 return op0;
7725 }
7726
7727 /* If the result is BLKmode, use that to access the object
7728 now as well. */
7729 if (mode == BLKmode)
7730 mode1 = BLKmode;
7731
7732 /* Get a reference to just this component. */
7733 if (modifier == EXPAND_CONST_ADDRESS
7734 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7735 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7736 else
7737 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7738
7739 if (op0 == orig_op0)
7740 op0 = copy_rtx (op0);
7741
7742 set_mem_attributes (op0, exp, 0);
7743 if (REG_P (XEXP (op0, 0)))
7744 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7745
7746 MEM_VOLATILE_P (op0) |= volatilep;
7747 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7748 || modifier == EXPAND_CONST_ADDRESS
7749 || modifier == EXPAND_INITIALIZER)
7750 return op0;
7751 else if (target == 0)
7752 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7753
7754 convert_move (target, op0, unsignedp);
7755 return target;
7756 }
7757
7758 case OBJ_TYPE_REF:
7759 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7760
7761 case CALL_EXPR:
7762 /* Check for a built-in function. */
7763 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7764 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7765 == FUNCTION_DECL)
7766 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7767 {
7768 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7769 == BUILT_IN_FRONTEND)
7770 return lang_hooks.expand_expr (exp, original_target,
7771 tmode, modifier,
7772 alt_rtl);
7773 else
7774 return expand_builtin (exp, target, subtarget, tmode, ignore);
7775 }
7776
7777 return expand_call (exp, target, ignore);
7778
7779 case NON_LVALUE_EXPR:
7780 case NOP_EXPR:
7781 case CONVERT_EXPR:
7782 if (TREE_OPERAND (exp, 0) == error_mark_node)
7783 return const0_rtx;
7784
7785 if (TREE_CODE (type) == UNION_TYPE)
7786 {
7787 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7788
7789 /* If both input and output are BLKmode, this conversion isn't doing
7790 anything except possibly changing memory attribute. */
7791 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7792 {
7793 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7794 modifier);
7795
7796 result = copy_rtx (result);
7797 set_mem_attributes (result, exp, 0);
7798 return result;
7799 }
7800
7801 if (target == 0)
7802 {
7803 if (TYPE_MODE (type) != BLKmode)
7804 target = gen_reg_rtx (TYPE_MODE (type));
7805 else
7806 target = assign_temp (type, 0, 1, 1);
7807 }
7808
7809 if (MEM_P (target))
7810 /* Store data into beginning of memory target. */
7811 store_expr (TREE_OPERAND (exp, 0),
7812 adjust_address (target, TYPE_MODE (valtype), 0),
7813 modifier == EXPAND_STACK_PARM);
7814
7815 else
7816 {
7817 gcc_assert (REG_P (target));
7818
7819 /* Store this field into a union of the proper type. */
7820 store_field (target,
7821 MIN ((int_size_in_bytes (TREE_TYPE
7822 (TREE_OPERAND (exp, 0)))
7823 * BITS_PER_UNIT),
7824 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7825 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7826 type, 0);
7827 }
7828
7829 /* Return the entire union. */
7830 return target;
7831 }
7832
7833 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7834 {
7835 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7836 modifier);
7837
7838 /* If the signedness of the conversion differs and OP0 is
7839 a promoted SUBREG, clear that indication since we now
7840 have to do the proper extension. */
7841 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7842 && GET_CODE (op0) == SUBREG)
7843 SUBREG_PROMOTED_VAR_P (op0) = 0;
7844
7845 return REDUCE_BIT_FIELD (op0);
7846 }
7847
7848 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7849 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7850 if (GET_MODE (op0) == mode)
7851 ;
7852
7853 /* If OP0 is a constant, just convert it into the proper mode. */
7854 else if (CONSTANT_P (op0))
7855 {
7856 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7857 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7858
7859 if (modifier == EXPAND_INITIALIZER)
7860 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7861 subreg_lowpart_offset (mode,
7862 inner_mode));
7863 else
7864 op0= convert_modes (mode, inner_mode, op0,
7865 TYPE_UNSIGNED (inner_type));
7866 }
7867
7868 else if (modifier == EXPAND_INITIALIZER)
7869 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7870
7871 else if (target == 0)
7872 op0 = convert_to_mode (mode, op0,
7873 TYPE_UNSIGNED (TREE_TYPE
7874 (TREE_OPERAND (exp, 0))));
7875 else
7876 {
7877 convert_move (target, op0,
7878 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7879 op0 = target;
7880 }
7881
7882 return REDUCE_BIT_FIELD (op0);
7883
7884 case VIEW_CONVERT_EXPR:
7885 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7886
7887 /* If the input and output modes are both the same, we are done. */
7888 if (TYPE_MODE (type) == GET_MODE (op0))
7889 ;
7890 /* If neither mode is BLKmode, and both modes are the same size
7891 then we can use gen_lowpart. */
7892 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7893 && GET_MODE_SIZE (TYPE_MODE (type))
7894 == GET_MODE_SIZE (GET_MODE (op0)))
7895 {
7896 if (GET_CODE (op0) == SUBREG)
7897 op0 = force_reg (GET_MODE (op0), op0);
7898 op0 = gen_lowpart (TYPE_MODE (type), op0);
7899 }
7900 /* If both modes are integral, then we can convert from one to the
7901 other. */
7902 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7903 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7904 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7905 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7906 /* As a last resort, spill op0 to memory, and reload it in a
7907 different mode. */
7908 else if (!MEM_P (op0))
7909 {
7910 /* If the operand is not a MEM, force it into memory. Since we
7911 are going to be changing the mode of the MEM, don't call
7912 force_const_mem for constants because we don't allow pool
7913 constants to change mode. */
7914 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7915
7916 gcc_assert (!TREE_ADDRESSABLE (exp));
7917
7918 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7919 target
7920 = assign_stack_temp_for_type
7921 (TYPE_MODE (inner_type),
7922 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7923
7924 emit_move_insn (target, op0);
7925 op0 = target;
7926 }
7927
7928 /* At this point, OP0 is in the correct mode. If the output type is such
7929 that the operand is known to be aligned, indicate that it is.
7930 Otherwise, we need only be concerned about alignment for non-BLKmode
7931 results. */
7932 if (MEM_P (op0))
7933 {
7934 op0 = copy_rtx (op0);
7935
7936 if (TYPE_ALIGN_OK (type))
7937 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7938 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7939 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7940 {
7941 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7942 HOST_WIDE_INT temp_size
7943 = MAX (int_size_in_bytes (inner_type),
7944 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7945 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7946 temp_size, 0, type);
7947 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7948
7949 gcc_assert (!TREE_ADDRESSABLE (exp));
7950
7951 if (GET_MODE (op0) == BLKmode)
7952 emit_block_move (new_with_op0_mode, op0,
7953 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7954 (modifier == EXPAND_STACK_PARM
7955 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7956 else
7957 emit_move_insn (new_with_op0_mode, op0);
7958
7959 op0 = new;
7960 }
7961
7962 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7963 }
7964
7965 return op0;
7966
7967 case PLUS_EXPR:
7968 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7969 something else, make sure we add the register to the constant and
7970 then to the other thing. This case can occur during strength
7971 reduction and doing it this way will produce better code if the
7972 frame pointer or argument pointer is eliminated.
7973
7974 fold-const.c will ensure that the constant is always in the inner
7975 PLUS_EXPR, so the only case we need to do anything about is if
7976 sp, ap, or fp is our second argument, in which case we must swap
7977 the innermost first argument and our second argument. */
7978
7979 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7980 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7981 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7982 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7983 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7984 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7985 {
7986 tree t = TREE_OPERAND (exp, 1);
7987
7988 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7989 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7990 }
7991
7992 /* If the result is to be ptr_mode and we are adding an integer to
7993 something, we might be forming a constant. So try to use
7994 plus_constant. If it produces a sum and we can't accept it,
7995 use force_operand. This allows P = &ARR[const] to generate
7996 efficient code on machines where a SYMBOL_REF is not a valid
7997 address.
7998
7999 If this is an EXPAND_SUM call, always return the sum. */
8000 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8001 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8002 {
8003 if (modifier == EXPAND_STACK_PARM)
8004 target = 0;
8005 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8006 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8007 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8008 {
8009 rtx constant_part;
8010
8011 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8012 EXPAND_SUM);
8013 /* Use immed_double_const to ensure that the constant is
8014 truncated according to the mode of OP1, then sign extended
8015 to a HOST_WIDE_INT. Using the constant directly can result
8016 in non-canonical RTL in a 64x32 cross compile. */
8017 constant_part
8018 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8019 (HOST_WIDE_INT) 0,
8020 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8021 op1 = plus_constant (op1, INTVAL (constant_part));
8022 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8023 op1 = force_operand (op1, target);
8024 return REDUCE_BIT_FIELD (op1);
8025 }
8026
8027 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8028 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8029 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8030 {
8031 rtx constant_part;
8032
8033 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8034 (modifier == EXPAND_INITIALIZER
8035 ? EXPAND_INITIALIZER : EXPAND_SUM));
8036 if (! CONSTANT_P (op0))
8037 {
8038 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8039 VOIDmode, modifier);
8040 /* Return a PLUS if modifier says it's OK. */
8041 if (modifier == EXPAND_SUM
8042 || modifier == EXPAND_INITIALIZER)
8043 return simplify_gen_binary (PLUS, mode, op0, op1);
8044 goto binop2;
8045 }
8046 /* Use immed_double_const to ensure that the constant is
8047 truncated according to the mode of OP1, then sign extended
8048 to a HOST_WIDE_INT. Using the constant directly can result
8049 in non-canonical RTL in a 64x32 cross compile. */
8050 constant_part
8051 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8052 (HOST_WIDE_INT) 0,
8053 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8054 op0 = plus_constant (op0, INTVAL (constant_part));
8055 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8056 op0 = force_operand (op0, target);
8057 return REDUCE_BIT_FIELD (op0);
8058 }
8059 }
8060
8061 /* No sense saving up arithmetic to be done
8062 if it's all in the wrong mode to form part of an address.
8063 And force_operand won't know whether to sign-extend or
8064 zero-extend. */
8065 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8066 || mode != ptr_mode)
8067 {
8068 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8069 subtarget, &op0, &op1, 0);
8070 if (op0 == const0_rtx)
8071 return op1;
8072 if (op1 == const0_rtx)
8073 return op0;
8074 goto binop2;
8075 }
8076
8077 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8078 subtarget, &op0, &op1, modifier);
8079 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8080
8081 case MINUS_EXPR:
8082 /* For initializers, we are allowed to return a MINUS of two
8083 symbolic constants. Here we handle all cases when both operands
8084 are constant. */
8085 /* Handle difference of two symbolic constants,
8086 for the sake of an initializer. */
8087 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8088 && really_constant_p (TREE_OPERAND (exp, 0))
8089 && really_constant_p (TREE_OPERAND (exp, 1)))
8090 {
8091 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8092 NULL_RTX, &op0, &op1, modifier);
8093
8094 /* If the last operand is a CONST_INT, use plus_constant of
8095 the negated constant. Else make the MINUS. */
8096 if (GET_CODE (op1) == CONST_INT)
8097 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8098 else
8099 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8100 }
8101
8102 /* No sense saving up arithmetic to be done
8103 if it's all in the wrong mode to form part of an address.
8104 And force_operand won't know whether to sign-extend or
8105 zero-extend. */
8106 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8107 || mode != ptr_mode)
8108 goto binop;
8109
8110 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8111 subtarget, &op0, &op1, modifier);
8112
8113 /* Convert A - const to A + (-const). */
8114 if (GET_CODE (op1) == CONST_INT)
8115 {
8116 op1 = negate_rtx (mode, op1);
8117 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8118 }
8119
8120 goto binop2;
8121
8122 case MULT_EXPR:
8123 /* If first operand is constant, swap them.
8124 Thus the following special case checks need only
8125 check the second operand. */
8126 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8127 {
8128 tree t1 = TREE_OPERAND (exp, 0);
8129 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8130 TREE_OPERAND (exp, 1) = t1;
8131 }
8132
8133 /* Attempt to return something suitable for generating an
8134 indexed address, for machines that support that. */
8135
8136 if (modifier == EXPAND_SUM && mode == ptr_mode
8137 && host_integerp (TREE_OPERAND (exp, 1), 0))
8138 {
8139 tree exp1 = TREE_OPERAND (exp, 1);
8140
8141 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8142 EXPAND_SUM);
8143
8144 if (!REG_P (op0))
8145 op0 = force_operand (op0, NULL_RTX);
8146 if (!REG_P (op0))
8147 op0 = copy_to_mode_reg (mode, op0);
8148
8149 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8150 gen_int_mode (tree_low_cst (exp1, 0),
8151 TYPE_MODE (TREE_TYPE (exp1)))));
8152 }
8153
8154 if (modifier == EXPAND_STACK_PARM)
8155 target = 0;
8156
8157 /* Check for multiplying things that have been extended
8158 from a narrower type. If this machine supports multiplying
8159 in that narrower type with a result in the desired type,
8160 do it that way, and avoid the explicit type-conversion. */
8161
8162 subexp0 = TREE_OPERAND (exp, 0);
8163 subexp1 = TREE_OPERAND (exp, 1);
8164 /* First, check if we have a multiplication of one signed and one
8165 unsigned operand. */
8166 if (TREE_CODE (subexp0) == NOP_EXPR
8167 && TREE_CODE (subexp1) == NOP_EXPR
8168 && TREE_CODE (type) == INTEGER_TYPE
8169 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8170 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8171 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8172 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8173 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8174 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8175 {
8176 enum machine_mode innermode
8177 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8178 this_optab = usmul_widen_optab;
8179 if (mode == GET_MODE_WIDER_MODE (innermode))
8180 {
8181 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8182 {
8183 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8184 expand_operands (TREE_OPERAND (subexp0, 0),
8185 TREE_OPERAND (subexp1, 0),
8186 NULL_RTX, &op0, &op1, 0);
8187 else
8188 expand_operands (TREE_OPERAND (subexp0, 0),
8189 TREE_OPERAND (subexp1, 0),
8190 NULL_RTX, &op1, &op0, 0);
8191
8192 goto binop3;
8193 }
8194 }
8195 }
8196 /* Check for a multiplication with matching signedness. */
8197 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8198 && TREE_CODE (type) == INTEGER_TYPE
8199 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8200 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8201 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8202 && int_fits_type_p (TREE_OPERAND (exp, 1),
8203 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8204 /* Don't use a widening multiply if a shift will do. */
8205 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8206 > HOST_BITS_PER_WIDE_INT)
8207 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8208 ||
8209 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8210 && (TYPE_PRECISION (TREE_TYPE
8211 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8212 == TYPE_PRECISION (TREE_TYPE
8213 (TREE_OPERAND
8214 (TREE_OPERAND (exp, 0), 0))))
8215 /* If both operands are extended, they must either both
8216 be zero-extended or both be sign-extended. */
8217 && (TYPE_UNSIGNED (TREE_TYPE
8218 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8219 == TYPE_UNSIGNED (TREE_TYPE
8220 (TREE_OPERAND
8221 (TREE_OPERAND (exp, 0), 0)))))))
8222 {
8223 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8224 enum machine_mode innermode = TYPE_MODE (op0type);
8225 bool zextend_p = TYPE_UNSIGNED (op0type);
8226 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8227 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8228
8229 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8230 {
8231 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8232 {
8233 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8234 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8235 TREE_OPERAND (exp, 1),
8236 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8237 else
8238 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8239 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8240 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8241 goto binop3;
8242 }
8243 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8244 && innermode == word_mode)
8245 {
8246 rtx htem, hipart;
8247 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8248 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8249 op1 = convert_modes (innermode, mode,
8250 expand_normal (TREE_OPERAND (exp, 1)),
8251 unsignedp);
8252 else
8253 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8254 temp = expand_binop (mode, other_optab, op0, op1, target,
8255 unsignedp, OPTAB_LIB_WIDEN);
8256 hipart = gen_highpart (innermode, temp);
8257 htem = expand_mult_highpart_adjust (innermode, hipart,
8258 op0, op1, hipart,
8259 zextend_p);
8260 if (htem != hipart)
8261 emit_move_insn (hipart, htem);
8262 return REDUCE_BIT_FIELD (temp);
8263 }
8264 }
8265 }
8266 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8267 subtarget, &op0, &op1, 0);
8268 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8269
8270 case TRUNC_DIV_EXPR:
8271 case FLOOR_DIV_EXPR:
8272 case CEIL_DIV_EXPR:
8273 case ROUND_DIV_EXPR:
8274 case EXACT_DIV_EXPR:
8275 if (modifier == EXPAND_STACK_PARM)
8276 target = 0;
8277 /* Possible optimization: compute the dividend with EXPAND_SUM
8278 then if the divisor is constant can optimize the case
8279 where some terms of the dividend have coeffs divisible by it. */
8280 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8281 subtarget, &op0, &op1, 0);
8282 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8283
8284 case RDIV_EXPR:
8285 goto binop;
8286
8287 case TRUNC_MOD_EXPR:
8288 case FLOOR_MOD_EXPR:
8289 case CEIL_MOD_EXPR:
8290 case ROUND_MOD_EXPR:
8291 if (modifier == EXPAND_STACK_PARM)
8292 target = 0;
8293 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8294 subtarget, &op0, &op1, 0);
8295 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8296
8297 case FIX_TRUNC_EXPR:
8298 op0 = expand_normal (TREE_OPERAND (exp, 0));
8299 if (target == 0 || modifier == EXPAND_STACK_PARM)
8300 target = gen_reg_rtx (mode);
8301 expand_fix (target, op0, unsignedp);
8302 return target;
8303
8304 case FLOAT_EXPR:
8305 op0 = expand_normal (TREE_OPERAND (exp, 0));
8306 if (target == 0 || modifier == EXPAND_STACK_PARM)
8307 target = gen_reg_rtx (mode);
8308 /* expand_float can't figure out what to do if FROM has VOIDmode.
8309 So give it the correct mode. With -O, cse will optimize this. */
8310 if (GET_MODE (op0) == VOIDmode)
8311 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8312 op0);
8313 expand_float (target, op0,
8314 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8315 return target;
8316
8317 case NEGATE_EXPR:
8318 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8319 if (modifier == EXPAND_STACK_PARM)
8320 target = 0;
8321 temp = expand_unop (mode,
8322 optab_for_tree_code (NEGATE_EXPR, type),
8323 op0, target, 0);
8324 gcc_assert (temp);
8325 return REDUCE_BIT_FIELD (temp);
8326
8327 case ABS_EXPR:
8328 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8329 if (modifier == EXPAND_STACK_PARM)
8330 target = 0;
8331
8332 /* ABS_EXPR is not valid for complex arguments. */
8333 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8334 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8335
8336 /* Unsigned abs is simply the operand. Testing here means we don't
8337 risk generating incorrect code below. */
8338 if (TYPE_UNSIGNED (type))
8339 return op0;
8340
8341 return expand_abs (mode, op0, target, unsignedp,
8342 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8343
8344 case MAX_EXPR:
8345 case MIN_EXPR:
8346 target = original_target;
8347 if (target == 0
8348 || modifier == EXPAND_STACK_PARM
8349 || (MEM_P (target) && MEM_VOLATILE_P (target))
8350 || GET_MODE (target) != mode
8351 || (REG_P (target)
8352 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8353 target = gen_reg_rtx (mode);
8354 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8355 target, &op0, &op1, 0);
8356
8357 /* First try to do it with a special MIN or MAX instruction.
8358 If that does not win, use a conditional jump to select the proper
8359 value. */
8360 this_optab = optab_for_tree_code (code, type);
8361 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8362 OPTAB_WIDEN);
8363 if (temp != 0)
8364 return temp;
8365
8366 /* At this point, a MEM target is no longer useful; we will get better
8367 code without it. */
8368
8369 if (! REG_P (target))
8370 target = gen_reg_rtx (mode);
8371
8372 /* If op1 was placed in target, swap op0 and op1. */
8373 if (target != op0 && target == op1)
8374 {
8375 temp = op0;
8376 op0 = op1;
8377 op1 = temp;
8378 }
8379
8380 /* We generate better code and avoid problems with op1 mentioning
8381 target by forcing op1 into a pseudo if it isn't a constant. */
8382 if (! CONSTANT_P (op1))
8383 op1 = force_reg (mode, op1);
8384
8385 {
8386 enum rtx_code comparison_code;
8387 rtx cmpop1 = op1;
8388
8389 if (code == MAX_EXPR)
8390 comparison_code = unsignedp ? GEU : GE;
8391 else
8392 comparison_code = unsignedp ? LEU : LE;
8393
8394 /* Canonicalize to comparisons against 0. */
8395 if (op1 == const1_rtx)
8396 {
8397 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8398 or (a != 0 ? a : 1) for unsigned.
8399 For MIN we are safe converting (a <= 1 ? a : 1)
8400 into (a <= 0 ? a : 1) */
8401 cmpop1 = const0_rtx;
8402 if (code == MAX_EXPR)
8403 comparison_code = unsignedp ? NE : GT;
8404 }
8405 if (op1 == constm1_rtx && !unsignedp)
8406 {
8407 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8408 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8409 cmpop1 = const0_rtx;
8410 if (code == MIN_EXPR)
8411 comparison_code = LT;
8412 }
8413 #ifdef HAVE_conditional_move
8414 /* Use a conditional move if possible. */
8415 if (can_conditionally_move_p (mode))
8416 {
8417 rtx insn;
8418
8419 /* ??? Same problem as in expmed.c: emit_conditional_move
8420 forces a stack adjustment via compare_from_rtx, and we
8421 lose the stack adjustment if the sequence we are about
8422 to create is discarded. */
8423 do_pending_stack_adjust ();
8424
8425 start_sequence ();
8426
8427 /* Try to emit the conditional move. */
8428 insn = emit_conditional_move (target, comparison_code,
8429 op0, cmpop1, mode,
8430 op0, op1, mode,
8431 unsignedp);
8432
8433 /* If we could do the conditional move, emit the sequence,
8434 and return. */
8435 if (insn)
8436 {
8437 rtx seq = get_insns ();
8438 end_sequence ();
8439 emit_insn (seq);
8440 return target;
8441 }
8442
8443 /* Otherwise discard the sequence and fall back to code with
8444 branches. */
8445 end_sequence ();
8446 }
8447 #endif
8448 if (target != op0)
8449 emit_move_insn (target, op0);
8450
8451 temp = gen_label_rtx ();
8452 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8453 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8454 }
8455 emit_move_insn (target, op1);
8456 emit_label (temp);
8457 return target;
8458
8459 case BIT_NOT_EXPR:
8460 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8461 if (modifier == EXPAND_STACK_PARM)
8462 target = 0;
8463 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8464 gcc_assert (temp);
8465 return temp;
8466
8467 /* ??? Can optimize bitwise operations with one arg constant.
8468 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8469 and (a bitwise1 b) bitwise2 b (etc)
8470 but that is probably not worth while. */
8471
8472 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8473 boolean values when we want in all cases to compute both of them. In
8474 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8475 as actual zero-or-1 values and then bitwise anding. In cases where
8476 there cannot be any side effects, better code would be made by
8477 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8478 how to recognize those cases. */
8479
8480 case TRUTH_AND_EXPR:
8481 code = BIT_AND_EXPR;
8482 case BIT_AND_EXPR:
8483 goto binop;
8484
8485 case TRUTH_OR_EXPR:
8486 code = BIT_IOR_EXPR;
8487 case BIT_IOR_EXPR:
8488 goto binop;
8489
8490 case TRUTH_XOR_EXPR:
8491 code = BIT_XOR_EXPR;
8492 case BIT_XOR_EXPR:
8493 goto binop;
8494
8495 case LSHIFT_EXPR:
8496 case RSHIFT_EXPR:
8497 case LROTATE_EXPR:
8498 case RROTATE_EXPR:
8499 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8500 subtarget = 0;
8501 if (modifier == EXPAND_STACK_PARM)
8502 target = 0;
8503 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8504 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8505 unsignedp);
8506
8507 /* Could determine the answer when only additive constants differ. Also,
8508 the addition of one can be handled by changing the condition. */
8509 case LT_EXPR:
8510 case LE_EXPR:
8511 case GT_EXPR:
8512 case GE_EXPR:
8513 case EQ_EXPR:
8514 case NE_EXPR:
8515 case UNORDERED_EXPR:
8516 case ORDERED_EXPR:
8517 case UNLT_EXPR:
8518 case UNLE_EXPR:
8519 case UNGT_EXPR:
8520 case UNGE_EXPR:
8521 case UNEQ_EXPR:
8522 case LTGT_EXPR:
8523 temp = do_store_flag (exp,
8524 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8525 tmode != VOIDmode ? tmode : mode, 0);
8526 if (temp != 0)
8527 return temp;
8528
8529 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8530 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8531 && original_target
8532 && REG_P (original_target)
8533 && (GET_MODE (original_target)
8534 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8535 {
8536 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8537 VOIDmode, 0);
8538
8539 /* If temp is constant, we can just compute the result. */
8540 if (GET_CODE (temp) == CONST_INT)
8541 {
8542 if (INTVAL (temp) != 0)
8543 emit_move_insn (target, const1_rtx);
8544 else
8545 emit_move_insn (target, const0_rtx);
8546
8547 return target;
8548 }
8549
8550 if (temp != original_target)
8551 {
8552 enum machine_mode mode1 = GET_MODE (temp);
8553 if (mode1 == VOIDmode)
8554 mode1 = tmode != VOIDmode ? tmode : mode;
8555
8556 temp = copy_to_mode_reg (mode1, temp);
8557 }
8558
8559 op1 = gen_label_rtx ();
8560 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8561 GET_MODE (temp), unsignedp, op1);
8562 emit_move_insn (temp, const1_rtx);
8563 emit_label (op1);
8564 return temp;
8565 }
8566
8567 /* If no set-flag instruction, must generate a conditional store
8568 into a temporary variable. Drop through and handle this
8569 like && and ||. */
8570
8571 if (! ignore
8572 && (target == 0
8573 || modifier == EXPAND_STACK_PARM
8574 || ! safe_from_p (target, exp, 1)
8575 /* Make sure we don't have a hard reg (such as function's return
8576 value) live across basic blocks, if not optimizing. */
8577 || (!optimize && REG_P (target)
8578 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8579 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8580
8581 if (target)
8582 emit_move_insn (target, const0_rtx);
8583
8584 op1 = gen_label_rtx ();
8585 jumpifnot (exp, op1);
8586
8587 if (target)
8588 emit_move_insn (target, const1_rtx);
8589
8590 emit_label (op1);
8591 return ignore ? const0_rtx : target;
8592
8593 case TRUTH_NOT_EXPR:
8594 if (modifier == EXPAND_STACK_PARM)
8595 target = 0;
8596 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8597 /* The parser is careful to generate TRUTH_NOT_EXPR
8598 only with operands that are always zero or one. */
8599 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8600 target, 1, OPTAB_LIB_WIDEN);
8601 gcc_assert (temp);
8602 return temp;
8603
8604 case STATEMENT_LIST:
8605 {
8606 tree_stmt_iterator iter;
8607
8608 gcc_assert (ignore);
8609
8610 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8611 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8612 }
8613 return const0_rtx;
8614
8615 case COND_EXPR:
8616 /* A COND_EXPR with its type being VOID_TYPE represents a
8617 conditional jump and is handled in
8618 expand_gimple_cond_expr. */
8619 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8620
8621 /* Note that COND_EXPRs whose type is a structure or union
8622 are required to be constructed to contain assignments of
8623 a temporary variable, so that we can evaluate them here
8624 for side effect only. If type is void, we must do likewise. */
8625
8626 gcc_assert (!TREE_ADDRESSABLE (type)
8627 && !ignore
8628 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8629 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8630
8631 /* If we are not to produce a result, we have no target. Otherwise,
8632 if a target was specified use it; it will not be used as an
8633 intermediate target unless it is safe. If no target, use a
8634 temporary. */
8635
8636 if (modifier != EXPAND_STACK_PARM
8637 && original_target
8638 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8639 && GET_MODE (original_target) == mode
8640 #ifdef HAVE_conditional_move
8641 && (! can_conditionally_move_p (mode)
8642 || REG_P (original_target))
8643 #endif
8644 && !MEM_P (original_target))
8645 temp = original_target;
8646 else
8647 temp = assign_temp (type, 0, 0, 1);
8648
8649 do_pending_stack_adjust ();
8650 NO_DEFER_POP;
8651 op0 = gen_label_rtx ();
8652 op1 = gen_label_rtx ();
8653 jumpifnot (TREE_OPERAND (exp, 0), op0);
8654 store_expr (TREE_OPERAND (exp, 1), temp,
8655 modifier == EXPAND_STACK_PARM);
8656
8657 emit_jump_insn (gen_jump (op1));
8658 emit_barrier ();
8659 emit_label (op0);
8660 store_expr (TREE_OPERAND (exp, 2), temp,
8661 modifier == EXPAND_STACK_PARM);
8662
8663 emit_label (op1);
8664 OK_DEFER_POP;
8665 return temp;
8666
8667 case VEC_COND_EXPR:
8668 target = expand_vec_cond_expr (exp, target);
8669 return target;
8670
8671 case GIMPLE_MODIFY_STMT:
8672 {
8673 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
8674 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
8675
8676 gcc_assert (ignore);
8677
8678 /* Check for |= or &= of a bitfield of size one into another bitfield
8679 of size 1. In this case, (unless we need the result of the
8680 assignment) we can do this more efficiently with a
8681 test followed by an assignment, if necessary.
8682
8683 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8684 things change so we do, this code should be enhanced to
8685 support it. */
8686 if (TREE_CODE (lhs) == COMPONENT_REF
8687 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8688 || TREE_CODE (rhs) == BIT_AND_EXPR)
8689 && TREE_OPERAND (rhs, 0) == lhs
8690 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8691 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8692 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8693 {
8694 rtx label = gen_label_rtx ();
8695 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8696 do_jump (TREE_OPERAND (rhs, 1),
8697 value ? label : 0,
8698 value ? 0 : label);
8699 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8700 do_pending_stack_adjust ();
8701 emit_label (label);
8702 return const0_rtx;
8703 }
8704
8705 expand_assignment (lhs, rhs);
8706
8707 return const0_rtx;
8708 }
8709
8710 case RETURN_EXPR:
8711 if (!TREE_OPERAND (exp, 0))
8712 expand_null_return ();
8713 else
8714 expand_return (TREE_OPERAND (exp, 0));
8715 return const0_rtx;
8716
8717 case ADDR_EXPR:
8718 return expand_expr_addr_expr (exp, target, tmode, modifier);
8719
8720 case COMPLEX_EXPR:
8721 /* Get the rtx code of the operands. */
8722 op0 = expand_normal (TREE_OPERAND (exp, 0));
8723 op1 = expand_normal (TREE_OPERAND (exp, 1));
8724
8725 if (!target)
8726 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8727
8728 /* Move the real (op0) and imaginary (op1) parts to their location. */
8729 write_complex_part (target, op0, false);
8730 write_complex_part (target, op1, true);
8731
8732 return target;
8733
8734 case REALPART_EXPR:
8735 op0 = expand_normal (TREE_OPERAND (exp, 0));
8736 return read_complex_part (op0, false);
8737
8738 case IMAGPART_EXPR:
8739 op0 = expand_normal (TREE_OPERAND (exp, 0));
8740 return read_complex_part (op0, true);
8741
8742 case RESX_EXPR:
8743 expand_resx_expr (exp);
8744 return const0_rtx;
8745
8746 case TRY_CATCH_EXPR:
8747 case CATCH_EXPR:
8748 case EH_FILTER_EXPR:
8749 case TRY_FINALLY_EXPR:
8750 /* Lowered by tree-eh.c. */
8751 gcc_unreachable ();
8752
8753 case WITH_CLEANUP_EXPR:
8754 case CLEANUP_POINT_EXPR:
8755 case TARGET_EXPR:
8756 case CASE_LABEL_EXPR:
8757 case VA_ARG_EXPR:
8758 case BIND_EXPR:
8759 case INIT_EXPR:
8760 case CONJ_EXPR:
8761 case COMPOUND_EXPR:
8762 case PREINCREMENT_EXPR:
8763 case PREDECREMENT_EXPR:
8764 case POSTINCREMENT_EXPR:
8765 case POSTDECREMENT_EXPR:
8766 case LOOP_EXPR:
8767 case EXIT_EXPR:
8768 case TRUTH_ANDIF_EXPR:
8769 case TRUTH_ORIF_EXPR:
8770 /* Lowered by gimplify.c. */
8771 gcc_unreachable ();
8772
8773 case EXC_PTR_EXPR:
8774 return get_exception_pointer (cfun);
8775
8776 case FILTER_EXPR:
8777 return get_exception_filter (cfun);
8778
8779 case FDESC_EXPR:
8780 /* Function descriptors are not valid except for as
8781 initialization constants, and should not be expanded. */
8782 gcc_unreachable ();
8783
8784 case SWITCH_EXPR:
8785 expand_case (exp);
8786 return const0_rtx;
8787
8788 case LABEL_EXPR:
8789 expand_label (TREE_OPERAND (exp, 0));
8790 return const0_rtx;
8791
8792 case ASM_EXPR:
8793 expand_asm_expr (exp);
8794 return const0_rtx;
8795
8796 case WITH_SIZE_EXPR:
8797 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8798 have pulled out the size to use in whatever context it needed. */
8799 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8800 modifier, alt_rtl);
8801
8802 case REALIGN_LOAD_EXPR:
8803 {
8804 tree oprnd0 = TREE_OPERAND (exp, 0);
8805 tree oprnd1 = TREE_OPERAND (exp, 1);
8806 tree oprnd2 = TREE_OPERAND (exp, 2);
8807 rtx op2;
8808
8809 this_optab = optab_for_tree_code (code, type);
8810 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8811 op2 = expand_normal (oprnd2);
8812 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8813 target, unsignedp);
8814 gcc_assert (temp);
8815 return temp;
8816 }
8817
8818 case DOT_PROD_EXPR:
8819 {
8820 tree oprnd0 = TREE_OPERAND (exp, 0);
8821 tree oprnd1 = TREE_OPERAND (exp, 1);
8822 tree oprnd2 = TREE_OPERAND (exp, 2);
8823 rtx op2;
8824
8825 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8826 op2 = expand_normal (oprnd2);
8827 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8828 target, unsignedp);
8829 return target;
8830 }
8831
8832 case WIDEN_SUM_EXPR:
8833 {
8834 tree oprnd0 = TREE_OPERAND (exp, 0);
8835 tree oprnd1 = TREE_OPERAND (exp, 1);
8836
8837 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8838 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8839 target, unsignedp);
8840 return target;
8841 }
8842
8843 case REDUC_MAX_EXPR:
8844 case REDUC_MIN_EXPR:
8845 case REDUC_PLUS_EXPR:
8846 {
8847 op0 = expand_normal (TREE_OPERAND (exp, 0));
8848 this_optab = optab_for_tree_code (code, type);
8849 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8850 gcc_assert (temp);
8851 return temp;
8852 }
8853
8854 case VEC_EXTRACT_EVEN_EXPR:
8855 case VEC_EXTRACT_ODD_EXPR:
8856 {
8857 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8858 NULL_RTX, &op0, &op1, 0);
8859 this_optab = optab_for_tree_code (code, type);
8860 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8861 OPTAB_WIDEN);
8862 gcc_assert (temp);
8863 return temp;
8864 }
8865
8866 case VEC_INTERLEAVE_HIGH_EXPR:
8867 case VEC_INTERLEAVE_LOW_EXPR:
8868 {
8869 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8870 NULL_RTX, &op0, &op1, 0);
8871 this_optab = optab_for_tree_code (code, type);
8872 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8873 OPTAB_WIDEN);
8874 gcc_assert (temp);
8875 return temp;
8876 }
8877
8878 case VEC_LSHIFT_EXPR:
8879 case VEC_RSHIFT_EXPR:
8880 {
8881 target = expand_vec_shift_expr (exp, target);
8882 return target;
8883 }
8884
8885 case VEC_UNPACK_HI_EXPR:
8886 case VEC_UNPACK_LO_EXPR:
8887 {
8888 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8889 this_optab = optab_for_tree_code (code, type);
8890 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
8891 target, unsignedp);
8892 gcc_assert (temp);
8893 return temp;
8894 }
8895
8896 case VEC_WIDEN_MULT_HI_EXPR:
8897 case VEC_WIDEN_MULT_LO_EXPR:
8898 {
8899 tree oprnd0 = TREE_OPERAND (exp, 0);
8900 tree oprnd1 = TREE_OPERAND (exp, 1);
8901
8902 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8903 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
8904 target, unsignedp);
8905 gcc_assert (target);
8906 return target;
8907 }
8908
8909 case VEC_PACK_MOD_EXPR:
8910 case VEC_PACK_SAT_EXPR:
8911 {
8912 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8913 goto binop;
8914 }
8915
8916 default:
8917 return lang_hooks.expand_expr (exp, original_target, tmode,
8918 modifier, alt_rtl);
8919 }
8920
8921 /* Here to do an ordinary binary operator. */
8922 binop:
8923 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8924 subtarget, &op0, &op1, 0);
8925 binop2:
8926 this_optab = optab_for_tree_code (code, type);
8927 binop3:
8928 if (modifier == EXPAND_STACK_PARM)
8929 target = 0;
8930 temp = expand_binop (mode, this_optab, op0, op1, target,
8931 unsignedp, OPTAB_LIB_WIDEN);
8932 gcc_assert (temp);
8933 return REDUCE_BIT_FIELD (temp);
8934 }
8935 #undef REDUCE_BIT_FIELD
8936 \f
8937 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8938 signedness of TYPE), possibly returning the result in TARGET. */
8939 static rtx
8940 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8941 {
8942 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8943 if (target && GET_MODE (target) != GET_MODE (exp))
8944 target = 0;
8945 if (TYPE_UNSIGNED (type))
8946 {
8947 rtx mask;
8948 if (prec < HOST_BITS_PER_WIDE_INT)
8949 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8950 GET_MODE (exp));
8951 else
8952 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8953 ((unsigned HOST_WIDE_INT) 1
8954 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8955 GET_MODE (exp));
8956 return expand_and (GET_MODE (exp), exp, mask, target);
8957 }
8958 else
8959 {
8960 tree count = build_int_cst (NULL_TREE,
8961 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8962 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8963 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8964 }
8965 }
8966 \f
8967 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8968 when applied to the address of EXP produces an address known to be
8969 aligned more than BIGGEST_ALIGNMENT. */
8970
8971 static int
8972 is_aligning_offset (tree offset, tree exp)
8973 {
8974 /* Strip off any conversions. */
8975 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8976 || TREE_CODE (offset) == NOP_EXPR
8977 || TREE_CODE (offset) == CONVERT_EXPR)
8978 offset = TREE_OPERAND (offset, 0);
8979
8980 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8981 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8982 if (TREE_CODE (offset) != BIT_AND_EXPR
8983 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8984 || compare_tree_int (TREE_OPERAND (offset, 1),
8985 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8986 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8987 return 0;
8988
8989 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8990 It must be NEGATE_EXPR. Then strip any more conversions. */
8991 offset = TREE_OPERAND (offset, 0);
8992 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8993 || TREE_CODE (offset) == NOP_EXPR
8994 || TREE_CODE (offset) == CONVERT_EXPR)
8995 offset = TREE_OPERAND (offset, 0);
8996
8997 if (TREE_CODE (offset) != NEGATE_EXPR)
8998 return 0;
8999
9000 offset = TREE_OPERAND (offset, 0);
9001 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9002 || TREE_CODE (offset) == NOP_EXPR
9003 || TREE_CODE (offset) == CONVERT_EXPR)
9004 offset = TREE_OPERAND (offset, 0);
9005
9006 /* This must now be the address of EXP. */
9007 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9008 }
9009 \f
9010 /* Return the tree node if an ARG corresponds to a string constant or zero
9011 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9012 in bytes within the string that ARG is accessing. The type of the
9013 offset will be `sizetype'. */
9014
9015 tree
9016 string_constant (tree arg, tree *ptr_offset)
9017 {
9018 tree array, offset, lower_bound;
9019 STRIP_NOPS (arg);
9020
9021 if (TREE_CODE (arg) == ADDR_EXPR)
9022 {
9023 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9024 {
9025 *ptr_offset = size_zero_node;
9026 return TREE_OPERAND (arg, 0);
9027 }
9028 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9029 {
9030 array = TREE_OPERAND (arg, 0);
9031 offset = size_zero_node;
9032 }
9033 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9034 {
9035 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9036 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9037 if (TREE_CODE (array) != STRING_CST
9038 && TREE_CODE (array) != VAR_DECL)
9039 return 0;
9040
9041 /* Check if the array has a nonzero lower bound. */
9042 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9043 if (!integer_zerop (lower_bound))
9044 {
9045 /* If the offset and base aren't both constants, return 0. */
9046 if (TREE_CODE (lower_bound) != INTEGER_CST)
9047 return 0;
9048 if (TREE_CODE (offset) != INTEGER_CST)
9049 return 0;
9050 /* Adjust offset by the lower bound. */
9051 offset = size_diffop (fold_convert (sizetype, offset),
9052 fold_convert (sizetype, lower_bound));
9053 }
9054 }
9055 else
9056 return 0;
9057 }
9058 else if (TREE_CODE (arg) == PLUS_EXPR)
9059 {
9060 tree arg0 = TREE_OPERAND (arg, 0);
9061 tree arg1 = TREE_OPERAND (arg, 1);
9062
9063 STRIP_NOPS (arg0);
9064 STRIP_NOPS (arg1);
9065
9066 if (TREE_CODE (arg0) == ADDR_EXPR
9067 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9068 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9069 {
9070 array = TREE_OPERAND (arg0, 0);
9071 offset = arg1;
9072 }
9073 else if (TREE_CODE (arg1) == ADDR_EXPR
9074 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9075 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9076 {
9077 array = TREE_OPERAND (arg1, 0);
9078 offset = arg0;
9079 }
9080 else
9081 return 0;
9082 }
9083 else
9084 return 0;
9085
9086 if (TREE_CODE (array) == STRING_CST)
9087 {
9088 *ptr_offset = fold_convert (sizetype, offset);
9089 return array;
9090 }
9091 else if (TREE_CODE (array) == VAR_DECL)
9092 {
9093 int length;
9094
9095 /* Variables initialized to string literals can be handled too. */
9096 if (DECL_INITIAL (array) == NULL_TREE
9097 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9098 return 0;
9099
9100 /* If they are read-only, non-volatile and bind locally. */
9101 if (! TREE_READONLY (array)
9102 || TREE_SIDE_EFFECTS (array)
9103 || ! targetm.binds_local_p (array))
9104 return 0;
9105
9106 /* Avoid const char foo[4] = "abcde"; */
9107 if (DECL_SIZE_UNIT (array) == NULL_TREE
9108 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9109 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9110 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9111 return 0;
9112
9113 /* If variable is bigger than the string literal, OFFSET must be constant
9114 and inside of the bounds of the string literal. */
9115 offset = fold_convert (sizetype, offset);
9116 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9117 && (! host_integerp (offset, 1)
9118 || compare_tree_int (offset, length) >= 0))
9119 return 0;
9120
9121 *ptr_offset = offset;
9122 return DECL_INITIAL (array);
9123 }
9124
9125 return 0;
9126 }
9127 \f
9128 /* Generate code to calculate EXP using a store-flag instruction
9129 and return an rtx for the result. EXP is either a comparison
9130 or a TRUTH_NOT_EXPR whose operand is a comparison.
9131
9132 If TARGET is nonzero, store the result there if convenient.
9133
9134 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9135 cheap.
9136
9137 Return zero if there is no suitable set-flag instruction
9138 available on this machine.
9139
9140 Once expand_expr has been called on the arguments of the comparison,
9141 we are committed to doing the store flag, since it is not safe to
9142 re-evaluate the expression. We emit the store-flag insn by calling
9143 emit_store_flag, but only expand the arguments if we have a reason
9144 to believe that emit_store_flag will be successful. If we think that
9145 it will, but it isn't, we have to simulate the store-flag with a
9146 set/jump/set sequence. */
9147
9148 static rtx
9149 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9150 {
9151 enum rtx_code code;
9152 tree arg0, arg1, type;
9153 tree tem;
9154 enum machine_mode operand_mode;
9155 int invert = 0;
9156 int unsignedp;
9157 rtx op0, op1;
9158 enum insn_code icode;
9159 rtx subtarget = target;
9160 rtx result, label;
9161
9162 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9163 result at the end. We can't simply invert the test since it would
9164 have already been inverted if it were valid. This case occurs for
9165 some floating-point comparisons. */
9166
9167 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9168 invert = 1, exp = TREE_OPERAND (exp, 0);
9169
9170 arg0 = TREE_OPERAND (exp, 0);
9171 arg1 = TREE_OPERAND (exp, 1);
9172
9173 /* Don't crash if the comparison was erroneous. */
9174 if (arg0 == error_mark_node || arg1 == error_mark_node)
9175 return const0_rtx;
9176
9177 type = TREE_TYPE (arg0);
9178 operand_mode = TYPE_MODE (type);
9179 unsignedp = TYPE_UNSIGNED (type);
9180
9181 /* We won't bother with BLKmode store-flag operations because it would mean
9182 passing a lot of information to emit_store_flag. */
9183 if (operand_mode == BLKmode)
9184 return 0;
9185
9186 /* We won't bother with store-flag operations involving function pointers
9187 when function pointers must be canonicalized before comparisons. */
9188 #ifdef HAVE_canonicalize_funcptr_for_compare
9189 if (HAVE_canonicalize_funcptr_for_compare
9190 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9191 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9192 == FUNCTION_TYPE))
9193 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9194 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9195 == FUNCTION_TYPE))))
9196 return 0;
9197 #endif
9198
9199 STRIP_NOPS (arg0);
9200 STRIP_NOPS (arg1);
9201
9202 /* Get the rtx comparison code to use. We know that EXP is a comparison
9203 operation of some type. Some comparisons against 1 and -1 can be
9204 converted to comparisons with zero. Do so here so that the tests
9205 below will be aware that we have a comparison with zero. These
9206 tests will not catch constants in the first operand, but constants
9207 are rarely passed as the first operand. */
9208
9209 switch (TREE_CODE (exp))
9210 {
9211 case EQ_EXPR:
9212 code = EQ;
9213 break;
9214 case NE_EXPR:
9215 code = NE;
9216 break;
9217 case LT_EXPR:
9218 if (integer_onep (arg1))
9219 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9220 else
9221 code = unsignedp ? LTU : LT;
9222 break;
9223 case LE_EXPR:
9224 if (! unsignedp && integer_all_onesp (arg1))
9225 arg1 = integer_zero_node, code = LT;
9226 else
9227 code = unsignedp ? LEU : LE;
9228 break;
9229 case GT_EXPR:
9230 if (! unsignedp && integer_all_onesp (arg1))
9231 arg1 = integer_zero_node, code = GE;
9232 else
9233 code = unsignedp ? GTU : GT;
9234 break;
9235 case GE_EXPR:
9236 if (integer_onep (arg1))
9237 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9238 else
9239 code = unsignedp ? GEU : GE;
9240 break;
9241
9242 case UNORDERED_EXPR:
9243 code = UNORDERED;
9244 break;
9245 case ORDERED_EXPR:
9246 code = ORDERED;
9247 break;
9248 case UNLT_EXPR:
9249 code = UNLT;
9250 break;
9251 case UNLE_EXPR:
9252 code = UNLE;
9253 break;
9254 case UNGT_EXPR:
9255 code = UNGT;
9256 break;
9257 case UNGE_EXPR:
9258 code = UNGE;
9259 break;
9260 case UNEQ_EXPR:
9261 code = UNEQ;
9262 break;
9263 case LTGT_EXPR:
9264 code = LTGT;
9265 break;
9266
9267 default:
9268 gcc_unreachable ();
9269 }
9270
9271 /* Put a constant second. */
9272 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9273 {
9274 tem = arg0; arg0 = arg1; arg1 = tem;
9275 code = swap_condition (code);
9276 }
9277
9278 /* If this is an equality or inequality test of a single bit, we can
9279 do this by shifting the bit being tested to the low-order bit and
9280 masking the result with the constant 1. If the condition was EQ,
9281 we xor it with 1. This does not require an scc insn and is faster
9282 than an scc insn even if we have it.
9283
9284 The code to make this transformation was moved into fold_single_bit_test,
9285 so we just call into the folder and expand its result. */
9286
9287 if ((code == NE || code == EQ)
9288 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9289 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9290 {
9291 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9292 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9293 arg0, arg1, type),
9294 target, VOIDmode, EXPAND_NORMAL);
9295 }
9296
9297 /* Now see if we are likely to be able to do this. Return if not. */
9298 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9299 return 0;
9300
9301 icode = setcc_gen_code[(int) code];
9302
9303 if (icode == CODE_FOR_nothing)
9304 {
9305 enum machine_mode wmode;
9306
9307 for (wmode = operand_mode;
9308 icode == CODE_FOR_nothing && wmode != VOIDmode;
9309 wmode = GET_MODE_WIDER_MODE (wmode))
9310 icode = cstore_optab->handlers[(int) wmode].insn_code;
9311 }
9312
9313 if (icode == CODE_FOR_nothing
9314 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9315 {
9316 /* We can only do this if it is one of the special cases that
9317 can be handled without an scc insn. */
9318 if ((code == LT && integer_zerop (arg1))
9319 || (! only_cheap && code == GE && integer_zerop (arg1)))
9320 ;
9321 else if (! only_cheap && (code == NE || code == EQ)
9322 && TREE_CODE (type) != REAL_TYPE
9323 && ((abs_optab->handlers[(int) operand_mode].insn_code
9324 != CODE_FOR_nothing)
9325 || (ffs_optab->handlers[(int) operand_mode].insn_code
9326 != CODE_FOR_nothing)))
9327 ;
9328 else
9329 return 0;
9330 }
9331
9332 if (! get_subtarget (target)
9333 || GET_MODE (subtarget) != operand_mode)
9334 subtarget = 0;
9335
9336 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9337
9338 if (target == 0)
9339 target = gen_reg_rtx (mode);
9340
9341 result = emit_store_flag (target, code, op0, op1,
9342 operand_mode, unsignedp, 1);
9343
9344 if (result)
9345 {
9346 if (invert)
9347 result = expand_binop (mode, xor_optab, result, const1_rtx,
9348 result, 0, OPTAB_LIB_WIDEN);
9349 return result;
9350 }
9351
9352 /* If this failed, we have to do this with set/compare/jump/set code. */
9353 if (!REG_P (target)
9354 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9355 target = gen_reg_rtx (GET_MODE (target));
9356
9357 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9358 label = gen_label_rtx ();
9359 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9360 NULL_RTX, label);
9361
9362 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9363 emit_label (label);
9364
9365 return target;
9366 }
9367 \f
9368
9369 /* Stubs in case we haven't got a casesi insn. */
9370 #ifndef HAVE_casesi
9371 # define HAVE_casesi 0
9372 # define gen_casesi(a, b, c, d, e) (0)
9373 # define CODE_FOR_casesi CODE_FOR_nothing
9374 #endif
9375
9376 /* If the machine does not have a case insn that compares the bounds,
9377 this means extra overhead for dispatch tables, which raises the
9378 threshold for using them. */
9379 #ifndef CASE_VALUES_THRESHOLD
9380 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9381 #endif /* CASE_VALUES_THRESHOLD */
9382
9383 unsigned int
9384 case_values_threshold (void)
9385 {
9386 return CASE_VALUES_THRESHOLD;
9387 }
9388
9389 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9390 0 otherwise (i.e. if there is no casesi instruction). */
9391 int
9392 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9393 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9394 {
9395 enum machine_mode index_mode = SImode;
9396 int index_bits = GET_MODE_BITSIZE (index_mode);
9397 rtx op1, op2, index;
9398 enum machine_mode op_mode;
9399
9400 if (! HAVE_casesi)
9401 return 0;
9402
9403 /* Convert the index to SImode. */
9404 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9405 {
9406 enum machine_mode omode = TYPE_MODE (index_type);
9407 rtx rangertx = expand_normal (range);
9408
9409 /* We must handle the endpoints in the original mode. */
9410 index_expr = build2 (MINUS_EXPR, index_type,
9411 index_expr, minval);
9412 minval = integer_zero_node;
9413 index = expand_normal (index_expr);
9414 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9415 omode, 1, default_label);
9416 /* Now we can safely truncate. */
9417 index = convert_to_mode (index_mode, index, 0);
9418 }
9419 else
9420 {
9421 if (TYPE_MODE (index_type) != index_mode)
9422 {
9423 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9424 index_expr = fold_convert (index_type, index_expr);
9425 }
9426
9427 index = expand_normal (index_expr);
9428 }
9429
9430 do_pending_stack_adjust ();
9431
9432 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9433 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9434 (index, op_mode))
9435 index = copy_to_mode_reg (op_mode, index);
9436
9437 op1 = expand_normal (minval);
9438
9439 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9440 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9441 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9442 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9443 (op1, op_mode))
9444 op1 = copy_to_mode_reg (op_mode, op1);
9445
9446 op2 = expand_normal (range);
9447
9448 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9449 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9450 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9451 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9452 (op2, op_mode))
9453 op2 = copy_to_mode_reg (op_mode, op2);
9454
9455 emit_jump_insn (gen_casesi (index, op1, op2,
9456 table_label, default_label));
9457 return 1;
9458 }
9459
9460 /* Attempt to generate a tablejump instruction; same concept. */
9461 #ifndef HAVE_tablejump
9462 #define HAVE_tablejump 0
9463 #define gen_tablejump(x, y) (0)
9464 #endif
9465
9466 /* Subroutine of the next function.
9467
9468 INDEX is the value being switched on, with the lowest value
9469 in the table already subtracted.
9470 MODE is its expected mode (needed if INDEX is constant).
9471 RANGE is the length of the jump table.
9472 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9473
9474 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9475 index value is out of range. */
9476
9477 static void
9478 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9479 rtx default_label)
9480 {
9481 rtx temp, vector;
9482
9483 if (INTVAL (range) > cfun->max_jumptable_ents)
9484 cfun->max_jumptable_ents = INTVAL (range);
9485
9486 /* Do an unsigned comparison (in the proper mode) between the index
9487 expression and the value which represents the length of the range.
9488 Since we just finished subtracting the lower bound of the range
9489 from the index expression, this comparison allows us to simultaneously
9490 check that the original index expression value is both greater than
9491 or equal to the minimum value of the range and less than or equal to
9492 the maximum value of the range. */
9493
9494 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9495 default_label);
9496
9497 /* If index is in range, it must fit in Pmode.
9498 Convert to Pmode so we can index with it. */
9499 if (mode != Pmode)
9500 index = convert_to_mode (Pmode, index, 1);
9501
9502 /* Don't let a MEM slip through, because then INDEX that comes
9503 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9504 and break_out_memory_refs will go to work on it and mess it up. */
9505 #ifdef PIC_CASE_VECTOR_ADDRESS
9506 if (flag_pic && !REG_P (index))
9507 index = copy_to_mode_reg (Pmode, index);
9508 #endif
9509
9510 /* If flag_force_addr were to affect this address
9511 it could interfere with the tricky assumptions made
9512 about addresses that contain label-refs,
9513 which may be valid only very near the tablejump itself. */
9514 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9515 GET_MODE_SIZE, because this indicates how large insns are. The other
9516 uses should all be Pmode, because they are addresses. This code
9517 could fail if addresses and insns are not the same size. */
9518 index = gen_rtx_PLUS (Pmode,
9519 gen_rtx_MULT (Pmode, index,
9520 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9521 gen_rtx_LABEL_REF (Pmode, table_label));
9522 #ifdef PIC_CASE_VECTOR_ADDRESS
9523 if (flag_pic)
9524 index = PIC_CASE_VECTOR_ADDRESS (index);
9525 else
9526 #endif
9527 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9528 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9529 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9530 convert_move (temp, vector, 0);
9531
9532 emit_jump_insn (gen_tablejump (temp, table_label));
9533
9534 /* If we are generating PIC code or if the table is PC-relative, the
9535 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9536 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9537 emit_barrier ();
9538 }
9539
9540 int
9541 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9542 rtx table_label, rtx default_label)
9543 {
9544 rtx index;
9545
9546 if (! HAVE_tablejump)
9547 return 0;
9548
9549 index_expr = fold_build2 (MINUS_EXPR, index_type,
9550 fold_convert (index_type, index_expr),
9551 fold_convert (index_type, minval));
9552 index = expand_normal (index_expr);
9553 do_pending_stack_adjust ();
9554
9555 do_tablejump (index, TYPE_MODE (index_type),
9556 convert_modes (TYPE_MODE (index_type),
9557 TYPE_MODE (TREE_TYPE (range)),
9558 expand_normal (range),
9559 TYPE_UNSIGNED (TREE_TYPE (range))),
9560 table_label, default_label);
9561 return 1;
9562 }
9563
9564 /* Nonzero if the mode is a valid vector mode for this architecture.
9565 This returns nonzero even if there is no hardware support for the
9566 vector mode, but we can emulate with narrower modes. */
9567
9568 int
9569 vector_mode_valid_p (enum machine_mode mode)
9570 {
9571 enum mode_class class = GET_MODE_CLASS (mode);
9572 enum machine_mode innermode;
9573
9574 /* Doh! What's going on? */
9575 if (class != MODE_VECTOR_INT
9576 && class != MODE_VECTOR_FLOAT)
9577 return 0;
9578
9579 /* Hardware support. Woo hoo! */
9580 if (targetm.vector_mode_supported_p (mode))
9581 return 1;
9582
9583 innermode = GET_MODE_INNER (mode);
9584
9585 /* We should probably return 1 if requesting V4DI and we have no DI,
9586 but we have V2DI, but this is probably very unlikely. */
9587
9588 /* If we have support for the inner mode, we can safely emulate it.
9589 We may not have V2DI, but me can emulate with a pair of DIs. */
9590 return targetm.scalar_mode_supported_p (innermode);
9591 }
9592
9593 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9594 static rtx
9595 const_vector_from_tree (tree exp)
9596 {
9597 rtvec v;
9598 int units, i;
9599 tree link, elt;
9600 enum machine_mode inner, mode;
9601
9602 mode = TYPE_MODE (TREE_TYPE (exp));
9603
9604 if (initializer_zerop (exp))
9605 return CONST0_RTX (mode);
9606
9607 units = GET_MODE_NUNITS (mode);
9608 inner = GET_MODE_INNER (mode);
9609
9610 v = rtvec_alloc (units);
9611
9612 link = TREE_VECTOR_CST_ELTS (exp);
9613 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9614 {
9615 elt = TREE_VALUE (link);
9616
9617 if (TREE_CODE (elt) == REAL_CST)
9618 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9619 inner);
9620 else
9621 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9622 TREE_INT_CST_HIGH (elt),
9623 inner);
9624 }
9625
9626 /* Initialize remaining elements to 0. */
9627 for (; i < units; ++i)
9628 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9629
9630 return gen_rtx_CONST_VECTOR (mode, v);
9631 }
9632 #include "gt-expr.h"