target.h (globalize_decl_name): New.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "typeclass.h"
46 #include "toplev.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "intl.h"
50 #include "tm_p.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
54 #include "target.h"
55 #include "timevar.h"
56
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
59
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
62
63 #ifdef PUSH_ROUNDING
64
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
70
71 #endif
72
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
80
81
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
89
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
93 {
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
105 };
106
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
109
110 struct store_by_pieces
111 {
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
121 };
122
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
145 tree, tree, int);
146
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
148
149 static int is_aligning_offset (tree, tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
160
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
164
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
167
168 /* Record for each mode whether we can float-extend from memory. */
169
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
171
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO)
178 #endif
179
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO)
186 #endif
187
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) MOVE_RATIO)
195 #endif
196
197 /* This array records the insn_code of insns to perform block moves. */
198 enum insn_code movmem_optab[NUM_MACHINE_MODES];
199
200 /* This array records the insn_code of insns to perform block sets. */
201 enum insn_code setmem_optab[NUM_MACHINE_MODES];
202
203 /* These arrays record the insn_code of three different kinds of insns
204 to perform block compares. */
205 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
206 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
208
209 /* Synchronization primitives. */
210 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
211 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
230 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
232
233 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
234
235 #ifndef SLOW_UNALIGNED_ACCESS
236 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
237 #endif
238 \f
239 /* This is run once per compilation to set up which modes can be used
240 directly in memory and to initialize the block move optab. */
241
242 void
243 init_expr_once (void)
244 {
245 rtx insn, pat;
246 enum machine_mode mode;
247 int num_clobbers;
248 rtx mem, mem1;
249 rtx reg;
250
251 /* Try indexing by frame ptr and try by stack ptr.
252 It is known that on the Convex the stack ptr isn't a valid index.
253 With luck, one or the other is valid on any machine. */
254 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
255 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
256
257 /* A scratch register we can modify in-place below to avoid
258 useless RTL allocations. */
259 reg = gen_rtx_REG (VOIDmode, -1);
260
261 insn = rtx_alloc (INSN);
262 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
263 PATTERN (insn) = pat;
264
265 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
266 mode = (enum machine_mode) ((int) mode + 1))
267 {
268 int regno;
269
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
272 PUT_MODE (mem1, mode);
273 PUT_MODE (reg, mode);
274
275 /* See if there is some register that can be used in this mode and
276 directly loaded or stored from memory. */
277
278 if (mode != VOIDmode && mode != BLKmode)
279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
280 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
281 regno++)
282 {
283 if (! HARD_REGNO_MODE_OK (regno, mode))
284 continue;
285
286 REGNO (reg) = regno;
287
288 SET_SRC (pat) = mem;
289 SET_DEST (pat) = reg;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_load[(int) mode] = 1;
292
293 SET_SRC (pat) = mem1;
294 SET_DEST (pat) = reg;
295 if (recog (pat, insn, &num_clobbers) >= 0)
296 direct_load[(int) mode] = 1;
297
298 SET_SRC (pat) = reg;
299 SET_DEST (pat) = mem;
300 if (recog (pat, insn, &num_clobbers) >= 0)
301 direct_store[(int) mode] = 1;
302
303 SET_SRC (pat) = reg;
304 SET_DEST (pat) = mem1;
305 if (recog (pat, insn, &num_clobbers) >= 0)
306 direct_store[(int) mode] = 1;
307 }
308 }
309
310 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
311
312 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
313 mode = GET_MODE_WIDER_MODE (mode))
314 {
315 enum machine_mode srcmode;
316 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
317 srcmode = GET_MODE_WIDER_MODE (srcmode))
318 {
319 enum insn_code ic;
320
321 ic = can_extend_p (mode, srcmode, 0);
322 if (ic == CODE_FOR_nothing)
323 continue;
324
325 PUT_MODE (mem, srcmode);
326
327 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
328 float_extend_from_mem[mode][srcmode] = true;
329 }
330 }
331 }
332
333 /* This is run at the start of compiling a function. */
334
335 void
336 init_expr (void)
337 {
338 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
339 }
340 \f
341 /* Copy data from FROM to TO, where the machine modes are not the same.
342 Both modes may be integer, or both may be floating.
343 UNSIGNEDP should be nonzero if FROM is an unsigned type.
344 This causes zero-extension instead of sign-extension. */
345
346 void
347 convert_move (rtx to, rtx from, int unsignedp)
348 {
349 enum machine_mode to_mode = GET_MODE (to);
350 enum machine_mode from_mode = GET_MODE (from);
351 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
352 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
353 enum insn_code code;
354 rtx libcall;
355
356 /* rtx code for making an equivalent value. */
357 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
358 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
359
360
361 gcc_assert (to_real == from_real);
362
363 /* If the source and destination are already the same, then there's
364 nothing to do. */
365 if (to == from)
366 return;
367
368 /* If FROM is a SUBREG that indicates that we have already done at least
369 the required extension, strip it. We don't handle such SUBREGs as
370 TO here. */
371
372 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
373 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
374 >= GET_MODE_SIZE (to_mode))
375 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
376 from = gen_lowpart (to_mode, from), from_mode = to_mode;
377
378 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
379
380 if (to_mode == from_mode
381 || (from_mode == VOIDmode && CONSTANT_P (from)))
382 {
383 emit_move_insn (to, from);
384 return;
385 }
386
387 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
388 {
389 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
390
391 if (VECTOR_MODE_P (to_mode))
392 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
393 else
394 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
395
396 emit_move_insn (to, from);
397 return;
398 }
399
400 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
401 {
402 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
403 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
404 return;
405 }
406
407 if (to_real)
408 {
409 rtx value, insns;
410 convert_optab tab;
411
412 gcc_assert ((GET_MODE_PRECISION (from_mode)
413 != GET_MODE_PRECISION (to_mode))
414 || (DECIMAL_FLOAT_MODE_P (from_mode)
415 != DECIMAL_FLOAT_MODE_P (to_mode)));
416
417 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
418 /* Conversion between decimal float and binary float, same size. */
419 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
420 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
421 tab = sext_optab;
422 else
423 tab = trunc_optab;
424
425 /* Try converting directly if the insn is supported. */
426
427 code = tab->handlers[to_mode][from_mode].insn_code;
428 if (code != CODE_FOR_nothing)
429 {
430 emit_unop_insn (code, to, from,
431 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
432 return;
433 }
434
435 /* Otherwise use a libcall. */
436 libcall = tab->handlers[to_mode][from_mode].libfunc;
437
438 /* Is this conversion implemented yet? */
439 gcc_assert (libcall);
440
441 start_sequence ();
442 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
443 1, from, from_mode);
444 insns = get_insns ();
445 end_sequence ();
446 emit_libcall_block (insns, to, value,
447 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
448 from)
449 : gen_rtx_FLOAT_EXTEND (to_mode, from));
450 return;
451 }
452
453 /* Handle pointer conversion. */ /* SPEE 900220. */
454 /* Targets are expected to provide conversion insns between PxImode and
455 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
456 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
457 {
458 enum machine_mode full_mode
459 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
460
461 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
462 != CODE_FOR_nothing);
463
464 if (full_mode != from_mode)
465 from = convert_to_mode (full_mode, from, unsignedp);
466 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
467 to, from, UNKNOWN);
468 return;
469 }
470 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
471 {
472 rtx new_from;
473 enum machine_mode full_mode
474 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
475
476 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
477 != CODE_FOR_nothing);
478
479 if (to_mode == full_mode)
480 {
481 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
482 to, from, UNKNOWN);
483 return;
484 }
485
486 new_from = gen_reg_rtx (full_mode);
487 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
488 new_from, from, UNKNOWN);
489
490 /* else proceed to integer conversions below. */
491 from_mode = full_mode;
492 from = new_from;
493 }
494
495 /* Now both modes are integers. */
496
497 /* Handle expanding beyond a word. */
498 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
499 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
500 {
501 rtx insns;
502 rtx lowpart;
503 rtx fill_value;
504 rtx lowfrom;
505 int i;
506 enum machine_mode lowpart_mode;
507 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
508
509 /* Try converting directly if the insn is supported. */
510 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
511 != CODE_FOR_nothing)
512 {
513 /* If FROM is a SUBREG, put it into a register. Do this
514 so that we always generate the same set of insns for
515 better cse'ing; if an intermediate assignment occurred,
516 we won't be doing the operation directly on the SUBREG. */
517 if (optimize > 0 && GET_CODE (from) == SUBREG)
518 from = force_reg (from_mode, from);
519 emit_unop_insn (code, to, from, equiv_code);
520 return;
521 }
522 /* Next, try converting via full word. */
523 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
524 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
525 != CODE_FOR_nothing))
526 {
527 if (REG_P (to))
528 {
529 if (reg_overlap_mentioned_p (to, from))
530 from = force_reg (from_mode, from);
531 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
532 }
533 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
534 emit_unop_insn (code, to,
535 gen_lowpart (word_mode, to), equiv_code);
536 return;
537 }
538
539 /* No special multiword conversion insn; do it by hand. */
540 start_sequence ();
541
542 /* Since we will turn this into a no conflict block, we must ensure
543 that the source does not overlap the target. */
544
545 if (reg_overlap_mentioned_p (to, from))
546 from = force_reg (from_mode, from);
547
548 /* Get a copy of FROM widened to a word, if necessary. */
549 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
550 lowpart_mode = word_mode;
551 else
552 lowpart_mode = from_mode;
553
554 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
555
556 lowpart = gen_lowpart (lowpart_mode, to);
557 emit_move_insn (lowpart, lowfrom);
558
559 /* Compute the value to put in each remaining word. */
560 if (unsignedp)
561 fill_value = const0_rtx;
562 else
563 {
564 #ifdef HAVE_slt
565 if (HAVE_slt
566 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
567 && STORE_FLAG_VALUE == -1)
568 {
569 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
570 lowpart_mode, 0);
571 fill_value = gen_reg_rtx (word_mode);
572 emit_insn (gen_slt (fill_value));
573 }
574 else
575 #endif
576 {
577 fill_value
578 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
579 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
580 NULL_RTX, 0);
581 fill_value = convert_to_mode (word_mode, fill_value, 1);
582 }
583 }
584
585 /* Fill the remaining words. */
586 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
587 {
588 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
589 rtx subword = operand_subword (to, index, 1, to_mode);
590
591 gcc_assert (subword);
592
593 if (fill_value != subword)
594 emit_move_insn (subword, fill_value);
595 }
596
597 insns = get_insns ();
598 end_sequence ();
599
600 emit_no_conflict_block (insns, to, from, NULL_RTX,
601 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
602 return;
603 }
604
605 /* Truncating multi-word to a word or less. */
606 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
607 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
608 {
609 if (!((MEM_P (from)
610 && ! MEM_VOLATILE_P (from)
611 && direct_load[(int) to_mode]
612 && ! mode_dependent_address_p (XEXP (from, 0)))
613 || REG_P (from)
614 || GET_CODE (from) == SUBREG))
615 from = force_reg (from_mode, from);
616 convert_move (to, gen_lowpart (word_mode, from), 0);
617 return;
618 }
619
620 /* Now follow all the conversions between integers
621 no more than a word long. */
622
623 /* For truncation, usually we can just refer to FROM in a narrower mode. */
624 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
625 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
626 GET_MODE_BITSIZE (from_mode)))
627 {
628 if (!((MEM_P (from)
629 && ! MEM_VOLATILE_P (from)
630 && direct_load[(int) to_mode]
631 && ! mode_dependent_address_p (XEXP (from, 0)))
632 || REG_P (from)
633 || GET_CODE (from) == SUBREG))
634 from = force_reg (from_mode, from);
635 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
636 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
637 from = copy_to_reg (from);
638 emit_move_insn (to, gen_lowpart (to_mode, from));
639 return;
640 }
641
642 /* Handle extension. */
643 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
644 {
645 /* Convert directly if that works. */
646 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
647 != CODE_FOR_nothing)
648 {
649 emit_unop_insn (code, to, from, equiv_code);
650 return;
651 }
652 else
653 {
654 enum machine_mode intermediate;
655 rtx tmp;
656 tree shift_amount;
657
658 /* Search for a mode to convert via. */
659 for (intermediate = from_mode; intermediate != VOIDmode;
660 intermediate = GET_MODE_WIDER_MODE (intermediate))
661 if (((can_extend_p (to_mode, intermediate, unsignedp)
662 != CODE_FOR_nothing)
663 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
664 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
665 GET_MODE_BITSIZE (intermediate))))
666 && (can_extend_p (intermediate, from_mode, unsignedp)
667 != CODE_FOR_nothing))
668 {
669 convert_move (to, convert_to_mode (intermediate, from,
670 unsignedp), unsignedp);
671 return;
672 }
673
674 /* No suitable intermediate mode.
675 Generate what we need with shifts. */
676 shift_amount = build_int_cst (NULL_TREE,
677 GET_MODE_BITSIZE (to_mode)
678 - GET_MODE_BITSIZE (from_mode));
679 from = gen_lowpart (to_mode, force_reg (from_mode, from));
680 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
681 to, unsignedp);
682 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
683 to, unsignedp);
684 if (tmp != to)
685 emit_move_insn (to, tmp);
686 return;
687 }
688 }
689
690 /* Support special truncate insns for certain modes. */
691 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
692 {
693 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
694 to, from, UNKNOWN);
695 return;
696 }
697
698 /* Handle truncation of volatile memrefs, and so on;
699 the things that couldn't be truncated directly,
700 and for which there was no special instruction.
701
702 ??? Code above formerly short-circuited this, for most integer
703 mode pairs, with a force_reg in from_mode followed by a recursive
704 call to this routine. Appears always to have been wrong. */
705 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
706 {
707 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
708 emit_move_insn (to, temp);
709 return;
710 }
711
712 /* Mode combination is not recognized. */
713 gcc_unreachable ();
714 }
715
716 /* Return an rtx for a value that would result
717 from converting X to mode MODE.
718 Both X and MODE may be floating, or both integer.
719 UNSIGNEDP is nonzero if X is an unsigned value.
720 This can be done by referring to a part of X in place
721 or by copying to a new temporary with conversion. */
722
723 rtx
724 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
725 {
726 return convert_modes (mode, VOIDmode, x, unsignedp);
727 }
728
729 /* Return an rtx for a value that would result
730 from converting X from mode OLDMODE to mode MODE.
731 Both modes may be floating, or both integer.
732 UNSIGNEDP is nonzero if X is an unsigned value.
733
734 This can be done by referring to a part of X in place
735 or by copying to a new temporary with conversion.
736
737 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
738
739 rtx
740 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
741 {
742 rtx temp;
743
744 /* If FROM is a SUBREG that indicates that we have already done at least
745 the required extension, strip it. */
746
747 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
748 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
749 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
750 x = gen_lowpart (mode, x);
751
752 if (GET_MODE (x) != VOIDmode)
753 oldmode = GET_MODE (x);
754
755 if (mode == oldmode)
756 return x;
757
758 /* There is one case that we must handle specially: If we are converting
759 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
760 we are to interpret the constant as unsigned, gen_lowpart will do
761 the wrong if the constant appears negative. What we want to do is
762 make the high-order word of the constant zero, not all ones. */
763
764 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
765 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
766 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
767 {
768 HOST_WIDE_INT val = INTVAL (x);
769
770 if (oldmode != VOIDmode
771 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
772 {
773 int width = GET_MODE_BITSIZE (oldmode);
774
775 /* We need to zero extend VAL. */
776 val &= ((HOST_WIDE_INT) 1 << width) - 1;
777 }
778
779 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
780 }
781
782 /* We can do this with a gen_lowpart if both desired and current modes
783 are integer, and this is either a constant integer, a register, or a
784 non-volatile MEM. Except for the constant case where MODE is no
785 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
786
787 if ((GET_CODE (x) == CONST_INT
788 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
789 || (GET_MODE_CLASS (mode) == MODE_INT
790 && GET_MODE_CLASS (oldmode) == MODE_INT
791 && (GET_CODE (x) == CONST_DOUBLE
792 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
793 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
794 && direct_load[(int) mode])
795 || (REG_P (x)
796 && (! HARD_REGISTER_P (x)
797 || HARD_REGNO_MODE_OK (REGNO (x), mode))
798 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
799 GET_MODE_BITSIZE (GET_MODE (x)))))))))
800 {
801 /* ?? If we don't know OLDMODE, we have to assume here that
802 X does not need sign- or zero-extension. This may not be
803 the case, but it's the best we can do. */
804 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
805 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
806 {
807 HOST_WIDE_INT val = INTVAL (x);
808 int width = GET_MODE_BITSIZE (oldmode);
809
810 /* We must sign or zero-extend in this case. Start by
811 zero-extending, then sign extend if we need to. */
812 val &= ((HOST_WIDE_INT) 1 << width) - 1;
813 if (! unsignedp
814 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
815 val |= (HOST_WIDE_INT) (-1) << width;
816
817 return gen_int_mode (val, mode);
818 }
819
820 return gen_lowpart (mode, x);
821 }
822
823 /* Converting from integer constant into mode is always equivalent to an
824 subreg operation. */
825 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
826 {
827 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
828 return simplify_gen_subreg (mode, x, oldmode, 0);
829 }
830
831 temp = gen_reg_rtx (mode);
832 convert_move (temp, x, unsignedp);
833 return temp;
834 }
835 \f
836 /* STORE_MAX_PIECES is the number of bytes at a time that we can
837 store efficiently. Due to internal GCC limitations, this is
838 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
839 for an immediate constant. */
840
841 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
842
843 /* Determine whether the LEN bytes can be moved by using several move
844 instructions. Return nonzero if a call to move_by_pieces should
845 succeed. */
846
847 int
848 can_move_by_pieces (unsigned HOST_WIDE_INT len,
849 unsigned int align ATTRIBUTE_UNUSED)
850 {
851 return MOVE_BY_PIECES_P (len, align);
852 }
853
854 /* Generate several move instructions to copy LEN bytes from block FROM to
855 block TO. (These are MEM rtx's with BLKmode).
856
857 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
858 used to push FROM to the stack.
859
860 ALIGN is maximum stack alignment we can assume.
861
862 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
863 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
864 stpcpy. */
865
866 rtx
867 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
868 unsigned int align, int endp)
869 {
870 struct move_by_pieces data;
871 rtx to_addr, from_addr = XEXP (from, 0);
872 unsigned int max_size = MOVE_MAX_PIECES + 1;
873 enum machine_mode mode = VOIDmode, tmode;
874 enum insn_code icode;
875
876 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
877
878 data.offset = 0;
879 data.from_addr = from_addr;
880 if (to)
881 {
882 to_addr = XEXP (to, 0);
883 data.to = to;
884 data.autinc_to
885 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
886 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
887 data.reverse
888 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
889 }
890 else
891 {
892 to_addr = NULL_RTX;
893 data.to = NULL_RTX;
894 data.autinc_to = 1;
895 #ifdef STACK_GROWS_DOWNWARD
896 data.reverse = 1;
897 #else
898 data.reverse = 0;
899 #endif
900 }
901 data.to_addr = to_addr;
902 data.from = from;
903 data.autinc_from
904 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
905 || GET_CODE (from_addr) == POST_INC
906 || GET_CODE (from_addr) == POST_DEC);
907
908 data.explicit_inc_from = 0;
909 data.explicit_inc_to = 0;
910 if (data.reverse) data.offset = len;
911 data.len = len;
912
913 /* If copying requires more than two move insns,
914 copy addresses to registers (to make displacements shorter)
915 and use post-increment if available. */
916 if (!(data.autinc_from && data.autinc_to)
917 && move_by_pieces_ninsns (len, align, max_size) > 2)
918 {
919 /* Find the mode of the largest move... */
920 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
921 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
922 if (GET_MODE_SIZE (tmode) < max_size)
923 mode = tmode;
924
925 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
926 {
927 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
928 data.autinc_from = 1;
929 data.explicit_inc_from = -1;
930 }
931 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
932 {
933 data.from_addr = copy_addr_to_reg (from_addr);
934 data.autinc_from = 1;
935 data.explicit_inc_from = 1;
936 }
937 if (!data.autinc_from && CONSTANT_P (from_addr))
938 data.from_addr = copy_addr_to_reg (from_addr);
939 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
940 {
941 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
942 data.autinc_to = 1;
943 data.explicit_inc_to = -1;
944 }
945 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
946 {
947 data.to_addr = copy_addr_to_reg (to_addr);
948 data.autinc_to = 1;
949 data.explicit_inc_to = 1;
950 }
951 if (!data.autinc_to && CONSTANT_P (to_addr))
952 data.to_addr = copy_addr_to_reg (to_addr);
953 }
954
955 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
956 if (align >= GET_MODE_ALIGNMENT (tmode))
957 align = GET_MODE_ALIGNMENT (tmode);
958 else
959 {
960 enum machine_mode xmode;
961
962 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
963 tmode != VOIDmode;
964 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
965 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
966 || SLOW_UNALIGNED_ACCESS (tmode, align))
967 break;
968
969 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
970 }
971
972 /* First move what we can in the largest integer mode, then go to
973 successively smaller modes. */
974
975 while (max_size > 1)
976 {
977 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
978 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
979 if (GET_MODE_SIZE (tmode) < max_size)
980 mode = tmode;
981
982 if (mode == VOIDmode)
983 break;
984
985 icode = mov_optab->handlers[(int) mode].insn_code;
986 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
987 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
988
989 max_size = GET_MODE_SIZE (mode);
990 }
991
992 /* The code above should have handled everything. */
993 gcc_assert (!data.len);
994
995 if (endp)
996 {
997 rtx to1;
998
999 gcc_assert (!data.reverse);
1000 if (data.autinc_to)
1001 {
1002 if (endp == 2)
1003 {
1004 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1005 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1006 else
1007 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1008 -1));
1009 }
1010 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1011 data.offset);
1012 }
1013 else
1014 {
1015 if (endp == 2)
1016 --data.offset;
1017 to1 = adjust_address (data.to, QImode, data.offset);
1018 }
1019 return to1;
1020 }
1021 else
1022 return data.to;
1023 }
1024
1025 /* Return number of insns required to move L bytes by pieces.
1026 ALIGN (in bits) is maximum alignment we can assume. */
1027
1028 static unsigned HOST_WIDE_INT
1029 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1030 unsigned int max_size)
1031 {
1032 unsigned HOST_WIDE_INT n_insns = 0;
1033 enum machine_mode tmode;
1034
1035 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1036 if (align >= GET_MODE_ALIGNMENT (tmode))
1037 align = GET_MODE_ALIGNMENT (tmode);
1038 else
1039 {
1040 enum machine_mode tmode, xmode;
1041
1042 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1043 tmode != VOIDmode;
1044 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1045 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1046 || SLOW_UNALIGNED_ACCESS (tmode, align))
1047 break;
1048
1049 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1050 }
1051
1052 while (max_size > 1)
1053 {
1054 enum machine_mode mode = VOIDmode;
1055 enum insn_code icode;
1056
1057 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1058 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1059 if (GET_MODE_SIZE (tmode) < max_size)
1060 mode = tmode;
1061
1062 if (mode == VOIDmode)
1063 break;
1064
1065 icode = mov_optab->handlers[(int) mode].insn_code;
1066 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1067 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1068
1069 max_size = GET_MODE_SIZE (mode);
1070 }
1071
1072 gcc_assert (!l);
1073 return n_insns;
1074 }
1075
1076 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1077 with move instructions for mode MODE. GENFUN is the gen_... function
1078 to make a move insn for that mode. DATA has all the other info. */
1079
1080 static void
1081 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1082 struct move_by_pieces *data)
1083 {
1084 unsigned int size = GET_MODE_SIZE (mode);
1085 rtx to1 = NULL_RTX, from1;
1086
1087 while (data->len >= size)
1088 {
1089 if (data->reverse)
1090 data->offset -= size;
1091
1092 if (data->to)
1093 {
1094 if (data->autinc_to)
1095 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1096 data->offset);
1097 else
1098 to1 = adjust_address (data->to, mode, data->offset);
1099 }
1100
1101 if (data->autinc_from)
1102 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1103 data->offset);
1104 else
1105 from1 = adjust_address (data->from, mode, data->offset);
1106
1107 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1108 emit_insn (gen_add2_insn (data->to_addr,
1109 GEN_INT (-(HOST_WIDE_INT)size)));
1110 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1111 emit_insn (gen_add2_insn (data->from_addr,
1112 GEN_INT (-(HOST_WIDE_INT)size)));
1113
1114 if (data->to)
1115 emit_insn ((*genfun) (to1, from1));
1116 else
1117 {
1118 #ifdef PUSH_ROUNDING
1119 emit_single_push_insn (mode, from1, NULL);
1120 #else
1121 gcc_unreachable ();
1122 #endif
1123 }
1124
1125 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1126 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1128 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1129
1130 if (! data->reverse)
1131 data->offset += size;
1132
1133 data->len -= size;
1134 }
1135 }
1136 \f
1137 /* Emit code to move a block Y to a block X. This may be done with
1138 string-move instructions, with multiple scalar move instructions,
1139 or with a library call.
1140
1141 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1142 SIZE is an rtx that says how long they are.
1143 ALIGN is the maximum alignment we can assume they have.
1144 METHOD describes what kind of copy this is, and what mechanisms may be used.
1145
1146 Return the address of the new block, if memcpy is called and returns it,
1147 0 otherwise. */
1148
1149 rtx
1150 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1151 {
1152 bool may_use_call;
1153 rtx retval = 0;
1154 unsigned int align;
1155
1156 switch (method)
1157 {
1158 case BLOCK_OP_NORMAL:
1159 case BLOCK_OP_TAILCALL:
1160 may_use_call = true;
1161 break;
1162
1163 case BLOCK_OP_CALL_PARM:
1164 may_use_call = block_move_libcall_safe_for_call_parm ();
1165
1166 /* Make inhibit_defer_pop nonzero around the library call
1167 to force it to pop the arguments right away. */
1168 NO_DEFER_POP;
1169 break;
1170
1171 case BLOCK_OP_NO_LIBCALL:
1172 may_use_call = false;
1173 break;
1174
1175 default:
1176 gcc_unreachable ();
1177 }
1178
1179 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1180
1181 gcc_assert (MEM_P (x));
1182 gcc_assert (MEM_P (y));
1183 gcc_assert (size);
1184
1185 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1186 block copy is more efficient for other large modes, e.g. DCmode. */
1187 x = adjust_address (x, BLKmode, 0);
1188 y = adjust_address (y, BLKmode, 0);
1189
1190 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1191 can be incorrect is coming from __builtin_memcpy. */
1192 if (GET_CODE (size) == CONST_INT)
1193 {
1194 if (INTVAL (size) == 0)
1195 return 0;
1196
1197 x = shallow_copy_rtx (x);
1198 y = shallow_copy_rtx (y);
1199 set_mem_size (x, size);
1200 set_mem_size (y, size);
1201 }
1202
1203 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1204 move_by_pieces (x, y, INTVAL (size), align, 0);
1205 else if (emit_block_move_via_movmem (x, y, size, align))
1206 ;
1207 else if (may_use_call)
1208 retval = emit_block_move_via_libcall (x, y, size,
1209 method == BLOCK_OP_TAILCALL);
1210 else
1211 emit_block_move_via_loop (x, y, size, align);
1212
1213 if (method == BLOCK_OP_CALL_PARM)
1214 OK_DEFER_POP;
1215
1216 return retval;
1217 }
1218
1219 /* A subroutine of emit_block_move. Returns true if calling the
1220 block move libcall will not clobber any parameters which may have
1221 already been placed on the stack. */
1222
1223 static bool
1224 block_move_libcall_safe_for_call_parm (void)
1225 {
1226 /* If arguments are pushed on the stack, then they're safe. */
1227 if (PUSH_ARGS)
1228 return true;
1229
1230 /* If registers go on the stack anyway, any argument is sure to clobber
1231 an outgoing argument. */
1232 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1233 {
1234 tree fn = emit_block_move_libcall_fn (false);
1235 (void) fn;
1236 if (REG_PARM_STACK_SPACE (fn) != 0)
1237 return false;
1238 }
1239 #endif
1240
1241 /* If any argument goes in memory, then it might clobber an outgoing
1242 argument. */
1243 {
1244 CUMULATIVE_ARGS args_so_far;
1245 tree fn, arg;
1246
1247 fn = emit_block_move_libcall_fn (false);
1248 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1249
1250 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1251 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1252 {
1253 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1254 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1255 if (!tmp || !REG_P (tmp))
1256 return false;
1257 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1258 return false;
1259 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1260 }
1261 }
1262 return true;
1263 }
1264
1265 /* A subroutine of emit_block_move. Expand a movmem pattern;
1266 return true if successful. */
1267
1268 static bool
1269 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1270 {
1271 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1272 int save_volatile_ok = volatile_ok;
1273 enum machine_mode mode;
1274
1275 /* Since this is a move insn, we don't care about volatility. */
1276 volatile_ok = 1;
1277
1278 /* Try the most limited insn first, because there's no point
1279 including more than one in the machine description unless
1280 the more limited one has some advantage. */
1281
1282 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1283 mode = GET_MODE_WIDER_MODE (mode))
1284 {
1285 enum insn_code code = movmem_optab[(int) mode];
1286 insn_operand_predicate_fn pred;
1287
1288 if (code != CODE_FOR_nothing
1289 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1290 here because if SIZE is less than the mode mask, as it is
1291 returned by the macro, it will definitely be less than the
1292 actual mode mask. */
1293 && ((GET_CODE (size) == CONST_INT
1294 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1295 <= (GET_MODE_MASK (mode) >> 1)))
1296 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1297 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1298 || (*pred) (x, BLKmode))
1299 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1300 || (*pred) (y, BLKmode))
1301 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1302 || (*pred) (opalign, VOIDmode)))
1303 {
1304 rtx op2;
1305 rtx last = get_last_insn ();
1306 rtx pat;
1307
1308 op2 = convert_to_mode (mode, size, 1);
1309 pred = insn_data[(int) code].operand[2].predicate;
1310 if (pred != 0 && ! (*pred) (op2, mode))
1311 op2 = copy_to_mode_reg (mode, op2);
1312
1313 /* ??? When called via emit_block_move_for_call, it'd be
1314 nice if there were some way to inform the backend, so
1315 that it doesn't fail the expansion because it thinks
1316 emitting the libcall would be more efficient. */
1317
1318 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1319 if (pat)
1320 {
1321 emit_insn (pat);
1322 volatile_ok = save_volatile_ok;
1323 return true;
1324 }
1325 else
1326 delete_insns_since (last);
1327 }
1328 }
1329
1330 volatile_ok = save_volatile_ok;
1331 return false;
1332 }
1333
1334 /* A subroutine of emit_block_move. Expand a call to memcpy.
1335 Return the return value from memcpy, 0 otherwise. */
1336
1337 rtx
1338 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1339 {
1340 rtx dst_addr, src_addr;
1341 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1342 enum machine_mode size_mode;
1343 rtx retval;
1344
1345 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1346 pseudos. We can then place those new pseudos into a VAR_DECL and
1347 use them later. */
1348
1349 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1350 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1351
1352 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1353 src_addr = convert_memory_address (ptr_mode, src_addr);
1354
1355 dst_tree = make_tree (ptr_type_node, dst_addr);
1356 src_tree = make_tree (ptr_type_node, src_addr);
1357
1358 size_mode = TYPE_MODE (sizetype);
1359
1360 size = convert_to_mode (size_mode, size, 1);
1361 size = copy_to_mode_reg (size_mode, size);
1362
1363 /* It is incorrect to use the libcall calling conventions to call
1364 memcpy in this context. This could be a user call to memcpy and
1365 the user may wish to examine the return value from memcpy. For
1366 targets where libcalls and normal calls have different conventions
1367 for returning pointers, we could end up generating incorrect code. */
1368
1369 size_tree = make_tree (sizetype, size);
1370
1371 fn = emit_block_move_libcall_fn (true);
1372 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1373 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1374 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1375
1376 /* Now we have to build up the CALL_EXPR itself. */
1377 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1378 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1379 call_expr, arg_list, NULL_TREE);
1380 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1381
1382 retval = expand_normal (call_expr);
1383
1384 return retval;
1385 }
1386
1387 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1388 for the function we use for block copies. The first time FOR_CALL
1389 is true, we call assemble_external. */
1390
1391 static GTY(()) tree block_move_fn;
1392
1393 void
1394 init_block_move_fn (const char *asmspec)
1395 {
1396 if (!block_move_fn)
1397 {
1398 tree args, fn;
1399
1400 fn = get_identifier ("memcpy");
1401 args = build_function_type_list (ptr_type_node, ptr_type_node,
1402 const_ptr_type_node, sizetype,
1403 NULL_TREE);
1404
1405 fn = build_decl (FUNCTION_DECL, fn, args);
1406 DECL_EXTERNAL (fn) = 1;
1407 TREE_PUBLIC (fn) = 1;
1408 DECL_ARTIFICIAL (fn) = 1;
1409 TREE_NOTHROW (fn) = 1;
1410 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1411 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1412
1413 block_move_fn = fn;
1414 }
1415
1416 if (asmspec)
1417 set_user_assembler_name (block_move_fn, asmspec);
1418 }
1419
1420 static tree
1421 emit_block_move_libcall_fn (int for_call)
1422 {
1423 static bool emitted_extern;
1424
1425 if (!block_move_fn)
1426 init_block_move_fn (NULL);
1427
1428 if (for_call && !emitted_extern)
1429 {
1430 emitted_extern = true;
1431 make_decl_rtl (block_move_fn);
1432 assemble_external (block_move_fn);
1433 }
1434
1435 return block_move_fn;
1436 }
1437
1438 /* A subroutine of emit_block_move. Copy the data via an explicit
1439 loop. This is used only when libcalls are forbidden. */
1440 /* ??? It'd be nice to copy in hunks larger than QImode. */
1441
1442 static void
1443 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1444 unsigned int align ATTRIBUTE_UNUSED)
1445 {
1446 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1447 enum machine_mode iter_mode;
1448
1449 iter_mode = GET_MODE (size);
1450 if (iter_mode == VOIDmode)
1451 iter_mode = word_mode;
1452
1453 top_label = gen_label_rtx ();
1454 cmp_label = gen_label_rtx ();
1455 iter = gen_reg_rtx (iter_mode);
1456
1457 emit_move_insn (iter, const0_rtx);
1458
1459 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1460 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1461 do_pending_stack_adjust ();
1462
1463 emit_jump (cmp_label);
1464 emit_label (top_label);
1465
1466 tmp = convert_modes (Pmode, iter_mode, iter, true);
1467 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1468 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1469 x = change_address (x, QImode, x_addr);
1470 y = change_address (y, QImode, y_addr);
1471
1472 emit_move_insn (x, y);
1473
1474 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1475 true, OPTAB_LIB_WIDEN);
1476 if (tmp != iter)
1477 emit_move_insn (iter, tmp);
1478
1479 emit_label (cmp_label);
1480
1481 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1482 true, top_label);
1483 }
1484 \f
1485 /* Copy all or part of a value X into registers starting at REGNO.
1486 The number of registers to be filled is NREGS. */
1487
1488 void
1489 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1490 {
1491 int i;
1492 #ifdef HAVE_load_multiple
1493 rtx pat;
1494 rtx last;
1495 #endif
1496
1497 if (nregs == 0)
1498 return;
1499
1500 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1501 x = validize_mem (force_const_mem (mode, x));
1502
1503 /* See if the machine can do this with a load multiple insn. */
1504 #ifdef HAVE_load_multiple
1505 if (HAVE_load_multiple)
1506 {
1507 last = get_last_insn ();
1508 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1509 GEN_INT (nregs));
1510 if (pat)
1511 {
1512 emit_insn (pat);
1513 return;
1514 }
1515 else
1516 delete_insns_since (last);
1517 }
1518 #endif
1519
1520 for (i = 0; i < nregs; i++)
1521 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1522 operand_subword_force (x, i, mode));
1523 }
1524
1525 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1526 The number of registers to be filled is NREGS. */
1527
1528 void
1529 move_block_from_reg (int regno, rtx x, int nregs)
1530 {
1531 int i;
1532
1533 if (nregs == 0)
1534 return;
1535
1536 /* See if the machine can do this with a store multiple insn. */
1537 #ifdef HAVE_store_multiple
1538 if (HAVE_store_multiple)
1539 {
1540 rtx last = get_last_insn ();
1541 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1542 GEN_INT (nregs));
1543 if (pat)
1544 {
1545 emit_insn (pat);
1546 return;
1547 }
1548 else
1549 delete_insns_since (last);
1550 }
1551 #endif
1552
1553 for (i = 0; i < nregs; i++)
1554 {
1555 rtx tem = operand_subword (x, i, 1, BLKmode);
1556
1557 gcc_assert (tem);
1558
1559 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1560 }
1561 }
1562
1563 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1564 ORIG, where ORIG is a non-consecutive group of registers represented by
1565 a PARALLEL. The clone is identical to the original except in that the
1566 original set of registers is replaced by a new set of pseudo registers.
1567 The new set has the same modes as the original set. */
1568
1569 rtx
1570 gen_group_rtx (rtx orig)
1571 {
1572 int i, length;
1573 rtx *tmps;
1574
1575 gcc_assert (GET_CODE (orig) == PARALLEL);
1576
1577 length = XVECLEN (orig, 0);
1578 tmps = alloca (sizeof (rtx) * length);
1579
1580 /* Skip a NULL entry in first slot. */
1581 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1582
1583 if (i)
1584 tmps[0] = 0;
1585
1586 for (; i < length; i++)
1587 {
1588 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1589 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1590
1591 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1592 }
1593
1594 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1595 }
1596
1597 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1598 except that values are placed in TMPS[i], and must later be moved
1599 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1600
1601 static void
1602 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1603 {
1604 rtx src;
1605 int start, i;
1606 enum machine_mode m = GET_MODE (orig_src);
1607
1608 gcc_assert (GET_CODE (dst) == PARALLEL);
1609
1610 if (m != VOIDmode
1611 && !SCALAR_INT_MODE_P (m)
1612 && !MEM_P (orig_src)
1613 && GET_CODE (orig_src) != CONCAT)
1614 {
1615 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1616 if (imode == BLKmode)
1617 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1618 else
1619 src = gen_reg_rtx (imode);
1620 if (imode != BLKmode)
1621 src = gen_lowpart (GET_MODE (orig_src), src);
1622 emit_move_insn (src, orig_src);
1623 /* ...and back again. */
1624 if (imode != BLKmode)
1625 src = gen_lowpart (imode, src);
1626 emit_group_load_1 (tmps, dst, src, type, ssize);
1627 return;
1628 }
1629
1630 /* Check for a NULL entry, used to indicate that the parameter goes
1631 both on the stack and in registers. */
1632 if (XEXP (XVECEXP (dst, 0, 0), 0))
1633 start = 0;
1634 else
1635 start = 1;
1636
1637 /* Process the pieces. */
1638 for (i = start; i < XVECLEN (dst, 0); i++)
1639 {
1640 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1641 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1642 unsigned int bytelen = GET_MODE_SIZE (mode);
1643 int shift = 0;
1644
1645 /* Handle trailing fragments that run over the size of the struct. */
1646 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1647 {
1648 /* Arrange to shift the fragment to where it belongs.
1649 extract_bit_field loads to the lsb of the reg. */
1650 if (
1651 #ifdef BLOCK_REG_PADDING
1652 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1653 == (BYTES_BIG_ENDIAN ? upward : downward)
1654 #else
1655 BYTES_BIG_ENDIAN
1656 #endif
1657 )
1658 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1659 bytelen = ssize - bytepos;
1660 gcc_assert (bytelen > 0);
1661 }
1662
1663 /* If we won't be loading directly from memory, protect the real source
1664 from strange tricks we might play; but make sure that the source can
1665 be loaded directly into the destination. */
1666 src = orig_src;
1667 if (!MEM_P (orig_src)
1668 && (!CONSTANT_P (orig_src)
1669 || (GET_MODE (orig_src) != mode
1670 && GET_MODE (orig_src) != VOIDmode)))
1671 {
1672 if (GET_MODE (orig_src) == VOIDmode)
1673 src = gen_reg_rtx (mode);
1674 else
1675 src = gen_reg_rtx (GET_MODE (orig_src));
1676
1677 emit_move_insn (src, orig_src);
1678 }
1679
1680 /* Optimize the access just a bit. */
1681 if (MEM_P (src)
1682 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1683 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1684 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1685 && bytelen == GET_MODE_SIZE (mode))
1686 {
1687 tmps[i] = gen_reg_rtx (mode);
1688 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1689 }
1690 else if (COMPLEX_MODE_P (mode)
1691 && GET_MODE (src) == mode
1692 && bytelen == GET_MODE_SIZE (mode))
1693 /* Let emit_move_complex do the bulk of the work. */
1694 tmps[i] = src;
1695 else if (GET_CODE (src) == CONCAT)
1696 {
1697 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1698 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1699
1700 if ((bytepos == 0 && bytelen == slen0)
1701 || (bytepos != 0 && bytepos + bytelen <= slen))
1702 {
1703 /* The following assumes that the concatenated objects all
1704 have the same size. In this case, a simple calculation
1705 can be used to determine the object and the bit field
1706 to be extracted. */
1707 tmps[i] = XEXP (src, bytepos / slen0);
1708 if (! CONSTANT_P (tmps[i])
1709 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1710 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1711 (bytepos % slen0) * BITS_PER_UNIT,
1712 1, NULL_RTX, mode, mode);
1713 }
1714 else
1715 {
1716 rtx mem;
1717
1718 gcc_assert (!bytepos);
1719 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1720 emit_move_insn (mem, src);
1721 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1722 0, 1, NULL_RTX, mode, mode);
1723 }
1724 }
1725 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1726 SIMD register, which is currently broken. While we get GCC
1727 to emit proper RTL for these cases, let's dump to memory. */
1728 else if (VECTOR_MODE_P (GET_MODE (dst))
1729 && REG_P (src))
1730 {
1731 int slen = GET_MODE_SIZE (GET_MODE (src));
1732 rtx mem;
1733
1734 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1735 emit_move_insn (mem, src);
1736 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1737 }
1738 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1739 && XVECLEN (dst, 0) > 1)
1740 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1741 else if (CONSTANT_P (src)
1742 || (REG_P (src) && GET_MODE (src) == mode))
1743 tmps[i] = src;
1744 else
1745 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1746 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1747 mode, mode);
1748
1749 if (shift)
1750 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1751 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1752 }
1753 }
1754
1755 /* Emit code to move a block SRC of type TYPE to a block DST,
1756 where DST is non-consecutive registers represented by a PARALLEL.
1757 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1758 if not known. */
1759
1760 void
1761 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1762 {
1763 rtx *tmps;
1764 int i;
1765
1766 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1767 emit_group_load_1 (tmps, dst, src, type, ssize);
1768
1769 /* Copy the extracted pieces into the proper (probable) hard regs. */
1770 for (i = 0; i < XVECLEN (dst, 0); i++)
1771 {
1772 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1773 if (d == NULL)
1774 continue;
1775 emit_move_insn (d, tmps[i]);
1776 }
1777 }
1778
1779 /* Similar, but load SRC into new pseudos in a format that looks like
1780 PARALLEL. This can later be fed to emit_group_move to get things
1781 in the right place. */
1782
1783 rtx
1784 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1785 {
1786 rtvec vec;
1787 int i;
1788
1789 vec = rtvec_alloc (XVECLEN (parallel, 0));
1790 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1791
1792 /* Convert the vector to look just like the original PARALLEL, except
1793 with the computed values. */
1794 for (i = 0; i < XVECLEN (parallel, 0); i++)
1795 {
1796 rtx e = XVECEXP (parallel, 0, i);
1797 rtx d = XEXP (e, 0);
1798
1799 if (d)
1800 {
1801 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1802 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1803 }
1804 RTVEC_ELT (vec, i) = e;
1805 }
1806
1807 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1808 }
1809
1810 /* Emit code to move a block SRC to block DST, where SRC and DST are
1811 non-consecutive groups of registers, each represented by a PARALLEL. */
1812
1813 void
1814 emit_group_move (rtx dst, rtx src)
1815 {
1816 int i;
1817
1818 gcc_assert (GET_CODE (src) == PARALLEL
1819 && GET_CODE (dst) == PARALLEL
1820 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1821
1822 /* Skip first entry if NULL. */
1823 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1824 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1825 XEXP (XVECEXP (src, 0, i), 0));
1826 }
1827
1828 /* Move a group of registers represented by a PARALLEL into pseudos. */
1829
1830 rtx
1831 emit_group_move_into_temps (rtx src)
1832 {
1833 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1834 int i;
1835
1836 for (i = 0; i < XVECLEN (src, 0); i++)
1837 {
1838 rtx e = XVECEXP (src, 0, i);
1839 rtx d = XEXP (e, 0);
1840
1841 if (d)
1842 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1843 RTVEC_ELT (vec, i) = e;
1844 }
1845
1846 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1847 }
1848
1849 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1850 where SRC is non-consecutive registers represented by a PARALLEL.
1851 SSIZE represents the total size of block ORIG_DST, or -1 if not
1852 known. */
1853
1854 void
1855 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1856 {
1857 rtx *tmps, dst;
1858 int start, finish, i;
1859 enum machine_mode m = GET_MODE (orig_dst);
1860
1861 gcc_assert (GET_CODE (src) == PARALLEL);
1862
1863 if (!SCALAR_INT_MODE_P (m)
1864 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1865 {
1866 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1867 if (imode == BLKmode)
1868 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1869 else
1870 dst = gen_reg_rtx (imode);
1871 emit_group_store (dst, src, type, ssize);
1872 if (imode != BLKmode)
1873 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1874 emit_move_insn (orig_dst, dst);
1875 return;
1876 }
1877
1878 /* Check for a NULL entry, used to indicate that the parameter goes
1879 both on the stack and in registers. */
1880 if (XEXP (XVECEXP (src, 0, 0), 0))
1881 start = 0;
1882 else
1883 start = 1;
1884 finish = XVECLEN (src, 0);
1885
1886 tmps = alloca (sizeof (rtx) * finish);
1887
1888 /* Copy the (probable) hard regs into pseudos. */
1889 for (i = start; i < finish; i++)
1890 {
1891 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1892 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1893 {
1894 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1895 emit_move_insn (tmps[i], reg);
1896 }
1897 else
1898 tmps[i] = reg;
1899 }
1900
1901 /* If we won't be storing directly into memory, protect the real destination
1902 from strange tricks we might play. */
1903 dst = orig_dst;
1904 if (GET_CODE (dst) == PARALLEL)
1905 {
1906 rtx temp;
1907
1908 /* We can get a PARALLEL dst if there is a conditional expression in
1909 a return statement. In that case, the dst and src are the same,
1910 so no action is necessary. */
1911 if (rtx_equal_p (dst, src))
1912 return;
1913
1914 /* It is unclear if we can ever reach here, but we may as well handle
1915 it. Allocate a temporary, and split this into a store/load to/from
1916 the temporary. */
1917
1918 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1919 emit_group_store (temp, src, type, ssize);
1920 emit_group_load (dst, temp, type, ssize);
1921 return;
1922 }
1923 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1924 {
1925 enum machine_mode outer = GET_MODE (dst);
1926 enum machine_mode inner;
1927 HOST_WIDE_INT bytepos;
1928 bool done = false;
1929 rtx temp;
1930
1931 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1932 dst = gen_reg_rtx (outer);
1933
1934 /* Make life a bit easier for combine. */
1935 /* If the first element of the vector is the low part
1936 of the destination mode, use a paradoxical subreg to
1937 initialize the destination. */
1938 if (start < finish)
1939 {
1940 inner = GET_MODE (tmps[start]);
1941 bytepos = subreg_lowpart_offset (inner, outer);
1942 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1943 {
1944 temp = simplify_gen_subreg (outer, tmps[start],
1945 inner, 0);
1946 if (temp)
1947 {
1948 emit_move_insn (dst, temp);
1949 done = true;
1950 start++;
1951 }
1952 }
1953 }
1954
1955 /* If the first element wasn't the low part, try the last. */
1956 if (!done
1957 && start < finish - 1)
1958 {
1959 inner = GET_MODE (tmps[finish - 1]);
1960 bytepos = subreg_lowpart_offset (inner, outer);
1961 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1962 {
1963 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1964 inner, 0);
1965 if (temp)
1966 {
1967 emit_move_insn (dst, temp);
1968 done = true;
1969 finish--;
1970 }
1971 }
1972 }
1973
1974 /* Otherwise, simply initialize the result to zero. */
1975 if (!done)
1976 emit_move_insn (dst, CONST0_RTX (outer));
1977 }
1978
1979 /* Process the pieces. */
1980 for (i = start; i < finish; i++)
1981 {
1982 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1983 enum machine_mode mode = GET_MODE (tmps[i]);
1984 unsigned int bytelen = GET_MODE_SIZE (mode);
1985 rtx dest = dst;
1986
1987 /* Handle trailing fragments that run over the size of the struct. */
1988 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1989 {
1990 /* store_bit_field always takes its value from the lsb.
1991 Move the fragment to the lsb if it's not already there. */
1992 if (
1993 #ifdef BLOCK_REG_PADDING
1994 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1995 == (BYTES_BIG_ENDIAN ? upward : downward)
1996 #else
1997 BYTES_BIG_ENDIAN
1998 #endif
1999 )
2000 {
2001 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2002 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2003 build_int_cst (NULL_TREE, shift),
2004 tmps[i], 0);
2005 }
2006 bytelen = ssize - bytepos;
2007 }
2008
2009 if (GET_CODE (dst) == CONCAT)
2010 {
2011 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2012 dest = XEXP (dst, 0);
2013 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2014 {
2015 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2016 dest = XEXP (dst, 1);
2017 }
2018 else
2019 {
2020 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2021 dest = assign_stack_temp (GET_MODE (dest),
2022 GET_MODE_SIZE (GET_MODE (dest)), 0);
2023 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2024 tmps[i]);
2025 dst = dest;
2026 break;
2027 }
2028 }
2029
2030 /* Optimize the access just a bit. */
2031 if (MEM_P (dest)
2032 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2033 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2034 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2035 && bytelen == GET_MODE_SIZE (mode))
2036 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2037 else
2038 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2039 mode, tmps[i]);
2040 }
2041
2042 /* Copy from the pseudo into the (probable) hard reg. */
2043 if (orig_dst != dst)
2044 emit_move_insn (orig_dst, dst);
2045 }
2046
2047 /* Generate code to copy a BLKmode object of TYPE out of a
2048 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2049 is null, a stack temporary is created. TGTBLK is returned.
2050
2051 The purpose of this routine is to handle functions that return
2052 BLKmode structures in registers. Some machines (the PA for example)
2053 want to return all small structures in registers regardless of the
2054 structure's alignment. */
2055
2056 rtx
2057 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2058 {
2059 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2060 rtx src = NULL, dst = NULL;
2061 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2062 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2063
2064 if (tgtblk == 0)
2065 {
2066 tgtblk = assign_temp (build_qualified_type (type,
2067 (TYPE_QUALS (type)
2068 | TYPE_QUAL_CONST)),
2069 0, 1, 1);
2070 preserve_temp_slots (tgtblk);
2071 }
2072
2073 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2074 into a new pseudo which is a full word. */
2075
2076 if (GET_MODE (srcreg) != BLKmode
2077 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2078 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2079
2080 /* If the structure doesn't take up a whole number of words, see whether
2081 SRCREG is padded on the left or on the right. If it's on the left,
2082 set PADDING_CORRECTION to the number of bits to skip.
2083
2084 In most ABIs, the structure will be returned at the least end of
2085 the register, which translates to right padding on little-endian
2086 targets and left padding on big-endian targets. The opposite
2087 holds if the structure is returned at the most significant
2088 end of the register. */
2089 if (bytes % UNITS_PER_WORD != 0
2090 && (targetm.calls.return_in_msb (type)
2091 ? !BYTES_BIG_ENDIAN
2092 : BYTES_BIG_ENDIAN))
2093 padding_correction
2094 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2095
2096 /* Copy the structure BITSIZE bites at a time.
2097
2098 We could probably emit more efficient code for machines which do not use
2099 strict alignment, but it doesn't seem worth the effort at the current
2100 time. */
2101 for (bitpos = 0, xbitpos = padding_correction;
2102 bitpos < bytes * BITS_PER_UNIT;
2103 bitpos += bitsize, xbitpos += bitsize)
2104 {
2105 /* We need a new source operand each time xbitpos is on a
2106 word boundary and when xbitpos == padding_correction
2107 (the first time through). */
2108 if (xbitpos % BITS_PER_WORD == 0
2109 || xbitpos == padding_correction)
2110 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2111 GET_MODE (srcreg));
2112
2113 /* We need a new destination operand each time bitpos is on
2114 a word boundary. */
2115 if (bitpos % BITS_PER_WORD == 0)
2116 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2117
2118 /* Use xbitpos for the source extraction (right justified) and
2119 xbitpos for the destination store (left justified). */
2120 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2121 extract_bit_field (src, bitsize,
2122 xbitpos % BITS_PER_WORD, 1,
2123 NULL_RTX, word_mode, word_mode));
2124 }
2125
2126 return tgtblk;
2127 }
2128
2129 /* Add a USE expression for REG to the (possibly empty) list pointed
2130 to by CALL_FUSAGE. REG must denote a hard register. */
2131
2132 void
2133 use_reg (rtx *call_fusage, rtx reg)
2134 {
2135 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2136
2137 *call_fusage
2138 = gen_rtx_EXPR_LIST (VOIDmode,
2139 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2140 }
2141
2142 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2143 starting at REGNO. All of these registers must be hard registers. */
2144
2145 void
2146 use_regs (rtx *call_fusage, int regno, int nregs)
2147 {
2148 int i;
2149
2150 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2151
2152 for (i = 0; i < nregs; i++)
2153 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2154 }
2155
2156 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2157 PARALLEL REGS. This is for calls that pass values in multiple
2158 non-contiguous locations. The Irix 6 ABI has examples of this. */
2159
2160 void
2161 use_group_regs (rtx *call_fusage, rtx regs)
2162 {
2163 int i;
2164
2165 for (i = 0; i < XVECLEN (regs, 0); i++)
2166 {
2167 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2168
2169 /* A NULL entry means the parameter goes both on the stack and in
2170 registers. This can also be a MEM for targets that pass values
2171 partially on the stack and partially in registers. */
2172 if (reg != 0 && REG_P (reg))
2173 use_reg (call_fusage, reg);
2174 }
2175 }
2176 \f
2177
2178 /* Determine whether the LEN bytes generated by CONSTFUN can be
2179 stored to memory using several move instructions. CONSTFUNDATA is
2180 a pointer which will be passed as argument in every CONSTFUN call.
2181 ALIGN is maximum alignment we can assume. Return nonzero if a
2182 call to store_by_pieces should succeed. */
2183
2184 int
2185 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2186 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2187 void *constfundata, unsigned int align)
2188 {
2189 unsigned HOST_WIDE_INT l;
2190 unsigned int max_size;
2191 HOST_WIDE_INT offset = 0;
2192 enum machine_mode mode, tmode;
2193 enum insn_code icode;
2194 int reverse;
2195 rtx cst;
2196
2197 if (len == 0)
2198 return 1;
2199
2200 if (! STORE_BY_PIECES_P (len, align))
2201 return 0;
2202
2203 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2204 if (align >= GET_MODE_ALIGNMENT (tmode))
2205 align = GET_MODE_ALIGNMENT (tmode);
2206 else
2207 {
2208 enum machine_mode xmode;
2209
2210 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2211 tmode != VOIDmode;
2212 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2213 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2214 || SLOW_UNALIGNED_ACCESS (tmode, align))
2215 break;
2216
2217 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2218 }
2219
2220 /* We would first store what we can in the largest integer mode, then go to
2221 successively smaller modes. */
2222
2223 for (reverse = 0;
2224 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2225 reverse++)
2226 {
2227 l = len;
2228 mode = VOIDmode;
2229 max_size = STORE_MAX_PIECES + 1;
2230 while (max_size > 1)
2231 {
2232 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2233 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2234 if (GET_MODE_SIZE (tmode) < max_size)
2235 mode = tmode;
2236
2237 if (mode == VOIDmode)
2238 break;
2239
2240 icode = mov_optab->handlers[(int) mode].insn_code;
2241 if (icode != CODE_FOR_nothing
2242 && align >= GET_MODE_ALIGNMENT (mode))
2243 {
2244 unsigned int size = GET_MODE_SIZE (mode);
2245
2246 while (l >= size)
2247 {
2248 if (reverse)
2249 offset -= size;
2250
2251 cst = (*constfun) (constfundata, offset, mode);
2252 if (!LEGITIMATE_CONSTANT_P (cst))
2253 return 0;
2254
2255 if (!reverse)
2256 offset += size;
2257
2258 l -= size;
2259 }
2260 }
2261
2262 max_size = GET_MODE_SIZE (mode);
2263 }
2264
2265 /* The code above should have handled everything. */
2266 gcc_assert (!l);
2267 }
2268
2269 return 1;
2270 }
2271
2272 /* Generate several move instructions to store LEN bytes generated by
2273 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2274 pointer which will be passed as argument in every CONSTFUN call.
2275 ALIGN is maximum alignment we can assume.
2276 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2277 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2278 stpcpy. */
2279
2280 rtx
2281 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2282 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2283 void *constfundata, unsigned int align, int endp)
2284 {
2285 struct store_by_pieces data;
2286
2287 if (len == 0)
2288 {
2289 gcc_assert (endp != 2);
2290 return to;
2291 }
2292
2293 gcc_assert (STORE_BY_PIECES_P (len, align));
2294 data.constfun = constfun;
2295 data.constfundata = constfundata;
2296 data.len = len;
2297 data.to = to;
2298 store_by_pieces_1 (&data, align);
2299 if (endp)
2300 {
2301 rtx to1;
2302
2303 gcc_assert (!data.reverse);
2304 if (data.autinc_to)
2305 {
2306 if (endp == 2)
2307 {
2308 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2309 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2310 else
2311 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2312 -1));
2313 }
2314 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2315 data.offset);
2316 }
2317 else
2318 {
2319 if (endp == 2)
2320 --data.offset;
2321 to1 = adjust_address (data.to, QImode, data.offset);
2322 }
2323 return to1;
2324 }
2325 else
2326 return data.to;
2327 }
2328
2329 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2330 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2331
2332 static void
2333 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2334 {
2335 struct store_by_pieces data;
2336
2337 if (len == 0)
2338 return;
2339
2340 data.constfun = clear_by_pieces_1;
2341 data.constfundata = NULL;
2342 data.len = len;
2343 data.to = to;
2344 store_by_pieces_1 (&data, align);
2345 }
2346
2347 /* Callback routine for clear_by_pieces.
2348 Return const0_rtx unconditionally. */
2349
2350 static rtx
2351 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2352 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2353 enum machine_mode mode ATTRIBUTE_UNUSED)
2354 {
2355 return const0_rtx;
2356 }
2357
2358 /* Subroutine of clear_by_pieces and store_by_pieces.
2359 Generate several move instructions to store LEN bytes of block TO. (A MEM
2360 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2361
2362 static void
2363 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2364 unsigned int align ATTRIBUTE_UNUSED)
2365 {
2366 rtx to_addr = XEXP (data->to, 0);
2367 unsigned int max_size = STORE_MAX_PIECES + 1;
2368 enum machine_mode mode = VOIDmode, tmode;
2369 enum insn_code icode;
2370
2371 data->offset = 0;
2372 data->to_addr = to_addr;
2373 data->autinc_to
2374 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2375 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2376
2377 data->explicit_inc_to = 0;
2378 data->reverse
2379 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2380 if (data->reverse)
2381 data->offset = data->len;
2382
2383 /* If storing requires more than two move insns,
2384 copy addresses to registers (to make displacements shorter)
2385 and use post-increment if available. */
2386 if (!data->autinc_to
2387 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2388 {
2389 /* Determine the main mode we'll be using. */
2390 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2391 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2392 if (GET_MODE_SIZE (tmode) < max_size)
2393 mode = tmode;
2394
2395 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2396 {
2397 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2398 data->autinc_to = 1;
2399 data->explicit_inc_to = -1;
2400 }
2401
2402 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2403 && ! data->autinc_to)
2404 {
2405 data->to_addr = copy_addr_to_reg (to_addr);
2406 data->autinc_to = 1;
2407 data->explicit_inc_to = 1;
2408 }
2409
2410 if ( !data->autinc_to && CONSTANT_P (to_addr))
2411 data->to_addr = copy_addr_to_reg (to_addr);
2412 }
2413
2414 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2415 if (align >= GET_MODE_ALIGNMENT (tmode))
2416 align = GET_MODE_ALIGNMENT (tmode);
2417 else
2418 {
2419 enum machine_mode xmode;
2420
2421 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2422 tmode != VOIDmode;
2423 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2424 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2425 || SLOW_UNALIGNED_ACCESS (tmode, align))
2426 break;
2427
2428 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2429 }
2430
2431 /* First store what we can in the largest integer mode, then go to
2432 successively smaller modes. */
2433
2434 while (max_size > 1)
2435 {
2436 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2437 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2438 if (GET_MODE_SIZE (tmode) < max_size)
2439 mode = tmode;
2440
2441 if (mode == VOIDmode)
2442 break;
2443
2444 icode = mov_optab->handlers[(int) mode].insn_code;
2445 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2446 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2447
2448 max_size = GET_MODE_SIZE (mode);
2449 }
2450
2451 /* The code above should have handled everything. */
2452 gcc_assert (!data->len);
2453 }
2454
2455 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2456 with move instructions for mode MODE. GENFUN is the gen_... function
2457 to make a move insn for that mode. DATA has all the other info. */
2458
2459 static void
2460 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2461 struct store_by_pieces *data)
2462 {
2463 unsigned int size = GET_MODE_SIZE (mode);
2464 rtx to1, cst;
2465
2466 while (data->len >= size)
2467 {
2468 if (data->reverse)
2469 data->offset -= size;
2470
2471 if (data->autinc_to)
2472 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2473 data->offset);
2474 else
2475 to1 = adjust_address (data->to, mode, data->offset);
2476
2477 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2478 emit_insn (gen_add2_insn (data->to_addr,
2479 GEN_INT (-(HOST_WIDE_INT) size)));
2480
2481 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2482 emit_insn ((*genfun) (to1, cst));
2483
2484 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2485 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2486
2487 if (! data->reverse)
2488 data->offset += size;
2489
2490 data->len -= size;
2491 }
2492 }
2493 \f
2494 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2495 its length in bytes. */
2496
2497 rtx
2498 clear_storage (rtx object, rtx size, enum block_op_methods method)
2499 {
2500 enum machine_mode mode = GET_MODE (object);
2501 unsigned int align;
2502
2503 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2504
2505 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2506 just move a zero. Otherwise, do this a piece at a time. */
2507 if (mode != BLKmode
2508 && GET_CODE (size) == CONST_INT
2509 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2510 {
2511 rtx zero = CONST0_RTX (mode);
2512 if (zero != NULL)
2513 {
2514 emit_move_insn (object, zero);
2515 return NULL;
2516 }
2517
2518 if (COMPLEX_MODE_P (mode))
2519 {
2520 zero = CONST0_RTX (GET_MODE_INNER (mode));
2521 if (zero != NULL)
2522 {
2523 write_complex_part (object, zero, 0);
2524 write_complex_part (object, zero, 1);
2525 return NULL;
2526 }
2527 }
2528 }
2529
2530 if (size == const0_rtx)
2531 return NULL;
2532
2533 align = MEM_ALIGN (object);
2534
2535 if (GET_CODE (size) == CONST_INT
2536 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2537 clear_by_pieces (object, INTVAL (size), align);
2538 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2539 ;
2540 else
2541 return set_storage_via_libcall (object, size, const0_rtx,
2542 method == BLOCK_OP_TAILCALL);
2543
2544 return NULL;
2545 }
2546
2547 /* A subroutine of clear_storage. Expand a call to memset.
2548 Return the return value of memset, 0 otherwise. */
2549
2550 rtx
2551 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2552 {
2553 tree call_expr, arg_list, fn, object_tree, size_tree, val_tree;
2554 enum machine_mode size_mode;
2555 rtx retval;
2556
2557 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2558 place those into new pseudos into a VAR_DECL and use them later. */
2559
2560 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2561
2562 size_mode = TYPE_MODE (sizetype);
2563 size = convert_to_mode (size_mode, size, 1);
2564 size = copy_to_mode_reg (size_mode, size);
2565
2566 /* It is incorrect to use the libcall calling conventions to call
2567 memset in this context. This could be a user call to memset and
2568 the user may wish to examine the return value from memset. For
2569 targets where libcalls and normal calls have different conventions
2570 for returning pointers, we could end up generating incorrect code. */
2571
2572 object_tree = make_tree (ptr_type_node, object);
2573 if (GET_CODE (val) != CONST_INT)
2574 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2575 size_tree = make_tree (sizetype, size);
2576 val_tree = make_tree (integer_type_node, val);
2577
2578 fn = clear_storage_libcall_fn (true);
2579 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2580 arg_list = tree_cons (NULL_TREE, val_tree, arg_list);
2581 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2582
2583 /* Now we have to build up the CALL_EXPR itself. */
2584 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2585 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2586 call_expr, arg_list, NULL_TREE);
2587 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2588
2589 retval = expand_normal (call_expr);
2590
2591 return retval;
2592 }
2593
2594 /* A subroutine of set_storage_via_libcall. Create the tree node
2595 for the function we use for block clears. The first time FOR_CALL
2596 is true, we call assemble_external. */
2597
2598 static GTY(()) tree block_clear_fn;
2599
2600 void
2601 init_block_clear_fn (const char *asmspec)
2602 {
2603 if (!block_clear_fn)
2604 {
2605 tree fn, args;
2606
2607 fn = get_identifier ("memset");
2608 args = build_function_type_list (ptr_type_node, ptr_type_node,
2609 integer_type_node, sizetype,
2610 NULL_TREE);
2611
2612 fn = build_decl (FUNCTION_DECL, fn, args);
2613 DECL_EXTERNAL (fn) = 1;
2614 TREE_PUBLIC (fn) = 1;
2615 DECL_ARTIFICIAL (fn) = 1;
2616 TREE_NOTHROW (fn) = 1;
2617 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2618 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2619
2620 block_clear_fn = fn;
2621 }
2622
2623 if (asmspec)
2624 set_user_assembler_name (block_clear_fn, asmspec);
2625 }
2626
2627 static tree
2628 clear_storage_libcall_fn (int for_call)
2629 {
2630 static bool emitted_extern;
2631
2632 if (!block_clear_fn)
2633 init_block_clear_fn (NULL);
2634
2635 if (for_call && !emitted_extern)
2636 {
2637 emitted_extern = true;
2638 make_decl_rtl (block_clear_fn);
2639 assemble_external (block_clear_fn);
2640 }
2641
2642 return block_clear_fn;
2643 }
2644 \f
2645 /* Expand a setmem pattern; return true if successful. */
2646
2647 bool
2648 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2649 {
2650 /* Try the most limited insn first, because there's no point
2651 including more than one in the machine description unless
2652 the more limited one has some advantage. */
2653
2654 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2655 enum machine_mode mode;
2656
2657 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2658 mode = GET_MODE_WIDER_MODE (mode))
2659 {
2660 enum insn_code code = setmem_optab[(int) mode];
2661 insn_operand_predicate_fn pred;
2662
2663 if (code != CODE_FOR_nothing
2664 /* We don't need MODE to be narrower than
2665 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2666 the mode mask, as it is returned by the macro, it will
2667 definitely be less than the actual mode mask. */
2668 && ((GET_CODE (size) == CONST_INT
2669 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2670 <= (GET_MODE_MASK (mode) >> 1)))
2671 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2672 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2673 || (*pred) (object, BLKmode))
2674 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2675 || (*pred) (opalign, VOIDmode)))
2676 {
2677 rtx opsize, opchar;
2678 enum machine_mode char_mode;
2679 rtx last = get_last_insn ();
2680 rtx pat;
2681
2682 opsize = convert_to_mode (mode, size, 1);
2683 pred = insn_data[(int) code].operand[1].predicate;
2684 if (pred != 0 && ! (*pred) (opsize, mode))
2685 opsize = copy_to_mode_reg (mode, opsize);
2686
2687 opchar = val;
2688 char_mode = insn_data[(int) code].operand[2].mode;
2689 if (char_mode != VOIDmode)
2690 {
2691 opchar = convert_to_mode (char_mode, opchar, 1);
2692 pred = insn_data[(int) code].operand[2].predicate;
2693 if (pred != 0 && ! (*pred) (opchar, char_mode))
2694 opchar = copy_to_mode_reg (char_mode, opchar);
2695 }
2696
2697 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2698 if (pat)
2699 {
2700 emit_insn (pat);
2701 return true;
2702 }
2703 else
2704 delete_insns_since (last);
2705 }
2706 }
2707
2708 return false;
2709 }
2710
2711 \f
2712 /* Write to one of the components of the complex value CPLX. Write VAL to
2713 the real part if IMAG_P is false, and the imaginary part if its true. */
2714
2715 static void
2716 write_complex_part (rtx cplx, rtx val, bool imag_p)
2717 {
2718 enum machine_mode cmode;
2719 enum machine_mode imode;
2720 unsigned ibitsize;
2721
2722 if (GET_CODE (cplx) == CONCAT)
2723 {
2724 emit_move_insn (XEXP (cplx, imag_p), val);
2725 return;
2726 }
2727
2728 cmode = GET_MODE (cplx);
2729 imode = GET_MODE_INNER (cmode);
2730 ibitsize = GET_MODE_BITSIZE (imode);
2731
2732 /* For MEMs simplify_gen_subreg may generate an invalid new address
2733 because, e.g., the original address is considered mode-dependent
2734 by the target, which restricts simplify_subreg from invoking
2735 adjust_address_nv. Instead of preparing fallback support for an
2736 invalid address, we call adjust_address_nv directly. */
2737 if (MEM_P (cplx))
2738 {
2739 emit_move_insn (adjust_address_nv (cplx, imode,
2740 imag_p ? GET_MODE_SIZE (imode) : 0),
2741 val);
2742 return;
2743 }
2744
2745 /* If the sub-object is at least word sized, then we know that subregging
2746 will work. This special case is important, since store_bit_field
2747 wants to operate on integer modes, and there's rarely an OImode to
2748 correspond to TCmode. */
2749 if (ibitsize >= BITS_PER_WORD
2750 /* For hard regs we have exact predicates. Assume we can split
2751 the original object if it spans an even number of hard regs.
2752 This special case is important for SCmode on 64-bit platforms
2753 where the natural size of floating-point regs is 32-bit. */
2754 || (REG_P (cplx)
2755 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2756 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2757 {
2758 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2759 imag_p ? GET_MODE_SIZE (imode) : 0);
2760 if (part)
2761 {
2762 emit_move_insn (part, val);
2763 return;
2764 }
2765 else
2766 /* simplify_gen_subreg may fail for sub-word MEMs. */
2767 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2768 }
2769
2770 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2771 }
2772
2773 /* Extract one of the components of the complex value CPLX. Extract the
2774 real part if IMAG_P is false, and the imaginary part if it's true. */
2775
2776 static rtx
2777 read_complex_part (rtx cplx, bool imag_p)
2778 {
2779 enum machine_mode cmode, imode;
2780 unsigned ibitsize;
2781
2782 if (GET_CODE (cplx) == CONCAT)
2783 return XEXP (cplx, imag_p);
2784
2785 cmode = GET_MODE (cplx);
2786 imode = GET_MODE_INNER (cmode);
2787 ibitsize = GET_MODE_BITSIZE (imode);
2788
2789 /* Special case reads from complex constants that got spilled to memory. */
2790 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2791 {
2792 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2793 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2794 {
2795 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2796 if (CONSTANT_CLASS_P (part))
2797 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2798 }
2799 }
2800
2801 /* For MEMs simplify_gen_subreg may generate an invalid new address
2802 because, e.g., the original address is considered mode-dependent
2803 by the target, which restricts simplify_subreg from invoking
2804 adjust_address_nv. Instead of preparing fallback support for an
2805 invalid address, we call adjust_address_nv directly. */
2806 if (MEM_P (cplx))
2807 return adjust_address_nv (cplx, imode,
2808 imag_p ? GET_MODE_SIZE (imode) : 0);
2809
2810 /* If the sub-object is at least word sized, then we know that subregging
2811 will work. This special case is important, since extract_bit_field
2812 wants to operate on integer modes, and there's rarely an OImode to
2813 correspond to TCmode. */
2814 if (ibitsize >= BITS_PER_WORD
2815 /* For hard regs we have exact predicates. Assume we can split
2816 the original object if it spans an even number of hard regs.
2817 This special case is important for SCmode on 64-bit platforms
2818 where the natural size of floating-point regs is 32-bit. */
2819 || (REG_P (cplx)
2820 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2821 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2822 {
2823 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2824 imag_p ? GET_MODE_SIZE (imode) : 0);
2825 if (ret)
2826 return ret;
2827 else
2828 /* simplify_gen_subreg may fail for sub-word MEMs. */
2829 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2830 }
2831
2832 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2833 true, NULL_RTX, imode, imode);
2834 }
2835 \f
2836 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2837 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2838 represented in NEW_MODE. If FORCE is true, this will never happen, as
2839 we'll force-create a SUBREG if needed. */
2840
2841 static rtx
2842 emit_move_change_mode (enum machine_mode new_mode,
2843 enum machine_mode old_mode, rtx x, bool force)
2844 {
2845 rtx ret;
2846
2847 if (MEM_P (x))
2848 {
2849 /* We don't have to worry about changing the address since the
2850 size in bytes is supposed to be the same. */
2851 if (reload_in_progress)
2852 {
2853 /* Copy the MEM to change the mode and move any
2854 substitutions from the old MEM to the new one. */
2855 ret = adjust_address_nv (x, new_mode, 0);
2856 copy_replacements (x, ret);
2857 }
2858 else
2859 ret = adjust_address (x, new_mode, 0);
2860 }
2861 else
2862 {
2863 /* Note that we do want simplify_subreg's behavior of validating
2864 that the new mode is ok for a hard register. If we were to use
2865 simplify_gen_subreg, we would create the subreg, but would
2866 probably run into the target not being able to implement it. */
2867 /* Except, of course, when FORCE is true, when this is exactly what
2868 we want. Which is needed for CCmodes on some targets. */
2869 if (force)
2870 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2871 else
2872 ret = simplify_subreg (new_mode, x, old_mode, 0);
2873 }
2874
2875 return ret;
2876 }
2877
2878 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2879 an integer mode of the same size as MODE. Returns the instruction
2880 emitted, or NULL if such a move could not be generated. */
2881
2882 static rtx
2883 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2884 {
2885 enum machine_mode imode;
2886 enum insn_code code;
2887
2888 /* There must exist a mode of the exact size we require. */
2889 imode = int_mode_for_mode (mode);
2890 if (imode == BLKmode)
2891 return NULL_RTX;
2892
2893 /* The target must support moves in this mode. */
2894 code = mov_optab->handlers[imode].insn_code;
2895 if (code == CODE_FOR_nothing)
2896 return NULL_RTX;
2897
2898 x = emit_move_change_mode (imode, mode, x, force);
2899 if (x == NULL_RTX)
2900 return NULL_RTX;
2901 y = emit_move_change_mode (imode, mode, y, force);
2902 if (y == NULL_RTX)
2903 return NULL_RTX;
2904 return emit_insn (GEN_FCN (code) (x, y));
2905 }
2906
2907 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2908 Return an equivalent MEM that does not use an auto-increment. */
2909
2910 static rtx
2911 emit_move_resolve_push (enum machine_mode mode, rtx x)
2912 {
2913 enum rtx_code code = GET_CODE (XEXP (x, 0));
2914 HOST_WIDE_INT adjust;
2915 rtx temp;
2916
2917 adjust = GET_MODE_SIZE (mode);
2918 #ifdef PUSH_ROUNDING
2919 adjust = PUSH_ROUNDING (adjust);
2920 #endif
2921 if (code == PRE_DEC || code == POST_DEC)
2922 adjust = -adjust;
2923 else if (code == PRE_MODIFY || code == POST_MODIFY)
2924 {
2925 rtx expr = XEXP (XEXP (x, 0), 1);
2926 HOST_WIDE_INT val;
2927
2928 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2929 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2930 val = INTVAL (XEXP (expr, 1));
2931 if (GET_CODE (expr) == MINUS)
2932 val = -val;
2933 gcc_assert (adjust == val || adjust == -val);
2934 adjust = val;
2935 }
2936
2937 /* Do not use anti_adjust_stack, since we don't want to update
2938 stack_pointer_delta. */
2939 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2940 GEN_INT (adjust), stack_pointer_rtx,
2941 0, OPTAB_LIB_WIDEN);
2942 if (temp != stack_pointer_rtx)
2943 emit_move_insn (stack_pointer_rtx, temp);
2944
2945 switch (code)
2946 {
2947 case PRE_INC:
2948 case PRE_DEC:
2949 case PRE_MODIFY:
2950 temp = stack_pointer_rtx;
2951 break;
2952 case POST_INC:
2953 case POST_DEC:
2954 case POST_MODIFY:
2955 temp = plus_constant (stack_pointer_rtx, -adjust);
2956 break;
2957 default:
2958 gcc_unreachable ();
2959 }
2960
2961 return replace_equiv_address (x, temp);
2962 }
2963
2964 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2965 X is known to satisfy push_operand, and MODE is known to be complex.
2966 Returns the last instruction emitted. */
2967
2968 static rtx
2969 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2970 {
2971 enum machine_mode submode = GET_MODE_INNER (mode);
2972 bool imag_first;
2973
2974 #ifdef PUSH_ROUNDING
2975 unsigned int submodesize = GET_MODE_SIZE (submode);
2976
2977 /* In case we output to the stack, but the size is smaller than the
2978 machine can push exactly, we need to use move instructions. */
2979 if (PUSH_ROUNDING (submodesize) != submodesize)
2980 {
2981 x = emit_move_resolve_push (mode, x);
2982 return emit_move_insn (x, y);
2983 }
2984 #endif
2985
2986 /* Note that the real part always precedes the imag part in memory
2987 regardless of machine's endianness. */
2988 switch (GET_CODE (XEXP (x, 0)))
2989 {
2990 case PRE_DEC:
2991 case POST_DEC:
2992 imag_first = true;
2993 break;
2994 case PRE_INC:
2995 case POST_INC:
2996 imag_first = false;
2997 break;
2998 default:
2999 gcc_unreachable ();
3000 }
3001
3002 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3003 read_complex_part (y, imag_first));
3004 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3005 read_complex_part (y, !imag_first));
3006 }
3007
3008 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3009 MODE is known to be complex. Returns the last instruction emitted. */
3010
3011 static rtx
3012 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3013 {
3014 bool try_int;
3015
3016 /* Need to take special care for pushes, to maintain proper ordering
3017 of the data, and possibly extra padding. */
3018 if (push_operand (x, mode))
3019 return emit_move_complex_push (mode, x, y);
3020
3021 /* See if we can coerce the target into moving both values at once. */
3022
3023 /* Move floating point as parts. */
3024 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3025 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3026 try_int = false;
3027 /* Not possible if the values are inherently not adjacent. */
3028 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3029 try_int = false;
3030 /* Is possible if both are registers (or subregs of registers). */
3031 else if (register_operand (x, mode) && register_operand (y, mode))
3032 try_int = true;
3033 /* If one of the operands is a memory, and alignment constraints
3034 are friendly enough, we may be able to do combined memory operations.
3035 We do not attempt this if Y is a constant because that combination is
3036 usually better with the by-parts thing below. */
3037 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3038 && (!STRICT_ALIGNMENT
3039 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3040 try_int = true;
3041 else
3042 try_int = false;
3043
3044 if (try_int)
3045 {
3046 rtx ret;
3047
3048 /* For memory to memory moves, optimal behavior can be had with the
3049 existing block move logic. */
3050 if (MEM_P (x) && MEM_P (y))
3051 {
3052 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3053 BLOCK_OP_NO_LIBCALL);
3054 return get_last_insn ();
3055 }
3056
3057 ret = emit_move_via_integer (mode, x, y, true);
3058 if (ret)
3059 return ret;
3060 }
3061
3062 /* Show the output dies here. This is necessary for SUBREGs
3063 of pseudos since we cannot track their lifetimes correctly;
3064 hard regs shouldn't appear here except as return values. */
3065 if (!reload_completed && !reload_in_progress
3066 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3067 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3068
3069 write_complex_part (x, read_complex_part (y, false), false);
3070 write_complex_part (x, read_complex_part (y, true), true);
3071 return get_last_insn ();
3072 }
3073
3074 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3075 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3076
3077 static rtx
3078 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3079 {
3080 rtx ret;
3081
3082 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3083 if (mode != CCmode)
3084 {
3085 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3086 if (code != CODE_FOR_nothing)
3087 {
3088 x = emit_move_change_mode (CCmode, mode, x, true);
3089 y = emit_move_change_mode (CCmode, mode, y, true);
3090 return emit_insn (GEN_FCN (code) (x, y));
3091 }
3092 }
3093
3094 /* Otherwise, find the MODE_INT mode of the same width. */
3095 ret = emit_move_via_integer (mode, x, y, false);
3096 gcc_assert (ret != NULL);
3097 return ret;
3098 }
3099
3100 /* Return true if word I of OP lies entirely in the
3101 undefined bits of a paradoxical subreg. */
3102
3103 static bool
3104 undefined_operand_subword_p (rtx op, int i)
3105 {
3106 enum machine_mode innermode, innermostmode;
3107 int offset;
3108 if (GET_CODE (op) != SUBREG)
3109 return false;
3110 innermode = GET_MODE (op);
3111 innermostmode = GET_MODE (SUBREG_REG (op));
3112 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3113 /* The SUBREG_BYTE represents offset, as if the value were stored in
3114 memory, except for a paradoxical subreg where we define
3115 SUBREG_BYTE to be 0; undo this exception as in
3116 simplify_subreg. */
3117 if (SUBREG_BYTE (op) == 0
3118 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3119 {
3120 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3121 if (WORDS_BIG_ENDIAN)
3122 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3123 if (BYTES_BIG_ENDIAN)
3124 offset += difference % UNITS_PER_WORD;
3125 }
3126 if (offset >= GET_MODE_SIZE (innermostmode)
3127 || offset <= -GET_MODE_SIZE (word_mode))
3128 return true;
3129 return false;
3130 }
3131
3132 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3133 MODE is any multi-word or full-word mode that lacks a move_insn
3134 pattern. Note that you will get better code if you define such
3135 patterns, even if they must turn into multiple assembler instructions. */
3136
3137 static rtx
3138 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3139 {
3140 rtx last_insn = 0;
3141 rtx seq, inner;
3142 bool need_clobber;
3143 int i;
3144
3145 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3146
3147 /* If X is a push on the stack, do the push now and replace
3148 X with a reference to the stack pointer. */
3149 if (push_operand (x, mode))
3150 x = emit_move_resolve_push (mode, x);
3151
3152 /* If we are in reload, see if either operand is a MEM whose address
3153 is scheduled for replacement. */
3154 if (reload_in_progress && MEM_P (x)
3155 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3156 x = replace_equiv_address_nv (x, inner);
3157 if (reload_in_progress && MEM_P (y)
3158 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3159 y = replace_equiv_address_nv (y, inner);
3160
3161 start_sequence ();
3162
3163 need_clobber = false;
3164 for (i = 0;
3165 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3166 i++)
3167 {
3168 rtx xpart = operand_subword (x, i, 1, mode);
3169 rtx ypart;
3170
3171 /* Do not generate code for a move if it would come entirely
3172 from the undefined bits of a paradoxical subreg. */
3173 if (undefined_operand_subword_p (y, i))
3174 continue;
3175
3176 ypart = operand_subword (y, i, 1, mode);
3177
3178 /* If we can't get a part of Y, put Y into memory if it is a
3179 constant. Otherwise, force it into a register. Then we must
3180 be able to get a part of Y. */
3181 if (ypart == 0 && CONSTANT_P (y))
3182 {
3183 y = use_anchored_address (force_const_mem (mode, y));
3184 ypart = operand_subword (y, i, 1, mode);
3185 }
3186 else if (ypart == 0)
3187 ypart = operand_subword_force (y, i, mode);
3188
3189 gcc_assert (xpart && ypart);
3190
3191 need_clobber |= (GET_CODE (xpart) == SUBREG);
3192
3193 last_insn = emit_move_insn (xpart, ypart);
3194 }
3195
3196 seq = get_insns ();
3197 end_sequence ();
3198
3199 /* Show the output dies here. This is necessary for SUBREGs
3200 of pseudos since we cannot track their lifetimes correctly;
3201 hard regs shouldn't appear here except as return values.
3202 We never want to emit such a clobber after reload. */
3203 if (x != y
3204 && ! (reload_in_progress || reload_completed)
3205 && need_clobber != 0)
3206 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3207
3208 emit_insn (seq);
3209
3210 return last_insn;
3211 }
3212
3213 /* Low level part of emit_move_insn.
3214 Called just like emit_move_insn, but assumes X and Y
3215 are basically valid. */
3216
3217 rtx
3218 emit_move_insn_1 (rtx x, rtx y)
3219 {
3220 enum machine_mode mode = GET_MODE (x);
3221 enum insn_code code;
3222
3223 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3224
3225 code = mov_optab->handlers[mode].insn_code;
3226 if (code != CODE_FOR_nothing)
3227 return emit_insn (GEN_FCN (code) (x, y));
3228
3229 /* Expand complex moves by moving real part and imag part. */
3230 if (COMPLEX_MODE_P (mode))
3231 return emit_move_complex (mode, x, y);
3232
3233 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3234 {
3235 rtx result = emit_move_via_integer (mode, x, y, true);
3236
3237 /* If we can't find an integer mode, use multi words. */
3238 if (result)
3239 return result;
3240 else
3241 return emit_move_multi_word (mode, x, y);
3242 }
3243
3244 if (GET_MODE_CLASS (mode) == MODE_CC)
3245 return emit_move_ccmode (mode, x, y);
3246
3247 /* Try using a move pattern for the corresponding integer mode. This is
3248 only safe when simplify_subreg can convert MODE constants into integer
3249 constants. At present, it can only do this reliably if the value
3250 fits within a HOST_WIDE_INT. */
3251 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3252 {
3253 rtx ret = emit_move_via_integer (mode, x, y, false);
3254 if (ret)
3255 return ret;
3256 }
3257
3258 return emit_move_multi_word (mode, x, y);
3259 }
3260
3261 /* Generate code to copy Y into X.
3262 Both Y and X must have the same mode, except that
3263 Y can be a constant with VOIDmode.
3264 This mode cannot be BLKmode; use emit_block_move for that.
3265
3266 Return the last instruction emitted. */
3267
3268 rtx
3269 emit_move_insn (rtx x, rtx y)
3270 {
3271 enum machine_mode mode = GET_MODE (x);
3272 rtx y_cst = NULL_RTX;
3273 rtx last_insn, set;
3274
3275 gcc_assert (mode != BLKmode
3276 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3277
3278 if (CONSTANT_P (y))
3279 {
3280 if (optimize
3281 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3282 && (last_insn = compress_float_constant (x, y)))
3283 return last_insn;
3284
3285 y_cst = y;
3286
3287 if (!LEGITIMATE_CONSTANT_P (y))
3288 {
3289 y = force_const_mem (mode, y);
3290
3291 /* If the target's cannot_force_const_mem prevented the spill,
3292 assume that the target's move expanders will also take care
3293 of the non-legitimate constant. */
3294 if (!y)
3295 y = y_cst;
3296 else
3297 y = use_anchored_address (y);
3298 }
3299 }
3300
3301 /* If X or Y are memory references, verify that their addresses are valid
3302 for the machine. */
3303 if (MEM_P (x)
3304 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3305 && ! push_operand (x, GET_MODE (x)))
3306 || (flag_force_addr
3307 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3308 x = validize_mem (x);
3309
3310 if (MEM_P (y)
3311 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3312 || (flag_force_addr
3313 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3314 y = validize_mem (y);
3315
3316 gcc_assert (mode != BLKmode);
3317
3318 last_insn = emit_move_insn_1 (x, y);
3319
3320 if (y_cst && REG_P (x)
3321 && (set = single_set (last_insn)) != NULL_RTX
3322 && SET_DEST (set) == x
3323 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3324 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3325
3326 return last_insn;
3327 }
3328
3329 /* If Y is representable exactly in a narrower mode, and the target can
3330 perform the extension directly from constant or memory, then emit the
3331 move as an extension. */
3332
3333 static rtx
3334 compress_float_constant (rtx x, rtx y)
3335 {
3336 enum machine_mode dstmode = GET_MODE (x);
3337 enum machine_mode orig_srcmode = GET_MODE (y);
3338 enum machine_mode srcmode;
3339 REAL_VALUE_TYPE r;
3340 int oldcost, newcost;
3341
3342 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3343
3344 if (LEGITIMATE_CONSTANT_P (y))
3345 oldcost = rtx_cost (y, SET);
3346 else
3347 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3348
3349 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3350 srcmode != orig_srcmode;
3351 srcmode = GET_MODE_WIDER_MODE (srcmode))
3352 {
3353 enum insn_code ic;
3354 rtx trunc_y, last_insn;
3355
3356 /* Skip if the target can't extend this way. */
3357 ic = can_extend_p (dstmode, srcmode, 0);
3358 if (ic == CODE_FOR_nothing)
3359 continue;
3360
3361 /* Skip if the narrowed value isn't exact. */
3362 if (! exact_real_truncate (srcmode, &r))
3363 continue;
3364
3365 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3366
3367 if (LEGITIMATE_CONSTANT_P (trunc_y))
3368 {
3369 /* Skip if the target needs extra instructions to perform
3370 the extension. */
3371 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3372 continue;
3373 /* This is valid, but may not be cheaper than the original. */
3374 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3375 if (oldcost < newcost)
3376 continue;
3377 }
3378 else if (float_extend_from_mem[dstmode][srcmode])
3379 {
3380 trunc_y = force_const_mem (srcmode, trunc_y);
3381 /* This is valid, but may not be cheaper than the original. */
3382 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3383 if (oldcost < newcost)
3384 continue;
3385 trunc_y = validize_mem (trunc_y);
3386 }
3387 else
3388 continue;
3389
3390 /* For CSE's benefit, force the compressed constant pool entry
3391 into a new pseudo. This constant may be used in different modes,
3392 and if not, combine will put things back together for us. */
3393 trunc_y = force_reg (srcmode, trunc_y);
3394 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3395 last_insn = get_last_insn ();
3396
3397 if (REG_P (x))
3398 set_unique_reg_note (last_insn, REG_EQUAL, y);
3399
3400 return last_insn;
3401 }
3402
3403 return NULL_RTX;
3404 }
3405 \f
3406 /* Pushing data onto the stack. */
3407
3408 /* Push a block of length SIZE (perhaps variable)
3409 and return an rtx to address the beginning of the block.
3410 The value may be virtual_outgoing_args_rtx.
3411
3412 EXTRA is the number of bytes of padding to push in addition to SIZE.
3413 BELOW nonzero means this padding comes at low addresses;
3414 otherwise, the padding comes at high addresses. */
3415
3416 rtx
3417 push_block (rtx size, int extra, int below)
3418 {
3419 rtx temp;
3420
3421 size = convert_modes (Pmode, ptr_mode, size, 1);
3422 if (CONSTANT_P (size))
3423 anti_adjust_stack (plus_constant (size, extra));
3424 else if (REG_P (size) && extra == 0)
3425 anti_adjust_stack (size);
3426 else
3427 {
3428 temp = copy_to_mode_reg (Pmode, size);
3429 if (extra != 0)
3430 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3431 temp, 0, OPTAB_LIB_WIDEN);
3432 anti_adjust_stack (temp);
3433 }
3434
3435 #ifndef STACK_GROWS_DOWNWARD
3436 if (0)
3437 #else
3438 if (1)
3439 #endif
3440 {
3441 temp = virtual_outgoing_args_rtx;
3442 if (extra != 0 && below)
3443 temp = plus_constant (temp, extra);
3444 }
3445 else
3446 {
3447 if (GET_CODE (size) == CONST_INT)
3448 temp = plus_constant (virtual_outgoing_args_rtx,
3449 -INTVAL (size) - (below ? 0 : extra));
3450 else if (extra != 0 && !below)
3451 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3452 negate_rtx (Pmode, plus_constant (size, extra)));
3453 else
3454 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3455 negate_rtx (Pmode, size));
3456 }
3457
3458 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3459 }
3460
3461 #ifdef PUSH_ROUNDING
3462
3463 /* Emit single push insn. */
3464
3465 static void
3466 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3467 {
3468 rtx dest_addr;
3469 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3470 rtx dest;
3471 enum insn_code icode;
3472 insn_operand_predicate_fn pred;
3473
3474 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3475 /* If there is push pattern, use it. Otherwise try old way of throwing
3476 MEM representing push operation to move expander. */
3477 icode = push_optab->handlers[(int) mode].insn_code;
3478 if (icode != CODE_FOR_nothing)
3479 {
3480 if (((pred = insn_data[(int) icode].operand[0].predicate)
3481 && !((*pred) (x, mode))))
3482 x = force_reg (mode, x);
3483 emit_insn (GEN_FCN (icode) (x));
3484 return;
3485 }
3486 if (GET_MODE_SIZE (mode) == rounded_size)
3487 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3488 /* If we are to pad downward, adjust the stack pointer first and
3489 then store X into the stack location using an offset. This is
3490 because emit_move_insn does not know how to pad; it does not have
3491 access to type. */
3492 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3493 {
3494 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3495 HOST_WIDE_INT offset;
3496
3497 emit_move_insn (stack_pointer_rtx,
3498 expand_binop (Pmode,
3499 #ifdef STACK_GROWS_DOWNWARD
3500 sub_optab,
3501 #else
3502 add_optab,
3503 #endif
3504 stack_pointer_rtx,
3505 GEN_INT (rounded_size),
3506 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3507
3508 offset = (HOST_WIDE_INT) padding_size;
3509 #ifdef STACK_GROWS_DOWNWARD
3510 if (STACK_PUSH_CODE == POST_DEC)
3511 /* We have already decremented the stack pointer, so get the
3512 previous value. */
3513 offset += (HOST_WIDE_INT) rounded_size;
3514 #else
3515 if (STACK_PUSH_CODE == POST_INC)
3516 /* We have already incremented the stack pointer, so get the
3517 previous value. */
3518 offset -= (HOST_WIDE_INT) rounded_size;
3519 #endif
3520 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3521 }
3522 else
3523 {
3524 #ifdef STACK_GROWS_DOWNWARD
3525 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3526 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3527 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3528 #else
3529 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3530 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3531 GEN_INT (rounded_size));
3532 #endif
3533 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3534 }
3535
3536 dest = gen_rtx_MEM (mode, dest_addr);
3537
3538 if (type != 0)
3539 {
3540 set_mem_attributes (dest, type, 1);
3541
3542 if (flag_optimize_sibling_calls)
3543 /* Function incoming arguments may overlap with sibling call
3544 outgoing arguments and we cannot allow reordering of reads
3545 from function arguments with stores to outgoing arguments
3546 of sibling calls. */
3547 set_mem_alias_set (dest, 0);
3548 }
3549 emit_move_insn (dest, x);
3550 }
3551 #endif
3552
3553 /* Generate code to push X onto the stack, assuming it has mode MODE and
3554 type TYPE.
3555 MODE is redundant except when X is a CONST_INT (since they don't
3556 carry mode info).
3557 SIZE is an rtx for the size of data to be copied (in bytes),
3558 needed only if X is BLKmode.
3559
3560 ALIGN (in bits) is maximum alignment we can assume.
3561
3562 If PARTIAL and REG are both nonzero, then copy that many of the first
3563 bytes of X into registers starting with REG, and push the rest of X.
3564 The amount of space pushed is decreased by PARTIAL bytes.
3565 REG must be a hard register in this case.
3566 If REG is zero but PARTIAL is not, take any all others actions for an
3567 argument partially in registers, but do not actually load any
3568 registers.
3569
3570 EXTRA is the amount in bytes of extra space to leave next to this arg.
3571 This is ignored if an argument block has already been allocated.
3572
3573 On a machine that lacks real push insns, ARGS_ADDR is the address of
3574 the bottom of the argument block for this call. We use indexing off there
3575 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3576 argument block has not been preallocated.
3577
3578 ARGS_SO_FAR is the size of args previously pushed for this call.
3579
3580 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3581 for arguments passed in registers. If nonzero, it will be the number
3582 of bytes required. */
3583
3584 void
3585 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3586 unsigned int align, int partial, rtx reg, int extra,
3587 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3588 rtx alignment_pad)
3589 {
3590 rtx xinner;
3591 enum direction stack_direction
3592 #ifdef STACK_GROWS_DOWNWARD
3593 = downward;
3594 #else
3595 = upward;
3596 #endif
3597
3598 /* Decide where to pad the argument: `downward' for below,
3599 `upward' for above, or `none' for don't pad it.
3600 Default is below for small data on big-endian machines; else above. */
3601 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3602
3603 /* Invert direction if stack is post-decrement.
3604 FIXME: why? */
3605 if (STACK_PUSH_CODE == POST_DEC)
3606 if (where_pad != none)
3607 where_pad = (where_pad == downward ? upward : downward);
3608
3609 xinner = x;
3610
3611 if (mode == BLKmode)
3612 {
3613 /* Copy a block into the stack, entirely or partially. */
3614
3615 rtx temp;
3616 int used;
3617 int offset;
3618 int skip;
3619
3620 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3621 used = partial - offset;
3622
3623 gcc_assert (size);
3624
3625 /* USED is now the # of bytes we need not copy to the stack
3626 because registers will take care of them. */
3627
3628 if (partial != 0)
3629 xinner = adjust_address (xinner, BLKmode, used);
3630
3631 /* If the partial register-part of the arg counts in its stack size,
3632 skip the part of stack space corresponding to the registers.
3633 Otherwise, start copying to the beginning of the stack space,
3634 by setting SKIP to 0. */
3635 skip = (reg_parm_stack_space == 0) ? 0 : used;
3636
3637 #ifdef PUSH_ROUNDING
3638 /* Do it with several push insns if that doesn't take lots of insns
3639 and if there is no difficulty with push insns that skip bytes
3640 on the stack for alignment purposes. */
3641 if (args_addr == 0
3642 && PUSH_ARGS
3643 && GET_CODE (size) == CONST_INT
3644 && skip == 0
3645 && MEM_ALIGN (xinner) >= align
3646 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3647 /* Here we avoid the case of a structure whose weak alignment
3648 forces many pushes of a small amount of data,
3649 and such small pushes do rounding that causes trouble. */
3650 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3651 || align >= BIGGEST_ALIGNMENT
3652 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3653 == (align / BITS_PER_UNIT)))
3654 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3655 {
3656 /* Push padding now if padding above and stack grows down,
3657 or if padding below and stack grows up.
3658 But if space already allocated, this has already been done. */
3659 if (extra && args_addr == 0
3660 && where_pad != none && where_pad != stack_direction)
3661 anti_adjust_stack (GEN_INT (extra));
3662
3663 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3664 }
3665 else
3666 #endif /* PUSH_ROUNDING */
3667 {
3668 rtx target;
3669
3670 /* Otherwise make space on the stack and copy the data
3671 to the address of that space. */
3672
3673 /* Deduct words put into registers from the size we must copy. */
3674 if (partial != 0)
3675 {
3676 if (GET_CODE (size) == CONST_INT)
3677 size = GEN_INT (INTVAL (size) - used);
3678 else
3679 size = expand_binop (GET_MODE (size), sub_optab, size,
3680 GEN_INT (used), NULL_RTX, 0,
3681 OPTAB_LIB_WIDEN);
3682 }
3683
3684 /* Get the address of the stack space.
3685 In this case, we do not deal with EXTRA separately.
3686 A single stack adjust will do. */
3687 if (! args_addr)
3688 {
3689 temp = push_block (size, extra, where_pad == downward);
3690 extra = 0;
3691 }
3692 else if (GET_CODE (args_so_far) == CONST_INT)
3693 temp = memory_address (BLKmode,
3694 plus_constant (args_addr,
3695 skip + INTVAL (args_so_far)));
3696 else
3697 temp = memory_address (BLKmode,
3698 plus_constant (gen_rtx_PLUS (Pmode,
3699 args_addr,
3700 args_so_far),
3701 skip));
3702
3703 if (!ACCUMULATE_OUTGOING_ARGS)
3704 {
3705 /* If the source is referenced relative to the stack pointer,
3706 copy it to another register to stabilize it. We do not need
3707 to do this if we know that we won't be changing sp. */
3708
3709 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3710 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3711 temp = copy_to_reg (temp);
3712 }
3713
3714 target = gen_rtx_MEM (BLKmode, temp);
3715
3716 /* We do *not* set_mem_attributes here, because incoming arguments
3717 may overlap with sibling call outgoing arguments and we cannot
3718 allow reordering of reads from function arguments with stores
3719 to outgoing arguments of sibling calls. We do, however, want
3720 to record the alignment of the stack slot. */
3721 /* ALIGN may well be better aligned than TYPE, e.g. due to
3722 PARM_BOUNDARY. Assume the caller isn't lying. */
3723 set_mem_align (target, align);
3724
3725 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3726 }
3727 }
3728 else if (partial > 0)
3729 {
3730 /* Scalar partly in registers. */
3731
3732 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3733 int i;
3734 int not_stack;
3735 /* # bytes of start of argument
3736 that we must make space for but need not store. */
3737 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3738 int args_offset = INTVAL (args_so_far);
3739 int skip;
3740
3741 /* Push padding now if padding above and stack grows down,
3742 or if padding below and stack grows up.
3743 But if space already allocated, this has already been done. */
3744 if (extra && args_addr == 0
3745 && where_pad != none && where_pad != stack_direction)
3746 anti_adjust_stack (GEN_INT (extra));
3747
3748 /* If we make space by pushing it, we might as well push
3749 the real data. Otherwise, we can leave OFFSET nonzero
3750 and leave the space uninitialized. */
3751 if (args_addr == 0)
3752 offset = 0;
3753
3754 /* Now NOT_STACK gets the number of words that we don't need to
3755 allocate on the stack. Convert OFFSET to words too. */
3756 not_stack = (partial - offset) / UNITS_PER_WORD;
3757 offset /= UNITS_PER_WORD;
3758
3759 /* If the partial register-part of the arg counts in its stack size,
3760 skip the part of stack space corresponding to the registers.
3761 Otherwise, start copying to the beginning of the stack space,
3762 by setting SKIP to 0. */
3763 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3764
3765 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3766 x = validize_mem (force_const_mem (mode, x));
3767
3768 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3769 SUBREGs of such registers are not allowed. */
3770 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3771 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3772 x = copy_to_reg (x);
3773
3774 /* Loop over all the words allocated on the stack for this arg. */
3775 /* We can do it by words, because any scalar bigger than a word
3776 has a size a multiple of a word. */
3777 #ifndef PUSH_ARGS_REVERSED
3778 for (i = not_stack; i < size; i++)
3779 #else
3780 for (i = size - 1; i >= not_stack; i--)
3781 #endif
3782 if (i >= not_stack + offset)
3783 emit_push_insn (operand_subword_force (x, i, mode),
3784 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3785 0, args_addr,
3786 GEN_INT (args_offset + ((i - not_stack + skip)
3787 * UNITS_PER_WORD)),
3788 reg_parm_stack_space, alignment_pad);
3789 }
3790 else
3791 {
3792 rtx addr;
3793 rtx dest;
3794
3795 /* Push padding now if padding above and stack grows down,
3796 or if padding below and stack grows up.
3797 But if space already allocated, this has already been done. */
3798 if (extra && args_addr == 0
3799 && where_pad != none && where_pad != stack_direction)
3800 anti_adjust_stack (GEN_INT (extra));
3801
3802 #ifdef PUSH_ROUNDING
3803 if (args_addr == 0 && PUSH_ARGS)
3804 emit_single_push_insn (mode, x, type);
3805 else
3806 #endif
3807 {
3808 if (GET_CODE (args_so_far) == CONST_INT)
3809 addr
3810 = memory_address (mode,
3811 plus_constant (args_addr,
3812 INTVAL (args_so_far)));
3813 else
3814 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3815 args_so_far));
3816 dest = gen_rtx_MEM (mode, addr);
3817
3818 /* We do *not* set_mem_attributes here, because incoming arguments
3819 may overlap with sibling call outgoing arguments and we cannot
3820 allow reordering of reads from function arguments with stores
3821 to outgoing arguments of sibling calls. We do, however, want
3822 to record the alignment of the stack slot. */
3823 /* ALIGN may well be better aligned than TYPE, e.g. due to
3824 PARM_BOUNDARY. Assume the caller isn't lying. */
3825 set_mem_align (dest, align);
3826
3827 emit_move_insn (dest, x);
3828 }
3829 }
3830
3831 /* If part should go in registers, copy that part
3832 into the appropriate registers. Do this now, at the end,
3833 since mem-to-mem copies above may do function calls. */
3834 if (partial > 0 && reg != 0)
3835 {
3836 /* Handle calls that pass values in multiple non-contiguous locations.
3837 The Irix 6 ABI has examples of this. */
3838 if (GET_CODE (reg) == PARALLEL)
3839 emit_group_load (reg, x, type, -1);
3840 else
3841 {
3842 gcc_assert (partial % UNITS_PER_WORD == 0);
3843 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3844 }
3845 }
3846
3847 if (extra && args_addr == 0 && where_pad == stack_direction)
3848 anti_adjust_stack (GEN_INT (extra));
3849
3850 if (alignment_pad && args_addr == 0)
3851 anti_adjust_stack (alignment_pad);
3852 }
3853 \f
3854 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3855 operations. */
3856
3857 static rtx
3858 get_subtarget (rtx x)
3859 {
3860 return (optimize
3861 || x == 0
3862 /* Only registers can be subtargets. */
3863 || !REG_P (x)
3864 /* Don't use hard regs to avoid extending their life. */
3865 || REGNO (x) < FIRST_PSEUDO_REGISTER
3866 ? 0 : x);
3867 }
3868
3869 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3870 FIELD is a bitfield. Returns true if the optimization was successful,
3871 and there's nothing else to do. */
3872
3873 static bool
3874 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3875 unsigned HOST_WIDE_INT bitpos,
3876 enum machine_mode mode1, rtx str_rtx,
3877 tree to, tree src)
3878 {
3879 enum machine_mode str_mode = GET_MODE (str_rtx);
3880 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3881 tree op0, op1;
3882 rtx value, result;
3883 optab binop;
3884
3885 if (mode1 != VOIDmode
3886 || bitsize >= BITS_PER_WORD
3887 || str_bitsize > BITS_PER_WORD
3888 || TREE_SIDE_EFFECTS (to)
3889 || TREE_THIS_VOLATILE (to))
3890 return false;
3891
3892 STRIP_NOPS (src);
3893 if (!BINARY_CLASS_P (src)
3894 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3895 return false;
3896
3897 op0 = TREE_OPERAND (src, 0);
3898 op1 = TREE_OPERAND (src, 1);
3899 STRIP_NOPS (op0);
3900
3901 if (!operand_equal_p (to, op0, 0))
3902 return false;
3903
3904 if (MEM_P (str_rtx))
3905 {
3906 unsigned HOST_WIDE_INT offset1;
3907
3908 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3909 str_mode = word_mode;
3910 str_mode = get_best_mode (bitsize, bitpos,
3911 MEM_ALIGN (str_rtx), str_mode, 0);
3912 if (str_mode == VOIDmode)
3913 return false;
3914 str_bitsize = GET_MODE_BITSIZE (str_mode);
3915
3916 offset1 = bitpos;
3917 bitpos %= str_bitsize;
3918 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3919 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3920 }
3921 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3922 return false;
3923
3924 /* If the bit field covers the whole REG/MEM, store_field
3925 will likely generate better code. */
3926 if (bitsize >= str_bitsize)
3927 return false;
3928
3929 /* We can't handle fields split across multiple entities. */
3930 if (bitpos + bitsize > str_bitsize)
3931 return false;
3932
3933 if (BYTES_BIG_ENDIAN)
3934 bitpos = str_bitsize - bitpos - bitsize;
3935
3936 switch (TREE_CODE (src))
3937 {
3938 case PLUS_EXPR:
3939 case MINUS_EXPR:
3940 /* For now, just optimize the case of the topmost bitfield
3941 where we don't need to do any masking and also
3942 1 bit bitfields where xor can be used.
3943 We might win by one instruction for the other bitfields
3944 too if insv/extv instructions aren't used, so that
3945 can be added later. */
3946 if (bitpos + bitsize != str_bitsize
3947 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3948 break;
3949
3950 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3951 value = convert_modes (str_mode,
3952 TYPE_MODE (TREE_TYPE (op1)), value,
3953 TYPE_UNSIGNED (TREE_TYPE (op1)));
3954
3955 /* We may be accessing data outside the field, which means
3956 we can alias adjacent data. */
3957 if (MEM_P (str_rtx))
3958 {
3959 str_rtx = shallow_copy_rtx (str_rtx);
3960 set_mem_alias_set (str_rtx, 0);
3961 set_mem_expr (str_rtx, 0);
3962 }
3963
3964 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3965 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3966 {
3967 value = expand_and (str_mode, value, const1_rtx, NULL);
3968 binop = xor_optab;
3969 }
3970 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3971 build_int_cst (NULL_TREE, bitpos),
3972 NULL_RTX, 1);
3973 result = expand_binop (str_mode, binop, str_rtx,
3974 value, str_rtx, 1, OPTAB_WIDEN);
3975 if (result != str_rtx)
3976 emit_move_insn (str_rtx, result);
3977 return true;
3978
3979 case BIT_IOR_EXPR:
3980 case BIT_XOR_EXPR:
3981 if (TREE_CODE (op1) != INTEGER_CST)
3982 break;
3983 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3984 value = convert_modes (GET_MODE (str_rtx),
3985 TYPE_MODE (TREE_TYPE (op1)), value,
3986 TYPE_UNSIGNED (TREE_TYPE (op1)));
3987
3988 /* We may be accessing data outside the field, which means
3989 we can alias adjacent data. */
3990 if (MEM_P (str_rtx))
3991 {
3992 str_rtx = shallow_copy_rtx (str_rtx);
3993 set_mem_alias_set (str_rtx, 0);
3994 set_mem_expr (str_rtx, 0);
3995 }
3996
3997 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3998 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3999 {
4000 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4001 - 1);
4002 value = expand_and (GET_MODE (str_rtx), value, mask,
4003 NULL_RTX);
4004 }
4005 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4006 build_int_cst (NULL_TREE, bitpos),
4007 NULL_RTX, 1);
4008 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4009 value, str_rtx, 1, OPTAB_WIDEN);
4010 if (result != str_rtx)
4011 emit_move_insn (str_rtx, result);
4012 return true;
4013
4014 default:
4015 break;
4016 }
4017
4018 return false;
4019 }
4020
4021
4022 /* Expand an assignment that stores the value of FROM into TO. */
4023
4024 void
4025 expand_assignment (tree to, tree from)
4026 {
4027 rtx to_rtx = 0;
4028 rtx result;
4029
4030 /* Don't crash if the lhs of the assignment was erroneous. */
4031 if (TREE_CODE (to) == ERROR_MARK)
4032 {
4033 result = expand_normal (from);
4034 return;
4035 }
4036
4037 /* Optimize away no-op moves without side-effects. */
4038 if (operand_equal_p (to, from, 0))
4039 return;
4040
4041 /* Assignment of a structure component needs special treatment
4042 if the structure component's rtx is not simply a MEM.
4043 Assignment of an array element at a constant index, and assignment of
4044 an array element in an unaligned packed structure field, has the same
4045 problem. */
4046 if (handled_component_p (to)
4047 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4048 {
4049 enum machine_mode mode1;
4050 HOST_WIDE_INT bitsize, bitpos;
4051 tree offset;
4052 int unsignedp;
4053 int volatilep = 0;
4054 tree tem;
4055
4056 push_temp_slots ();
4057 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4058 &unsignedp, &volatilep, true);
4059
4060 /* If we are going to use store_bit_field and extract_bit_field,
4061 make sure to_rtx will be safe for multiple use. */
4062
4063 to_rtx = expand_normal (tem);
4064
4065 if (offset != 0)
4066 {
4067 rtx offset_rtx;
4068
4069 if (!MEM_P (to_rtx))
4070 {
4071 /* We can get constant negative offsets into arrays with broken
4072 user code. Translate this to a trap instead of ICEing. */
4073 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4074 expand_builtin_trap ();
4075 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4076 }
4077
4078 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4079 #ifdef POINTERS_EXTEND_UNSIGNED
4080 if (GET_MODE (offset_rtx) != Pmode)
4081 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4082 #else
4083 if (GET_MODE (offset_rtx) != ptr_mode)
4084 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4085 #endif
4086
4087 /* A constant address in TO_RTX can have VOIDmode, we must not try
4088 to call force_reg for that case. Avoid that case. */
4089 if (MEM_P (to_rtx)
4090 && GET_MODE (to_rtx) == BLKmode
4091 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4092 && bitsize > 0
4093 && (bitpos % bitsize) == 0
4094 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4095 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4096 {
4097 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4098 bitpos = 0;
4099 }
4100
4101 to_rtx = offset_address (to_rtx, offset_rtx,
4102 highest_pow2_factor_for_target (to,
4103 offset));
4104 }
4105
4106 /* Handle expand_expr of a complex value returning a CONCAT. */
4107 if (GET_CODE (to_rtx) == CONCAT)
4108 {
4109 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4110 {
4111 gcc_assert (bitpos == 0);
4112 result = store_expr (from, to_rtx, false);
4113 }
4114 else
4115 {
4116 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4117 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4118 }
4119 }
4120 else
4121 {
4122 if (MEM_P (to_rtx))
4123 {
4124 /* If the field is at offset zero, we could have been given the
4125 DECL_RTX of the parent struct. Don't munge it. */
4126 to_rtx = shallow_copy_rtx (to_rtx);
4127
4128 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4129
4130 /* Deal with volatile and readonly fields. The former is only
4131 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4132 if (volatilep)
4133 MEM_VOLATILE_P (to_rtx) = 1;
4134 if (component_uses_parent_alias_set (to))
4135 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4136 }
4137
4138 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4139 to_rtx, to, from))
4140 result = NULL;
4141 else
4142 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4143 TREE_TYPE (tem), get_alias_set (to));
4144 }
4145
4146 if (result)
4147 preserve_temp_slots (result);
4148 free_temp_slots ();
4149 pop_temp_slots ();
4150 return;
4151 }
4152
4153 /* If the rhs is a function call and its value is not an aggregate,
4154 call the function before we start to compute the lhs.
4155 This is needed for correct code for cases such as
4156 val = setjmp (buf) on machines where reference to val
4157 requires loading up part of an address in a separate insn.
4158
4159 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4160 since it might be a promoted variable where the zero- or sign- extension
4161 needs to be done. Handling this in the normal way is safe because no
4162 computation is done before the call. */
4163 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4164 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4165 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4166 && REG_P (DECL_RTL (to))))
4167 {
4168 rtx value;
4169
4170 push_temp_slots ();
4171 value = expand_normal (from);
4172 if (to_rtx == 0)
4173 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4174
4175 /* Handle calls that return values in multiple non-contiguous locations.
4176 The Irix 6 ABI has examples of this. */
4177 if (GET_CODE (to_rtx) == PARALLEL)
4178 emit_group_load (to_rtx, value, TREE_TYPE (from),
4179 int_size_in_bytes (TREE_TYPE (from)));
4180 else if (GET_MODE (to_rtx) == BLKmode)
4181 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4182 else
4183 {
4184 if (POINTER_TYPE_P (TREE_TYPE (to)))
4185 value = convert_memory_address (GET_MODE (to_rtx), value);
4186 emit_move_insn (to_rtx, value);
4187 }
4188 preserve_temp_slots (to_rtx);
4189 free_temp_slots ();
4190 pop_temp_slots ();
4191 return;
4192 }
4193
4194 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4195 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4196
4197 if (to_rtx == 0)
4198 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4199
4200 /* Don't move directly into a return register. */
4201 if (TREE_CODE (to) == RESULT_DECL
4202 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4203 {
4204 rtx temp;
4205
4206 push_temp_slots ();
4207 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4208
4209 if (GET_CODE (to_rtx) == PARALLEL)
4210 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4211 int_size_in_bytes (TREE_TYPE (from)));
4212 else
4213 emit_move_insn (to_rtx, temp);
4214
4215 preserve_temp_slots (to_rtx);
4216 free_temp_slots ();
4217 pop_temp_slots ();
4218 return;
4219 }
4220
4221 /* In case we are returning the contents of an object which overlaps
4222 the place the value is being stored, use a safe function when copying
4223 a value through a pointer into a structure value return block. */
4224 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4225 && current_function_returns_struct
4226 && !current_function_returns_pcc_struct)
4227 {
4228 rtx from_rtx, size;
4229
4230 push_temp_slots ();
4231 size = expr_size (from);
4232 from_rtx = expand_normal (from);
4233
4234 emit_library_call (memmove_libfunc, LCT_NORMAL,
4235 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4236 XEXP (from_rtx, 0), Pmode,
4237 convert_to_mode (TYPE_MODE (sizetype),
4238 size, TYPE_UNSIGNED (sizetype)),
4239 TYPE_MODE (sizetype));
4240
4241 preserve_temp_slots (to_rtx);
4242 free_temp_slots ();
4243 pop_temp_slots ();
4244 return;
4245 }
4246
4247 /* Compute FROM and store the value in the rtx we got. */
4248
4249 push_temp_slots ();
4250 result = store_expr (from, to_rtx, 0);
4251 preserve_temp_slots (result);
4252 free_temp_slots ();
4253 pop_temp_slots ();
4254 return;
4255 }
4256
4257 /* Generate code for computing expression EXP,
4258 and storing the value into TARGET.
4259
4260 If the mode is BLKmode then we may return TARGET itself.
4261 It turns out that in BLKmode it doesn't cause a problem.
4262 because C has no operators that could combine two different
4263 assignments into the same BLKmode object with different values
4264 with no sequence point. Will other languages need this to
4265 be more thorough?
4266
4267 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4268 stack, and block moves may need to be treated specially. */
4269
4270 rtx
4271 store_expr (tree exp, rtx target, int call_param_p)
4272 {
4273 rtx temp;
4274 rtx alt_rtl = NULL_RTX;
4275 int dont_return_target = 0;
4276
4277 if (VOID_TYPE_P (TREE_TYPE (exp)))
4278 {
4279 /* C++ can generate ?: expressions with a throw expression in one
4280 branch and an rvalue in the other. Here, we resolve attempts to
4281 store the throw expression's nonexistent result. */
4282 gcc_assert (!call_param_p);
4283 expand_expr (exp, const0_rtx, VOIDmode, 0);
4284 return NULL_RTX;
4285 }
4286 if (TREE_CODE (exp) == COMPOUND_EXPR)
4287 {
4288 /* Perform first part of compound expression, then assign from second
4289 part. */
4290 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4291 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4292 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4293 }
4294 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4295 {
4296 /* For conditional expression, get safe form of the target. Then
4297 test the condition, doing the appropriate assignment on either
4298 side. This avoids the creation of unnecessary temporaries.
4299 For non-BLKmode, it is more efficient not to do this. */
4300
4301 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4302
4303 do_pending_stack_adjust ();
4304 NO_DEFER_POP;
4305 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4306 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4307 emit_jump_insn (gen_jump (lab2));
4308 emit_barrier ();
4309 emit_label (lab1);
4310 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4311 emit_label (lab2);
4312 OK_DEFER_POP;
4313
4314 return NULL_RTX;
4315 }
4316 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4317 /* If this is a scalar in a register that is stored in a wider mode
4318 than the declared mode, compute the result into its declared mode
4319 and then convert to the wider mode. Our value is the computed
4320 expression. */
4321 {
4322 rtx inner_target = 0;
4323
4324 /* We can do the conversion inside EXP, which will often result
4325 in some optimizations. Do the conversion in two steps: first
4326 change the signedness, if needed, then the extend. But don't
4327 do this if the type of EXP is a subtype of something else
4328 since then the conversion might involve more than just
4329 converting modes. */
4330 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4331 && TREE_TYPE (TREE_TYPE (exp)) == 0
4332 && (!lang_hooks.reduce_bit_field_operations
4333 || (GET_MODE_PRECISION (GET_MODE (target))
4334 == TYPE_PRECISION (TREE_TYPE (exp)))))
4335 {
4336 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4337 != SUBREG_PROMOTED_UNSIGNED_P (target))
4338 exp = fold_convert
4339 (lang_hooks.types.signed_or_unsigned_type
4340 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4341
4342 exp = fold_convert (lang_hooks.types.type_for_mode
4343 (GET_MODE (SUBREG_REG (target)),
4344 SUBREG_PROMOTED_UNSIGNED_P (target)),
4345 exp);
4346
4347 inner_target = SUBREG_REG (target);
4348 }
4349
4350 temp = expand_expr (exp, inner_target, VOIDmode,
4351 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4352
4353 /* If TEMP is a VOIDmode constant, use convert_modes to make
4354 sure that we properly convert it. */
4355 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4356 {
4357 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4358 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4359 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4360 GET_MODE (target), temp,
4361 SUBREG_PROMOTED_UNSIGNED_P (target));
4362 }
4363
4364 convert_move (SUBREG_REG (target), temp,
4365 SUBREG_PROMOTED_UNSIGNED_P (target));
4366
4367 return NULL_RTX;
4368 }
4369 else
4370 {
4371 temp = expand_expr_real (exp, target, GET_MODE (target),
4372 (call_param_p
4373 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4374 &alt_rtl);
4375 /* Return TARGET if it's a specified hardware register.
4376 If TARGET is a volatile mem ref, either return TARGET
4377 or return a reg copied *from* TARGET; ANSI requires this.
4378
4379 Otherwise, if TEMP is not TARGET, return TEMP
4380 if it is constant (for efficiency),
4381 or if we really want the correct value. */
4382 if (!(target && REG_P (target)
4383 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4384 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4385 && ! rtx_equal_p (temp, target)
4386 && CONSTANT_P (temp))
4387 dont_return_target = 1;
4388 }
4389
4390 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4391 the same as that of TARGET, adjust the constant. This is needed, for
4392 example, in case it is a CONST_DOUBLE and we want only a word-sized
4393 value. */
4394 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4395 && TREE_CODE (exp) != ERROR_MARK
4396 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4397 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4398 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4399
4400 /* If value was not generated in the target, store it there.
4401 Convert the value to TARGET's type first if necessary and emit the
4402 pending incrementations that have been queued when expanding EXP.
4403 Note that we cannot emit the whole queue blindly because this will
4404 effectively disable the POST_INC optimization later.
4405
4406 If TEMP and TARGET compare equal according to rtx_equal_p, but
4407 one or both of them are volatile memory refs, we have to distinguish
4408 two cases:
4409 - expand_expr has used TARGET. In this case, we must not generate
4410 another copy. This can be detected by TARGET being equal according
4411 to == .
4412 - expand_expr has not used TARGET - that means that the source just
4413 happens to have the same RTX form. Since temp will have been created
4414 by expand_expr, it will compare unequal according to == .
4415 We must generate a copy in this case, to reach the correct number
4416 of volatile memory references. */
4417
4418 if ((! rtx_equal_p (temp, target)
4419 || (temp != target && (side_effects_p (temp)
4420 || side_effects_p (target))))
4421 && TREE_CODE (exp) != ERROR_MARK
4422 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4423 but TARGET is not valid memory reference, TEMP will differ
4424 from TARGET although it is really the same location. */
4425 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4426 /* If there's nothing to copy, don't bother. Don't call
4427 expr_size unless necessary, because some front-ends (C++)
4428 expr_size-hook must not be given objects that are not
4429 supposed to be bit-copied or bit-initialized. */
4430 && expr_size (exp) != const0_rtx)
4431 {
4432 if (GET_MODE (temp) != GET_MODE (target)
4433 && GET_MODE (temp) != VOIDmode)
4434 {
4435 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4436 if (dont_return_target)
4437 {
4438 /* In this case, we will return TEMP,
4439 so make sure it has the proper mode.
4440 But don't forget to store the value into TARGET. */
4441 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4442 emit_move_insn (target, temp);
4443 }
4444 else
4445 convert_move (target, temp, unsignedp);
4446 }
4447
4448 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4449 {
4450 /* Handle copying a string constant into an array. The string
4451 constant may be shorter than the array. So copy just the string's
4452 actual length, and clear the rest. First get the size of the data
4453 type of the string, which is actually the size of the target. */
4454 rtx size = expr_size (exp);
4455
4456 if (GET_CODE (size) == CONST_INT
4457 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4458 emit_block_move (target, temp, size,
4459 (call_param_p
4460 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4461 else
4462 {
4463 /* Compute the size of the data to copy from the string. */
4464 tree copy_size
4465 = size_binop (MIN_EXPR,
4466 make_tree (sizetype, size),
4467 size_int (TREE_STRING_LENGTH (exp)));
4468 rtx copy_size_rtx
4469 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4470 (call_param_p
4471 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4472 rtx label = 0;
4473
4474 /* Copy that much. */
4475 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4476 TYPE_UNSIGNED (sizetype));
4477 emit_block_move (target, temp, copy_size_rtx,
4478 (call_param_p
4479 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4480
4481 /* Figure out how much is left in TARGET that we have to clear.
4482 Do all calculations in ptr_mode. */
4483 if (GET_CODE (copy_size_rtx) == CONST_INT)
4484 {
4485 size = plus_constant (size, -INTVAL (copy_size_rtx));
4486 target = adjust_address (target, BLKmode,
4487 INTVAL (copy_size_rtx));
4488 }
4489 else
4490 {
4491 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4492 copy_size_rtx, NULL_RTX, 0,
4493 OPTAB_LIB_WIDEN);
4494
4495 #ifdef POINTERS_EXTEND_UNSIGNED
4496 if (GET_MODE (copy_size_rtx) != Pmode)
4497 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4498 TYPE_UNSIGNED (sizetype));
4499 #endif
4500
4501 target = offset_address (target, copy_size_rtx,
4502 highest_pow2_factor (copy_size));
4503 label = gen_label_rtx ();
4504 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4505 GET_MODE (size), 0, label);
4506 }
4507
4508 if (size != const0_rtx)
4509 clear_storage (target, size, BLOCK_OP_NORMAL);
4510
4511 if (label)
4512 emit_label (label);
4513 }
4514 }
4515 /* Handle calls that return values in multiple non-contiguous locations.
4516 The Irix 6 ABI has examples of this. */
4517 else if (GET_CODE (target) == PARALLEL)
4518 emit_group_load (target, temp, TREE_TYPE (exp),
4519 int_size_in_bytes (TREE_TYPE (exp)));
4520 else if (GET_MODE (temp) == BLKmode)
4521 emit_block_move (target, temp, expr_size (exp),
4522 (call_param_p
4523 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4524 else
4525 {
4526 temp = force_operand (temp, target);
4527 if (temp != target)
4528 emit_move_insn (target, temp);
4529 }
4530 }
4531
4532 return NULL_RTX;
4533 }
4534 \f
4535 /* Helper for categorize_ctor_elements. Identical interface. */
4536
4537 static bool
4538 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4539 HOST_WIDE_INT *p_elt_count,
4540 bool *p_must_clear)
4541 {
4542 unsigned HOST_WIDE_INT idx;
4543 HOST_WIDE_INT nz_elts, elt_count;
4544 tree value, purpose;
4545
4546 /* Whether CTOR is a valid constant initializer, in accordance with what
4547 initializer_constant_valid_p does. If inferred from the constructor
4548 elements, true until proven otherwise. */
4549 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4550 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4551
4552 nz_elts = 0;
4553 elt_count = 0;
4554
4555 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4556 {
4557 HOST_WIDE_INT mult;
4558
4559 mult = 1;
4560 if (TREE_CODE (purpose) == RANGE_EXPR)
4561 {
4562 tree lo_index = TREE_OPERAND (purpose, 0);
4563 tree hi_index = TREE_OPERAND (purpose, 1);
4564
4565 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4566 mult = (tree_low_cst (hi_index, 1)
4567 - tree_low_cst (lo_index, 1) + 1);
4568 }
4569
4570 switch (TREE_CODE (value))
4571 {
4572 case CONSTRUCTOR:
4573 {
4574 HOST_WIDE_INT nz = 0, ic = 0;
4575
4576 bool const_elt_p
4577 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4578
4579 nz_elts += mult * nz;
4580 elt_count += mult * ic;
4581
4582 if (const_from_elts_p && const_p)
4583 const_p = const_elt_p;
4584 }
4585 break;
4586
4587 case INTEGER_CST:
4588 case REAL_CST:
4589 if (!initializer_zerop (value))
4590 nz_elts += mult;
4591 elt_count += mult;
4592 break;
4593
4594 case STRING_CST:
4595 nz_elts += mult * TREE_STRING_LENGTH (value);
4596 elt_count += mult * TREE_STRING_LENGTH (value);
4597 break;
4598
4599 case COMPLEX_CST:
4600 if (!initializer_zerop (TREE_REALPART (value)))
4601 nz_elts += mult;
4602 if (!initializer_zerop (TREE_IMAGPART (value)))
4603 nz_elts += mult;
4604 elt_count += mult;
4605 break;
4606
4607 case VECTOR_CST:
4608 {
4609 tree v;
4610 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4611 {
4612 if (!initializer_zerop (TREE_VALUE (v)))
4613 nz_elts += mult;
4614 elt_count += mult;
4615 }
4616 }
4617 break;
4618
4619 default:
4620 nz_elts += mult;
4621 elt_count += mult;
4622
4623 if (const_from_elts_p && const_p)
4624 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4625 != NULL_TREE;
4626 break;
4627 }
4628 }
4629
4630 if (!*p_must_clear
4631 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4632 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4633 {
4634 tree init_sub_type;
4635 bool clear_this = true;
4636
4637 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4638 {
4639 /* We don't expect more than one element of the union to be
4640 initialized. Not sure what we should do otherwise... */
4641 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4642 == 1);
4643
4644 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4645 CONSTRUCTOR_ELTS (ctor),
4646 0)->value);
4647
4648 /* ??? We could look at each element of the union, and find the
4649 largest element. Which would avoid comparing the size of the
4650 initialized element against any tail padding in the union.
4651 Doesn't seem worth the effort... */
4652 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4653 TYPE_SIZE (init_sub_type)) == 1)
4654 {
4655 /* And now we have to find out if the element itself is fully
4656 constructed. E.g. for union { struct { int a, b; } s; } u
4657 = { .s = { .a = 1 } }. */
4658 if (elt_count == count_type_elements (init_sub_type, false))
4659 clear_this = false;
4660 }
4661 }
4662
4663 *p_must_clear = clear_this;
4664 }
4665
4666 *p_nz_elts += nz_elts;
4667 *p_elt_count += elt_count;
4668
4669 return const_p;
4670 }
4671
4672 /* Examine CTOR to discover:
4673 * how many scalar fields are set to nonzero values,
4674 and place it in *P_NZ_ELTS;
4675 * how many scalar fields in total are in CTOR,
4676 and place it in *P_ELT_COUNT.
4677 * if a type is a union, and the initializer from the constructor
4678 is not the largest element in the union, then set *p_must_clear.
4679
4680 Return whether or not CTOR is a valid static constant initializer, the same
4681 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4682
4683 bool
4684 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4685 HOST_WIDE_INT *p_elt_count,
4686 bool *p_must_clear)
4687 {
4688 *p_nz_elts = 0;
4689 *p_elt_count = 0;
4690 *p_must_clear = false;
4691
4692 return
4693 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4694 }
4695
4696 /* Count the number of scalars in TYPE. Return -1 on overflow or
4697 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4698 array member at the end of the structure. */
4699
4700 HOST_WIDE_INT
4701 count_type_elements (tree type, bool allow_flexarr)
4702 {
4703 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4704 switch (TREE_CODE (type))
4705 {
4706 case ARRAY_TYPE:
4707 {
4708 tree telts = array_type_nelts (type);
4709 if (telts && host_integerp (telts, 1))
4710 {
4711 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4712 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4713 if (n == 0)
4714 return 0;
4715 else if (max / n > m)
4716 return n * m;
4717 }
4718 return -1;
4719 }
4720
4721 case RECORD_TYPE:
4722 {
4723 HOST_WIDE_INT n = 0, t;
4724 tree f;
4725
4726 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4727 if (TREE_CODE (f) == FIELD_DECL)
4728 {
4729 t = count_type_elements (TREE_TYPE (f), false);
4730 if (t < 0)
4731 {
4732 /* Check for structures with flexible array member. */
4733 tree tf = TREE_TYPE (f);
4734 if (allow_flexarr
4735 && TREE_CHAIN (f) == NULL
4736 && TREE_CODE (tf) == ARRAY_TYPE
4737 && TYPE_DOMAIN (tf)
4738 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4739 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4740 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4741 && int_size_in_bytes (type) >= 0)
4742 break;
4743
4744 return -1;
4745 }
4746 n += t;
4747 }
4748
4749 return n;
4750 }
4751
4752 case UNION_TYPE:
4753 case QUAL_UNION_TYPE:
4754 {
4755 /* Ho hum. How in the world do we guess here? Clearly it isn't
4756 right to count the fields. Guess based on the number of words. */
4757 HOST_WIDE_INT n = int_size_in_bytes (type);
4758 if (n < 0)
4759 return -1;
4760 return n / UNITS_PER_WORD;
4761 }
4762
4763 case COMPLEX_TYPE:
4764 return 2;
4765
4766 case VECTOR_TYPE:
4767 return TYPE_VECTOR_SUBPARTS (type);
4768
4769 case INTEGER_TYPE:
4770 case REAL_TYPE:
4771 case ENUMERAL_TYPE:
4772 case BOOLEAN_TYPE:
4773 case POINTER_TYPE:
4774 case OFFSET_TYPE:
4775 case REFERENCE_TYPE:
4776 return 1;
4777
4778 case VOID_TYPE:
4779 case METHOD_TYPE:
4780 case FUNCTION_TYPE:
4781 case LANG_TYPE:
4782 default:
4783 gcc_unreachable ();
4784 }
4785 }
4786
4787 /* Return 1 if EXP contains mostly (3/4) zeros. */
4788
4789 static int
4790 mostly_zeros_p (tree exp)
4791 {
4792 if (TREE_CODE (exp) == CONSTRUCTOR)
4793
4794 {
4795 HOST_WIDE_INT nz_elts, count, elts;
4796 bool must_clear;
4797
4798 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4799 if (must_clear)
4800 return 1;
4801
4802 elts = count_type_elements (TREE_TYPE (exp), false);
4803
4804 return nz_elts < elts / 4;
4805 }
4806
4807 return initializer_zerop (exp);
4808 }
4809
4810 /* Return 1 if EXP contains all zeros. */
4811
4812 static int
4813 all_zeros_p (tree exp)
4814 {
4815 if (TREE_CODE (exp) == CONSTRUCTOR)
4816
4817 {
4818 HOST_WIDE_INT nz_elts, count;
4819 bool must_clear;
4820
4821 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4822 return nz_elts == 0;
4823 }
4824
4825 return initializer_zerop (exp);
4826 }
4827 \f
4828 /* Helper function for store_constructor.
4829 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4830 TYPE is the type of the CONSTRUCTOR, not the element type.
4831 CLEARED is as for store_constructor.
4832 ALIAS_SET is the alias set to use for any stores.
4833
4834 This provides a recursive shortcut back to store_constructor when it isn't
4835 necessary to go through store_field. This is so that we can pass through
4836 the cleared field to let store_constructor know that we may not have to
4837 clear a substructure if the outer structure has already been cleared. */
4838
4839 static void
4840 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4841 HOST_WIDE_INT bitpos, enum machine_mode mode,
4842 tree exp, tree type, int cleared, int alias_set)
4843 {
4844 if (TREE_CODE (exp) == CONSTRUCTOR
4845 /* We can only call store_constructor recursively if the size and
4846 bit position are on a byte boundary. */
4847 && bitpos % BITS_PER_UNIT == 0
4848 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4849 /* If we have a nonzero bitpos for a register target, then we just
4850 let store_field do the bitfield handling. This is unlikely to
4851 generate unnecessary clear instructions anyways. */
4852 && (bitpos == 0 || MEM_P (target)))
4853 {
4854 if (MEM_P (target))
4855 target
4856 = adjust_address (target,
4857 GET_MODE (target) == BLKmode
4858 || 0 != (bitpos
4859 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4860 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4861
4862
4863 /* Update the alias set, if required. */
4864 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4865 && MEM_ALIAS_SET (target) != 0)
4866 {
4867 target = copy_rtx (target);
4868 set_mem_alias_set (target, alias_set);
4869 }
4870
4871 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4872 }
4873 else
4874 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4875 }
4876
4877 /* Store the value of constructor EXP into the rtx TARGET.
4878 TARGET is either a REG or a MEM; we know it cannot conflict, since
4879 safe_from_p has been called.
4880 CLEARED is true if TARGET is known to have been zero'd.
4881 SIZE is the number of bytes of TARGET we are allowed to modify: this
4882 may not be the same as the size of EXP if we are assigning to a field
4883 which has been packed to exclude padding bits. */
4884
4885 static void
4886 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4887 {
4888 tree type = TREE_TYPE (exp);
4889 #ifdef WORD_REGISTER_OPERATIONS
4890 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4891 #endif
4892
4893 switch (TREE_CODE (type))
4894 {
4895 case RECORD_TYPE:
4896 case UNION_TYPE:
4897 case QUAL_UNION_TYPE:
4898 {
4899 unsigned HOST_WIDE_INT idx;
4900 tree field, value;
4901
4902 /* If size is zero or the target is already cleared, do nothing. */
4903 if (size == 0 || cleared)
4904 cleared = 1;
4905 /* We either clear the aggregate or indicate the value is dead. */
4906 else if ((TREE_CODE (type) == UNION_TYPE
4907 || TREE_CODE (type) == QUAL_UNION_TYPE)
4908 && ! CONSTRUCTOR_ELTS (exp))
4909 /* If the constructor is empty, clear the union. */
4910 {
4911 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4912 cleared = 1;
4913 }
4914
4915 /* If we are building a static constructor into a register,
4916 set the initial value as zero so we can fold the value into
4917 a constant. But if more than one register is involved,
4918 this probably loses. */
4919 else if (REG_P (target) && TREE_STATIC (exp)
4920 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4921 {
4922 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4923 cleared = 1;
4924 }
4925
4926 /* If the constructor has fewer fields than the structure or
4927 if we are initializing the structure to mostly zeros, clear
4928 the whole structure first. Don't do this if TARGET is a
4929 register whose mode size isn't equal to SIZE since
4930 clear_storage can't handle this case. */
4931 else if (size > 0
4932 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4933 != fields_length (type))
4934 || mostly_zeros_p (exp))
4935 && (!REG_P (target)
4936 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4937 == size)))
4938 {
4939 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4940 cleared = 1;
4941 }
4942
4943 if (! cleared)
4944 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4945
4946 /* Store each element of the constructor into the
4947 corresponding field of TARGET. */
4948 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4949 {
4950 enum machine_mode mode;
4951 HOST_WIDE_INT bitsize;
4952 HOST_WIDE_INT bitpos = 0;
4953 tree offset;
4954 rtx to_rtx = target;
4955
4956 /* Just ignore missing fields. We cleared the whole
4957 structure, above, if any fields are missing. */
4958 if (field == 0)
4959 continue;
4960
4961 if (cleared && initializer_zerop (value))
4962 continue;
4963
4964 if (host_integerp (DECL_SIZE (field), 1))
4965 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4966 else
4967 bitsize = -1;
4968
4969 mode = DECL_MODE (field);
4970 if (DECL_BIT_FIELD (field))
4971 mode = VOIDmode;
4972
4973 offset = DECL_FIELD_OFFSET (field);
4974 if (host_integerp (offset, 0)
4975 && host_integerp (bit_position (field), 0))
4976 {
4977 bitpos = int_bit_position (field);
4978 offset = 0;
4979 }
4980 else
4981 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4982
4983 if (offset)
4984 {
4985 rtx offset_rtx;
4986
4987 offset
4988 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4989 make_tree (TREE_TYPE (exp),
4990 target));
4991
4992 offset_rtx = expand_normal (offset);
4993 gcc_assert (MEM_P (to_rtx));
4994
4995 #ifdef POINTERS_EXTEND_UNSIGNED
4996 if (GET_MODE (offset_rtx) != Pmode)
4997 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4998 #else
4999 if (GET_MODE (offset_rtx) != ptr_mode)
5000 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5001 #endif
5002
5003 to_rtx = offset_address (to_rtx, offset_rtx,
5004 highest_pow2_factor (offset));
5005 }
5006
5007 #ifdef WORD_REGISTER_OPERATIONS
5008 /* If this initializes a field that is smaller than a
5009 word, at the start of a word, try to widen it to a full
5010 word. This special case allows us to output C++ member
5011 function initializations in a form that the optimizers
5012 can understand. */
5013 if (REG_P (target)
5014 && bitsize < BITS_PER_WORD
5015 && bitpos % BITS_PER_WORD == 0
5016 && GET_MODE_CLASS (mode) == MODE_INT
5017 && TREE_CODE (value) == INTEGER_CST
5018 && exp_size >= 0
5019 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5020 {
5021 tree type = TREE_TYPE (value);
5022
5023 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5024 {
5025 type = lang_hooks.types.type_for_size
5026 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5027 value = fold_convert (type, value);
5028 }
5029
5030 if (BYTES_BIG_ENDIAN)
5031 value
5032 = fold_build2 (LSHIFT_EXPR, type, value,
5033 build_int_cst (type,
5034 BITS_PER_WORD - bitsize));
5035 bitsize = BITS_PER_WORD;
5036 mode = word_mode;
5037 }
5038 #endif
5039
5040 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5041 && DECL_NONADDRESSABLE_P (field))
5042 {
5043 to_rtx = copy_rtx (to_rtx);
5044 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5045 }
5046
5047 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5048 value, type, cleared,
5049 get_alias_set (TREE_TYPE (field)));
5050 }
5051 break;
5052 }
5053 case ARRAY_TYPE:
5054 {
5055 tree value, index;
5056 unsigned HOST_WIDE_INT i;
5057 int need_to_clear;
5058 tree domain;
5059 tree elttype = TREE_TYPE (type);
5060 int const_bounds_p;
5061 HOST_WIDE_INT minelt = 0;
5062 HOST_WIDE_INT maxelt = 0;
5063
5064 domain = TYPE_DOMAIN (type);
5065 const_bounds_p = (TYPE_MIN_VALUE (domain)
5066 && TYPE_MAX_VALUE (domain)
5067 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5068 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5069
5070 /* If we have constant bounds for the range of the type, get them. */
5071 if (const_bounds_p)
5072 {
5073 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5074 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5075 }
5076
5077 /* If the constructor has fewer elements than the array, clear
5078 the whole array first. Similarly if this is static
5079 constructor of a non-BLKmode object. */
5080 if (cleared)
5081 need_to_clear = 0;
5082 else if (REG_P (target) && TREE_STATIC (exp))
5083 need_to_clear = 1;
5084 else
5085 {
5086 unsigned HOST_WIDE_INT idx;
5087 tree index, value;
5088 HOST_WIDE_INT count = 0, zero_count = 0;
5089 need_to_clear = ! const_bounds_p;
5090
5091 /* This loop is a more accurate version of the loop in
5092 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5093 is also needed to check for missing elements. */
5094 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5095 {
5096 HOST_WIDE_INT this_node_count;
5097
5098 if (need_to_clear)
5099 break;
5100
5101 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5102 {
5103 tree lo_index = TREE_OPERAND (index, 0);
5104 tree hi_index = TREE_OPERAND (index, 1);
5105
5106 if (! host_integerp (lo_index, 1)
5107 || ! host_integerp (hi_index, 1))
5108 {
5109 need_to_clear = 1;
5110 break;
5111 }
5112
5113 this_node_count = (tree_low_cst (hi_index, 1)
5114 - tree_low_cst (lo_index, 1) + 1);
5115 }
5116 else
5117 this_node_count = 1;
5118
5119 count += this_node_count;
5120 if (mostly_zeros_p (value))
5121 zero_count += this_node_count;
5122 }
5123
5124 /* Clear the entire array first if there are any missing
5125 elements, or if the incidence of zero elements is >=
5126 75%. */
5127 if (! need_to_clear
5128 && (count < maxelt - minelt + 1
5129 || 4 * zero_count >= 3 * count))
5130 need_to_clear = 1;
5131 }
5132
5133 if (need_to_clear && size > 0)
5134 {
5135 if (REG_P (target))
5136 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5137 else
5138 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5139 cleared = 1;
5140 }
5141
5142 if (!cleared && REG_P (target))
5143 /* Inform later passes that the old value is dead. */
5144 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5145
5146 /* Store each element of the constructor into the
5147 corresponding element of TARGET, determined by counting the
5148 elements. */
5149 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5150 {
5151 enum machine_mode mode;
5152 HOST_WIDE_INT bitsize;
5153 HOST_WIDE_INT bitpos;
5154 int unsignedp;
5155 rtx xtarget = target;
5156
5157 if (cleared && initializer_zerop (value))
5158 continue;
5159
5160 unsignedp = TYPE_UNSIGNED (elttype);
5161 mode = TYPE_MODE (elttype);
5162 if (mode == BLKmode)
5163 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5164 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5165 : -1);
5166 else
5167 bitsize = GET_MODE_BITSIZE (mode);
5168
5169 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5170 {
5171 tree lo_index = TREE_OPERAND (index, 0);
5172 tree hi_index = TREE_OPERAND (index, 1);
5173 rtx index_r, pos_rtx;
5174 HOST_WIDE_INT lo, hi, count;
5175 tree position;
5176
5177 /* If the range is constant and "small", unroll the loop. */
5178 if (const_bounds_p
5179 && host_integerp (lo_index, 0)
5180 && host_integerp (hi_index, 0)
5181 && (lo = tree_low_cst (lo_index, 0),
5182 hi = tree_low_cst (hi_index, 0),
5183 count = hi - lo + 1,
5184 (!MEM_P (target)
5185 || count <= 2
5186 || (host_integerp (TYPE_SIZE (elttype), 1)
5187 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5188 <= 40 * 8)))))
5189 {
5190 lo -= minelt; hi -= minelt;
5191 for (; lo <= hi; lo++)
5192 {
5193 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5194
5195 if (MEM_P (target)
5196 && !MEM_KEEP_ALIAS_SET_P (target)
5197 && TREE_CODE (type) == ARRAY_TYPE
5198 && TYPE_NONALIASED_COMPONENT (type))
5199 {
5200 target = copy_rtx (target);
5201 MEM_KEEP_ALIAS_SET_P (target) = 1;
5202 }
5203
5204 store_constructor_field
5205 (target, bitsize, bitpos, mode, value, type, cleared,
5206 get_alias_set (elttype));
5207 }
5208 }
5209 else
5210 {
5211 rtx loop_start = gen_label_rtx ();
5212 rtx loop_end = gen_label_rtx ();
5213 tree exit_cond;
5214
5215 expand_normal (hi_index);
5216 unsignedp = TYPE_UNSIGNED (domain);
5217
5218 index = build_decl (VAR_DECL, NULL_TREE, domain);
5219
5220 index_r
5221 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5222 &unsignedp, 0));
5223 SET_DECL_RTL (index, index_r);
5224 store_expr (lo_index, index_r, 0);
5225
5226 /* Build the head of the loop. */
5227 do_pending_stack_adjust ();
5228 emit_label (loop_start);
5229
5230 /* Assign value to element index. */
5231 position =
5232 fold_convert (ssizetype,
5233 fold_build2 (MINUS_EXPR,
5234 TREE_TYPE (index),
5235 index,
5236 TYPE_MIN_VALUE (domain)));
5237
5238 position =
5239 size_binop (MULT_EXPR, position,
5240 fold_convert (ssizetype,
5241 TYPE_SIZE_UNIT (elttype)));
5242
5243 pos_rtx = expand_normal (position);
5244 xtarget = offset_address (target, pos_rtx,
5245 highest_pow2_factor (position));
5246 xtarget = adjust_address (xtarget, mode, 0);
5247 if (TREE_CODE (value) == CONSTRUCTOR)
5248 store_constructor (value, xtarget, cleared,
5249 bitsize / BITS_PER_UNIT);
5250 else
5251 store_expr (value, xtarget, 0);
5252
5253 /* Generate a conditional jump to exit the loop. */
5254 exit_cond = build2 (LT_EXPR, integer_type_node,
5255 index, hi_index);
5256 jumpif (exit_cond, loop_end);
5257
5258 /* Update the loop counter, and jump to the head of
5259 the loop. */
5260 expand_assignment (index,
5261 build2 (PLUS_EXPR, TREE_TYPE (index),
5262 index, integer_one_node));
5263
5264 emit_jump (loop_start);
5265
5266 /* Build the end of the loop. */
5267 emit_label (loop_end);
5268 }
5269 }
5270 else if ((index != 0 && ! host_integerp (index, 0))
5271 || ! host_integerp (TYPE_SIZE (elttype), 1))
5272 {
5273 tree position;
5274
5275 if (index == 0)
5276 index = ssize_int (1);
5277
5278 if (minelt)
5279 index = fold_convert (ssizetype,
5280 fold_build2 (MINUS_EXPR,
5281 TREE_TYPE (index),
5282 index,
5283 TYPE_MIN_VALUE (domain)));
5284
5285 position =
5286 size_binop (MULT_EXPR, index,
5287 fold_convert (ssizetype,
5288 TYPE_SIZE_UNIT (elttype)));
5289 xtarget = offset_address (target,
5290 expand_normal (position),
5291 highest_pow2_factor (position));
5292 xtarget = adjust_address (xtarget, mode, 0);
5293 store_expr (value, xtarget, 0);
5294 }
5295 else
5296 {
5297 if (index != 0)
5298 bitpos = ((tree_low_cst (index, 0) - minelt)
5299 * tree_low_cst (TYPE_SIZE (elttype), 1));
5300 else
5301 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5302
5303 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5304 && TREE_CODE (type) == ARRAY_TYPE
5305 && TYPE_NONALIASED_COMPONENT (type))
5306 {
5307 target = copy_rtx (target);
5308 MEM_KEEP_ALIAS_SET_P (target) = 1;
5309 }
5310 store_constructor_field (target, bitsize, bitpos, mode, value,
5311 type, cleared, get_alias_set (elttype));
5312 }
5313 }
5314 break;
5315 }
5316
5317 case VECTOR_TYPE:
5318 {
5319 unsigned HOST_WIDE_INT idx;
5320 constructor_elt *ce;
5321 int i;
5322 int need_to_clear;
5323 int icode = 0;
5324 tree elttype = TREE_TYPE (type);
5325 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5326 enum machine_mode eltmode = TYPE_MODE (elttype);
5327 HOST_WIDE_INT bitsize;
5328 HOST_WIDE_INT bitpos;
5329 rtvec vector = NULL;
5330 unsigned n_elts;
5331
5332 gcc_assert (eltmode != BLKmode);
5333
5334 n_elts = TYPE_VECTOR_SUBPARTS (type);
5335 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5336 {
5337 enum machine_mode mode = GET_MODE (target);
5338
5339 icode = (int) vec_init_optab->handlers[mode].insn_code;
5340 if (icode != CODE_FOR_nothing)
5341 {
5342 unsigned int i;
5343
5344 vector = rtvec_alloc (n_elts);
5345 for (i = 0; i < n_elts; i++)
5346 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5347 }
5348 }
5349
5350 /* If the constructor has fewer elements than the vector,
5351 clear the whole array first. Similarly if this is static
5352 constructor of a non-BLKmode object. */
5353 if (cleared)
5354 need_to_clear = 0;
5355 else if (REG_P (target) && TREE_STATIC (exp))
5356 need_to_clear = 1;
5357 else
5358 {
5359 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5360 tree value;
5361
5362 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5363 {
5364 int n_elts_here = tree_low_cst
5365 (int_const_binop (TRUNC_DIV_EXPR,
5366 TYPE_SIZE (TREE_TYPE (value)),
5367 TYPE_SIZE (elttype), 0), 1);
5368
5369 count += n_elts_here;
5370 if (mostly_zeros_p (value))
5371 zero_count += n_elts_here;
5372 }
5373
5374 /* Clear the entire vector first if there are any missing elements,
5375 or if the incidence of zero elements is >= 75%. */
5376 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5377 }
5378
5379 if (need_to_clear && size > 0 && !vector)
5380 {
5381 if (REG_P (target))
5382 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5383 else
5384 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5385 cleared = 1;
5386 }
5387
5388 /* Inform later passes that the old value is dead. */
5389 if (!cleared && !vector && REG_P (target))
5390 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5391
5392 /* Store each element of the constructor into the corresponding
5393 element of TARGET, determined by counting the elements. */
5394 for (idx = 0, i = 0;
5395 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5396 idx++, i += bitsize / elt_size)
5397 {
5398 HOST_WIDE_INT eltpos;
5399 tree value = ce->value;
5400
5401 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5402 if (cleared && initializer_zerop (value))
5403 continue;
5404
5405 if (ce->index)
5406 eltpos = tree_low_cst (ce->index, 1);
5407 else
5408 eltpos = i;
5409
5410 if (vector)
5411 {
5412 /* Vector CONSTRUCTORs should only be built from smaller
5413 vectors in the case of BLKmode vectors. */
5414 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5415 RTVEC_ELT (vector, eltpos)
5416 = expand_normal (value);
5417 }
5418 else
5419 {
5420 enum machine_mode value_mode =
5421 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5422 ? TYPE_MODE (TREE_TYPE (value))
5423 : eltmode;
5424 bitpos = eltpos * elt_size;
5425 store_constructor_field (target, bitsize, bitpos,
5426 value_mode, value, type,
5427 cleared, get_alias_set (elttype));
5428 }
5429 }
5430
5431 if (vector)
5432 emit_insn (GEN_FCN (icode)
5433 (target,
5434 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5435 break;
5436 }
5437
5438 default:
5439 gcc_unreachable ();
5440 }
5441 }
5442
5443 /* Store the value of EXP (an expression tree)
5444 into a subfield of TARGET which has mode MODE and occupies
5445 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5446 If MODE is VOIDmode, it means that we are storing into a bit-field.
5447
5448 Always return const0_rtx unless we have something particular to
5449 return.
5450
5451 TYPE is the type of the underlying object,
5452
5453 ALIAS_SET is the alias set for the destination. This value will
5454 (in general) be different from that for TARGET, since TARGET is a
5455 reference to the containing structure. */
5456
5457 static rtx
5458 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5459 enum machine_mode mode, tree exp, tree type, int alias_set)
5460 {
5461 HOST_WIDE_INT width_mask = 0;
5462
5463 if (TREE_CODE (exp) == ERROR_MARK)
5464 return const0_rtx;
5465
5466 /* If we have nothing to store, do nothing unless the expression has
5467 side-effects. */
5468 if (bitsize == 0)
5469 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5470 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5471 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5472
5473 /* If we are storing into an unaligned field of an aligned union that is
5474 in a register, we may have the mode of TARGET being an integer mode but
5475 MODE == BLKmode. In that case, get an aligned object whose size and
5476 alignment are the same as TARGET and store TARGET into it (we can avoid
5477 the store if the field being stored is the entire width of TARGET). Then
5478 call ourselves recursively to store the field into a BLKmode version of
5479 that object. Finally, load from the object into TARGET. This is not
5480 very efficient in general, but should only be slightly more expensive
5481 than the otherwise-required unaligned accesses. Perhaps this can be
5482 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5483 twice, once with emit_move_insn and once via store_field. */
5484
5485 if (mode == BLKmode
5486 && (REG_P (target) || GET_CODE (target) == SUBREG))
5487 {
5488 rtx object = assign_temp (type, 0, 1, 1);
5489 rtx blk_object = adjust_address (object, BLKmode, 0);
5490
5491 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5492 emit_move_insn (object, target);
5493
5494 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5495
5496 emit_move_insn (target, object);
5497
5498 /* We want to return the BLKmode version of the data. */
5499 return blk_object;
5500 }
5501
5502 if (GET_CODE (target) == CONCAT)
5503 {
5504 /* We're storing into a struct containing a single __complex. */
5505
5506 gcc_assert (!bitpos);
5507 return store_expr (exp, target, 0);
5508 }
5509
5510 /* If the structure is in a register or if the component
5511 is a bit field, we cannot use addressing to access it.
5512 Use bit-field techniques or SUBREG to store in it. */
5513
5514 if (mode == VOIDmode
5515 || (mode != BLKmode && ! direct_store[(int) mode]
5516 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5517 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5518 || REG_P (target)
5519 || GET_CODE (target) == SUBREG
5520 /* If the field isn't aligned enough to store as an ordinary memref,
5521 store it as a bit field. */
5522 || (mode != BLKmode
5523 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5524 || bitpos % GET_MODE_ALIGNMENT (mode))
5525 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5526 || (bitpos % BITS_PER_UNIT != 0)))
5527 /* If the RHS and field are a constant size and the size of the
5528 RHS isn't the same size as the bitfield, we must use bitfield
5529 operations. */
5530 || (bitsize >= 0
5531 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5532 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5533 {
5534 rtx temp;
5535
5536 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5537 implies a mask operation. If the precision is the same size as
5538 the field we're storing into, that mask is redundant. This is
5539 particularly common with bit field assignments generated by the
5540 C front end. */
5541 if (TREE_CODE (exp) == NOP_EXPR)
5542 {
5543 tree type = TREE_TYPE (exp);
5544 if (INTEGRAL_TYPE_P (type)
5545 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5546 && bitsize == TYPE_PRECISION (type))
5547 {
5548 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5549 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5550 exp = TREE_OPERAND (exp, 0);
5551 }
5552 }
5553
5554 temp = expand_normal (exp);
5555
5556 /* If BITSIZE is narrower than the size of the type of EXP
5557 we will be narrowing TEMP. Normally, what's wanted are the
5558 low-order bits. However, if EXP's type is a record and this is
5559 big-endian machine, we want the upper BITSIZE bits. */
5560 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5561 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5562 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5563 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5564 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5565 - bitsize),
5566 NULL_RTX, 1);
5567
5568 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5569 MODE. */
5570 if (mode != VOIDmode && mode != BLKmode
5571 && mode != TYPE_MODE (TREE_TYPE (exp)))
5572 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5573
5574 /* If the modes of TARGET and TEMP are both BLKmode, both
5575 must be in memory and BITPOS must be aligned on a byte
5576 boundary. If so, we simply do a block copy. */
5577 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5578 {
5579 gcc_assert (MEM_P (target) && MEM_P (temp)
5580 && !(bitpos % BITS_PER_UNIT));
5581
5582 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5583 emit_block_move (target, temp,
5584 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5585 / BITS_PER_UNIT),
5586 BLOCK_OP_NORMAL);
5587
5588 return const0_rtx;
5589 }
5590
5591 /* Store the value in the bitfield. */
5592 store_bit_field (target, bitsize, bitpos, mode, temp);
5593
5594 return const0_rtx;
5595 }
5596 else
5597 {
5598 /* Now build a reference to just the desired component. */
5599 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5600
5601 if (to_rtx == target)
5602 to_rtx = copy_rtx (to_rtx);
5603
5604 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5605 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5606 set_mem_alias_set (to_rtx, alias_set);
5607
5608 return store_expr (exp, to_rtx, 0);
5609 }
5610 }
5611 \f
5612 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5613 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5614 codes and find the ultimate containing object, which we return.
5615
5616 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5617 bit position, and *PUNSIGNEDP to the signedness of the field.
5618 If the position of the field is variable, we store a tree
5619 giving the variable offset (in units) in *POFFSET.
5620 This offset is in addition to the bit position.
5621 If the position is not variable, we store 0 in *POFFSET.
5622
5623 If any of the extraction expressions is volatile,
5624 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5625
5626 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5627 is a mode that can be used to access the field. In that case, *PBITSIZE
5628 is redundant.
5629
5630 If the field describes a variable-sized object, *PMODE is set to
5631 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5632 this case, but the address of the object can be found.
5633
5634 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5635 look through nodes that serve as markers of a greater alignment than
5636 the one that can be deduced from the expression. These nodes make it
5637 possible for front-ends to prevent temporaries from being created by
5638 the middle-end on alignment considerations. For that purpose, the
5639 normal operating mode at high-level is to always pass FALSE so that
5640 the ultimate containing object is really returned; moreover, the
5641 associated predicate handled_component_p will always return TRUE
5642 on these nodes, thus indicating that they are essentially handled
5643 by get_inner_reference. TRUE should only be passed when the caller
5644 is scanning the expression in order to build another representation
5645 and specifically knows how to handle these nodes; as such, this is
5646 the normal operating mode in the RTL expanders. */
5647
5648 tree
5649 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5650 HOST_WIDE_INT *pbitpos, tree *poffset,
5651 enum machine_mode *pmode, int *punsignedp,
5652 int *pvolatilep, bool keep_aligning)
5653 {
5654 tree size_tree = 0;
5655 enum machine_mode mode = VOIDmode;
5656 tree offset = size_zero_node;
5657 tree bit_offset = bitsize_zero_node;
5658 tree tem;
5659
5660 /* First get the mode, signedness, and size. We do this from just the
5661 outermost expression. */
5662 if (TREE_CODE (exp) == COMPONENT_REF)
5663 {
5664 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5665 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5666 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5667
5668 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5669 }
5670 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5671 {
5672 size_tree = TREE_OPERAND (exp, 1);
5673 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5674
5675 /* For vector types, with the correct size of access, use the mode of
5676 inner type. */
5677 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5678 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5679 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5680 mode = TYPE_MODE (TREE_TYPE (exp));
5681 }
5682 else
5683 {
5684 mode = TYPE_MODE (TREE_TYPE (exp));
5685 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5686
5687 if (mode == BLKmode)
5688 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5689 else
5690 *pbitsize = GET_MODE_BITSIZE (mode);
5691 }
5692
5693 if (size_tree != 0)
5694 {
5695 if (! host_integerp (size_tree, 1))
5696 mode = BLKmode, *pbitsize = -1;
5697 else
5698 *pbitsize = tree_low_cst (size_tree, 1);
5699 }
5700
5701 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5702 and find the ultimate containing object. */
5703 while (1)
5704 {
5705 switch (TREE_CODE (exp))
5706 {
5707 case BIT_FIELD_REF:
5708 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5709 TREE_OPERAND (exp, 2));
5710 break;
5711
5712 case COMPONENT_REF:
5713 {
5714 tree field = TREE_OPERAND (exp, 1);
5715 tree this_offset = component_ref_field_offset (exp);
5716
5717 /* If this field hasn't been filled in yet, don't go past it.
5718 This should only happen when folding expressions made during
5719 type construction. */
5720 if (this_offset == 0)
5721 break;
5722
5723 offset = size_binop (PLUS_EXPR, offset, this_offset);
5724 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5725 DECL_FIELD_BIT_OFFSET (field));
5726
5727 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5728 }
5729 break;
5730
5731 case ARRAY_REF:
5732 case ARRAY_RANGE_REF:
5733 {
5734 tree index = TREE_OPERAND (exp, 1);
5735 tree low_bound = array_ref_low_bound (exp);
5736 tree unit_size = array_ref_element_size (exp);
5737
5738 /* We assume all arrays have sizes that are a multiple of a byte.
5739 First subtract the lower bound, if any, in the type of the
5740 index, then convert to sizetype and multiply by the size of
5741 the array element. */
5742 if (! integer_zerop (low_bound))
5743 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5744 index, low_bound);
5745
5746 offset = size_binop (PLUS_EXPR, offset,
5747 size_binop (MULT_EXPR,
5748 fold_convert (sizetype, index),
5749 unit_size));
5750 }
5751 break;
5752
5753 case REALPART_EXPR:
5754 break;
5755
5756 case IMAGPART_EXPR:
5757 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5758 bitsize_int (*pbitsize));
5759 break;
5760
5761 case VIEW_CONVERT_EXPR:
5762 if (keep_aligning && STRICT_ALIGNMENT
5763 && (TYPE_ALIGN (TREE_TYPE (exp))
5764 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5765 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5766 < BIGGEST_ALIGNMENT)
5767 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5768 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5769 goto done;
5770 break;
5771
5772 default:
5773 goto done;
5774 }
5775
5776 /* If any reference in the chain is volatile, the effect is volatile. */
5777 if (TREE_THIS_VOLATILE (exp))
5778 *pvolatilep = 1;
5779
5780 exp = TREE_OPERAND (exp, 0);
5781 }
5782 done:
5783
5784 /* If OFFSET is constant, see if we can return the whole thing as a
5785 constant bit position. Otherwise, split it up. */
5786 if (host_integerp (offset, 0)
5787 && 0 != (tem = size_binop (MULT_EXPR,
5788 fold_convert (bitsizetype, offset),
5789 bitsize_unit_node))
5790 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5791 && host_integerp (tem, 0))
5792 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5793 else
5794 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5795
5796 *pmode = mode;
5797 return exp;
5798 }
5799
5800 /* Return a tree of sizetype representing the size, in bytes, of the element
5801 of EXP, an ARRAY_REF. */
5802
5803 tree
5804 array_ref_element_size (tree exp)
5805 {
5806 tree aligned_size = TREE_OPERAND (exp, 3);
5807 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5808
5809 /* If a size was specified in the ARRAY_REF, it's the size measured
5810 in alignment units of the element type. So multiply by that value. */
5811 if (aligned_size)
5812 {
5813 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5814 sizetype from another type of the same width and signedness. */
5815 if (TREE_TYPE (aligned_size) != sizetype)
5816 aligned_size = fold_convert (sizetype, aligned_size);
5817 return size_binop (MULT_EXPR, aligned_size,
5818 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5819 }
5820
5821 /* Otherwise, take the size from that of the element type. Substitute
5822 any PLACEHOLDER_EXPR that we have. */
5823 else
5824 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5825 }
5826
5827 /* Return a tree representing the lower bound of the array mentioned in
5828 EXP, an ARRAY_REF. */
5829
5830 tree
5831 array_ref_low_bound (tree exp)
5832 {
5833 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5834
5835 /* If a lower bound is specified in EXP, use it. */
5836 if (TREE_OPERAND (exp, 2))
5837 return TREE_OPERAND (exp, 2);
5838
5839 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5840 substituting for a PLACEHOLDER_EXPR as needed. */
5841 if (domain_type && TYPE_MIN_VALUE (domain_type))
5842 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5843
5844 /* Otherwise, return a zero of the appropriate type. */
5845 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5846 }
5847
5848 /* Return a tree representing the upper bound of the array mentioned in
5849 EXP, an ARRAY_REF. */
5850
5851 tree
5852 array_ref_up_bound (tree exp)
5853 {
5854 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5855
5856 /* If there is a domain type and it has an upper bound, use it, substituting
5857 for a PLACEHOLDER_EXPR as needed. */
5858 if (domain_type && TYPE_MAX_VALUE (domain_type))
5859 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5860
5861 /* Otherwise fail. */
5862 return NULL_TREE;
5863 }
5864
5865 /* Return a tree representing the offset, in bytes, of the field referenced
5866 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5867
5868 tree
5869 component_ref_field_offset (tree exp)
5870 {
5871 tree aligned_offset = TREE_OPERAND (exp, 2);
5872 tree field = TREE_OPERAND (exp, 1);
5873
5874 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5875 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5876 value. */
5877 if (aligned_offset)
5878 {
5879 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5880 sizetype from another type of the same width and signedness. */
5881 if (TREE_TYPE (aligned_offset) != sizetype)
5882 aligned_offset = fold_convert (sizetype, aligned_offset);
5883 return size_binop (MULT_EXPR, aligned_offset,
5884 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5885 }
5886
5887 /* Otherwise, take the offset from that of the field. Substitute
5888 any PLACEHOLDER_EXPR that we have. */
5889 else
5890 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5891 }
5892
5893 /* Return 1 if T is an expression that get_inner_reference handles. */
5894
5895 int
5896 handled_component_p (tree t)
5897 {
5898 switch (TREE_CODE (t))
5899 {
5900 case BIT_FIELD_REF:
5901 case COMPONENT_REF:
5902 case ARRAY_REF:
5903 case ARRAY_RANGE_REF:
5904 case VIEW_CONVERT_EXPR:
5905 case REALPART_EXPR:
5906 case IMAGPART_EXPR:
5907 return 1;
5908
5909 default:
5910 return 0;
5911 }
5912 }
5913 \f
5914 /* Given an rtx VALUE that may contain additions and multiplications, return
5915 an equivalent value that just refers to a register, memory, or constant.
5916 This is done by generating instructions to perform the arithmetic and
5917 returning a pseudo-register containing the value.
5918
5919 The returned value may be a REG, SUBREG, MEM or constant. */
5920
5921 rtx
5922 force_operand (rtx value, rtx target)
5923 {
5924 rtx op1, op2;
5925 /* Use subtarget as the target for operand 0 of a binary operation. */
5926 rtx subtarget = get_subtarget (target);
5927 enum rtx_code code = GET_CODE (value);
5928
5929 /* Check for subreg applied to an expression produced by loop optimizer. */
5930 if (code == SUBREG
5931 && !REG_P (SUBREG_REG (value))
5932 && !MEM_P (SUBREG_REG (value)))
5933 {
5934 value = simplify_gen_subreg (GET_MODE (value),
5935 force_reg (GET_MODE (SUBREG_REG (value)),
5936 force_operand (SUBREG_REG (value),
5937 NULL_RTX)),
5938 GET_MODE (SUBREG_REG (value)),
5939 SUBREG_BYTE (value));
5940 code = GET_CODE (value);
5941 }
5942
5943 /* Check for a PIC address load. */
5944 if ((code == PLUS || code == MINUS)
5945 && XEXP (value, 0) == pic_offset_table_rtx
5946 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5947 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5948 || GET_CODE (XEXP (value, 1)) == CONST))
5949 {
5950 if (!subtarget)
5951 subtarget = gen_reg_rtx (GET_MODE (value));
5952 emit_move_insn (subtarget, value);
5953 return subtarget;
5954 }
5955
5956 if (ARITHMETIC_P (value))
5957 {
5958 op2 = XEXP (value, 1);
5959 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5960 subtarget = 0;
5961 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5962 {
5963 code = PLUS;
5964 op2 = negate_rtx (GET_MODE (value), op2);
5965 }
5966
5967 /* Check for an addition with OP2 a constant integer and our first
5968 operand a PLUS of a virtual register and something else. In that
5969 case, we want to emit the sum of the virtual register and the
5970 constant first and then add the other value. This allows virtual
5971 register instantiation to simply modify the constant rather than
5972 creating another one around this addition. */
5973 if (code == PLUS && GET_CODE (op2) == CONST_INT
5974 && GET_CODE (XEXP (value, 0)) == PLUS
5975 && REG_P (XEXP (XEXP (value, 0), 0))
5976 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5977 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5978 {
5979 rtx temp = expand_simple_binop (GET_MODE (value), code,
5980 XEXP (XEXP (value, 0), 0), op2,
5981 subtarget, 0, OPTAB_LIB_WIDEN);
5982 return expand_simple_binop (GET_MODE (value), code, temp,
5983 force_operand (XEXP (XEXP (value,
5984 0), 1), 0),
5985 target, 0, OPTAB_LIB_WIDEN);
5986 }
5987
5988 op1 = force_operand (XEXP (value, 0), subtarget);
5989 op2 = force_operand (op2, NULL_RTX);
5990 switch (code)
5991 {
5992 case MULT:
5993 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5994 case DIV:
5995 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5996 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5997 target, 1, OPTAB_LIB_WIDEN);
5998 else
5999 return expand_divmod (0,
6000 FLOAT_MODE_P (GET_MODE (value))
6001 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6002 GET_MODE (value), op1, op2, target, 0);
6003 break;
6004 case MOD:
6005 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6006 target, 0);
6007 break;
6008 case UDIV:
6009 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6010 target, 1);
6011 break;
6012 case UMOD:
6013 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6014 target, 1);
6015 break;
6016 case ASHIFTRT:
6017 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6018 target, 0, OPTAB_LIB_WIDEN);
6019 break;
6020 default:
6021 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6022 target, 1, OPTAB_LIB_WIDEN);
6023 }
6024 }
6025 if (UNARY_P (value))
6026 {
6027 if (!target)
6028 target = gen_reg_rtx (GET_MODE (value));
6029 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6030 switch (code)
6031 {
6032 case ZERO_EXTEND:
6033 case SIGN_EXTEND:
6034 case TRUNCATE:
6035 case FLOAT_EXTEND:
6036 case FLOAT_TRUNCATE:
6037 convert_move (target, op1, code == ZERO_EXTEND);
6038 return target;
6039
6040 case FIX:
6041 case UNSIGNED_FIX:
6042 expand_fix (target, op1, code == UNSIGNED_FIX);
6043 return target;
6044
6045 case FLOAT:
6046 case UNSIGNED_FLOAT:
6047 expand_float (target, op1, code == UNSIGNED_FLOAT);
6048 return target;
6049
6050 default:
6051 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6052 }
6053 }
6054
6055 #ifdef INSN_SCHEDULING
6056 /* On machines that have insn scheduling, we want all memory reference to be
6057 explicit, so we need to deal with such paradoxical SUBREGs. */
6058 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6059 && (GET_MODE_SIZE (GET_MODE (value))
6060 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6061 value
6062 = simplify_gen_subreg (GET_MODE (value),
6063 force_reg (GET_MODE (SUBREG_REG (value)),
6064 force_operand (SUBREG_REG (value),
6065 NULL_RTX)),
6066 GET_MODE (SUBREG_REG (value)),
6067 SUBREG_BYTE (value));
6068 #endif
6069
6070 return value;
6071 }
6072 \f
6073 /* Subroutine of expand_expr: return nonzero iff there is no way that
6074 EXP can reference X, which is being modified. TOP_P is nonzero if this
6075 call is going to be used to determine whether we need a temporary
6076 for EXP, as opposed to a recursive call to this function.
6077
6078 It is always safe for this routine to return zero since it merely
6079 searches for optimization opportunities. */
6080
6081 int
6082 safe_from_p (rtx x, tree exp, int top_p)
6083 {
6084 rtx exp_rtl = 0;
6085 int i, nops;
6086
6087 if (x == 0
6088 /* If EXP has varying size, we MUST use a target since we currently
6089 have no way of allocating temporaries of variable size
6090 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6091 So we assume here that something at a higher level has prevented a
6092 clash. This is somewhat bogus, but the best we can do. Only
6093 do this when X is BLKmode and when we are at the top level. */
6094 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6095 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6096 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6097 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6098 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6099 != INTEGER_CST)
6100 && GET_MODE (x) == BLKmode)
6101 /* If X is in the outgoing argument area, it is always safe. */
6102 || (MEM_P (x)
6103 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6104 || (GET_CODE (XEXP (x, 0)) == PLUS
6105 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6106 return 1;
6107
6108 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6109 find the underlying pseudo. */
6110 if (GET_CODE (x) == SUBREG)
6111 {
6112 x = SUBREG_REG (x);
6113 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6114 return 0;
6115 }
6116
6117 /* Now look at our tree code and possibly recurse. */
6118 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6119 {
6120 case tcc_declaration:
6121 exp_rtl = DECL_RTL_IF_SET (exp);
6122 break;
6123
6124 case tcc_constant:
6125 return 1;
6126
6127 case tcc_exceptional:
6128 if (TREE_CODE (exp) == TREE_LIST)
6129 {
6130 while (1)
6131 {
6132 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6133 return 0;
6134 exp = TREE_CHAIN (exp);
6135 if (!exp)
6136 return 1;
6137 if (TREE_CODE (exp) != TREE_LIST)
6138 return safe_from_p (x, exp, 0);
6139 }
6140 }
6141 else if (TREE_CODE (exp) == CONSTRUCTOR)
6142 {
6143 constructor_elt *ce;
6144 unsigned HOST_WIDE_INT idx;
6145
6146 for (idx = 0;
6147 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6148 idx++)
6149 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6150 || !safe_from_p (x, ce->value, 0))
6151 return 0;
6152 return 1;
6153 }
6154 else if (TREE_CODE (exp) == ERROR_MARK)
6155 return 1; /* An already-visited SAVE_EXPR? */
6156 else
6157 return 0;
6158
6159 case tcc_statement:
6160 /* The only case we look at here is the DECL_INITIAL inside a
6161 DECL_EXPR. */
6162 return (TREE_CODE (exp) != DECL_EXPR
6163 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6164 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6165 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6166
6167 case tcc_binary:
6168 case tcc_comparison:
6169 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6170 return 0;
6171 /* Fall through. */
6172
6173 case tcc_unary:
6174 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6175
6176 case tcc_expression:
6177 case tcc_reference:
6178 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6179 the expression. If it is set, we conflict iff we are that rtx or
6180 both are in memory. Otherwise, we check all operands of the
6181 expression recursively. */
6182
6183 switch (TREE_CODE (exp))
6184 {
6185 case ADDR_EXPR:
6186 /* If the operand is static or we are static, we can't conflict.
6187 Likewise if we don't conflict with the operand at all. */
6188 if (staticp (TREE_OPERAND (exp, 0))
6189 || TREE_STATIC (exp)
6190 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6191 return 1;
6192
6193 /* Otherwise, the only way this can conflict is if we are taking
6194 the address of a DECL a that address if part of X, which is
6195 very rare. */
6196 exp = TREE_OPERAND (exp, 0);
6197 if (DECL_P (exp))
6198 {
6199 if (!DECL_RTL_SET_P (exp)
6200 || !MEM_P (DECL_RTL (exp)))
6201 return 0;
6202 else
6203 exp_rtl = XEXP (DECL_RTL (exp), 0);
6204 }
6205 break;
6206
6207 case MISALIGNED_INDIRECT_REF:
6208 case ALIGN_INDIRECT_REF:
6209 case INDIRECT_REF:
6210 if (MEM_P (x)
6211 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6212 get_alias_set (exp)))
6213 return 0;
6214 break;
6215
6216 case CALL_EXPR:
6217 /* Assume that the call will clobber all hard registers and
6218 all of memory. */
6219 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6220 || MEM_P (x))
6221 return 0;
6222 break;
6223
6224 case WITH_CLEANUP_EXPR:
6225 case CLEANUP_POINT_EXPR:
6226 /* Lowered by gimplify.c. */
6227 gcc_unreachable ();
6228
6229 case SAVE_EXPR:
6230 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6231
6232 default:
6233 break;
6234 }
6235
6236 /* If we have an rtx, we do not need to scan our operands. */
6237 if (exp_rtl)
6238 break;
6239
6240 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6241 for (i = 0; i < nops; i++)
6242 if (TREE_OPERAND (exp, i) != 0
6243 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6244 return 0;
6245
6246 /* If this is a language-specific tree code, it may require
6247 special handling. */
6248 if ((unsigned int) TREE_CODE (exp)
6249 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6250 && !lang_hooks.safe_from_p (x, exp))
6251 return 0;
6252 break;
6253
6254 case tcc_type:
6255 /* Should never get a type here. */
6256 gcc_unreachable ();
6257
6258 case tcc_gimple_stmt:
6259 gcc_unreachable ();
6260 }
6261
6262 /* If we have an rtl, find any enclosed object. Then see if we conflict
6263 with it. */
6264 if (exp_rtl)
6265 {
6266 if (GET_CODE (exp_rtl) == SUBREG)
6267 {
6268 exp_rtl = SUBREG_REG (exp_rtl);
6269 if (REG_P (exp_rtl)
6270 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6271 return 0;
6272 }
6273
6274 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6275 are memory and they conflict. */
6276 return ! (rtx_equal_p (x, exp_rtl)
6277 || (MEM_P (x) && MEM_P (exp_rtl)
6278 && true_dependence (exp_rtl, VOIDmode, x,
6279 rtx_addr_varies_p)));
6280 }
6281
6282 /* If we reach here, it is safe. */
6283 return 1;
6284 }
6285
6286 \f
6287 /* Return the highest power of two that EXP is known to be a multiple of.
6288 This is used in updating alignment of MEMs in array references. */
6289
6290 unsigned HOST_WIDE_INT
6291 highest_pow2_factor (tree exp)
6292 {
6293 unsigned HOST_WIDE_INT c0, c1;
6294
6295 switch (TREE_CODE (exp))
6296 {
6297 case INTEGER_CST:
6298 /* We can find the lowest bit that's a one. If the low
6299 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6300 We need to handle this case since we can find it in a COND_EXPR,
6301 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6302 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6303 later ICE. */
6304 if (TREE_OVERFLOW (exp))
6305 return BIGGEST_ALIGNMENT;
6306 else
6307 {
6308 /* Note: tree_low_cst is intentionally not used here,
6309 we don't care about the upper bits. */
6310 c0 = TREE_INT_CST_LOW (exp);
6311 c0 &= -c0;
6312 return c0 ? c0 : BIGGEST_ALIGNMENT;
6313 }
6314 break;
6315
6316 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6317 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6318 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6319 return MIN (c0, c1);
6320
6321 case MULT_EXPR:
6322 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6323 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6324 return c0 * c1;
6325
6326 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6327 case CEIL_DIV_EXPR:
6328 if (integer_pow2p (TREE_OPERAND (exp, 1))
6329 && host_integerp (TREE_OPERAND (exp, 1), 1))
6330 {
6331 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6332 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6333 return MAX (1, c0 / c1);
6334 }
6335 break;
6336
6337 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6338 case SAVE_EXPR:
6339 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6340
6341 case COMPOUND_EXPR:
6342 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6343
6344 case COND_EXPR:
6345 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6346 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6347 return MIN (c0, c1);
6348
6349 default:
6350 break;
6351 }
6352
6353 return 1;
6354 }
6355
6356 /* Similar, except that the alignment requirements of TARGET are
6357 taken into account. Assume it is at least as aligned as its
6358 type, unless it is a COMPONENT_REF in which case the layout of
6359 the structure gives the alignment. */
6360
6361 static unsigned HOST_WIDE_INT
6362 highest_pow2_factor_for_target (tree target, tree exp)
6363 {
6364 unsigned HOST_WIDE_INT target_align, factor;
6365
6366 factor = highest_pow2_factor (exp);
6367 if (TREE_CODE (target) == COMPONENT_REF)
6368 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6369 else
6370 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6371 return MAX (factor, target_align);
6372 }
6373 \f
6374 /* Expands variable VAR. */
6375
6376 void
6377 expand_var (tree var)
6378 {
6379 if (DECL_EXTERNAL (var))
6380 return;
6381
6382 if (TREE_STATIC (var))
6383 /* If this is an inlined copy of a static local variable,
6384 look up the original decl. */
6385 var = DECL_ORIGIN (var);
6386
6387 if (TREE_STATIC (var)
6388 ? !TREE_ASM_WRITTEN (var)
6389 : !DECL_RTL_SET_P (var))
6390 {
6391 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6392 /* Should be ignored. */;
6393 else if (lang_hooks.expand_decl (var))
6394 /* OK. */;
6395 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6396 expand_decl (var);
6397 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6398 rest_of_decl_compilation (var, 0, 0);
6399 else
6400 /* No expansion needed. */
6401 gcc_assert (TREE_CODE (var) == TYPE_DECL
6402 || TREE_CODE (var) == CONST_DECL
6403 || TREE_CODE (var) == FUNCTION_DECL
6404 || TREE_CODE (var) == LABEL_DECL);
6405 }
6406 }
6407
6408 /* Subroutine of expand_expr. Expand the two operands of a binary
6409 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6410 The value may be stored in TARGET if TARGET is nonzero. The
6411 MODIFIER argument is as documented by expand_expr. */
6412
6413 static void
6414 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6415 enum expand_modifier modifier)
6416 {
6417 if (! safe_from_p (target, exp1, 1))
6418 target = 0;
6419 if (operand_equal_p (exp0, exp1, 0))
6420 {
6421 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6422 *op1 = copy_rtx (*op0);
6423 }
6424 else
6425 {
6426 /* If we need to preserve evaluation order, copy exp0 into its own
6427 temporary variable so that it can't be clobbered by exp1. */
6428 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6429 exp0 = save_expr (exp0);
6430 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6431 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6432 }
6433 }
6434
6435 \f
6436 /* Return a MEM that contains constant EXP. DEFER is as for
6437 output_constant_def and MODIFIER is as for expand_expr. */
6438
6439 static rtx
6440 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6441 {
6442 rtx mem;
6443
6444 mem = output_constant_def (exp, defer);
6445 if (modifier != EXPAND_INITIALIZER)
6446 mem = use_anchored_address (mem);
6447 return mem;
6448 }
6449
6450 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6451 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6452
6453 static rtx
6454 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6455 enum expand_modifier modifier)
6456 {
6457 rtx result, subtarget;
6458 tree inner, offset;
6459 HOST_WIDE_INT bitsize, bitpos;
6460 int volatilep, unsignedp;
6461 enum machine_mode mode1;
6462
6463 /* If we are taking the address of a constant and are at the top level,
6464 we have to use output_constant_def since we can't call force_const_mem
6465 at top level. */
6466 /* ??? This should be considered a front-end bug. We should not be
6467 generating ADDR_EXPR of something that isn't an LVALUE. The only
6468 exception here is STRING_CST. */
6469 if (TREE_CODE (exp) == CONSTRUCTOR
6470 || CONSTANT_CLASS_P (exp))
6471 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6472
6473 /* Everything must be something allowed by is_gimple_addressable. */
6474 switch (TREE_CODE (exp))
6475 {
6476 case INDIRECT_REF:
6477 /* This case will happen via recursion for &a->b. */
6478 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6479
6480 case CONST_DECL:
6481 /* Recurse and make the output_constant_def clause above handle this. */
6482 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6483 tmode, modifier);
6484
6485 case REALPART_EXPR:
6486 /* The real part of the complex number is always first, therefore
6487 the address is the same as the address of the parent object. */
6488 offset = 0;
6489 bitpos = 0;
6490 inner = TREE_OPERAND (exp, 0);
6491 break;
6492
6493 case IMAGPART_EXPR:
6494 /* The imaginary part of the complex number is always second.
6495 The expression is therefore always offset by the size of the
6496 scalar type. */
6497 offset = 0;
6498 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6499 inner = TREE_OPERAND (exp, 0);
6500 break;
6501
6502 default:
6503 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6504 expand_expr, as that can have various side effects; LABEL_DECLs for
6505 example, may not have their DECL_RTL set yet. Assume language
6506 specific tree nodes can be expanded in some interesting way. */
6507 if (DECL_P (exp)
6508 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6509 {
6510 result = expand_expr (exp, target, tmode,
6511 modifier == EXPAND_INITIALIZER
6512 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6513
6514 /* If the DECL isn't in memory, then the DECL wasn't properly
6515 marked TREE_ADDRESSABLE, which will be either a front-end
6516 or a tree optimizer bug. */
6517 gcc_assert (MEM_P (result));
6518 result = XEXP (result, 0);
6519
6520 /* ??? Is this needed anymore? */
6521 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6522 {
6523 assemble_external (exp);
6524 TREE_USED (exp) = 1;
6525 }
6526
6527 if (modifier != EXPAND_INITIALIZER
6528 && modifier != EXPAND_CONST_ADDRESS)
6529 result = force_operand (result, target);
6530 return result;
6531 }
6532
6533 /* Pass FALSE as the last argument to get_inner_reference although
6534 we are expanding to RTL. The rationale is that we know how to
6535 handle "aligning nodes" here: we can just bypass them because
6536 they won't change the final object whose address will be returned
6537 (they actually exist only for that purpose). */
6538 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6539 &mode1, &unsignedp, &volatilep, false);
6540 break;
6541 }
6542
6543 /* We must have made progress. */
6544 gcc_assert (inner != exp);
6545
6546 subtarget = offset || bitpos ? NULL_RTX : target;
6547 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6548
6549 if (offset)
6550 {
6551 rtx tmp;
6552
6553 if (modifier != EXPAND_NORMAL)
6554 result = force_operand (result, NULL);
6555 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6556
6557 result = convert_memory_address (tmode, result);
6558 tmp = convert_memory_address (tmode, tmp);
6559
6560 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6561 result = gen_rtx_PLUS (tmode, result, tmp);
6562 else
6563 {
6564 subtarget = bitpos ? NULL_RTX : target;
6565 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6566 1, OPTAB_LIB_WIDEN);
6567 }
6568 }
6569
6570 if (bitpos)
6571 {
6572 /* Someone beforehand should have rejected taking the address
6573 of such an object. */
6574 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6575
6576 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6577 if (modifier < EXPAND_SUM)
6578 result = force_operand (result, target);
6579 }
6580
6581 return result;
6582 }
6583
6584 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6585 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6586
6587 static rtx
6588 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6589 enum expand_modifier modifier)
6590 {
6591 enum machine_mode rmode;
6592 rtx result;
6593
6594 /* Target mode of VOIDmode says "whatever's natural". */
6595 if (tmode == VOIDmode)
6596 tmode = TYPE_MODE (TREE_TYPE (exp));
6597
6598 /* We can get called with some Weird Things if the user does silliness
6599 like "(short) &a". In that case, convert_memory_address won't do
6600 the right thing, so ignore the given target mode. */
6601 if (tmode != Pmode && tmode != ptr_mode)
6602 tmode = Pmode;
6603
6604 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6605 tmode, modifier);
6606
6607 /* Despite expand_expr claims concerning ignoring TMODE when not
6608 strictly convenient, stuff breaks if we don't honor it. Note
6609 that combined with the above, we only do this for pointer modes. */
6610 rmode = GET_MODE (result);
6611 if (rmode == VOIDmode)
6612 rmode = tmode;
6613 if (rmode != tmode)
6614 result = convert_memory_address (tmode, result);
6615
6616 return result;
6617 }
6618
6619
6620 /* expand_expr: generate code for computing expression EXP.
6621 An rtx for the computed value is returned. The value is never null.
6622 In the case of a void EXP, const0_rtx is returned.
6623
6624 The value may be stored in TARGET if TARGET is nonzero.
6625 TARGET is just a suggestion; callers must assume that
6626 the rtx returned may not be the same as TARGET.
6627
6628 If TARGET is CONST0_RTX, it means that the value will be ignored.
6629
6630 If TMODE is not VOIDmode, it suggests generating the
6631 result in mode TMODE. But this is done only when convenient.
6632 Otherwise, TMODE is ignored and the value generated in its natural mode.
6633 TMODE is just a suggestion; callers must assume that
6634 the rtx returned may not have mode TMODE.
6635
6636 Note that TARGET may have neither TMODE nor MODE. In that case, it
6637 probably will not be used.
6638
6639 If MODIFIER is EXPAND_SUM then when EXP is an addition
6640 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6641 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6642 products as above, or REG or MEM, or constant.
6643 Ordinarily in such cases we would output mul or add instructions
6644 and then return a pseudo reg containing the sum.
6645
6646 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6647 it also marks a label as absolutely required (it can't be dead).
6648 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6649 This is used for outputting expressions used in initializers.
6650
6651 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6652 with a constant address even if that address is not normally legitimate.
6653 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6654
6655 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6656 a call parameter. Such targets require special care as we haven't yet
6657 marked TARGET so that it's safe from being trashed by libcalls. We
6658 don't want to use TARGET for anything but the final result;
6659 Intermediate values must go elsewhere. Additionally, calls to
6660 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6661
6662 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6663 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6664 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6665 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6666 recursively. */
6667
6668 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6669 enum expand_modifier, rtx *);
6670
6671 rtx
6672 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6673 enum expand_modifier modifier, rtx *alt_rtl)
6674 {
6675 int rn = -1;
6676 rtx ret, last = NULL;
6677
6678 /* Handle ERROR_MARK before anybody tries to access its type. */
6679 if (TREE_CODE (exp) == ERROR_MARK
6680 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
6681 {
6682 ret = CONST0_RTX (tmode);
6683 return ret ? ret : const0_rtx;
6684 }
6685
6686 if (flag_non_call_exceptions)
6687 {
6688 rn = lookup_stmt_eh_region (exp);
6689 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6690 if (rn >= 0)
6691 last = get_last_insn ();
6692 }
6693
6694 /* If this is an expression of some kind and it has an associated line
6695 number, then emit the line number before expanding the expression.
6696
6697 We need to save and restore the file and line information so that
6698 errors discovered during expansion are emitted with the right
6699 information. It would be better of the diagnostic routines
6700 used the file/line information embedded in the tree nodes rather
6701 than globals. */
6702 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6703 {
6704 location_t saved_location = input_location;
6705 input_location = EXPR_LOCATION (exp);
6706 emit_line_note (input_location);
6707
6708 /* Record where the insns produced belong. */
6709 record_block_change (TREE_BLOCK (exp));
6710
6711 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6712
6713 input_location = saved_location;
6714 }
6715 else
6716 {
6717 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6718 }
6719
6720 /* If using non-call exceptions, mark all insns that may trap.
6721 expand_call() will mark CALL_INSNs before we get to this code,
6722 but it doesn't handle libcalls, and these may trap. */
6723 if (rn >= 0)
6724 {
6725 rtx insn;
6726 for (insn = next_real_insn (last); insn;
6727 insn = next_real_insn (insn))
6728 {
6729 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6730 /* If we want exceptions for non-call insns, any
6731 may_trap_p instruction may throw. */
6732 && GET_CODE (PATTERN (insn)) != CLOBBER
6733 && GET_CODE (PATTERN (insn)) != USE
6734 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6735 {
6736 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6737 REG_NOTES (insn));
6738 }
6739 }
6740 }
6741
6742 return ret;
6743 }
6744
6745 static rtx
6746 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6747 enum expand_modifier modifier, rtx *alt_rtl)
6748 {
6749 rtx op0, op1, temp, decl_rtl;
6750 tree type;
6751 int unsignedp;
6752 enum machine_mode mode;
6753 enum tree_code code = TREE_CODE (exp);
6754 optab this_optab;
6755 rtx subtarget, original_target;
6756 int ignore;
6757 tree context, subexp0, subexp1;
6758 bool reduce_bit_field = false;
6759 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6760 ? reduce_to_bit_field_precision ((expr), \
6761 target, \
6762 type) \
6763 : (expr))
6764
6765 if (GIMPLE_STMT_P (exp))
6766 {
6767 type = void_type_node;
6768 mode = VOIDmode;
6769 unsignedp = 0;
6770 }
6771 else
6772 {
6773 type = TREE_TYPE (exp);
6774 mode = TYPE_MODE (type);
6775 unsignedp = TYPE_UNSIGNED (type);
6776 }
6777 if (lang_hooks.reduce_bit_field_operations
6778 && TREE_CODE (type) == INTEGER_TYPE
6779 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6780 {
6781 /* An operation in what may be a bit-field type needs the
6782 result to be reduced to the precision of the bit-field type,
6783 which is narrower than that of the type's mode. */
6784 reduce_bit_field = true;
6785 if (modifier == EXPAND_STACK_PARM)
6786 target = 0;
6787 }
6788
6789 /* Use subtarget as the target for operand 0 of a binary operation. */
6790 subtarget = get_subtarget (target);
6791 original_target = target;
6792 ignore = (target == const0_rtx
6793 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6794 || code == CONVERT_EXPR || code == COND_EXPR
6795 || code == VIEW_CONVERT_EXPR)
6796 && TREE_CODE (type) == VOID_TYPE));
6797
6798 /* If we are going to ignore this result, we need only do something
6799 if there is a side-effect somewhere in the expression. If there
6800 is, short-circuit the most common cases here. Note that we must
6801 not call expand_expr with anything but const0_rtx in case this
6802 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6803
6804 if (ignore)
6805 {
6806 if (! TREE_SIDE_EFFECTS (exp))
6807 return const0_rtx;
6808
6809 /* Ensure we reference a volatile object even if value is ignored, but
6810 don't do this if all we are doing is taking its address. */
6811 if (TREE_THIS_VOLATILE (exp)
6812 && TREE_CODE (exp) != FUNCTION_DECL
6813 && mode != VOIDmode && mode != BLKmode
6814 && modifier != EXPAND_CONST_ADDRESS)
6815 {
6816 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6817 if (MEM_P (temp))
6818 temp = copy_to_reg (temp);
6819 return const0_rtx;
6820 }
6821
6822 if (TREE_CODE_CLASS (code) == tcc_unary
6823 || code == COMPONENT_REF || code == INDIRECT_REF)
6824 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6825 modifier);
6826
6827 else if (TREE_CODE_CLASS (code) == tcc_binary
6828 || TREE_CODE_CLASS (code) == tcc_comparison
6829 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6830 {
6831 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6832 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6833 return const0_rtx;
6834 }
6835 else if (code == BIT_FIELD_REF)
6836 {
6837 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6838 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6839 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6840 return const0_rtx;
6841 }
6842
6843 target = 0;
6844 }
6845
6846
6847 switch (code)
6848 {
6849 case LABEL_DECL:
6850 {
6851 tree function = decl_function_context (exp);
6852
6853 temp = label_rtx (exp);
6854 temp = gen_rtx_LABEL_REF (Pmode, temp);
6855
6856 if (function != current_function_decl
6857 && function != 0)
6858 LABEL_REF_NONLOCAL_P (temp) = 1;
6859
6860 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6861 return temp;
6862 }
6863
6864 case SSA_NAME:
6865 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6866 NULL);
6867
6868 case PARM_DECL:
6869 case VAR_DECL:
6870 /* If a static var's type was incomplete when the decl was written,
6871 but the type is complete now, lay out the decl now. */
6872 if (DECL_SIZE (exp) == 0
6873 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6874 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6875 layout_decl (exp, 0);
6876
6877 /* ... fall through ... */
6878
6879 case FUNCTION_DECL:
6880 case RESULT_DECL:
6881 decl_rtl = DECL_RTL (exp);
6882 gcc_assert (decl_rtl);
6883
6884 /* Ensure variable marked as used even if it doesn't go through
6885 a parser. If it hasn't be used yet, write out an external
6886 definition. */
6887 if (! TREE_USED (exp))
6888 {
6889 assemble_external (exp);
6890 TREE_USED (exp) = 1;
6891 }
6892
6893 /* Show we haven't gotten RTL for this yet. */
6894 temp = 0;
6895
6896 /* Variables inherited from containing functions should have
6897 been lowered by this point. */
6898 context = decl_function_context (exp);
6899 gcc_assert (!context
6900 || context == current_function_decl
6901 || TREE_STATIC (exp)
6902 /* ??? C++ creates functions that are not TREE_STATIC. */
6903 || TREE_CODE (exp) == FUNCTION_DECL);
6904
6905 /* This is the case of an array whose size is to be determined
6906 from its initializer, while the initializer is still being parsed.
6907 See expand_decl. */
6908
6909 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6910 temp = validize_mem (decl_rtl);
6911
6912 /* If DECL_RTL is memory, we are in the normal case and either
6913 the address is not valid or it is not a register and -fforce-addr
6914 is specified, get the address into a register. */
6915
6916 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
6917 {
6918 if (alt_rtl)
6919 *alt_rtl = decl_rtl;
6920 decl_rtl = use_anchored_address (decl_rtl);
6921 if (modifier != EXPAND_CONST_ADDRESS
6922 && modifier != EXPAND_SUM
6923 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6924 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6925 temp = replace_equiv_address (decl_rtl,
6926 copy_rtx (XEXP (decl_rtl, 0)));
6927 }
6928
6929 /* If we got something, return it. But first, set the alignment
6930 if the address is a register. */
6931 if (temp != 0)
6932 {
6933 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6934 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6935
6936 return temp;
6937 }
6938
6939 /* If the mode of DECL_RTL does not match that of the decl, it
6940 must be a promoted value. We return a SUBREG of the wanted mode,
6941 but mark it so that we know that it was already extended. */
6942
6943 if (REG_P (decl_rtl)
6944 && GET_MODE (decl_rtl) != DECL_MODE (exp))
6945 {
6946 enum machine_mode pmode;
6947
6948 /* Get the signedness used for this variable. Ensure we get the
6949 same mode we got when the variable was declared. */
6950 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6951 (TREE_CODE (exp) == RESULT_DECL
6952 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
6953 gcc_assert (GET_MODE (decl_rtl) == pmode);
6954
6955 temp = gen_lowpart_SUBREG (mode, decl_rtl);
6956 SUBREG_PROMOTED_VAR_P (temp) = 1;
6957 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6958 return temp;
6959 }
6960
6961 return decl_rtl;
6962
6963 case INTEGER_CST:
6964 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6965 TREE_INT_CST_HIGH (exp), mode);
6966
6967 /* ??? If overflow is set, fold will have done an incomplete job,
6968 which can result in (plus xx (const_int 0)), which can get
6969 simplified by validate_replace_rtx during virtual register
6970 instantiation, which can result in unrecognizable insns.
6971 Avoid this by forcing all overflows into registers. */
6972 if (TREE_OVERFLOW (exp)
6973 && modifier != EXPAND_INITIALIZER)
6974 temp = force_reg (mode, temp);
6975
6976 return temp;
6977
6978 case VECTOR_CST:
6979 {
6980 tree tmp = NULL_TREE;
6981 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
6982 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
6983 return const_vector_from_tree (exp);
6984 if (GET_MODE_CLASS (mode) == MODE_INT)
6985 {
6986 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
6987 if (type_for_mode)
6988 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
6989 }
6990 if (!tmp)
6991 tmp = build_constructor_from_list (type,
6992 TREE_VECTOR_CST_ELTS (exp));
6993 return expand_expr (tmp, ignore ? const0_rtx : target,
6994 tmode, modifier);
6995 }
6996
6997 case CONST_DECL:
6998 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6999
7000 case REAL_CST:
7001 /* If optimized, generate immediate CONST_DOUBLE
7002 which will be turned into memory by reload if necessary.
7003
7004 We used to force a register so that loop.c could see it. But
7005 this does not allow gen_* patterns to perform optimizations with
7006 the constants. It also produces two insns in cases like "x = 1.0;".
7007 On most machines, floating-point constants are not permitted in
7008 many insns, so we'd end up copying it to a register in any case.
7009
7010 Now, we do the copying in expand_binop, if appropriate. */
7011 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7012 TYPE_MODE (TREE_TYPE (exp)));
7013
7014 case COMPLEX_CST:
7015 /* Handle evaluating a complex constant in a CONCAT target. */
7016 if (original_target && GET_CODE (original_target) == CONCAT)
7017 {
7018 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7019 rtx rtarg, itarg;
7020
7021 rtarg = XEXP (original_target, 0);
7022 itarg = XEXP (original_target, 1);
7023
7024 /* Move the real and imaginary parts separately. */
7025 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7026 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7027
7028 if (op0 != rtarg)
7029 emit_move_insn (rtarg, op0);
7030 if (op1 != itarg)
7031 emit_move_insn (itarg, op1);
7032
7033 return original_target;
7034 }
7035
7036 /* ... fall through ... */
7037
7038 case STRING_CST:
7039 temp = expand_expr_constant (exp, 1, modifier);
7040
7041 /* temp contains a constant address.
7042 On RISC machines where a constant address isn't valid,
7043 make some insns to get that address into a register. */
7044 if (modifier != EXPAND_CONST_ADDRESS
7045 && modifier != EXPAND_INITIALIZER
7046 && modifier != EXPAND_SUM
7047 && (! memory_address_p (mode, XEXP (temp, 0))
7048 || flag_force_addr))
7049 return replace_equiv_address (temp,
7050 copy_rtx (XEXP (temp, 0)));
7051 return temp;
7052
7053 case SAVE_EXPR:
7054 {
7055 tree val = TREE_OPERAND (exp, 0);
7056 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7057
7058 if (!SAVE_EXPR_RESOLVED_P (exp))
7059 {
7060 /* We can indeed still hit this case, typically via builtin
7061 expanders calling save_expr immediately before expanding
7062 something. Assume this means that we only have to deal
7063 with non-BLKmode values. */
7064 gcc_assert (GET_MODE (ret) != BLKmode);
7065
7066 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7067 DECL_ARTIFICIAL (val) = 1;
7068 DECL_IGNORED_P (val) = 1;
7069 TREE_OPERAND (exp, 0) = val;
7070 SAVE_EXPR_RESOLVED_P (exp) = 1;
7071
7072 if (!CONSTANT_P (ret))
7073 ret = copy_to_reg (ret);
7074 SET_DECL_RTL (val, ret);
7075 }
7076
7077 return ret;
7078 }
7079
7080 case GOTO_EXPR:
7081 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7082 expand_goto (TREE_OPERAND (exp, 0));
7083 else
7084 expand_computed_goto (TREE_OPERAND (exp, 0));
7085 return const0_rtx;
7086
7087 case CONSTRUCTOR:
7088 /* If we don't need the result, just ensure we evaluate any
7089 subexpressions. */
7090 if (ignore)
7091 {
7092 unsigned HOST_WIDE_INT idx;
7093 tree value;
7094
7095 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7096 expand_expr (value, const0_rtx, VOIDmode, 0);
7097
7098 return const0_rtx;
7099 }
7100
7101 /* Try to avoid creating a temporary at all. This is possible
7102 if all of the initializer is zero.
7103 FIXME: try to handle all [0..255] initializers we can handle
7104 with memset. */
7105 else if (TREE_STATIC (exp)
7106 && !TREE_ADDRESSABLE (exp)
7107 && target != 0 && mode == BLKmode
7108 && all_zeros_p (exp))
7109 {
7110 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7111 return target;
7112 }
7113
7114 /* All elts simple constants => refer to a constant in memory. But
7115 if this is a non-BLKmode mode, let it store a field at a time
7116 since that should make a CONST_INT or CONST_DOUBLE when we
7117 fold. Likewise, if we have a target we can use, it is best to
7118 store directly into the target unless the type is large enough
7119 that memcpy will be used. If we are making an initializer and
7120 all operands are constant, put it in memory as well.
7121
7122 FIXME: Avoid trying to fill vector constructors piece-meal.
7123 Output them with output_constant_def below unless we're sure
7124 they're zeros. This should go away when vector initializers
7125 are treated like VECTOR_CST instead of arrays.
7126 */
7127 else if ((TREE_STATIC (exp)
7128 && ((mode == BLKmode
7129 && ! (target != 0 && safe_from_p (target, exp, 1)))
7130 || TREE_ADDRESSABLE (exp)
7131 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7132 && (! MOVE_BY_PIECES_P
7133 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7134 TYPE_ALIGN (type)))
7135 && ! mostly_zeros_p (exp))))
7136 || ((modifier == EXPAND_INITIALIZER
7137 || modifier == EXPAND_CONST_ADDRESS)
7138 && TREE_CONSTANT (exp)))
7139 {
7140 rtx constructor = expand_expr_constant (exp, 1, modifier);
7141
7142 if (modifier != EXPAND_CONST_ADDRESS
7143 && modifier != EXPAND_INITIALIZER
7144 && modifier != EXPAND_SUM)
7145 constructor = validize_mem (constructor);
7146
7147 return constructor;
7148 }
7149 else
7150 {
7151 /* Handle calls that pass values in multiple non-contiguous
7152 locations. The Irix 6 ABI has examples of this. */
7153 if (target == 0 || ! safe_from_p (target, exp, 1)
7154 || GET_CODE (target) == PARALLEL
7155 || modifier == EXPAND_STACK_PARM)
7156 target
7157 = assign_temp (build_qualified_type (type,
7158 (TYPE_QUALS (type)
7159 | (TREE_READONLY (exp)
7160 * TYPE_QUAL_CONST))),
7161 0, TREE_ADDRESSABLE (exp), 1);
7162
7163 store_constructor (exp, target, 0, int_expr_size (exp));
7164 return target;
7165 }
7166
7167 case MISALIGNED_INDIRECT_REF:
7168 case ALIGN_INDIRECT_REF:
7169 case INDIRECT_REF:
7170 {
7171 tree exp1 = TREE_OPERAND (exp, 0);
7172
7173 if (modifier != EXPAND_WRITE)
7174 {
7175 tree t;
7176
7177 t = fold_read_from_constant_string (exp);
7178 if (t)
7179 return expand_expr (t, target, tmode, modifier);
7180 }
7181
7182 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7183 op0 = memory_address (mode, op0);
7184
7185 if (code == ALIGN_INDIRECT_REF)
7186 {
7187 int align = TYPE_ALIGN_UNIT (type);
7188 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7189 op0 = memory_address (mode, op0);
7190 }
7191
7192 temp = gen_rtx_MEM (mode, op0);
7193
7194 set_mem_attributes (temp, exp, 0);
7195
7196 /* Resolve the misalignment now, so that we don't have to remember
7197 to resolve it later. Of course, this only works for reads. */
7198 /* ??? When we get around to supporting writes, we'll have to handle
7199 this in store_expr directly. The vectorizer isn't generating
7200 those yet, however. */
7201 if (code == MISALIGNED_INDIRECT_REF)
7202 {
7203 int icode;
7204 rtx reg, insn;
7205
7206 gcc_assert (modifier == EXPAND_NORMAL
7207 || modifier == EXPAND_STACK_PARM);
7208
7209 /* The vectorizer should have already checked the mode. */
7210 icode = movmisalign_optab->handlers[mode].insn_code;
7211 gcc_assert (icode != CODE_FOR_nothing);
7212
7213 /* We've already validated the memory, and we're creating a
7214 new pseudo destination. The predicates really can't fail. */
7215 reg = gen_reg_rtx (mode);
7216
7217 /* Nor can the insn generator. */
7218 insn = GEN_FCN (icode) (reg, temp);
7219 emit_insn (insn);
7220
7221 return reg;
7222 }
7223
7224 return temp;
7225 }
7226
7227 case TARGET_MEM_REF:
7228 {
7229 struct mem_address addr;
7230
7231 get_address_description (exp, &addr);
7232 op0 = addr_for_mem_ref (&addr, true);
7233 op0 = memory_address (mode, op0);
7234 temp = gen_rtx_MEM (mode, op0);
7235 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7236 }
7237 return temp;
7238
7239 case ARRAY_REF:
7240
7241 {
7242 tree array = TREE_OPERAND (exp, 0);
7243 tree index = TREE_OPERAND (exp, 1);
7244
7245 /* Fold an expression like: "foo"[2].
7246 This is not done in fold so it won't happen inside &.
7247 Don't fold if this is for wide characters since it's too
7248 difficult to do correctly and this is a very rare case. */
7249
7250 if (modifier != EXPAND_CONST_ADDRESS
7251 && modifier != EXPAND_INITIALIZER
7252 && modifier != EXPAND_MEMORY)
7253 {
7254 tree t = fold_read_from_constant_string (exp);
7255
7256 if (t)
7257 return expand_expr (t, target, tmode, modifier);
7258 }
7259
7260 /* If this is a constant index into a constant array,
7261 just get the value from the array. Handle both the cases when
7262 we have an explicit constructor and when our operand is a variable
7263 that was declared const. */
7264
7265 if (modifier != EXPAND_CONST_ADDRESS
7266 && modifier != EXPAND_INITIALIZER
7267 && modifier != EXPAND_MEMORY
7268 && TREE_CODE (array) == CONSTRUCTOR
7269 && ! TREE_SIDE_EFFECTS (array)
7270 && TREE_CODE (index) == INTEGER_CST)
7271 {
7272 unsigned HOST_WIDE_INT ix;
7273 tree field, value;
7274
7275 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7276 field, value)
7277 if (tree_int_cst_equal (field, index))
7278 {
7279 if (!TREE_SIDE_EFFECTS (value))
7280 return expand_expr (fold (value), target, tmode, modifier);
7281 break;
7282 }
7283 }
7284
7285 else if (optimize >= 1
7286 && modifier != EXPAND_CONST_ADDRESS
7287 && modifier != EXPAND_INITIALIZER
7288 && modifier != EXPAND_MEMORY
7289 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7290 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7291 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7292 && targetm.binds_local_p (array))
7293 {
7294 if (TREE_CODE (index) == INTEGER_CST)
7295 {
7296 tree init = DECL_INITIAL (array);
7297
7298 if (TREE_CODE (init) == CONSTRUCTOR)
7299 {
7300 unsigned HOST_WIDE_INT ix;
7301 tree field, value;
7302
7303 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7304 field, value)
7305 if (tree_int_cst_equal (field, index))
7306 {
7307 if (!TREE_SIDE_EFFECTS (value))
7308 return expand_expr (fold (value), target, tmode,
7309 modifier);
7310 break;
7311 }
7312 }
7313 else if(TREE_CODE (init) == STRING_CST)
7314 {
7315 tree index1 = index;
7316 tree low_bound = array_ref_low_bound (exp);
7317 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7318
7319 /* Optimize the special-case of a zero lower bound.
7320
7321 We convert the low_bound to sizetype to avoid some problems
7322 with constant folding. (E.g. suppose the lower bound is 1,
7323 and its mode is QI. Without the conversion,l (ARRAY
7324 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7325 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7326
7327 if (! integer_zerop (low_bound))
7328 index1 = size_diffop (index1, fold_convert (sizetype,
7329 low_bound));
7330
7331 if (0 > compare_tree_int (index1,
7332 TREE_STRING_LENGTH (init)))
7333 {
7334 tree type = TREE_TYPE (TREE_TYPE (init));
7335 enum machine_mode mode = TYPE_MODE (type);
7336
7337 if (GET_MODE_CLASS (mode) == MODE_INT
7338 && GET_MODE_SIZE (mode) == 1)
7339 return gen_int_mode (TREE_STRING_POINTER (init)
7340 [TREE_INT_CST_LOW (index1)],
7341 mode);
7342 }
7343 }
7344 }
7345 }
7346 }
7347 goto normal_inner_ref;
7348
7349 case COMPONENT_REF:
7350 /* If the operand is a CONSTRUCTOR, we can just extract the
7351 appropriate field if it is present. */
7352 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7353 {
7354 unsigned HOST_WIDE_INT idx;
7355 tree field, value;
7356
7357 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7358 idx, field, value)
7359 if (field == TREE_OPERAND (exp, 1)
7360 /* We can normally use the value of the field in the
7361 CONSTRUCTOR. However, if this is a bitfield in
7362 an integral mode that we can fit in a HOST_WIDE_INT,
7363 we must mask only the number of bits in the bitfield,
7364 since this is done implicitly by the constructor. If
7365 the bitfield does not meet either of those conditions,
7366 we can't do this optimization. */
7367 && (! DECL_BIT_FIELD (field)
7368 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7369 && (GET_MODE_BITSIZE (DECL_MODE (field))
7370 <= HOST_BITS_PER_WIDE_INT))))
7371 {
7372 if (DECL_BIT_FIELD (field)
7373 && modifier == EXPAND_STACK_PARM)
7374 target = 0;
7375 op0 = expand_expr (value, target, tmode, modifier);
7376 if (DECL_BIT_FIELD (field))
7377 {
7378 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7379 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7380
7381 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7382 {
7383 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7384 op0 = expand_and (imode, op0, op1, target);
7385 }
7386 else
7387 {
7388 tree count
7389 = build_int_cst (NULL_TREE,
7390 GET_MODE_BITSIZE (imode) - bitsize);
7391
7392 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7393 target, 0);
7394 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7395 target, 0);
7396 }
7397 }
7398
7399 return op0;
7400 }
7401 }
7402 goto normal_inner_ref;
7403
7404 case BIT_FIELD_REF:
7405 case ARRAY_RANGE_REF:
7406 normal_inner_ref:
7407 {
7408 enum machine_mode mode1;
7409 HOST_WIDE_INT bitsize, bitpos;
7410 tree offset;
7411 int volatilep = 0;
7412 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7413 &mode1, &unsignedp, &volatilep, true);
7414 rtx orig_op0;
7415
7416 /* If we got back the original object, something is wrong. Perhaps
7417 we are evaluating an expression too early. In any event, don't
7418 infinitely recurse. */
7419 gcc_assert (tem != exp);
7420
7421 /* If TEM's type is a union of variable size, pass TARGET to the inner
7422 computation, since it will need a temporary and TARGET is known
7423 to have to do. This occurs in unchecked conversion in Ada. */
7424
7425 orig_op0 = op0
7426 = expand_expr (tem,
7427 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7428 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7429 != INTEGER_CST)
7430 && modifier != EXPAND_STACK_PARM
7431 ? target : NULL_RTX),
7432 VOIDmode,
7433 (modifier == EXPAND_INITIALIZER
7434 || modifier == EXPAND_CONST_ADDRESS
7435 || modifier == EXPAND_STACK_PARM)
7436 ? modifier : EXPAND_NORMAL);
7437
7438 /* If this is a constant, put it into a register if it is a legitimate
7439 constant, OFFSET is 0, and we won't try to extract outside the
7440 register (in case we were passed a partially uninitialized object
7441 or a view_conversion to a larger size). Force the constant to
7442 memory otherwise. */
7443 if (CONSTANT_P (op0))
7444 {
7445 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7446 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7447 && offset == 0
7448 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7449 op0 = force_reg (mode, op0);
7450 else
7451 op0 = validize_mem (force_const_mem (mode, op0));
7452 }
7453
7454 /* Otherwise, if this object not in memory and we either have an
7455 offset, a BLKmode result, or a reference outside the object, put it
7456 there. Such cases can occur in Ada if we have unchecked conversion
7457 of an expression from a scalar type to an array or record type or
7458 for an ARRAY_RANGE_REF whose type is BLKmode. */
7459 else if (!MEM_P (op0)
7460 && (offset != 0
7461 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7462 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7463 {
7464 tree nt = build_qualified_type (TREE_TYPE (tem),
7465 (TYPE_QUALS (TREE_TYPE (tem))
7466 | TYPE_QUAL_CONST));
7467 rtx memloc = assign_temp (nt, 1, 1, 1);
7468
7469 emit_move_insn (memloc, op0);
7470 op0 = memloc;
7471 }
7472
7473 if (offset != 0)
7474 {
7475 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7476 EXPAND_SUM);
7477
7478 gcc_assert (MEM_P (op0));
7479
7480 #ifdef POINTERS_EXTEND_UNSIGNED
7481 if (GET_MODE (offset_rtx) != Pmode)
7482 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7483 #else
7484 if (GET_MODE (offset_rtx) != ptr_mode)
7485 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7486 #endif
7487
7488 if (GET_MODE (op0) == BLKmode
7489 /* A constant address in OP0 can have VOIDmode, we must
7490 not try to call force_reg in that case. */
7491 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7492 && bitsize != 0
7493 && (bitpos % bitsize) == 0
7494 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7495 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7496 {
7497 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7498 bitpos = 0;
7499 }
7500
7501 op0 = offset_address (op0, offset_rtx,
7502 highest_pow2_factor (offset));
7503 }
7504
7505 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7506 record its alignment as BIGGEST_ALIGNMENT. */
7507 if (MEM_P (op0) && bitpos == 0 && offset != 0
7508 && is_aligning_offset (offset, tem))
7509 set_mem_align (op0, BIGGEST_ALIGNMENT);
7510
7511 /* Don't forget about volatility even if this is a bitfield. */
7512 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7513 {
7514 if (op0 == orig_op0)
7515 op0 = copy_rtx (op0);
7516
7517 MEM_VOLATILE_P (op0) = 1;
7518 }
7519
7520 /* The following code doesn't handle CONCAT.
7521 Assume only bitpos == 0 can be used for CONCAT, due to
7522 one element arrays having the same mode as its element. */
7523 if (GET_CODE (op0) == CONCAT)
7524 {
7525 gcc_assert (bitpos == 0
7526 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7527 return op0;
7528 }
7529
7530 /* In cases where an aligned union has an unaligned object
7531 as a field, we might be extracting a BLKmode value from
7532 an integer-mode (e.g., SImode) object. Handle this case
7533 by doing the extract into an object as wide as the field
7534 (which we know to be the width of a basic mode), then
7535 storing into memory, and changing the mode to BLKmode. */
7536 if (mode1 == VOIDmode
7537 || REG_P (op0) || GET_CODE (op0) == SUBREG
7538 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7539 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7540 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7541 && modifier != EXPAND_CONST_ADDRESS
7542 && modifier != EXPAND_INITIALIZER)
7543 /* If the field isn't aligned enough to fetch as a memref,
7544 fetch it as a bit field. */
7545 || (mode1 != BLKmode
7546 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7547 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7548 || (MEM_P (op0)
7549 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7550 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7551 && ((modifier == EXPAND_CONST_ADDRESS
7552 || modifier == EXPAND_INITIALIZER)
7553 ? STRICT_ALIGNMENT
7554 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7555 || (bitpos % BITS_PER_UNIT != 0)))
7556 /* If the type and the field are a constant size and the
7557 size of the type isn't the same size as the bitfield,
7558 we must use bitfield operations. */
7559 || (bitsize >= 0
7560 && TYPE_SIZE (TREE_TYPE (exp))
7561 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7562 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7563 bitsize)))
7564 {
7565 enum machine_mode ext_mode = mode;
7566
7567 if (ext_mode == BLKmode
7568 && ! (target != 0 && MEM_P (op0)
7569 && MEM_P (target)
7570 && bitpos % BITS_PER_UNIT == 0))
7571 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7572
7573 if (ext_mode == BLKmode)
7574 {
7575 if (target == 0)
7576 target = assign_temp (type, 0, 1, 1);
7577
7578 if (bitsize == 0)
7579 return target;
7580
7581 /* In this case, BITPOS must start at a byte boundary and
7582 TARGET, if specified, must be a MEM. */
7583 gcc_assert (MEM_P (op0)
7584 && (!target || MEM_P (target))
7585 && !(bitpos % BITS_PER_UNIT));
7586
7587 emit_block_move (target,
7588 adjust_address (op0, VOIDmode,
7589 bitpos / BITS_PER_UNIT),
7590 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7591 / BITS_PER_UNIT),
7592 (modifier == EXPAND_STACK_PARM
7593 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7594
7595 return target;
7596 }
7597
7598 op0 = validize_mem (op0);
7599
7600 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7601 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7602
7603 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7604 (modifier == EXPAND_STACK_PARM
7605 ? NULL_RTX : target),
7606 ext_mode, ext_mode);
7607
7608 /* If the result is a record type and BITSIZE is narrower than
7609 the mode of OP0, an integral mode, and this is a big endian
7610 machine, we must put the field into the high-order bits. */
7611 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7612 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7613 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7614 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7615 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7616 - bitsize),
7617 op0, 1);
7618
7619 /* If the result type is BLKmode, store the data into a temporary
7620 of the appropriate type, but with the mode corresponding to the
7621 mode for the data we have (op0's mode). It's tempting to make
7622 this a constant type, since we know it's only being stored once,
7623 but that can cause problems if we are taking the address of this
7624 COMPONENT_REF because the MEM of any reference via that address
7625 will have flags corresponding to the type, which will not
7626 necessarily be constant. */
7627 if (mode == BLKmode)
7628 {
7629 rtx new
7630 = assign_stack_temp_for_type
7631 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7632
7633 emit_move_insn (new, op0);
7634 op0 = copy_rtx (new);
7635 PUT_MODE (op0, BLKmode);
7636 set_mem_attributes (op0, exp, 1);
7637 }
7638
7639 return op0;
7640 }
7641
7642 /* If the result is BLKmode, use that to access the object
7643 now as well. */
7644 if (mode == BLKmode)
7645 mode1 = BLKmode;
7646
7647 /* Get a reference to just this component. */
7648 if (modifier == EXPAND_CONST_ADDRESS
7649 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7650 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7651 else
7652 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7653
7654 if (op0 == orig_op0)
7655 op0 = copy_rtx (op0);
7656
7657 set_mem_attributes (op0, exp, 0);
7658 if (REG_P (XEXP (op0, 0)))
7659 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7660
7661 MEM_VOLATILE_P (op0) |= volatilep;
7662 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7663 || modifier == EXPAND_CONST_ADDRESS
7664 || modifier == EXPAND_INITIALIZER)
7665 return op0;
7666 else if (target == 0)
7667 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7668
7669 convert_move (target, op0, unsignedp);
7670 return target;
7671 }
7672
7673 case OBJ_TYPE_REF:
7674 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7675
7676 case CALL_EXPR:
7677 /* Check for a built-in function. */
7678 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7679 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7680 == FUNCTION_DECL)
7681 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7682 {
7683 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7684 == BUILT_IN_FRONTEND)
7685 return lang_hooks.expand_expr (exp, original_target,
7686 tmode, modifier,
7687 alt_rtl);
7688 else
7689 return expand_builtin (exp, target, subtarget, tmode, ignore);
7690 }
7691
7692 return expand_call (exp, target, ignore);
7693
7694 case NON_LVALUE_EXPR:
7695 case NOP_EXPR:
7696 case CONVERT_EXPR:
7697 if (TREE_OPERAND (exp, 0) == error_mark_node)
7698 return const0_rtx;
7699
7700 if (TREE_CODE (type) == UNION_TYPE)
7701 {
7702 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7703
7704 /* If both input and output are BLKmode, this conversion isn't doing
7705 anything except possibly changing memory attribute. */
7706 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7707 {
7708 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7709 modifier);
7710
7711 result = copy_rtx (result);
7712 set_mem_attributes (result, exp, 0);
7713 return result;
7714 }
7715
7716 if (target == 0)
7717 {
7718 if (TYPE_MODE (type) != BLKmode)
7719 target = gen_reg_rtx (TYPE_MODE (type));
7720 else
7721 target = assign_temp (type, 0, 1, 1);
7722 }
7723
7724 if (MEM_P (target))
7725 /* Store data into beginning of memory target. */
7726 store_expr (TREE_OPERAND (exp, 0),
7727 adjust_address (target, TYPE_MODE (valtype), 0),
7728 modifier == EXPAND_STACK_PARM);
7729
7730 else
7731 {
7732 gcc_assert (REG_P (target));
7733
7734 /* Store this field into a union of the proper type. */
7735 store_field (target,
7736 MIN ((int_size_in_bytes (TREE_TYPE
7737 (TREE_OPERAND (exp, 0)))
7738 * BITS_PER_UNIT),
7739 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7740 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7741 type, 0);
7742 }
7743
7744 /* Return the entire union. */
7745 return target;
7746 }
7747
7748 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7749 {
7750 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7751 modifier);
7752
7753 /* If the signedness of the conversion differs and OP0 is
7754 a promoted SUBREG, clear that indication since we now
7755 have to do the proper extension. */
7756 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7757 && GET_CODE (op0) == SUBREG)
7758 SUBREG_PROMOTED_VAR_P (op0) = 0;
7759
7760 return REDUCE_BIT_FIELD (op0);
7761 }
7762
7763 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7764 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7765 if (GET_MODE (op0) == mode)
7766 ;
7767
7768 /* If OP0 is a constant, just convert it into the proper mode. */
7769 else if (CONSTANT_P (op0))
7770 {
7771 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7772 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7773
7774 if (modifier == EXPAND_INITIALIZER)
7775 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7776 subreg_lowpart_offset (mode,
7777 inner_mode));
7778 else
7779 op0= convert_modes (mode, inner_mode, op0,
7780 TYPE_UNSIGNED (inner_type));
7781 }
7782
7783 else if (modifier == EXPAND_INITIALIZER)
7784 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7785
7786 else if (target == 0)
7787 op0 = convert_to_mode (mode, op0,
7788 TYPE_UNSIGNED (TREE_TYPE
7789 (TREE_OPERAND (exp, 0))));
7790 else
7791 {
7792 convert_move (target, op0,
7793 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7794 op0 = target;
7795 }
7796
7797 return REDUCE_BIT_FIELD (op0);
7798
7799 case VIEW_CONVERT_EXPR:
7800 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7801
7802 /* If the input and output modes are both the same, we are done. */
7803 if (TYPE_MODE (type) == GET_MODE (op0))
7804 ;
7805 /* If neither mode is BLKmode, and both modes are the same size
7806 then we can use gen_lowpart. */
7807 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7808 && GET_MODE_SIZE (TYPE_MODE (type))
7809 == GET_MODE_SIZE (GET_MODE (op0)))
7810 {
7811 if (GET_CODE (op0) == SUBREG)
7812 op0 = force_reg (GET_MODE (op0), op0);
7813 op0 = gen_lowpart (TYPE_MODE (type), op0);
7814 }
7815 /* If both modes are integral, then we can convert from one to the
7816 other. */
7817 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7818 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7819 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7820 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7821 /* As a last resort, spill op0 to memory, and reload it in a
7822 different mode. */
7823 else if (!MEM_P (op0))
7824 {
7825 /* If the operand is not a MEM, force it into memory. Since we
7826 are going to be changing the mode of the MEM, don't call
7827 force_const_mem for constants because we don't allow pool
7828 constants to change mode. */
7829 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7830
7831 gcc_assert (!TREE_ADDRESSABLE (exp));
7832
7833 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7834 target
7835 = assign_stack_temp_for_type
7836 (TYPE_MODE (inner_type),
7837 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7838
7839 emit_move_insn (target, op0);
7840 op0 = target;
7841 }
7842
7843 /* At this point, OP0 is in the correct mode. If the output type is such
7844 that the operand is known to be aligned, indicate that it is.
7845 Otherwise, we need only be concerned about alignment for non-BLKmode
7846 results. */
7847 if (MEM_P (op0))
7848 {
7849 op0 = copy_rtx (op0);
7850
7851 if (TYPE_ALIGN_OK (type))
7852 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7853 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7854 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7855 {
7856 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7857 HOST_WIDE_INT temp_size
7858 = MAX (int_size_in_bytes (inner_type),
7859 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7860 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7861 temp_size, 0, type);
7862 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7863
7864 gcc_assert (!TREE_ADDRESSABLE (exp));
7865
7866 if (GET_MODE (op0) == BLKmode)
7867 emit_block_move (new_with_op0_mode, op0,
7868 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7869 (modifier == EXPAND_STACK_PARM
7870 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7871 else
7872 emit_move_insn (new_with_op0_mode, op0);
7873
7874 op0 = new;
7875 }
7876
7877 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7878 }
7879
7880 return op0;
7881
7882 case PLUS_EXPR:
7883 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7884 something else, make sure we add the register to the constant and
7885 then to the other thing. This case can occur during strength
7886 reduction and doing it this way will produce better code if the
7887 frame pointer or argument pointer is eliminated.
7888
7889 fold-const.c will ensure that the constant is always in the inner
7890 PLUS_EXPR, so the only case we need to do anything about is if
7891 sp, ap, or fp is our second argument, in which case we must swap
7892 the innermost first argument and our second argument. */
7893
7894 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7895 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7896 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7897 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7898 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7899 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7900 {
7901 tree t = TREE_OPERAND (exp, 1);
7902
7903 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7904 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7905 }
7906
7907 /* If the result is to be ptr_mode and we are adding an integer to
7908 something, we might be forming a constant. So try to use
7909 plus_constant. If it produces a sum and we can't accept it,
7910 use force_operand. This allows P = &ARR[const] to generate
7911 efficient code on machines where a SYMBOL_REF is not a valid
7912 address.
7913
7914 If this is an EXPAND_SUM call, always return the sum. */
7915 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7916 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7917 {
7918 if (modifier == EXPAND_STACK_PARM)
7919 target = 0;
7920 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7921 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7922 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7923 {
7924 rtx constant_part;
7925
7926 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7927 EXPAND_SUM);
7928 /* Use immed_double_const to ensure that the constant is
7929 truncated according to the mode of OP1, then sign extended
7930 to a HOST_WIDE_INT. Using the constant directly can result
7931 in non-canonical RTL in a 64x32 cross compile. */
7932 constant_part
7933 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7934 (HOST_WIDE_INT) 0,
7935 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7936 op1 = plus_constant (op1, INTVAL (constant_part));
7937 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7938 op1 = force_operand (op1, target);
7939 return REDUCE_BIT_FIELD (op1);
7940 }
7941
7942 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7943 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7944 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7945 {
7946 rtx constant_part;
7947
7948 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7949 (modifier == EXPAND_INITIALIZER
7950 ? EXPAND_INITIALIZER : EXPAND_SUM));
7951 if (! CONSTANT_P (op0))
7952 {
7953 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7954 VOIDmode, modifier);
7955 /* Return a PLUS if modifier says it's OK. */
7956 if (modifier == EXPAND_SUM
7957 || modifier == EXPAND_INITIALIZER)
7958 return simplify_gen_binary (PLUS, mode, op0, op1);
7959 goto binop2;
7960 }
7961 /* Use immed_double_const to ensure that the constant is
7962 truncated according to the mode of OP1, then sign extended
7963 to a HOST_WIDE_INT. Using the constant directly can result
7964 in non-canonical RTL in a 64x32 cross compile. */
7965 constant_part
7966 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7967 (HOST_WIDE_INT) 0,
7968 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7969 op0 = plus_constant (op0, INTVAL (constant_part));
7970 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7971 op0 = force_operand (op0, target);
7972 return REDUCE_BIT_FIELD (op0);
7973 }
7974 }
7975
7976 /* No sense saving up arithmetic to be done
7977 if it's all in the wrong mode to form part of an address.
7978 And force_operand won't know whether to sign-extend or
7979 zero-extend. */
7980 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7981 || mode != ptr_mode)
7982 {
7983 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7984 subtarget, &op0, &op1, 0);
7985 if (op0 == const0_rtx)
7986 return op1;
7987 if (op1 == const0_rtx)
7988 return op0;
7989 goto binop2;
7990 }
7991
7992 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7993 subtarget, &op0, &op1, modifier);
7994 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7995
7996 case MINUS_EXPR:
7997 /* For initializers, we are allowed to return a MINUS of two
7998 symbolic constants. Here we handle all cases when both operands
7999 are constant. */
8000 /* Handle difference of two symbolic constants,
8001 for the sake of an initializer. */
8002 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8003 && really_constant_p (TREE_OPERAND (exp, 0))
8004 && really_constant_p (TREE_OPERAND (exp, 1)))
8005 {
8006 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8007 NULL_RTX, &op0, &op1, modifier);
8008
8009 /* If the last operand is a CONST_INT, use plus_constant of
8010 the negated constant. Else make the MINUS. */
8011 if (GET_CODE (op1) == CONST_INT)
8012 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8013 else
8014 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8015 }
8016
8017 /* No sense saving up arithmetic to be done
8018 if it's all in the wrong mode to form part of an address.
8019 And force_operand won't know whether to sign-extend or
8020 zero-extend. */
8021 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8022 || mode != ptr_mode)
8023 goto binop;
8024
8025 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8026 subtarget, &op0, &op1, modifier);
8027
8028 /* Convert A - const to A + (-const). */
8029 if (GET_CODE (op1) == CONST_INT)
8030 {
8031 op1 = negate_rtx (mode, op1);
8032 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8033 }
8034
8035 goto binop2;
8036
8037 case MULT_EXPR:
8038 /* If first operand is constant, swap them.
8039 Thus the following special case checks need only
8040 check the second operand. */
8041 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8042 {
8043 tree t1 = TREE_OPERAND (exp, 0);
8044 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8045 TREE_OPERAND (exp, 1) = t1;
8046 }
8047
8048 /* Attempt to return something suitable for generating an
8049 indexed address, for machines that support that. */
8050
8051 if (modifier == EXPAND_SUM && mode == ptr_mode
8052 && host_integerp (TREE_OPERAND (exp, 1), 0))
8053 {
8054 tree exp1 = TREE_OPERAND (exp, 1);
8055
8056 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8057 EXPAND_SUM);
8058
8059 if (!REG_P (op0))
8060 op0 = force_operand (op0, NULL_RTX);
8061 if (!REG_P (op0))
8062 op0 = copy_to_mode_reg (mode, op0);
8063
8064 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8065 gen_int_mode (tree_low_cst (exp1, 0),
8066 TYPE_MODE (TREE_TYPE (exp1)))));
8067 }
8068
8069 if (modifier == EXPAND_STACK_PARM)
8070 target = 0;
8071
8072 /* Check for multiplying things that have been extended
8073 from a narrower type. If this machine supports multiplying
8074 in that narrower type with a result in the desired type,
8075 do it that way, and avoid the explicit type-conversion. */
8076
8077 subexp0 = TREE_OPERAND (exp, 0);
8078 subexp1 = TREE_OPERAND (exp, 1);
8079 /* First, check if we have a multiplication of one signed and one
8080 unsigned operand. */
8081 if (TREE_CODE (subexp0) == NOP_EXPR
8082 && TREE_CODE (subexp1) == NOP_EXPR
8083 && TREE_CODE (type) == INTEGER_TYPE
8084 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8085 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8086 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8087 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8088 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8089 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8090 {
8091 enum machine_mode innermode
8092 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8093 this_optab = usmul_widen_optab;
8094 if (mode == GET_MODE_WIDER_MODE (innermode))
8095 {
8096 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8097 {
8098 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8099 expand_operands (TREE_OPERAND (subexp0, 0),
8100 TREE_OPERAND (subexp1, 0),
8101 NULL_RTX, &op0, &op1, 0);
8102 else
8103 expand_operands (TREE_OPERAND (subexp0, 0),
8104 TREE_OPERAND (subexp1, 0),
8105 NULL_RTX, &op1, &op0, 0);
8106
8107 goto binop3;
8108 }
8109 }
8110 }
8111 /* Check for a multiplication with matching signedness. */
8112 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8113 && TREE_CODE (type) == INTEGER_TYPE
8114 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8115 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8116 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8117 && int_fits_type_p (TREE_OPERAND (exp, 1),
8118 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8119 /* Don't use a widening multiply if a shift will do. */
8120 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8121 > HOST_BITS_PER_WIDE_INT)
8122 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8123 ||
8124 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8125 && (TYPE_PRECISION (TREE_TYPE
8126 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8127 == TYPE_PRECISION (TREE_TYPE
8128 (TREE_OPERAND
8129 (TREE_OPERAND (exp, 0), 0))))
8130 /* If both operands are extended, they must either both
8131 be zero-extended or both be sign-extended. */
8132 && (TYPE_UNSIGNED (TREE_TYPE
8133 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8134 == TYPE_UNSIGNED (TREE_TYPE
8135 (TREE_OPERAND
8136 (TREE_OPERAND (exp, 0), 0)))))))
8137 {
8138 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8139 enum machine_mode innermode = TYPE_MODE (op0type);
8140 bool zextend_p = TYPE_UNSIGNED (op0type);
8141 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8142 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8143
8144 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8145 {
8146 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8147 {
8148 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8149 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8150 TREE_OPERAND (exp, 1),
8151 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8152 else
8153 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8154 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8155 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8156 goto binop3;
8157 }
8158 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8159 && innermode == word_mode)
8160 {
8161 rtx htem, hipart;
8162 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8163 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8164 op1 = convert_modes (innermode, mode,
8165 expand_normal (TREE_OPERAND (exp, 1)),
8166 unsignedp);
8167 else
8168 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8169 temp = expand_binop (mode, other_optab, op0, op1, target,
8170 unsignedp, OPTAB_LIB_WIDEN);
8171 hipart = gen_highpart (innermode, temp);
8172 htem = expand_mult_highpart_adjust (innermode, hipart,
8173 op0, op1, hipart,
8174 zextend_p);
8175 if (htem != hipart)
8176 emit_move_insn (hipart, htem);
8177 return REDUCE_BIT_FIELD (temp);
8178 }
8179 }
8180 }
8181 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8182 subtarget, &op0, &op1, 0);
8183 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8184
8185 case TRUNC_DIV_EXPR:
8186 case FLOOR_DIV_EXPR:
8187 case CEIL_DIV_EXPR:
8188 case ROUND_DIV_EXPR:
8189 case EXACT_DIV_EXPR:
8190 if (modifier == EXPAND_STACK_PARM)
8191 target = 0;
8192 /* Possible optimization: compute the dividend with EXPAND_SUM
8193 then if the divisor is constant can optimize the case
8194 where some terms of the dividend have coeffs divisible by it. */
8195 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8196 subtarget, &op0, &op1, 0);
8197 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8198
8199 case RDIV_EXPR:
8200 goto binop;
8201
8202 case TRUNC_MOD_EXPR:
8203 case FLOOR_MOD_EXPR:
8204 case CEIL_MOD_EXPR:
8205 case ROUND_MOD_EXPR:
8206 if (modifier == EXPAND_STACK_PARM)
8207 target = 0;
8208 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8209 subtarget, &op0, &op1, 0);
8210 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8211
8212 case FIX_TRUNC_EXPR:
8213 op0 = expand_normal (TREE_OPERAND (exp, 0));
8214 if (target == 0 || modifier == EXPAND_STACK_PARM)
8215 target = gen_reg_rtx (mode);
8216 expand_fix (target, op0, unsignedp);
8217 return target;
8218
8219 case FLOAT_EXPR:
8220 op0 = expand_normal (TREE_OPERAND (exp, 0));
8221 if (target == 0 || modifier == EXPAND_STACK_PARM)
8222 target = gen_reg_rtx (mode);
8223 /* expand_float can't figure out what to do if FROM has VOIDmode.
8224 So give it the correct mode. With -O, cse will optimize this. */
8225 if (GET_MODE (op0) == VOIDmode)
8226 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8227 op0);
8228 expand_float (target, op0,
8229 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8230 return target;
8231
8232 case NEGATE_EXPR:
8233 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8234 if (modifier == EXPAND_STACK_PARM)
8235 target = 0;
8236 temp = expand_unop (mode,
8237 optab_for_tree_code (NEGATE_EXPR, type),
8238 op0, target, 0);
8239 gcc_assert (temp);
8240 return REDUCE_BIT_FIELD (temp);
8241
8242 case ABS_EXPR:
8243 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8244 if (modifier == EXPAND_STACK_PARM)
8245 target = 0;
8246
8247 /* ABS_EXPR is not valid for complex arguments. */
8248 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8249 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8250
8251 /* Unsigned abs is simply the operand. Testing here means we don't
8252 risk generating incorrect code below. */
8253 if (TYPE_UNSIGNED (type))
8254 return op0;
8255
8256 return expand_abs (mode, op0, target, unsignedp,
8257 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8258
8259 case MAX_EXPR:
8260 case MIN_EXPR:
8261 target = original_target;
8262 if (target == 0
8263 || modifier == EXPAND_STACK_PARM
8264 || (MEM_P (target) && MEM_VOLATILE_P (target))
8265 || GET_MODE (target) != mode
8266 || (REG_P (target)
8267 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8268 target = gen_reg_rtx (mode);
8269 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8270 target, &op0, &op1, 0);
8271
8272 /* First try to do it with a special MIN or MAX instruction.
8273 If that does not win, use a conditional jump to select the proper
8274 value. */
8275 this_optab = optab_for_tree_code (code, type);
8276 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8277 OPTAB_WIDEN);
8278 if (temp != 0)
8279 return temp;
8280
8281 /* At this point, a MEM target is no longer useful; we will get better
8282 code without it. */
8283
8284 if (! REG_P (target))
8285 target = gen_reg_rtx (mode);
8286
8287 /* If op1 was placed in target, swap op0 and op1. */
8288 if (target != op0 && target == op1)
8289 {
8290 temp = op0;
8291 op0 = op1;
8292 op1 = temp;
8293 }
8294
8295 /* We generate better code and avoid problems with op1 mentioning
8296 target by forcing op1 into a pseudo if it isn't a constant. */
8297 if (! CONSTANT_P (op1))
8298 op1 = force_reg (mode, op1);
8299
8300 {
8301 enum rtx_code comparison_code;
8302 rtx cmpop1 = op1;
8303
8304 if (code == MAX_EXPR)
8305 comparison_code = unsignedp ? GEU : GE;
8306 else
8307 comparison_code = unsignedp ? LEU : LE;
8308
8309 /* Canonicalize to comparisons against 0. */
8310 if (op1 == const1_rtx)
8311 {
8312 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8313 or (a != 0 ? a : 1) for unsigned.
8314 For MIN we are safe converting (a <= 1 ? a : 1)
8315 into (a <= 0 ? a : 1) */
8316 cmpop1 = const0_rtx;
8317 if (code == MAX_EXPR)
8318 comparison_code = unsignedp ? NE : GT;
8319 }
8320 if (op1 == constm1_rtx && !unsignedp)
8321 {
8322 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8323 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8324 cmpop1 = const0_rtx;
8325 if (code == MIN_EXPR)
8326 comparison_code = LT;
8327 }
8328 #ifdef HAVE_conditional_move
8329 /* Use a conditional move if possible. */
8330 if (can_conditionally_move_p (mode))
8331 {
8332 rtx insn;
8333
8334 /* ??? Same problem as in expmed.c: emit_conditional_move
8335 forces a stack adjustment via compare_from_rtx, and we
8336 lose the stack adjustment if the sequence we are about
8337 to create is discarded. */
8338 do_pending_stack_adjust ();
8339
8340 start_sequence ();
8341
8342 /* Try to emit the conditional move. */
8343 insn = emit_conditional_move (target, comparison_code,
8344 op0, cmpop1, mode,
8345 op0, op1, mode,
8346 unsignedp);
8347
8348 /* If we could do the conditional move, emit the sequence,
8349 and return. */
8350 if (insn)
8351 {
8352 rtx seq = get_insns ();
8353 end_sequence ();
8354 emit_insn (seq);
8355 return target;
8356 }
8357
8358 /* Otherwise discard the sequence and fall back to code with
8359 branches. */
8360 end_sequence ();
8361 }
8362 #endif
8363 if (target != op0)
8364 emit_move_insn (target, op0);
8365
8366 temp = gen_label_rtx ();
8367 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8368 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8369 }
8370 emit_move_insn (target, op1);
8371 emit_label (temp);
8372 return target;
8373
8374 case BIT_NOT_EXPR:
8375 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8376 if (modifier == EXPAND_STACK_PARM)
8377 target = 0;
8378 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8379 gcc_assert (temp);
8380 return temp;
8381
8382 /* ??? Can optimize bitwise operations with one arg constant.
8383 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8384 and (a bitwise1 b) bitwise2 b (etc)
8385 but that is probably not worth while. */
8386
8387 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8388 boolean values when we want in all cases to compute both of them. In
8389 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8390 as actual zero-or-1 values and then bitwise anding. In cases where
8391 there cannot be any side effects, better code would be made by
8392 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8393 how to recognize those cases. */
8394
8395 case TRUTH_AND_EXPR:
8396 code = BIT_AND_EXPR;
8397 case BIT_AND_EXPR:
8398 goto binop;
8399
8400 case TRUTH_OR_EXPR:
8401 code = BIT_IOR_EXPR;
8402 case BIT_IOR_EXPR:
8403 goto binop;
8404
8405 case TRUTH_XOR_EXPR:
8406 code = BIT_XOR_EXPR;
8407 case BIT_XOR_EXPR:
8408 goto binop;
8409
8410 case LSHIFT_EXPR:
8411 case RSHIFT_EXPR:
8412 case LROTATE_EXPR:
8413 case RROTATE_EXPR:
8414 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8415 subtarget = 0;
8416 if (modifier == EXPAND_STACK_PARM)
8417 target = 0;
8418 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8419 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8420 unsignedp);
8421
8422 /* Could determine the answer when only additive constants differ. Also,
8423 the addition of one can be handled by changing the condition. */
8424 case LT_EXPR:
8425 case LE_EXPR:
8426 case GT_EXPR:
8427 case GE_EXPR:
8428 case EQ_EXPR:
8429 case NE_EXPR:
8430 case UNORDERED_EXPR:
8431 case ORDERED_EXPR:
8432 case UNLT_EXPR:
8433 case UNLE_EXPR:
8434 case UNGT_EXPR:
8435 case UNGE_EXPR:
8436 case UNEQ_EXPR:
8437 case LTGT_EXPR:
8438 temp = do_store_flag (exp,
8439 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8440 tmode != VOIDmode ? tmode : mode, 0);
8441 if (temp != 0)
8442 return temp;
8443
8444 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8445 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8446 && original_target
8447 && REG_P (original_target)
8448 && (GET_MODE (original_target)
8449 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8450 {
8451 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8452 VOIDmode, 0);
8453
8454 /* If temp is constant, we can just compute the result. */
8455 if (GET_CODE (temp) == CONST_INT)
8456 {
8457 if (INTVAL (temp) != 0)
8458 emit_move_insn (target, const1_rtx);
8459 else
8460 emit_move_insn (target, const0_rtx);
8461
8462 return target;
8463 }
8464
8465 if (temp != original_target)
8466 {
8467 enum machine_mode mode1 = GET_MODE (temp);
8468 if (mode1 == VOIDmode)
8469 mode1 = tmode != VOIDmode ? tmode : mode;
8470
8471 temp = copy_to_mode_reg (mode1, temp);
8472 }
8473
8474 op1 = gen_label_rtx ();
8475 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8476 GET_MODE (temp), unsignedp, op1);
8477 emit_move_insn (temp, const1_rtx);
8478 emit_label (op1);
8479 return temp;
8480 }
8481
8482 /* If no set-flag instruction, must generate a conditional store
8483 into a temporary variable. Drop through and handle this
8484 like && and ||. */
8485
8486 if (! ignore
8487 && (target == 0
8488 || modifier == EXPAND_STACK_PARM
8489 || ! safe_from_p (target, exp, 1)
8490 /* Make sure we don't have a hard reg (such as function's return
8491 value) live across basic blocks, if not optimizing. */
8492 || (!optimize && REG_P (target)
8493 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8494 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8495
8496 if (target)
8497 emit_move_insn (target, const0_rtx);
8498
8499 op1 = gen_label_rtx ();
8500 jumpifnot (exp, op1);
8501
8502 if (target)
8503 emit_move_insn (target, const1_rtx);
8504
8505 emit_label (op1);
8506 return ignore ? const0_rtx : target;
8507
8508 case TRUTH_NOT_EXPR:
8509 if (modifier == EXPAND_STACK_PARM)
8510 target = 0;
8511 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8512 /* The parser is careful to generate TRUTH_NOT_EXPR
8513 only with operands that are always zero or one. */
8514 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8515 target, 1, OPTAB_LIB_WIDEN);
8516 gcc_assert (temp);
8517 return temp;
8518
8519 case STATEMENT_LIST:
8520 {
8521 tree_stmt_iterator iter;
8522
8523 gcc_assert (ignore);
8524
8525 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8526 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8527 }
8528 return const0_rtx;
8529
8530 case COND_EXPR:
8531 /* A COND_EXPR with its type being VOID_TYPE represents a
8532 conditional jump and is handled in
8533 expand_gimple_cond_expr. */
8534 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8535
8536 /* Note that COND_EXPRs whose type is a structure or union
8537 are required to be constructed to contain assignments of
8538 a temporary variable, so that we can evaluate them here
8539 for side effect only. If type is void, we must do likewise. */
8540
8541 gcc_assert (!TREE_ADDRESSABLE (type)
8542 && !ignore
8543 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8544 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8545
8546 /* If we are not to produce a result, we have no target. Otherwise,
8547 if a target was specified use it; it will not be used as an
8548 intermediate target unless it is safe. If no target, use a
8549 temporary. */
8550
8551 if (modifier != EXPAND_STACK_PARM
8552 && original_target
8553 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8554 && GET_MODE (original_target) == mode
8555 #ifdef HAVE_conditional_move
8556 && (! can_conditionally_move_p (mode)
8557 || REG_P (original_target))
8558 #endif
8559 && !MEM_P (original_target))
8560 temp = original_target;
8561 else
8562 temp = assign_temp (type, 0, 0, 1);
8563
8564 do_pending_stack_adjust ();
8565 NO_DEFER_POP;
8566 op0 = gen_label_rtx ();
8567 op1 = gen_label_rtx ();
8568 jumpifnot (TREE_OPERAND (exp, 0), op0);
8569 store_expr (TREE_OPERAND (exp, 1), temp,
8570 modifier == EXPAND_STACK_PARM);
8571
8572 emit_jump_insn (gen_jump (op1));
8573 emit_barrier ();
8574 emit_label (op0);
8575 store_expr (TREE_OPERAND (exp, 2), temp,
8576 modifier == EXPAND_STACK_PARM);
8577
8578 emit_label (op1);
8579 OK_DEFER_POP;
8580 return temp;
8581
8582 case VEC_COND_EXPR:
8583 target = expand_vec_cond_expr (exp, target);
8584 return target;
8585
8586 case GIMPLE_MODIFY_STMT:
8587 {
8588 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
8589 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
8590
8591 gcc_assert (ignore);
8592
8593 /* Check for |= or &= of a bitfield of size one into another bitfield
8594 of size 1. In this case, (unless we need the result of the
8595 assignment) we can do this more efficiently with a
8596 test followed by an assignment, if necessary.
8597
8598 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8599 things change so we do, this code should be enhanced to
8600 support it. */
8601 if (TREE_CODE (lhs) == COMPONENT_REF
8602 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8603 || TREE_CODE (rhs) == BIT_AND_EXPR)
8604 && TREE_OPERAND (rhs, 0) == lhs
8605 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8606 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8607 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8608 {
8609 rtx label = gen_label_rtx ();
8610 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8611 do_jump (TREE_OPERAND (rhs, 1),
8612 value ? label : 0,
8613 value ? 0 : label);
8614 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8615 do_pending_stack_adjust ();
8616 emit_label (label);
8617 return const0_rtx;
8618 }
8619
8620 expand_assignment (lhs, rhs);
8621
8622 return const0_rtx;
8623 }
8624
8625 case RETURN_EXPR:
8626 if (!TREE_OPERAND (exp, 0))
8627 expand_null_return ();
8628 else
8629 expand_return (TREE_OPERAND (exp, 0));
8630 return const0_rtx;
8631
8632 case ADDR_EXPR:
8633 return expand_expr_addr_expr (exp, target, tmode, modifier);
8634
8635 case COMPLEX_EXPR:
8636 /* Get the rtx code of the operands. */
8637 op0 = expand_normal (TREE_OPERAND (exp, 0));
8638 op1 = expand_normal (TREE_OPERAND (exp, 1));
8639
8640 if (!target)
8641 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8642
8643 /* Move the real (op0) and imaginary (op1) parts to their location. */
8644 write_complex_part (target, op0, false);
8645 write_complex_part (target, op1, true);
8646
8647 return target;
8648
8649 case REALPART_EXPR:
8650 op0 = expand_normal (TREE_OPERAND (exp, 0));
8651 return read_complex_part (op0, false);
8652
8653 case IMAGPART_EXPR:
8654 op0 = expand_normal (TREE_OPERAND (exp, 0));
8655 return read_complex_part (op0, true);
8656
8657 case RESX_EXPR:
8658 expand_resx_expr (exp);
8659 return const0_rtx;
8660
8661 case TRY_CATCH_EXPR:
8662 case CATCH_EXPR:
8663 case EH_FILTER_EXPR:
8664 case TRY_FINALLY_EXPR:
8665 /* Lowered by tree-eh.c. */
8666 gcc_unreachable ();
8667
8668 case WITH_CLEANUP_EXPR:
8669 case CLEANUP_POINT_EXPR:
8670 case TARGET_EXPR:
8671 case CASE_LABEL_EXPR:
8672 case VA_ARG_EXPR:
8673 case BIND_EXPR:
8674 case INIT_EXPR:
8675 case CONJ_EXPR:
8676 case COMPOUND_EXPR:
8677 case PREINCREMENT_EXPR:
8678 case PREDECREMENT_EXPR:
8679 case POSTINCREMENT_EXPR:
8680 case POSTDECREMENT_EXPR:
8681 case LOOP_EXPR:
8682 case EXIT_EXPR:
8683 case TRUTH_ANDIF_EXPR:
8684 case TRUTH_ORIF_EXPR:
8685 /* Lowered by gimplify.c. */
8686 gcc_unreachable ();
8687
8688 case EXC_PTR_EXPR:
8689 return get_exception_pointer (cfun);
8690
8691 case FILTER_EXPR:
8692 return get_exception_filter (cfun);
8693
8694 case FDESC_EXPR:
8695 /* Function descriptors are not valid except for as
8696 initialization constants, and should not be expanded. */
8697 gcc_unreachable ();
8698
8699 case SWITCH_EXPR:
8700 expand_case (exp);
8701 return const0_rtx;
8702
8703 case LABEL_EXPR:
8704 expand_label (TREE_OPERAND (exp, 0));
8705 return const0_rtx;
8706
8707 case ASM_EXPR:
8708 expand_asm_expr (exp);
8709 return const0_rtx;
8710
8711 case WITH_SIZE_EXPR:
8712 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8713 have pulled out the size to use in whatever context it needed. */
8714 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8715 modifier, alt_rtl);
8716
8717 case REALIGN_LOAD_EXPR:
8718 {
8719 tree oprnd0 = TREE_OPERAND (exp, 0);
8720 tree oprnd1 = TREE_OPERAND (exp, 1);
8721 tree oprnd2 = TREE_OPERAND (exp, 2);
8722 rtx op2;
8723
8724 this_optab = optab_for_tree_code (code, type);
8725 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8726 op2 = expand_normal (oprnd2);
8727 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8728 target, unsignedp);
8729 gcc_assert (temp);
8730 return temp;
8731 }
8732
8733 case DOT_PROD_EXPR:
8734 {
8735 tree oprnd0 = TREE_OPERAND (exp, 0);
8736 tree oprnd1 = TREE_OPERAND (exp, 1);
8737 tree oprnd2 = TREE_OPERAND (exp, 2);
8738 rtx op2;
8739
8740 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8741 op2 = expand_normal (oprnd2);
8742 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8743 target, unsignedp);
8744 return target;
8745 }
8746
8747 case WIDEN_SUM_EXPR:
8748 {
8749 tree oprnd0 = TREE_OPERAND (exp, 0);
8750 tree oprnd1 = TREE_OPERAND (exp, 1);
8751
8752 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8753 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8754 target, unsignedp);
8755 return target;
8756 }
8757
8758 case REDUC_MAX_EXPR:
8759 case REDUC_MIN_EXPR:
8760 case REDUC_PLUS_EXPR:
8761 {
8762 op0 = expand_normal (TREE_OPERAND (exp, 0));
8763 this_optab = optab_for_tree_code (code, type);
8764 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8765 gcc_assert (temp);
8766 return temp;
8767 }
8768
8769 case VEC_EXTRACT_EVEN_EXPR:
8770 case VEC_EXTRACT_ODD_EXPR:
8771 {
8772 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8773 NULL_RTX, &op0, &op1, 0);
8774 this_optab = optab_for_tree_code (code, type);
8775 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8776 OPTAB_WIDEN);
8777 gcc_assert (temp);
8778 return temp;
8779 }
8780
8781 case VEC_INTERLEAVE_HIGH_EXPR:
8782 case VEC_INTERLEAVE_LOW_EXPR:
8783 {
8784 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8785 NULL_RTX, &op0, &op1, 0);
8786 this_optab = optab_for_tree_code (code, type);
8787 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8788 OPTAB_WIDEN);
8789 gcc_assert (temp);
8790 return temp;
8791 }
8792
8793 case VEC_LSHIFT_EXPR:
8794 case VEC_RSHIFT_EXPR:
8795 {
8796 target = expand_vec_shift_expr (exp, target);
8797 return target;
8798 }
8799
8800 case VEC_UNPACK_HI_EXPR:
8801 case VEC_UNPACK_LO_EXPR:
8802 {
8803 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8804 this_optab = optab_for_tree_code (code, type);
8805 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
8806 target, unsignedp);
8807 gcc_assert (temp);
8808 return temp;
8809 }
8810
8811 case VEC_WIDEN_MULT_HI_EXPR:
8812 case VEC_WIDEN_MULT_LO_EXPR:
8813 {
8814 tree oprnd0 = TREE_OPERAND (exp, 0);
8815 tree oprnd1 = TREE_OPERAND (exp, 1);
8816
8817 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8818 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
8819 target, unsignedp);
8820 gcc_assert (target);
8821 return target;
8822 }
8823
8824 case VEC_PACK_MOD_EXPR:
8825 case VEC_PACK_SAT_EXPR:
8826 {
8827 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8828 goto binop;
8829 }
8830
8831 default:
8832 return lang_hooks.expand_expr (exp, original_target, tmode,
8833 modifier, alt_rtl);
8834 }
8835
8836 /* Here to do an ordinary binary operator. */
8837 binop:
8838 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8839 subtarget, &op0, &op1, 0);
8840 binop2:
8841 this_optab = optab_for_tree_code (code, type);
8842 binop3:
8843 if (modifier == EXPAND_STACK_PARM)
8844 target = 0;
8845 temp = expand_binop (mode, this_optab, op0, op1, target,
8846 unsignedp, OPTAB_LIB_WIDEN);
8847 gcc_assert (temp);
8848 return REDUCE_BIT_FIELD (temp);
8849 }
8850 #undef REDUCE_BIT_FIELD
8851 \f
8852 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8853 signedness of TYPE), possibly returning the result in TARGET. */
8854 static rtx
8855 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8856 {
8857 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8858 if (target && GET_MODE (target) != GET_MODE (exp))
8859 target = 0;
8860 if (TYPE_UNSIGNED (type))
8861 {
8862 rtx mask;
8863 if (prec < HOST_BITS_PER_WIDE_INT)
8864 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8865 GET_MODE (exp));
8866 else
8867 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8868 ((unsigned HOST_WIDE_INT) 1
8869 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8870 GET_MODE (exp));
8871 return expand_and (GET_MODE (exp), exp, mask, target);
8872 }
8873 else
8874 {
8875 tree count = build_int_cst (NULL_TREE,
8876 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8877 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8878 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8879 }
8880 }
8881 \f
8882 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8883 when applied to the address of EXP produces an address known to be
8884 aligned more than BIGGEST_ALIGNMENT. */
8885
8886 static int
8887 is_aligning_offset (tree offset, tree exp)
8888 {
8889 /* Strip off any conversions. */
8890 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8891 || TREE_CODE (offset) == NOP_EXPR
8892 || TREE_CODE (offset) == CONVERT_EXPR)
8893 offset = TREE_OPERAND (offset, 0);
8894
8895 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8896 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8897 if (TREE_CODE (offset) != BIT_AND_EXPR
8898 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8899 || compare_tree_int (TREE_OPERAND (offset, 1),
8900 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8901 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8902 return 0;
8903
8904 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8905 It must be NEGATE_EXPR. Then strip any more conversions. */
8906 offset = TREE_OPERAND (offset, 0);
8907 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8908 || TREE_CODE (offset) == NOP_EXPR
8909 || TREE_CODE (offset) == CONVERT_EXPR)
8910 offset = TREE_OPERAND (offset, 0);
8911
8912 if (TREE_CODE (offset) != NEGATE_EXPR)
8913 return 0;
8914
8915 offset = TREE_OPERAND (offset, 0);
8916 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8917 || TREE_CODE (offset) == NOP_EXPR
8918 || TREE_CODE (offset) == CONVERT_EXPR)
8919 offset = TREE_OPERAND (offset, 0);
8920
8921 /* This must now be the address of EXP. */
8922 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8923 }
8924 \f
8925 /* Return the tree node if an ARG corresponds to a string constant or zero
8926 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8927 in bytes within the string that ARG is accessing. The type of the
8928 offset will be `sizetype'. */
8929
8930 tree
8931 string_constant (tree arg, tree *ptr_offset)
8932 {
8933 tree array, offset, lower_bound;
8934 STRIP_NOPS (arg);
8935
8936 if (TREE_CODE (arg) == ADDR_EXPR)
8937 {
8938 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8939 {
8940 *ptr_offset = size_zero_node;
8941 return TREE_OPERAND (arg, 0);
8942 }
8943 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8944 {
8945 array = TREE_OPERAND (arg, 0);
8946 offset = size_zero_node;
8947 }
8948 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8949 {
8950 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8951 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8952 if (TREE_CODE (array) != STRING_CST
8953 && TREE_CODE (array) != VAR_DECL)
8954 return 0;
8955
8956 /* Check if the array has a non-zero lower bound. */
8957 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
8958 if (!integer_zerop (lower_bound))
8959 {
8960 /* If the offset and base aren't both constants, return 0. */
8961 if (TREE_CODE (lower_bound) != INTEGER_CST)
8962 return 0;
8963 if (TREE_CODE (offset) != INTEGER_CST)
8964 return 0;
8965 /* Adjust offset by the lower bound. */
8966 offset = size_diffop (fold_convert (sizetype, offset),
8967 fold_convert (sizetype, lower_bound));
8968 }
8969 }
8970 else
8971 return 0;
8972 }
8973 else if (TREE_CODE (arg) == PLUS_EXPR)
8974 {
8975 tree arg0 = TREE_OPERAND (arg, 0);
8976 tree arg1 = TREE_OPERAND (arg, 1);
8977
8978 STRIP_NOPS (arg0);
8979 STRIP_NOPS (arg1);
8980
8981 if (TREE_CODE (arg0) == ADDR_EXPR
8982 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8983 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8984 {
8985 array = TREE_OPERAND (arg0, 0);
8986 offset = arg1;
8987 }
8988 else if (TREE_CODE (arg1) == ADDR_EXPR
8989 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8990 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8991 {
8992 array = TREE_OPERAND (arg1, 0);
8993 offset = arg0;
8994 }
8995 else
8996 return 0;
8997 }
8998 else
8999 return 0;
9000
9001 if (TREE_CODE (array) == STRING_CST)
9002 {
9003 *ptr_offset = fold_convert (sizetype, offset);
9004 return array;
9005 }
9006 else if (TREE_CODE (array) == VAR_DECL)
9007 {
9008 int length;
9009
9010 /* Variables initialized to string literals can be handled too. */
9011 if (DECL_INITIAL (array) == NULL_TREE
9012 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9013 return 0;
9014
9015 /* If they are read-only, non-volatile and bind locally. */
9016 if (! TREE_READONLY (array)
9017 || TREE_SIDE_EFFECTS (array)
9018 || ! targetm.binds_local_p (array))
9019 return 0;
9020
9021 /* Avoid const char foo[4] = "abcde"; */
9022 if (DECL_SIZE_UNIT (array) == NULL_TREE
9023 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9024 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9025 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9026 return 0;
9027
9028 /* If variable is bigger than the string literal, OFFSET must be constant
9029 and inside of the bounds of the string literal. */
9030 offset = fold_convert (sizetype, offset);
9031 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9032 && (! host_integerp (offset, 1)
9033 || compare_tree_int (offset, length) >= 0))
9034 return 0;
9035
9036 *ptr_offset = offset;
9037 return DECL_INITIAL (array);
9038 }
9039
9040 return 0;
9041 }
9042 \f
9043 /* Generate code to calculate EXP using a store-flag instruction
9044 and return an rtx for the result. EXP is either a comparison
9045 or a TRUTH_NOT_EXPR whose operand is a comparison.
9046
9047 If TARGET is nonzero, store the result there if convenient.
9048
9049 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9050 cheap.
9051
9052 Return zero if there is no suitable set-flag instruction
9053 available on this machine.
9054
9055 Once expand_expr has been called on the arguments of the comparison,
9056 we are committed to doing the store flag, since it is not safe to
9057 re-evaluate the expression. We emit the store-flag insn by calling
9058 emit_store_flag, but only expand the arguments if we have a reason
9059 to believe that emit_store_flag will be successful. If we think that
9060 it will, but it isn't, we have to simulate the store-flag with a
9061 set/jump/set sequence. */
9062
9063 static rtx
9064 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9065 {
9066 enum rtx_code code;
9067 tree arg0, arg1, type;
9068 tree tem;
9069 enum machine_mode operand_mode;
9070 int invert = 0;
9071 int unsignedp;
9072 rtx op0, op1;
9073 enum insn_code icode;
9074 rtx subtarget = target;
9075 rtx result, label;
9076
9077 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9078 result at the end. We can't simply invert the test since it would
9079 have already been inverted if it were valid. This case occurs for
9080 some floating-point comparisons. */
9081
9082 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9083 invert = 1, exp = TREE_OPERAND (exp, 0);
9084
9085 arg0 = TREE_OPERAND (exp, 0);
9086 arg1 = TREE_OPERAND (exp, 1);
9087
9088 /* Don't crash if the comparison was erroneous. */
9089 if (arg0 == error_mark_node || arg1 == error_mark_node)
9090 return const0_rtx;
9091
9092 type = TREE_TYPE (arg0);
9093 operand_mode = TYPE_MODE (type);
9094 unsignedp = TYPE_UNSIGNED (type);
9095
9096 /* We won't bother with BLKmode store-flag operations because it would mean
9097 passing a lot of information to emit_store_flag. */
9098 if (operand_mode == BLKmode)
9099 return 0;
9100
9101 /* We won't bother with store-flag operations involving function pointers
9102 when function pointers must be canonicalized before comparisons. */
9103 #ifdef HAVE_canonicalize_funcptr_for_compare
9104 if (HAVE_canonicalize_funcptr_for_compare
9105 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9106 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9107 == FUNCTION_TYPE))
9108 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9109 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9110 == FUNCTION_TYPE))))
9111 return 0;
9112 #endif
9113
9114 STRIP_NOPS (arg0);
9115 STRIP_NOPS (arg1);
9116
9117 /* Get the rtx comparison code to use. We know that EXP is a comparison
9118 operation of some type. Some comparisons against 1 and -1 can be
9119 converted to comparisons with zero. Do so here so that the tests
9120 below will be aware that we have a comparison with zero. These
9121 tests will not catch constants in the first operand, but constants
9122 are rarely passed as the first operand. */
9123
9124 switch (TREE_CODE (exp))
9125 {
9126 case EQ_EXPR:
9127 code = EQ;
9128 break;
9129 case NE_EXPR:
9130 code = NE;
9131 break;
9132 case LT_EXPR:
9133 if (integer_onep (arg1))
9134 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9135 else
9136 code = unsignedp ? LTU : LT;
9137 break;
9138 case LE_EXPR:
9139 if (! unsignedp && integer_all_onesp (arg1))
9140 arg1 = integer_zero_node, code = LT;
9141 else
9142 code = unsignedp ? LEU : LE;
9143 break;
9144 case GT_EXPR:
9145 if (! unsignedp && integer_all_onesp (arg1))
9146 arg1 = integer_zero_node, code = GE;
9147 else
9148 code = unsignedp ? GTU : GT;
9149 break;
9150 case GE_EXPR:
9151 if (integer_onep (arg1))
9152 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9153 else
9154 code = unsignedp ? GEU : GE;
9155 break;
9156
9157 case UNORDERED_EXPR:
9158 code = UNORDERED;
9159 break;
9160 case ORDERED_EXPR:
9161 code = ORDERED;
9162 break;
9163 case UNLT_EXPR:
9164 code = UNLT;
9165 break;
9166 case UNLE_EXPR:
9167 code = UNLE;
9168 break;
9169 case UNGT_EXPR:
9170 code = UNGT;
9171 break;
9172 case UNGE_EXPR:
9173 code = UNGE;
9174 break;
9175 case UNEQ_EXPR:
9176 code = UNEQ;
9177 break;
9178 case LTGT_EXPR:
9179 code = LTGT;
9180 break;
9181
9182 default:
9183 gcc_unreachable ();
9184 }
9185
9186 /* Put a constant second. */
9187 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9188 {
9189 tem = arg0; arg0 = arg1; arg1 = tem;
9190 code = swap_condition (code);
9191 }
9192
9193 /* If this is an equality or inequality test of a single bit, we can
9194 do this by shifting the bit being tested to the low-order bit and
9195 masking the result with the constant 1. If the condition was EQ,
9196 we xor it with 1. This does not require an scc insn and is faster
9197 than an scc insn even if we have it.
9198
9199 The code to make this transformation was moved into fold_single_bit_test,
9200 so we just call into the folder and expand its result. */
9201
9202 if ((code == NE || code == EQ)
9203 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9204 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9205 {
9206 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9207 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9208 arg0, arg1, type),
9209 target, VOIDmode, EXPAND_NORMAL);
9210 }
9211
9212 /* Now see if we are likely to be able to do this. Return if not. */
9213 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9214 return 0;
9215
9216 icode = setcc_gen_code[(int) code];
9217
9218 if (icode == CODE_FOR_nothing)
9219 {
9220 enum machine_mode wmode;
9221
9222 for (wmode = operand_mode;
9223 icode == CODE_FOR_nothing && wmode != VOIDmode;
9224 wmode = GET_MODE_WIDER_MODE (wmode))
9225 icode = cstore_optab->handlers[(int) wmode].insn_code;
9226 }
9227
9228 if (icode == CODE_FOR_nothing
9229 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9230 {
9231 /* We can only do this if it is one of the special cases that
9232 can be handled without an scc insn. */
9233 if ((code == LT && integer_zerop (arg1))
9234 || (! only_cheap && code == GE && integer_zerop (arg1)))
9235 ;
9236 else if (! only_cheap && (code == NE || code == EQ)
9237 && TREE_CODE (type) != REAL_TYPE
9238 && ((abs_optab->handlers[(int) operand_mode].insn_code
9239 != CODE_FOR_nothing)
9240 || (ffs_optab->handlers[(int) operand_mode].insn_code
9241 != CODE_FOR_nothing)))
9242 ;
9243 else
9244 return 0;
9245 }
9246
9247 if (! get_subtarget (target)
9248 || GET_MODE (subtarget) != operand_mode)
9249 subtarget = 0;
9250
9251 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9252
9253 if (target == 0)
9254 target = gen_reg_rtx (mode);
9255
9256 result = emit_store_flag (target, code, op0, op1,
9257 operand_mode, unsignedp, 1);
9258
9259 if (result)
9260 {
9261 if (invert)
9262 result = expand_binop (mode, xor_optab, result, const1_rtx,
9263 result, 0, OPTAB_LIB_WIDEN);
9264 return result;
9265 }
9266
9267 /* If this failed, we have to do this with set/compare/jump/set code. */
9268 if (!REG_P (target)
9269 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9270 target = gen_reg_rtx (GET_MODE (target));
9271
9272 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9273 label = gen_label_rtx ();
9274 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9275 NULL_RTX, label);
9276
9277 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9278 emit_label (label);
9279
9280 return target;
9281 }
9282 \f
9283
9284 /* Stubs in case we haven't got a casesi insn. */
9285 #ifndef HAVE_casesi
9286 # define HAVE_casesi 0
9287 # define gen_casesi(a, b, c, d, e) (0)
9288 # define CODE_FOR_casesi CODE_FOR_nothing
9289 #endif
9290
9291 /* If the machine does not have a case insn that compares the bounds,
9292 this means extra overhead for dispatch tables, which raises the
9293 threshold for using them. */
9294 #ifndef CASE_VALUES_THRESHOLD
9295 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9296 #endif /* CASE_VALUES_THRESHOLD */
9297
9298 unsigned int
9299 case_values_threshold (void)
9300 {
9301 return CASE_VALUES_THRESHOLD;
9302 }
9303
9304 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9305 0 otherwise (i.e. if there is no casesi instruction). */
9306 int
9307 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9308 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9309 {
9310 enum machine_mode index_mode = SImode;
9311 int index_bits = GET_MODE_BITSIZE (index_mode);
9312 rtx op1, op2, index;
9313 enum machine_mode op_mode;
9314
9315 if (! HAVE_casesi)
9316 return 0;
9317
9318 /* Convert the index to SImode. */
9319 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9320 {
9321 enum machine_mode omode = TYPE_MODE (index_type);
9322 rtx rangertx = expand_normal (range);
9323
9324 /* We must handle the endpoints in the original mode. */
9325 index_expr = build2 (MINUS_EXPR, index_type,
9326 index_expr, minval);
9327 minval = integer_zero_node;
9328 index = expand_normal (index_expr);
9329 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9330 omode, 1, default_label);
9331 /* Now we can safely truncate. */
9332 index = convert_to_mode (index_mode, index, 0);
9333 }
9334 else
9335 {
9336 if (TYPE_MODE (index_type) != index_mode)
9337 {
9338 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9339 index_expr = fold_convert (index_type, index_expr);
9340 }
9341
9342 index = expand_normal (index_expr);
9343 }
9344
9345 do_pending_stack_adjust ();
9346
9347 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9348 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9349 (index, op_mode))
9350 index = copy_to_mode_reg (op_mode, index);
9351
9352 op1 = expand_normal (minval);
9353
9354 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9355 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9356 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9357 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9358 (op1, op_mode))
9359 op1 = copy_to_mode_reg (op_mode, op1);
9360
9361 op2 = expand_normal (range);
9362
9363 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9364 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9365 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9366 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9367 (op2, op_mode))
9368 op2 = copy_to_mode_reg (op_mode, op2);
9369
9370 emit_jump_insn (gen_casesi (index, op1, op2,
9371 table_label, default_label));
9372 return 1;
9373 }
9374
9375 /* Attempt to generate a tablejump instruction; same concept. */
9376 #ifndef HAVE_tablejump
9377 #define HAVE_tablejump 0
9378 #define gen_tablejump(x, y) (0)
9379 #endif
9380
9381 /* Subroutine of the next function.
9382
9383 INDEX is the value being switched on, with the lowest value
9384 in the table already subtracted.
9385 MODE is its expected mode (needed if INDEX is constant).
9386 RANGE is the length of the jump table.
9387 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9388
9389 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9390 index value is out of range. */
9391
9392 static void
9393 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9394 rtx default_label)
9395 {
9396 rtx temp, vector;
9397
9398 if (INTVAL (range) > cfun->max_jumptable_ents)
9399 cfun->max_jumptable_ents = INTVAL (range);
9400
9401 /* Do an unsigned comparison (in the proper mode) between the index
9402 expression and the value which represents the length of the range.
9403 Since we just finished subtracting the lower bound of the range
9404 from the index expression, this comparison allows us to simultaneously
9405 check that the original index expression value is both greater than
9406 or equal to the minimum value of the range and less than or equal to
9407 the maximum value of the range. */
9408
9409 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9410 default_label);
9411
9412 /* If index is in range, it must fit in Pmode.
9413 Convert to Pmode so we can index with it. */
9414 if (mode != Pmode)
9415 index = convert_to_mode (Pmode, index, 1);
9416
9417 /* Don't let a MEM slip through, because then INDEX that comes
9418 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9419 and break_out_memory_refs will go to work on it and mess it up. */
9420 #ifdef PIC_CASE_VECTOR_ADDRESS
9421 if (flag_pic && !REG_P (index))
9422 index = copy_to_mode_reg (Pmode, index);
9423 #endif
9424
9425 /* If flag_force_addr were to affect this address
9426 it could interfere with the tricky assumptions made
9427 about addresses that contain label-refs,
9428 which may be valid only very near the tablejump itself. */
9429 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9430 GET_MODE_SIZE, because this indicates how large insns are. The other
9431 uses should all be Pmode, because they are addresses. This code
9432 could fail if addresses and insns are not the same size. */
9433 index = gen_rtx_PLUS (Pmode,
9434 gen_rtx_MULT (Pmode, index,
9435 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9436 gen_rtx_LABEL_REF (Pmode, table_label));
9437 #ifdef PIC_CASE_VECTOR_ADDRESS
9438 if (flag_pic)
9439 index = PIC_CASE_VECTOR_ADDRESS (index);
9440 else
9441 #endif
9442 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9443 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9444 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9445 convert_move (temp, vector, 0);
9446
9447 emit_jump_insn (gen_tablejump (temp, table_label));
9448
9449 /* If we are generating PIC code or if the table is PC-relative, the
9450 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9451 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9452 emit_barrier ();
9453 }
9454
9455 int
9456 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9457 rtx table_label, rtx default_label)
9458 {
9459 rtx index;
9460
9461 if (! HAVE_tablejump)
9462 return 0;
9463
9464 index_expr = fold_build2 (MINUS_EXPR, index_type,
9465 fold_convert (index_type, index_expr),
9466 fold_convert (index_type, minval));
9467 index = expand_normal (index_expr);
9468 do_pending_stack_adjust ();
9469
9470 do_tablejump (index, TYPE_MODE (index_type),
9471 convert_modes (TYPE_MODE (index_type),
9472 TYPE_MODE (TREE_TYPE (range)),
9473 expand_normal (range),
9474 TYPE_UNSIGNED (TREE_TYPE (range))),
9475 table_label, default_label);
9476 return 1;
9477 }
9478
9479 /* Nonzero if the mode is a valid vector mode for this architecture.
9480 This returns nonzero even if there is no hardware support for the
9481 vector mode, but we can emulate with narrower modes. */
9482
9483 int
9484 vector_mode_valid_p (enum machine_mode mode)
9485 {
9486 enum mode_class class = GET_MODE_CLASS (mode);
9487 enum machine_mode innermode;
9488
9489 /* Doh! What's going on? */
9490 if (class != MODE_VECTOR_INT
9491 && class != MODE_VECTOR_FLOAT)
9492 return 0;
9493
9494 /* Hardware support. Woo hoo! */
9495 if (targetm.vector_mode_supported_p (mode))
9496 return 1;
9497
9498 innermode = GET_MODE_INNER (mode);
9499
9500 /* We should probably return 1 if requesting V4DI and we have no DI,
9501 but we have V2DI, but this is probably very unlikely. */
9502
9503 /* If we have support for the inner mode, we can safely emulate it.
9504 We may not have V2DI, but me can emulate with a pair of DIs. */
9505 return targetm.scalar_mode_supported_p (innermode);
9506 }
9507
9508 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9509 static rtx
9510 const_vector_from_tree (tree exp)
9511 {
9512 rtvec v;
9513 int units, i;
9514 tree link, elt;
9515 enum machine_mode inner, mode;
9516
9517 mode = TYPE_MODE (TREE_TYPE (exp));
9518
9519 if (initializer_zerop (exp))
9520 return CONST0_RTX (mode);
9521
9522 units = GET_MODE_NUNITS (mode);
9523 inner = GET_MODE_INNER (mode);
9524
9525 v = rtvec_alloc (units);
9526
9527 link = TREE_VECTOR_CST_ELTS (exp);
9528 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9529 {
9530 elt = TREE_VALUE (link);
9531
9532 if (TREE_CODE (elt) == REAL_CST)
9533 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9534 inner);
9535 else
9536 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9537 TREE_INT_CST_HIGH (elt),
9538 inner);
9539 }
9540
9541 /* Initialize remaining elements to 0. */
9542 for (; i < units; ++i)
9543 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9544
9545 return gen_rtx_CONST_VECTOR (mode, v);
9546 }
9547 #include "gt-expr.h"