expr.c (emit_group_store): Make bytepos a HOST_WIDE_INT to signed vs.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4 Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "typeclass.h"
46 #include "toplev.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "intl.h"
50 #include "tm_p.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
54 #include "target.h"
55 #include "timevar.h"
56
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
59
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
62
63 #ifdef PUSH_ROUNDING
64
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
70
71 #endif
72
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
80
81
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
89
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
93 {
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
105 };
106
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
109
110 struct store_by_pieces
111 {
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
121 };
122
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
130 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static rtx clear_storage_via_libcall (rtx, rtx, bool);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, int);
148
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
150
151 static int is_aligning_offset (tree, tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
162
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
166
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
169
170 /* Record for each mode whether we can float-extend from memory. */
171
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
180 #endif
181
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
188 #endif
189
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
197 #endif
198
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
201
202 /* This array records the insn_code of insns to perform block sets. */
203 enum insn_code setmem_optab[NUM_MACHINE_MODES];
204
205 /* These arrays record the insn_code of three different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
209 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210
211 /* Synchronization primitives. */
212 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
231 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
233 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234
235 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
236
237 #ifndef SLOW_UNALIGNED_ACCESS
238 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
239 #endif
240 \f
241 /* This is run once per compilation to set up which modes can be used
242 directly in memory and to initialize the block move optab. */
243
244 void
245 init_expr_once (void)
246 {
247 rtx insn, pat;
248 enum machine_mode mode;
249 int num_clobbers;
250 rtx mem, mem1;
251 rtx reg;
252
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
257 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258
259 /* A scratch register we can modify in-place below to avoid
260 useless RTL allocations. */
261 reg = gen_rtx_REG (VOIDmode, -1);
262
263 insn = rtx_alloc (INSN);
264 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
265 PATTERN (insn) = pat;
266
267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268 mode = (enum machine_mode) ((int) mode + 1))
269 {
270 int regno;
271
272 direct_load[(int) mode] = direct_store[(int) mode] = 0;
273 PUT_MODE (mem, mode);
274 PUT_MODE (mem1, mode);
275 PUT_MODE (reg, mode);
276
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
279
280 if (mode != VOIDmode && mode != BLKmode)
281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283 regno++)
284 {
285 if (! HARD_REGNO_MODE_OK (regno, mode))
286 continue;
287
288 REGNO (reg) = regno;
289
290 SET_SRC (pat) = mem;
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
294
295 SET_SRC (pat) = mem1;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
299
300 SET_SRC (pat) = reg;
301 SET_DEST (pat) = mem;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
304
305 SET_SRC (pat) = reg;
306 SET_DEST (pat) = mem1;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
309 }
310 }
311
312 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313
314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
315 mode = GET_MODE_WIDER_MODE (mode))
316 {
317 enum machine_mode srcmode;
318 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
319 srcmode = GET_MODE_WIDER_MODE (srcmode))
320 {
321 enum insn_code ic;
322
323 ic = can_extend_p (mode, srcmode, 0);
324 if (ic == CODE_FOR_nothing)
325 continue;
326
327 PUT_MODE (mem, srcmode);
328
329 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
330 float_extend_from_mem[mode][srcmode] = true;
331 }
332 }
333 }
334
335 /* This is run at the start of compiling a function. */
336
337 void
338 init_expr (void)
339 {
340 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
341 }
342 \f
343 /* Copy data from FROM to TO, where the machine modes are not the same.
344 Both modes may be integer, or both may be floating.
345 UNSIGNEDP should be nonzero if FROM is an unsigned type.
346 This causes zero-extension instead of sign-extension. */
347
348 void
349 convert_move (rtx to, rtx from, int unsignedp)
350 {
351 enum machine_mode to_mode = GET_MODE (to);
352 enum machine_mode from_mode = GET_MODE (from);
353 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
354 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
355 enum insn_code code;
356 rtx libcall;
357
358 /* rtx code for making an equivalent value. */
359 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
360 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
361
362
363 gcc_assert (to_real == from_real);
364
365 /* If the source and destination are already the same, then there's
366 nothing to do. */
367 if (to == from)
368 return;
369
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
372 TO here. */
373
374 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
376 >= GET_MODE_SIZE (to_mode))
377 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
378 from = gen_lowpart (to_mode, from), from_mode = to_mode;
379
380 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
381
382 if (to_mode == from_mode
383 || (from_mode == VOIDmode && CONSTANT_P (from)))
384 {
385 emit_move_insn (to, from);
386 return;
387 }
388
389 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390 {
391 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
392
393 if (VECTOR_MODE_P (to_mode))
394 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
395 else
396 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
397
398 emit_move_insn (to, from);
399 return;
400 }
401
402 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403 {
404 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
405 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
406 return;
407 }
408
409 if (to_real)
410 {
411 rtx value, insns;
412 convert_optab tab;
413
414 gcc_assert ((GET_MODE_PRECISION (from_mode)
415 != GET_MODE_PRECISION (to_mode))
416 || (DECIMAL_FLOAT_MODE_P (from_mode)
417 != DECIMAL_FLOAT_MODE_P (to_mode)));
418
419 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
420 /* Conversion between decimal float and binary float, same size. */
421 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
422 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
423 tab = sext_optab;
424 else
425 tab = trunc_optab;
426
427 /* Try converting directly if the insn is supported. */
428
429 code = tab->handlers[to_mode][from_mode].insn_code;
430 if (code != CODE_FOR_nothing)
431 {
432 emit_unop_insn (code, to, from,
433 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
434 return;
435 }
436
437 /* Otherwise use a libcall. */
438 libcall = tab->handlers[to_mode][from_mode].libfunc;
439
440 /* Is this conversion implemented yet? */
441 gcc_assert (libcall);
442
443 start_sequence ();
444 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
445 1, from, from_mode);
446 insns = get_insns ();
447 end_sequence ();
448 emit_libcall_block (insns, to, value,
449 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
450 from)
451 : gen_rtx_FLOAT_EXTEND (to_mode, from));
452 return;
453 }
454
455 /* Handle pointer conversion. */ /* SPEE 900220. */
456 /* Targets are expected to provide conversion insns between PxImode and
457 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
458 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
459 {
460 enum machine_mode full_mode
461 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
462
463 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
464 != CODE_FOR_nothing);
465
466 if (full_mode != from_mode)
467 from = convert_to_mode (full_mode, from, unsignedp);
468 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
469 to, from, UNKNOWN);
470 return;
471 }
472 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
473 {
474 rtx new_from;
475 enum machine_mode full_mode
476 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
477
478 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
479 != CODE_FOR_nothing);
480
481 if (to_mode == full_mode)
482 {
483 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
484 to, from, UNKNOWN);
485 return;
486 }
487
488 new_from = gen_reg_rtx (full_mode);
489 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
490 new_from, from, UNKNOWN);
491
492 /* else proceed to integer conversions below. */
493 from_mode = full_mode;
494 from = new_from;
495 }
496
497 /* Now both modes are integers. */
498
499 /* Handle expanding beyond a word. */
500 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
501 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
502 {
503 rtx insns;
504 rtx lowpart;
505 rtx fill_value;
506 rtx lowfrom;
507 int i;
508 enum machine_mode lowpart_mode;
509 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
510
511 /* Try converting directly if the insn is supported. */
512 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
513 != CODE_FOR_nothing)
514 {
515 /* If FROM is a SUBREG, put it into a register. Do this
516 so that we always generate the same set of insns for
517 better cse'ing; if an intermediate assignment occurred,
518 we won't be doing the operation directly on the SUBREG. */
519 if (optimize > 0 && GET_CODE (from) == SUBREG)
520 from = force_reg (from_mode, from);
521 emit_unop_insn (code, to, from, equiv_code);
522 return;
523 }
524 /* Next, try converting via full word. */
525 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
526 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
527 != CODE_FOR_nothing))
528 {
529 if (REG_P (to))
530 {
531 if (reg_overlap_mentioned_p (to, from))
532 from = force_reg (from_mode, from);
533 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
534 }
535 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
536 emit_unop_insn (code, to,
537 gen_lowpart (word_mode, to), equiv_code);
538 return;
539 }
540
541 /* No special multiword conversion insn; do it by hand. */
542 start_sequence ();
543
544 /* Since we will turn this into a no conflict block, we must ensure
545 that the source does not overlap the target. */
546
547 if (reg_overlap_mentioned_p (to, from))
548 from = force_reg (from_mode, from);
549
550 /* Get a copy of FROM widened to a word, if necessary. */
551 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
552 lowpart_mode = word_mode;
553 else
554 lowpart_mode = from_mode;
555
556 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
557
558 lowpart = gen_lowpart (lowpart_mode, to);
559 emit_move_insn (lowpart, lowfrom);
560
561 /* Compute the value to put in each remaining word. */
562 if (unsignedp)
563 fill_value = const0_rtx;
564 else
565 {
566 #ifdef HAVE_slt
567 if (HAVE_slt
568 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
569 && STORE_FLAG_VALUE == -1)
570 {
571 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
572 lowpart_mode, 0);
573 fill_value = gen_reg_rtx (word_mode);
574 emit_insn (gen_slt (fill_value));
575 }
576 else
577 #endif
578 {
579 fill_value
580 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
581 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
582 NULL_RTX, 0);
583 fill_value = convert_to_mode (word_mode, fill_value, 1);
584 }
585 }
586
587 /* Fill the remaining words. */
588 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
589 {
590 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
591 rtx subword = operand_subword (to, index, 1, to_mode);
592
593 gcc_assert (subword);
594
595 if (fill_value != subword)
596 emit_move_insn (subword, fill_value);
597 }
598
599 insns = get_insns ();
600 end_sequence ();
601
602 emit_no_conflict_block (insns, to, from, NULL_RTX,
603 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
604 return;
605 }
606
607 /* Truncating multi-word to a word or less. */
608 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
609 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
610 {
611 if (!((MEM_P (from)
612 && ! MEM_VOLATILE_P (from)
613 && direct_load[(int) to_mode]
614 && ! mode_dependent_address_p (XEXP (from, 0)))
615 || REG_P (from)
616 || GET_CODE (from) == SUBREG))
617 from = force_reg (from_mode, from);
618 convert_move (to, gen_lowpart (word_mode, from), 0);
619 return;
620 }
621
622 /* Now follow all the conversions between integers
623 no more than a word long. */
624
625 /* For truncation, usually we can just refer to FROM in a narrower mode. */
626 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628 GET_MODE_BITSIZE (from_mode)))
629 {
630 if (!((MEM_P (from)
631 && ! MEM_VOLATILE_P (from)
632 && direct_load[(int) to_mode]
633 && ! mode_dependent_address_p (XEXP (from, 0)))
634 || REG_P (from)
635 || GET_CODE (from) == SUBREG))
636 from = force_reg (from_mode, from);
637 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
638 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
639 from = copy_to_reg (from);
640 emit_move_insn (to, gen_lowpart (to_mode, from));
641 return;
642 }
643
644 /* Handle extension. */
645 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
646 {
647 /* Convert directly if that works. */
648 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
649 != CODE_FOR_nothing)
650 {
651 emit_unop_insn (code, to, from, equiv_code);
652 return;
653 }
654 else
655 {
656 enum machine_mode intermediate;
657 rtx tmp;
658 tree shift_amount;
659
660 /* Search for a mode to convert via. */
661 for (intermediate = from_mode; intermediate != VOIDmode;
662 intermediate = GET_MODE_WIDER_MODE (intermediate))
663 if (((can_extend_p (to_mode, intermediate, unsignedp)
664 != CODE_FOR_nothing)
665 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
666 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
667 GET_MODE_BITSIZE (intermediate))))
668 && (can_extend_p (intermediate, from_mode, unsignedp)
669 != CODE_FOR_nothing))
670 {
671 convert_move (to, convert_to_mode (intermediate, from,
672 unsignedp), unsignedp);
673 return;
674 }
675
676 /* No suitable intermediate mode.
677 Generate what we need with shifts. */
678 shift_amount = build_int_cst (NULL_TREE,
679 GET_MODE_BITSIZE (to_mode)
680 - GET_MODE_BITSIZE (from_mode));
681 from = gen_lowpart (to_mode, force_reg (from_mode, from));
682 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
683 to, unsignedp);
684 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
685 to, unsignedp);
686 if (tmp != to)
687 emit_move_insn (to, tmp);
688 return;
689 }
690 }
691
692 /* Support special truncate insns for certain modes. */
693 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
694 {
695 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
696 to, from, UNKNOWN);
697 return;
698 }
699
700 /* Handle truncation of volatile memrefs, and so on;
701 the things that couldn't be truncated directly,
702 and for which there was no special instruction.
703
704 ??? Code above formerly short-circuited this, for most integer
705 mode pairs, with a force_reg in from_mode followed by a recursive
706 call to this routine. Appears always to have been wrong. */
707 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
708 {
709 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
710 emit_move_insn (to, temp);
711 return;
712 }
713
714 /* Mode combination is not recognized. */
715 gcc_unreachable ();
716 }
717
718 /* Return an rtx for a value that would result
719 from converting X to mode MODE.
720 Both X and MODE may be floating, or both integer.
721 UNSIGNEDP is nonzero if X is an unsigned value.
722 This can be done by referring to a part of X in place
723 or by copying to a new temporary with conversion. */
724
725 rtx
726 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
727 {
728 return convert_modes (mode, VOIDmode, x, unsignedp);
729 }
730
731 /* Return an rtx for a value that would result
732 from converting X from mode OLDMODE to mode MODE.
733 Both modes may be floating, or both integer.
734 UNSIGNEDP is nonzero if X is an unsigned value.
735
736 This can be done by referring to a part of X in place
737 or by copying to a new temporary with conversion.
738
739 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
740
741 rtx
742 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
743 {
744 rtx temp;
745
746 /* If FROM is a SUBREG that indicates that we have already done at least
747 the required extension, strip it. */
748
749 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
750 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
751 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
752 x = gen_lowpart (mode, x);
753
754 if (GET_MODE (x) != VOIDmode)
755 oldmode = GET_MODE (x);
756
757 if (mode == oldmode)
758 return x;
759
760 /* There is one case that we must handle specially: If we are converting
761 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
762 we are to interpret the constant as unsigned, gen_lowpart will do
763 the wrong if the constant appears negative. What we want to do is
764 make the high-order word of the constant zero, not all ones. */
765
766 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
767 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
768 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
769 {
770 HOST_WIDE_INT val = INTVAL (x);
771
772 if (oldmode != VOIDmode
773 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
774 {
775 int width = GET_MODE_BITSIZE (oldmode);
776
777 /* We need to zero extend VAL. */
778 val &= ((HOST_WIDE_INT) 1 << width) - 1;
779 }
780
781 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
782 }
783
784 /* We can do this with a gen_lowpart if both desired and current modes
785 are integer, and this is either a constant integer, a register, or a
786 non-volatile MEM. Except for the constant case where MODE is no
787 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
788
789 if ((GET_CODE (x) == CONST_INT
790 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
791 || (GET_MODE_CLASS (mode) == MODE_INT
792 && GET_MODE_CLASS (oldmode) == MODE_INT
793 && (GET_CODE (x) == CONST_DOUBLE
794 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
795 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
796 && direct_load[(int) mode])
797 || (REG_P (x)
798 && (! HARD_REGISTER_P (x)
799 || HARD_REGNO_MODE_OK (REGNO (x), mode))
800 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
801 GET_MODE_BITSIZE (GET_MODE (x)))))))))
802 {
803 /* ?? If we don't know OLDMODE, we have to assume here that
804 X does not need sign- or zero-extension. This may not be
805 the case, but it's the best we can do. */
806 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
807 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
808 {
809 HOST_WIDE_INT val = INTVAL (x);
810 int width = GET_MODE_BITSIZE (oldmode);
811
812 /* We must sign or zero-extend in this case. Start by
813 zero-extending, then sign extend if we need to. */
814 val &= ((HOST_WIDE_INT) 1 << width) - 1;
815 if (! unsignedp
816 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
817 val |= (HOST_WIDE_INT) (-1) << width;
818
819 return gen_int_mode (val, mode);
820 }
821
822 return gen_lowpart (mode, x);
823 }
824
825 /* Converting from integer constant into mode is always equivalent to an
826 subreg operation. */
827 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
828 {
829 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
830 return simplify_gen_subreg (mode, x, oldmode, 0);
831 }
832
833 temp = gen_reg_rtx (mode);
834 convert_move (temp, x, unsignedp);
835 return temp;
836 }
837 \f
838 /* STORE_MAX_PIECES is the number of bytes at a time that we can
839 store efficiently. Due to internal GCC limitations, this is
840 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841 for an immediate constant. */
842
843 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
844
845 /* Determine whether the LEN bytes can be moved by using several move
846 instructions. Return nonzero if a call to move_by_pieces should
847 succeed. */
848
849 int
850 can_move_by_pieces (unsigned HOST_WIDE_INT len,
851 unsigned int align ATTRIBUTE_UNUSED)
852 {
853 return MOVE_BY_PIECES_P (len, align);
854 }
855
856 /* Generate several move instructions to copy LEN bytes from block FROM to
857 block TO. (These are MEM rtx's with BLKmode).
858
859 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860 used to push FROM to the stack.
861
862 ALIGN is maximum stack alignment we can assume.
863
864 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
866 stpcpy. */
867
868 rtx
869 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
870 unsigned int align, int endp)
871 {
872 struct move_by_pieces data;
873 rtx to_addr, from_addr = XEXP (from, 0);
874 unsigned int max_size = MOVE_MAX_PIECES + 1;
875 enum machine_mode mode = VOIDmode, tmode;
876 enum insn_code icode;
877
878 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
879
880 data.offset = 0;
881 data.from_addr = from_addr;
882 if (to)
883 {
884 to_addr = XEXP (to, 0);
885 data.to = to;
886 data.autinc_to
887 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
888 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
889 data.reverse
890 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
891 }
892 else
893 {
894 to_addr = NULL_RTX;
895 data.to = NULL_RTX;
896 data.autinc_to = 1;
897 #ifdef STACK_GROWS_DOWNWARD
898 data.reverse = 1;
899 #else
900 data.reverse = 0;
901 #endif
902 }
903 data.to_addr = to_addr;
904 data.from = from;
905 data.autinc_from
906 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
907 || GET_CODE (from_addr) == POST_INC
908 || GET_CODE (from_addr) == POST_DEC);
909
910 data.explicit_inc_from = 0;
911 data.explicit_inc_to = 0;
912 if (data.reverse) data.offset = len;
913 data.len = len;
914
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data.autinc_from && data.autinc_to)
919 && move_by_pieces_ninsns (len, align, max_size) > 2)
920 {
921 /* Find the mode of the largest move... */
922 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
923 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
924 if (GET_MODE_SIZE (tmode) < max_size)
925 mode = tmode;
926
927 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
928 {
929 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
930 data.autinc_from = 1;
931 data.explicit_inc_from = -1;
932 }
933 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
934 {
935 data.from_addr = copy_addr_to_reg (from_addr);
936 data.autinc_from = 1;
937 data.explicit_inc_from = 1;
938 }
939 if (!data.autinc_from && CONSTANT_P (from_addr))
940 data.from_addr = copy_addr_to_reg (from_addr);
941 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
942 {
943 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
944 data.autinc_to = 1;
945 data.explicit_inc_to = -1;
946 }
947 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
948 {
949 data.to_addr = copy_addr_to_reg (to_addr);
950 data.autinc_to = 1;
951 data.explicit_inc_to = 1;
952 }
953 if (!data.autinc_to && CONSTANT_P (to_addr))
954 data.to_addr = copy_addr_to_reg (to_addr);
955 }
956
957 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
958 if (align >= GET_MODE_ALIGNMENT (tmode))
959 align = GET_MODE_ALIGNMENT (tmode);
960 else
961 {
962 enum machine_mode xmode;
963
964 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
965 tmode != VOIDmode;
966 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
967 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
968 || SLOW_UNALIGNED_ACCESS (tmode, align))
969 break;
970
971 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
972 }
973
974 /* First move what we can in the largest integer mode, then go to
975 successively smaller modes. */
976
977 while (max_size > 1)
978 {
979 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
980 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
981 if (GET_MODE_SIZE (tmode) < max_size)
982 mode = tmode;
983
984 if (mode == VOIDmode)
985 break;
986
987 icode = mov_optab->handlers[(int) mode].insn_code;
988 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
989 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
990
991 max_size = GET_MODE_SIZE (mode);
992 }
993
994 /* The code above should have handled everything. */
995 gcc_assert (!data.len);
996
997 if (endp)
998 {
999 rtx to1;
1000
1001 gcc_assert (!data.reverse);
1002 if (data.autinc_to)
1003 {
1004 if (endp == 2)
1005 {
1006 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1007 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1008 else
1009 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1010 -1));
1011 }
1012 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1013 data.offset);
1014 }
1015 else
1016 {
1017 if (endp == 2)
1018 --data.offset;
1019 to1 = adjust_address (data.to, QImode, data.offset);
1020 }
1021 return to1;
1022 }
1023 else
1024 return data.to;
1025 }
1026
1027 /* Return number of insns required to move L bytes by pieces.
1028 ALIGN (in bits) is maximum alignment we can assume. */
1029
1030 static unsigned HOST_WIDE_INT
1031 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1032 unsigned int max_size)
1033 {
1034 unsigned HOST_WIDE_INT n_insns = 0;
1035 enum machine_mode tmode;
1036
1037 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1038 if (align >= GET_MODE_ALIGNMENT (tmode))
1039 align = GET_MODE_ALIGNMENT (tmode);
1040 else
1041 {
1042 enum machine_mode tmode, xmode;
1043
1044 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1045 tmode != VOIDmode;
1046 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1047 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1048 || SLOW_UNALIGNED_ACCESS (tmode, align))
1049 break;
1050
1051 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1052 }
1053
1054 while (max_size > 1)
1055 {
1056 enum machine_mode mode = VOIDmode;
1057 enum insn_code icode;
1058
1059 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1060 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1061 if (GET_MODE_SIZE (tmode) < max_size)
1062 mode = tmode;
1063
1064 if (mode == VOIDmode)
1065 break;
1066
1067 icode = mov_optab->handlers[(int) mode].insn_code;
1068 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1069 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1070
1071 max_size = GET_MODE_SIZE (mode);
1072 }
1073
1074 gcc_assert (!l);
1075 return n_insns;
1076 }
1077
1078 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1079 with move instructions for mode MODE. GENFUN is the gen_... function
1080 to make a move insn for that mode. DATA has all the other info. */
1081
1082 static void
1083 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1084 struct move_by_pieces *data)
1085 {
1086 unsigned int size = GET_MODE_SIZE (mode);
1087 rtx to1 = NULL_RTX, from1;
1088
1089 while (data->len >= size)
1090 {
1091 if (data->reverse)
1092 data->offset -= size;
1093
1094 if (data->to)
1095 {
1096 if (data->autinc_to)
1097 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1098 data->offset);
1099 else
1100 to1 = adjust_address (data->to, mode, data->offset);
1101 }
1102
1103 if (data->autinc_from)
1104 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1105 data->offset);
1106 else
1107 from1 = adjust_address (data->from, mode, data->offset);
1108
1109 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1110 emit_insn (gen_add2_insn (data->to_addr,
1111 GEN_INT (-(HOST_WIDE_INT)size)));
1112 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1113 emit_insn (gen_add2_insn (data->from_addr,
1114 GEN_INT (-(HOST_WIDE_INT)size)));
1115
1116 if (data->to)
1117 emit_insn ((*genfun) (to1, from1));
1118 else
1119 {
1120 #ifdef PUSH_ROUNDING
1121 emit_single_push_insn (mode, from1, NULL);
1122 #else
1123 gcc_unreachable ();
1124 #endif
1125 }
1126
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1128 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1129 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1130 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1131
1132 if (! data->reverse)
1133 data->offset += size;
1134
1135 data->len -= size;
1136 }
1137 }
1138 \f
1139 /* Emit code to move a block Y to a block X. This may be done with
1140 string-move instructions, with multiple scalar move instructions,
1141 or with a library call.
1142
1143 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1144 SIZE is an rtx that says how long they are.
1145 ALIGN is the maximum alignment we can assume they have.
1146 METHOD describes what kind of copy this is, and what mechanisms may be used.
1147
1148 Return the address of the new block, if memcpy is called and returns it,
1149 0 otherwise. */
1150
1151 rtx
1152 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1153 {
1154 bool may_use_call;
1155 rtx retval = 0;
1156 unsigned int align;
1157
1158 switch (method)
1159 {
1160 case BLOCK_OP_NORMAL:
1161 case BLOCK_OP_TAILCALL:
1162 may_use_call = true;
1163 break;
1164
1165 case BLOCK_OP_CALL_PARM:
1166 may_use_call = block_move_libcall_safe_for_call_parm ();
1167
1168 /* Make inhibit_defer_pop nonzero around the library call
1169 to force it to pop the arguments right away. */
1170 NO_DEFER_POP;
1171 break;
1172
1173 case BLOCK_OP_NO_LIBCALL:
1174 may_use_call = false;
1175 break;
1176
1177 default:
1178 gcc_unreachable ();
1179 }
1180
1181 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1182
1183 gcc_assert (MEM_P (x));
1184 gcc_assert (MEM_P (y));
1185 gcc_assert (size);
1186
1187 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1188 block copy is more efficient for other large modes, e.g. DCmode. */
1189 x = adjust_address (x, BLKmode, 0);
1190 y = adjust_address (y, BLKmode, 0);
1191
1192 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1193 can be incorrect is coming from __builtin_memcpy. */
1194 if (GET_CODE (size) == CONST_INT)
1195 {
1196 if (INTVAL (size) == 0)
1197 return 0;
1198
1199 x = shallow_copy_rtx (x);
1200 y = shallow_copy_rtx (y);
1201 set_mem_size (x, size);
1202 set_mem_size (y, size);
1203 }
1204
1205 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1206 move_by_pieces (x, y, INTVAL (size), align, 0);
1207 else if (emit_block_move_via_movmem (x, y, size, align))
1208 ;
1209 else if (may_use_call)
1210 retval = emit_block_move_via_libcall (x, y, size,
1211 method == BLOCK_OP_TAILCALL);
1212 else
1213 emit_block_move_via_loop (x, y, size, align);
1214
1215 if (method == BLOCK_OP_CALL_PARM)
1216 OK_DEFER_POP;
1217
1218 return retval;
1219 }
1220
1221 /* A subroutine of emit_block_move. Returns true if calling the
1222 block move libcall will not clobber any parameters which may have
1223 already been placed on the stack. */
1224
1225 static bool
1226 block_move_libcall_safe_for_call_parm (void)
1227 {
1228 /* If arguments are pushed on the stack, then they're safe. */
1229 if (PUSH_ARGS)
1230 return true;
1231
1232 /* If registers go on the stack anyway, any argument is sure to clobber
1233 an outgoing argument. */
1234 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1235 {
1236 tree fn = emit_block_move_libcall_fn (false);
1237 (void) fn;
1238 if (REG_PARM_STACK_SPACE (fn) != 0)
1239 return false;
1240 }
1241 #endif
1242
1243 /* If any argument goes in memory, then it might clobber an outgoing
1244 argument. */
1245 {
1246 CUMULATIVE_ARGS args_so_far;
1247 tree fn, arg;
1248
1249 fn = emit_block_move_libcall_fn (false);
1250 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1251
1252 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1253 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1254 {
1255 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1256 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1257 if (!tmp || !REG_P (tmp))
1258 return false;
1259 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1260 return false;
1261 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1262 }
1263 }
1264 return true;
1265 }
1266
1267 /* A subroutine of emit_block_move. Expand a movmem pattern;
1268 return true if successful. */
1269
1270 static bool
1271 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1272 {
1273 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1274 int save_volatile_ok = volatile_ok;
1275 enum machine_mode mode;
1276
1277 /* Since this is a move insn, we don't care about volatility. */
1278 volatile_ok = 1;
1279
1280 /* Try the most limited insn first, because there's no point
1281 including more than one in the machine description unless
1282 the more limited one has some advantage. */
1283
1284 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1285 mode = GET_MODE_WIDER_MODE (mode))
1286 {
1287 enum insn_code code = movmem_optab[(int) mode];
1288 insn_operand_predicate_fn pred;
1289
1290 if (code != CODE_FOR_nothing
1291 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1292 here because if SIZE is less than the mode mask, as it is
1293 returned by the macro, it will definitely be less than the
1294 actual mode mask. */
1295 && ((GET_CODE (size) == CONST_INT
1296 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1297 <= (GET_MODE_MASK (mode) >> 1)))
1298 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1299 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1300 || (*pred) (x, BLKmode))
1301 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1302 || (*pred) (y, BLKmode))
1303 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1304 || (*pred) (opalign, VOIDmode)))
1305 {
1306 rtx op2;
1307 rtx last = get_last_insn ();
1308 rtx pat;
1309
1310 op2 = convert_to_mode (mode, size, 1);
1311 pred = insn_data[(int) code].operand[2].predicate;
1312 if (pred != 0 && ! (*pred) (op2, mode))
1313 op2 = copy_to_mode_reg (mode, op2);
1314
1315 /* ??? When called via emit_block_move_for_call, it'd be
1316 nice if there were some way to inform the backend, so
1317 that it doesn't fail the expansion because it thinks
1318 emitting the libcall would be more efficient. */
1319
1320 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1321 if (pat)
1322 {
1323 emit_insn (pat);
1324 volatile_ok = save_volatile_ok;
1325 return true;
1326 }
1327 else
1328 delete_insns_since (last);
1329 }
1330 }
1331
1332 volatile_ok = save_volatile_ok;
1333 return false;
1334 }
1335
1336 /* A subroutine of emit_block_move. Expand a call to memcpy.
1337 Return the return value from memcpy, 0 otherwise. */
1338
1339 static rtx
1340 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1341 {
1342 rtx dst_addr, src_addr;
1343 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1344 enum machine_mode size_mode;
1345 rtx retval;
1346
1347 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1348 pseudos. We can then place those new pseudos into a VAR_DECL and
1349 use them later. */
1350
1351 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1352 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1353
1354 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1355 src_addr = convert_memory_address (ptr_mode, src_addr);
1356
1357 dst_tree = make_tree (ptr_type_node, dst_addr);
1358 src_tree = make_tree (ptr_type_node, src_addr);
1359
1360 size_mode = TYPE_MODE (sizetype);
1361
1362 size = convert_to_mode (size_mode, size, 1);
1363 size = copy_to_mode_reg (size_mode, size);
1364
1365 /* It is incorrect to use the libcall calling conventions to call
1366 memcpy in this context. This could be a user call to memcpy and
1367 the user may wish to examine the return value from memcpy. For
1368 targets where libcalls and normal calls have different conventions
1369 for returning pointers, we could end up generating incorrect code. */
1370
1371 size_tree = make_tree (sizetype, size);
1372
1373 fn = emit_block_move_libcall_fn (true);
1374 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1375 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1376 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1377
1378 /* Now we have to build up the CALL_EXPR itself. */
1379 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1380 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1381 call_expr, arg_list, NULL_TREE);
1382 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1383
1384 retval = expand_normal (call_expr);
1385
1386 return retval;
1387 }
1388
1389 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1390 for the function we use for block copies. The first time FOR_CALL
1391 is true, we call assemble_external. */
1392
1393 static GTY(()) tree block_move_fn;
1394
1395 void
1396 init_block_move_fn (const char *asmspec)
1397 {
1398 if (!block_move_fn)
1399 {
1400 tree args, fn;
1401
1402 fn = get_identifier ("memcpy");
1403 args = build_function_type_list (ptr_type_node, ptr_type_node,
1404 const_ptr_type_node, sizetype,
1405 NULL_TREE);
1406
1407 fn = build_decl (FUNCTION_DECL, fn, args);
1408 DECL_EXTERNAL (fn) = 1;
1409 TREE_PUBLIC (fn) = 1;
1410 DECL_ARTIFICIAL (fn) = 1;
1411 TREE_NOTHROW (fn) = 1;
1412 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1413 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1414
1415 block_move_fn = fn;
1416 }
1417
1418 if (asmspec)
1419 set_user_assembler_name (block_move_fn, asmspec);
1420 }
1421
1422 static tree
1423 emit_block_move_libcall_fn (int for_call)
1424 {
1425 static bool emitted_extern;
1426
1427 if (!block_move_fn)
1428 init_block_move_fn (NULL);
1429
1430 if (for_call && !emitted_extern)
1431 {
1432 emitted_extern = true;
1433 make_decl_rtl (block_move_fn);
1434 assemble_external (block_move_fn);
1435 }
1436
1437 return block_move_fn;
1438 }
1439
1440 /* A subroutine of emit_block_move. Copy the data via an explicit
1441 loop. This is used only when libcalls are forbidden. */
1442 /* ??? It'd be nice to copy in hunks larger than QImode. */
1443
1444 static void
1445 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1446 unsigned int align ATTRIBUTE_UNUSED)
1447 {
1448 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1449 enum machine_mode iter_mode;
1450
1451 iter_mode = GET_MODE (size);
1452 if (iter_mode == VOIDmode)
1453 iter_mode = word_mode;
1454
1455 top_label = gen_label_rtx ();
1456 cmp_label = gen_label_rtx ();
1457 iter = gen_reg_rtx (iter_mode);
1458
1459 emit_move_insn (iter, const0_rtx);
1460
1461 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1462 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1463 do_pending_stack_adjust ();
1464
1465 emit_jump (cmp_label);
1466 emit_label (top_label);
1467
1468 tmp = convert_modes (Pmode, iter_mode, iter, true);
1469 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1470 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1471 x = change_address (x, QImode, x_addr);
1472 y = change_address (y, QImode, y_addr);
1473
1474 emit_move_insn (x, y);
1475
1476 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1477 true, OPTAB_LIB_WIDEN);
1478 if (tmp != iter)
1479 emit_move_insn (iter, tmp);
1480
1481 emit_label (cmp_label);
1482
1483 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1484 true, top_label);
1485 }
1486 \f
1487 /* Copy all or part of a value X into registers starting at REGNO.
1488 The number of registers to be filled is NREGS. */
1489
1490 void
1491 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1492 {
1493 int i;
1494 #ifdef HAVE_load_multiple
1495 rtx pat;
1496 rtx last;
1497 #endif
1498
1499 if (nregs == 0)
1500 return;
1501
1502 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1503 x = validize_mem (force_const_mem (mode, x));
1504
1505 /* See if the machine can do this with a load multiple insn. */
1506 #ifdef HAVE_load_multiple
1507 if (HAVE_load_multiple)
1508 {
1509 last = get_last_insn ();
1510 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1511 GEN_INT (nregs));
1512 if (pat)
1513 {
1514 emit_insn (pat);
1515 return;
1516 }
1517 else
1518 delete_insns_since (last);
1519 }
1520 #endif
1521
1522 for (i = 0; i < nregs; i++)
1523 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1524 operand_subword_force (x, i, mode));
1525 }
1526
1527 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1528 The number of registers to be filled is NREGS. */
1529
1530 void
1531 move_block_from_reg (int regno, rtx x, int nregs)
1532 {
1533 int i;
1534
1535 if (nregs == 0)
1536 return;
1537
1538 /* See if the machine can do this with a store multiple insn. */
1539 #ifdef HAVE_store_multiple
1540 if (HAVE_store_multiple)
1541 {
1542 rtx last = get_last_insn ();
1543 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1544 GEN_INT (nregs));
1545 if (pat)
1546 {
1547 emit_insn (pat);
1548 return;
1549 }
1550 else
1551 delete_insns_since (last);
1552 }
1553 #endif
1554
1555 for (i = 0; i < nregs; i++)
1556 {
1557 rtx tem = operand_subword (x, i, 1, BLKmode);
1558
1559 gcc_assert (tem);
1560
1561 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1562 }
1563 }
1564
1565 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1566 ORIG, where ORIG is a non-consecutive group of registers represented by
1567 a PARALLEL. The clone is identical to the original except in that the
1568 original set of registers is replaced by a new set of pseudo registers.
1569 The new set has the same modes as the original set. */
1570
1571 rtx
1572 gen_group_rtx (rtx orig)
1573 {
1574 int i, length;
1575 rtx *tmps;
1576
1577 gcc_assert (GET_CODE (orig) == PARALLEL);
1578
1579 length = XVECLEN (orig, 0);
1580 tmps = alloca (sizeof (rtx) * length);
1581
1582 /* Skip a NULL entry in first slot. */
1583 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1584
1585 if (i)
1586 tmps[0] = 0;
1587
1588 for (; i < length; i++)
1589 {
1590 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1591 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1592
1593 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1594 }
1595
1596 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1597 }
1598
1599 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1600 except that values are placed in TMPS[i], and must later be moved
1601 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1602
1603 static void
1604 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1605 {
1606 rtx src;
1607 int start, i;
1608 enum machine_mode m = GET_MODE (orig_src);
1609
1610 gcc_assert (GET_CODE (dst) == PARALLEL);
1611
1612 if (m != VOIDmode
1613 && !SCALAR_INT_MODE_P (m)
1614 && !MEM_P (orig_src)
1615 && GET_CODE (orig_src) != CONCAT)
1616 {
1617 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1618 if (imode == BLKmode)
1619 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1620 else
1621 src = gen_reg_rtx (imode);
1622 if (imode != BLKmode)
1623 src = gen_lowpart (GET_MODE (orig_src), src);
1624 emit_move_insn (src, orig_src);
1625 /* ...and back again. */
1626 if (imode != BLKmode)
1627 src = gen_lowpart (imode, src);
1628 emit_group_load_1 (tmps, dst, src, type, ssize);
1629 return;
1630 }
1631
1632 /* Check for a NULL entry, used to indicate that the parameter goes
1633 both on the stack and in registers. */
1634 if (XEXP (XVECEXP (dst, 0, 0), 0))
1635 start = 0;
1636 else
1637 start = 1;
1638
1639 /* Process the pieces. */
1640 for (i = start; i < XVECLEN (dst, 0); i++)
1641 {
1642 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1643 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1644 unsigned int bytelen = GET_MODE_SIZE (mode);
1645 int shift = 0;
1646
1647 /* Handle trailing fragments that run over the size of the struct. */
1648 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1649 {
1650 /* Arrange to shift the fragment to where it belongs.
1651 extract_bit_field loads to the lsb of the reg. */
1652 if (
1653 #ifdef BLOCK_REG_PADDING
1654 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1655 == (BYTES_BIG_ENDIAN ? upward : downward)
1656 #else
1657 BYTES_BIG_ENDIAN
1658 #endif
1659 )
1660 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1661 bytelen = ssize - bytepos;
1662 gcc_assert (bytelen > 0);
1663 }
1664
1665 /* If we won't be loading directly from memory, protect the real source
1666 from strange tricks we might play; but make sure that the source can
1667 be loaded directly into the destination. */
1668 src = orig_src;
1669 if (!MEM_P (orig_src)
1670 && (!CONSTANT_P (orig_src)
1671 || (GET_MODE (orig_src) != mode
1672 && GET_MODE (orig_src) != VOIDmode)))
1673 {
1674 if (GET_MODE (orig_src) == VOIDmode)
1675 src = gen_reg_rtx (mode);
1676 else
1677 src = gen_reg_rtx (GET_MODE (orig_src));
1678
1679 emit_move_insn (src, orig_src);
1680 }
1681
1682 /* Optimize the access just a bit. */
1683 if (MEM_P (src)
1684 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1685 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1686 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1687 && bytelen == GET_MODE_SIZE (mode))
1688 {
1689 tmps[i] = gen_reg_rtx (mode);
1690 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1691 }
1692 else if (COMPLEX_MODE_P (mode)
1693 && GET_MODE (src) == mode
1694 && bytelen == GET_MODE_SIZE (mode))
1695 /* Let emit_move_complex do the bulk of the work. */
1696 tmps[i] = src;
1697 else if (GET_CODE (src) == CONCAT)
1698 {
1699 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1700 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1701
1702 if ((bytepos == 0 && bytelen == slen0)
1703 || (bytepos != 0 && bytepos + bytelen <= slen))
1704 {
1705 /* The following assumes that the concatenated objects all
1706 have the same size. In this case, a simple calculation
1707 can be used to determine the object and the bit field
1708 to be extracted. */
1709 tmps[i] = XEXP (src, bytepos / slen0);
1710 if (! CONSTANT_P (tmps[i])
1711 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1712 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1713 (bytepos % slen0) * BITS_PER_UNIT,
1714 1, NULL_RTX, mode, mode);
1715 }
1716 else
1717 {
1718 rtx mem;
1719
1720 gcc_assert (!bytepos);
1721 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1722 emit_move_insn (mem, src);
1723 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1724 0, 1, NULL_RTX, mode, mode);
1725 }
1726 }
1727 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1728 SIMD register, which is currently broken. While we get GCC
1729 to emit proper RTL for these cases, let's dump to memory. */
1730 else if (VECTOR_MODE_P (GET_MODE (dst))
1731 && REG_P (src))
1732 {
1733 int slen = GET_MODE_SIZE (GET_MODE (src));
1734 rtx mem;
1735
1736 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1737 emit_move_insn (mem, src);
1738 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1739 }
1740 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1741 && XVECLEN (dst, 0) > 1)
1742 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1743 else if (CONSTANT_P (src)
1744 || (REG_P (src) && GET_MODE (src) == mode))
1745 tmps[i] = src;
1746 else
1747 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1748 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1749 mode, mode);
1750
1751 if (shift)
1752 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1753 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1754 }
1755 }
1756
1757 /* Emit code to move a block SRC of type TYPE to a block DST,
1758 where DST is non-consecutive registers represented by a PARALLEL.
1759 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1760 if not known. */
1761
1762 void
1763 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1764 {
1765 rtx *tmps;
1766 int i;
1767
1768 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1769 emit_group_load_1 (tmps, dst, src, type, ssize);
1770
1771 /* Copy the extracted pieces into the proper (probable) hard regs. */
1772 for (i = 0; i < XVECLEN (dst, 0); i++)
1773 {
1774 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1775 if (d == NULL)
1776 continue;
1777 emit_move_insn (d, tmps[i]);
1778 }
1779 }
1780
1781 /* Similar, but load SRC into new pseudos in a format that looks like
1782 PARALLEL. This can later be fed to emit_group_move to get things
1783 in the right place. */
1784
1785 rtx
1786 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1787 {
1788 rtvec vec;
1789 int i;
1790
1791 vec = rtvec_alloc (XVECLEN (parallel, 0));
1792 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1793
1794 /* Convert the vector to look just like the original PARALLEL, except
1795 with the computed values. */
1796 for (i = 0; i < XVECLEN (parallel, 0); i++)
1797 {
1798 rtx e = XVECEXP (parallel, 0, i);
1799 rtx d = XEXP (e, 0);
1800
1801 if (d)
1802 {
1803 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1804 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1805 }
1806 RTVEC_ELT (vec, i) = e;
1807 }
1808
1809 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1810 }
1811
1812 /* Emit code to move a block SRC to block DST, where SRC and DST are
1813 non-consecutive groups of registers, each represented by a PARALLEL. */
1814
1815 void
1816 emit_group_move (rtx dst, rtx src)
1817 {
1818 int i;
1819
1820 gcc_assert (GET_CODE (src) == PARALLEL
1821 && GET_CODE (dst) == PARALLEL
1822 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1823
1824 /* Skip first entry if NULL. */
1825 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1826 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1827 XEXP (XVECEXP (src, 0, i), 0));
1828 }
1829
1830 /* Move a group of registers represented by a PARALLEL into pseudos. */
1831
1832 rtx
1833 emit_group_move_into_temps (rtx src)
1834 {
1835 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1836 int i;
1837
1838 for (i = 0; i < XVECLEN (src, 0); i++)
1839 {
1840 rtx e = XVECEXP (src, 0, i);
1841 rtx d = XEXP (e, 0);
1842
1843 if (d)
1844 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1845 RTVEC_ELT (vec, i) = e;
1846 }
1847
1848 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1849 }
1850
1851 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1852 where SRC is non-consecutive registers represented by a PARALLEL.
1853 SSIZE represents the total size of block ORIG_DST, or -1 if not
1854 known. */
1855
1856 void
1857 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1858 {
1859 rtx *tmps, dst;
1860 int start, finish, i;
1861 enum machine_mode m = GET_MODE (orig_dst);
1862
1863 gcc_assert (GET_CODE (src) == PARALLEL);
1864
1865 if (!SCALAR_INT_MODE_P (m)
1866 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1867 {
1868 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1869 if (imode == BLKmode)
1870 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1871 else
1872 dst = gen_reg_rtx (imode);
1873 emit_group_store (dst, src, type, ssize);
1874 if (imode != BLKmode)
1875 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1876 emit_move_insn (orig_dst, dst);
1877 return;
1878 }
1879
1880 /* Check for a NULL entry, used to indicate that the parameter goes
1881 both on the stack and in registers. */
1882 if (XEXP (XVECEXP (src, 0, 0), 0))
1883 start = 0;
1884 else
1885 start = 1;
1886 finish = XVECLEN (src, 0);
1887
1888 tmps = alloca (sizeof (rtx) * finish);
1889
1890 /* Copy the (probable) hard regs into pseudos. */
1891 for (i = start; i < finish; i++)
1892 {
1893 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1894 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1895 {
1896 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1897 emit_move_insn (tmps[i], reg);
1898 }
1899 else
1900 tmps[i] = reg;
1901 }
1902
1903 /* If we won't be storing directly into memory, protect the real destination
1904 from strange tricks we might play. */
1905 dst = orig_dst;
1906 if (GET_CODE (dst) == PARALLEL)
1907 {
1908 rtx temp;
1909
1910 /* We can get a PARALLEL dst if there is a conditional expression in
1911 a return statement. In that case, the dst and src are the same,
1912 so no action is necessary. */
1913 if (rtx_equal_p (dst, src))
1914 return;
1915
1916 /* It is unclear if we can ever reach here, but we may as well handle
1917 it. Allocate a temporary, and split this into a store/load to/from
1918 the temporary. */
1919
1920 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1921 emit_group_store (temp, src, type, ssize);
1922 emit_group_load (dst, temp, type, ssize);
1923 return;
1924 }
1925 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1926 {
1927 enum machine_mode outer = GET_MODE (dst);
1928 enum machine_mode inner;
1929 HOST_WIDE_INT bytepos;
1930 bool done = false;
1931 rtx temp;
1932
1933 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1934 dst = gen_reg_rtx (outer);
1935
1936 /* Make life a bit easier for combine. */
1937 /* If the first element of the vector is the low part
1938 of the destination mode, use a paradoxical subreg to
1939 initialize the destination. */
1940 if (start < finish)
1941 {
1942 inner = GET_MODE (tmps[start]);
1943 bytepos = subreg_lowpart_offset (outer, inner);
1944 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1945 {
1946 temp = simplify_gen_subreg (outer, tmps[start],
1947 inner, bytepos);
1948 emit_move_insn (dst, temp);
1949 done = true;
1950 start++;
1951 }
1952 }
1953
1954 /* If the first element wasn't the low part, try the last. */
1955 if (!done
1956 && start < finish - 1)
1957 {
1958 inner = GET_MODE (tmps[finish - 1]);
1959 bytepos = subreg_lowpart_offset (outer, inner);
1960 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1961 {
1962 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1963 inner, bytepos);
1964 emit_move_insn (dst, temp);
1965 done = true;
1966 finish--;
1967 }
1968 }
1969
1970 /* Otherwise, simply initialize the result to zero. */
1971 if (!done)
1972 emit_move_insn (dst, CONST0_RTX (outer));
1973 }
1974
1975 /* Process the pieces. */
1976 for (i = start; i < finish; i++)
1977 {
1978 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1979 enum machine_mode mode = GET_MODE (tmps[i]);
1980 unsigned int bytelen = GET_MODE_SIZE (mode);
1981 rtx dest = dst;
1982
1983 /* Handle trailing fragments that run over the size of the struct. */
1984 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1985 {
1986 /* store_bit_field always takes its value from the lsb.
1987 Move the fragment to the lsb if it's not already there. */
1988 if (
1989 #ifdef BLOCK_REG_PADDING
1990 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1991 == (BYTES_BIG_ENDIAN ? upward : downward)
1992 #else
1993 BYTES_BIG_ENDIAN
1994 #endif
1995 )
1996 {
1997 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1998 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1999 build_int_cst (NULL_TREE, shift),
2000 tmps[i], 0);
2001 }
2002 bytelen = ssize - bytepos;
2003 }
2004
2005 if (GET_CODE (dst) == CONCAT)
2006 {
2007 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2008 dest = XEXP (dst, 0);
2009 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2010 {
2011 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2012 dest = XEXP (dst, 1);
2013 }
2014 else
2015 {
2016 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2017 dest = assign_stack_temp (GET_MODE (dest),
2018 GET_MODE_SIZE (GET_MODE (dest)), 0);
2019 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2020 tmps[i]);
2021 dst = dest;
2022 break;
2023 }
2024 }
2025
2026 /* Optimize the access just a bit. */
2027 if (MEM_P (dest)
2028 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2029 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2030 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2031 && bytelen == GET_MODE_SIZE (mode))
2032 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2033 else
2034 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2035 mode, tmps[i]);
2036 }
2037
2038 /* Copy from the pseudo into the (probable) hard reg. */
2039 if (orig_dst != dst)
2040 emit_move_insn (orig_dst, dst);
2041 }
2042
2043 /* Generate code to copy a BLKmode object of TYPE out of a
2044 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2045 is null, a stack temporary is created. TGTBLK is returned.
2046
2047 The purpose of this routine is to handle functions that return
2048 BLKmode structures in registers. Some machines (the PA for example)
2049 want to return all small structures in registers regardless of the
2050 structure's alignment. */
2051
2052 rtx
2053 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2054 {
2055 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2056 rtx src = NULL, dst = NULL;
2057 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2058 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2059
2060 if (tgtblk == 0)
2061 {
2062 tgtblk = assign_temp (build_qualified_type (type,
2063 (TYPE_QUALS (type)
2064 | TYPE_QUAL_CONST)),
2065 0, 1, 1);
2066 preserve_temp_slots (tgtblk);
2067 }
2068
2069 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2070 into a new pseudo which is a full word. */
2071
2072 if (GET_MODE (srcreg) != BLKmode
2073 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2074 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2075
2076 /* If the structure doesn't take up a whole number of words, see whether
2077 SRCREG is padded on the left or on the right. If it's on the left,
2078 set PADDING_CORRECTION to the number of bits to skip.
2079
2080 In most ABIs, the structure will be returned at the least end of
2081 the register, which translates to right padding on little-endian
2082 targets and left padding on big-endian targets. The opposite
2083 holds if the structure is returned at the most significant
2084 end of the register. */
2085 if (bytes % UNITS_PER_WORD != 0
2086 && (targetm.calls.return_in_msb (type)
2087 ? !BYTES_BIG_ENDIAN
2088 : BYTES_BIG_ENDIAN))
2089 padding_correction
2090 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2091
2092 /* Copy the structure BITSIZE bites at a time.
2093
2094 We could probably emit more efficient code for machines which do not use
2095 strict alignment, but it doesn't seem worth the effort at the current
2096 time. */
2097 for (bitpos = 0, xbitpos = padding_correction;
2098 bitpos < bytes * BITS_PER_UNIT;
2099 bitpos += bitsize, xbitpos += bitsize)
2100 {
2101 /* We need a new source operand each time xbitpos is on a
2102 word boundary and when xbitpos == padding_correction
2103 (the first time through). */
2104 if (xbitpos % BITS_PER_WORD == 0
2105 || xbitpos == padding_correction)
2106 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2107 GET_MODE (srcreg));
2108
2109 /* We need a new destination operand each time bitpos is on
2110 a word boundary. */
2111 if (bitpos % BITS_PER_WORD == 0)
2112 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2113
2114 /* Use xbitpos for the source extraction (right justified) and
2115 xbitpos for the destination store (left justified). */
2116 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2117 extract_bit_field (src, bitsize,
2118 xbitpos % BITS_PER_WORD, 1,
2119 NULL_RTX, word_mode, word_mode));
2120 }
2121
2122 return tgtblk;
2123 }
2124
2125 /* Add a USE expression for REG to the (possibly empty) list pointed
2126 to by CALL_FUSAGE. REG must denote a hard register. */
2127
2128 void
2129 use_reg (rtx *call_fusage, rtx reg)
2130 {
2131 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2132
2133 *call_fusage
2134 = gen_rtx_EXPR_LIST (VOIDmode,
2135 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2136 }
2137
2138 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2139 starting at REGNO. All of these registers must be hard registers. */
2140
2141 void
2142 use_regs (rtx *call_fusage, int regno, int nregs)
2143 {
2144 int i;
2145
2146 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2147
2148 for (i = 0; i < nregs; i++)
2149 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2150 }
2151
2152 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2153 PARALLEL REGS. This is for calls that pass values in multiple
2154 non-contiguous locations. The Irix 6 ABI has examples of this. */
2155
2156 void
2157 use_group_regs (rtx *call_fusage, rtx regs)
2158 {
2159 int i;
2160
2161 for (i = 0; i < XVECLEN (regs, 0); i++)
2162 {
2163 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2164
2165 /* A NULL entry means the parameter goes both on the stack and in
2166 registers. This can also be a MEM for targets that pass values
2167 partially on the stack and partially in registers. */
2168 if (reg != 0 && REG_P (reg))
2169 use_reg (call_fusage, reg);
2170 }
2171 }
2172 \f
2173
2174 /* Determine whether the LEN bytes generated by CONSTFUN can be
2175 stored to memory using several move instructions. CONSTFUNDATA is
2176 a pointer which will be passed as argument in every CONSTFUN call.
2177 ALIGN is maximum alignment we can assume. Return nonzero if a
2178 call to store_by_pieces should succeed. */
2179
2180 int
2181 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2182 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2183 void *constfundata, unsigned int align)
2184 {
2185 unsigned HOST_WIDE_INT l;
2186 unsigned int max_size;
2187 HOST_WIDE_INT offset = 0;
2188 enum machine_mode mode, tmode;
2189 enum insn_code icode;
2190 int reverse;
2191 rtx cst;
2192
2193 if (len == 0)
2194 return 1;
2195
2196 if (! STORE_BY_PIECES_P (len, align))
2197 return 0;
2198
2199 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2200 if (align >= GET_MODE_ALIGNMENT (tmode))
2201 align = GET_MODE_ALIGNMENT (tmode);
2202 else
2203 {
2204 enum machine_mode xmode;
2205
2206 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2207 tmode != VOIDmode;
2208 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2209 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2210 || SLOW_UNALIGNED_ACCESS (tmode, align))
2211 break;
2212
2213 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2214 }
2215
2216 /* We would first store what we can in the largest integer mode, then go to
2217 successively smaller modes. */
2218
2219 for (reverse = 0;
2220 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2221 reverse++)
2222 {
2223 l = len;
2224 mode = VOIDmode;
2225 max_size = STORE_MAX_PIECES + 1;
2226 while (max_size > 1)
2227 {
2228 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2229 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2230 if (GET_MODE_SIZE (tmode) < max_size)
2231 mode = tmode;
2232
2233 if (mode == VOIDmode)
2234 break;
2235
2236 icode = mov_optab->handlers[(int) mode].insn_code;
2237 if (icode != CODE_FOR_nothing
2238 && align >= GET_MODE_ALIGNMENT (mode))
2239 {
2240 unsigned int size = GET_MODE_SIZE (mode);
2241
2242 while (l >= size)
2243 {
2244 if (reverse)
2245 offset -= size;
2246
2247 cst = (*constfun) (constfundata, offset, mode);
2248 if (!LEGITIMATE_CONSTANT_P (cst))
2249 return 0;
2250
2251 if (!reverse)
2252 offset += size;
2253
2254 l -= size;
2255 }
2256 }
2257
2258 max_size = GET_MODE_SIZE (mode);
2259 }
2260
2261 /* The code above should have handled everything. */
2262 gcc_assert (!l);
2263 }
2264
2265 return 1;
2266 }
2267
2268 /* Generate several move instructions to store LEN bytes generated by
2269 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2270 pointer which will be passed as argument in every CONSTFUN call.
2271 ALIGN is maximum alignment we can assume.
2272 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2273 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2274 stpcpy. */
2275
2276 rtx
2277 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2278 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2279 void *constfundata, unsigned int align, int endp)
2280 {
2281 struct store_by_pieces data;
2282
2283 if (len == 0)
2284 {
2285 gcc_assert (endp != 2);
2286 return to;
2287 }
2288
2289 gcc_assert (STORE_BY_PIECES_P (len, align));
2290 data.constfun = constfun;
2291 data.constfundata = constfundata;
2292 data.len = len;
2293 data.to = to;
2294 store_by_pieces_1 (&data, align);
2295 if (endp)
2296 {
2297 rtx to1;
2298
2299 gcc_assert (!data.reverse);
2300 if (data.autinc_to)
2301 {
2302 if (endp == 2)
2303 {
2304 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2305 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2306 else
2307 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2308 -1));
2309 }
2310 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2311 data.offset);
2312 }
2313 else
2314 {
2315 if (endp == 2)
2316 --data.offset;
2317 to1 = adjust_address (data.to, QImode, data.offset);
2318 }
2319 return to1;
2320 }
2321 else
2322 return data.to;
2323 }
2324
2325 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2326 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2327
2328 static void
2329 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2330 {
2331 struct store_by_pieces data;
2332
2333 if (len == 0)
2334 return;
2335
2336 data.constfun = clear_by_pieces_1;
2337 data.constfundata = NULL;
2338 data.len = len;
2339 data.to = to;
2340 store_by_pieces_1 (&data, align);
2341 }
2342
2343 /* Callback routine for clear_by_pieces.
2344 Return const0_rtx unconditionally. */
2345
2346 static rtx
2347 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2348 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2349 enum machine_mode mode ATTRIBUTE_UNUSED)
2350 {
2351 return const0_rtx;
2352 }
2353
2354 /* Subroutine of clear_by_pieces and store_by_pieces.
2355 Generate several move instructions to store LEN bytes of block TO. (A MEM
2356 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2357
2358 static void
2359 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2360 unsigned int align ATTRIBUTE_UNUSED)
2361 {
2362 rtx to_addr = XEXP (data->to, 0);
2363 unsigned int max_size = STORE_MAX_PIECES + 1;
2364 enum machine_mode mode = VOIDmode, tmode;
2365 enum insn_code icode;
2366
2367 data->offset = 0;
2368 data->to_addr = to_addr;
2369 data->autinc_to
2370 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2371 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2372
2373 data->explicit_inc_to = 0;
2374 data->reverse
2375 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2376 if (data->reverse)
2377 data->offset = data->len;
2378
2379 /* If storing requires more than two move insns,
2380 copy addresses to registers (to make displacements shorter)
2381 and use post-increment if available. */
2382 if (!data->autinc_to
2383 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2384 {
2385 /* Determine the main mode we'll be using. */
2386 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2387 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2388 if (GET_MODE_SIZE (tmode) < max_size)
2389 mode = tmode;
2390
2391 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2392 {
2393 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2394 data->autinc_to = 1;
2395 data->explicit_inc_to = -1;
2396 }
2397
2398 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2399 && ! data->autinc_to)
2400 {
2401 data->to_addr = copy_addr_to_reg (to_addr);
2402 data->autinc_to = 1;
2403 data->explicit_inc_to = 1;
2404 }
2405
2406 if ( !data->autinc_to && CONSTANT_P (to_addr))
2407 data->to_addr = copy_addr_to_reg (to_addr);
2408 }
2409
2410 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2411 if (align >= GET_MODE_ALIGNMENT (tmode))
2412 align = GET_MODE_ALIGNMENT (tmode);
2413 else
2414 {
2415 enum machine_mode xmode;
2416
2417 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2418 tmode != VOIDmode;
2419 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2420 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2421 || SLOW_UNALIGNED_ACCESS (tmode, align))
2422 break;
2423
2424 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2425 }
2426
2427 /* First store what we can in the largest integer mode, then go to
2428 successively smaller modes. */
2429
2430 while (max_size > 1)
2431 {
2432 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2433 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2434 if (GET_MODE_SIZE (tmode) < max_size)
2435 mode = tmode;
2436
2437 if (mode == VOIDmode)
2438 break;
2439
2440 icode = mov_optab->handlers[(int) mode].insn_code;
2441 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2442 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2443
2444 max_size = GET_MODE_SIZE (mode);
2445 }
2446
2447 /* The code above should have handled everything. */
2448 gcc_assert (!data->len);
2449 }
2450
2451 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2452 with move instructions for mode MODE. GENFUN is the gen_... function
2453 to make a move insn for that mode. DATA has all the other info. */
2454
2455 static void
2456 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2457 struct store_by_pieces *data)
2458 {
2459 unsigned int size = GET_MODE_SIZE (mode);
2460 rtx to1, cst;
2461
2462 while (data->len >= size)
2463 {
2464 if (data->reverse)
2465 data->offset -= size;
2466
2467 if (data->autinc_to)
2468 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2469 data->offset);
2470 else
2471 to1 = adjust_address (data->to, mode, data->offset);
2472
2473 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2474 emit_insn (gen_add2_insn (data->to_addr,
2475 GEN_INT (-(HOST_WIDE_INT) size)));
2476
2477 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2478 emit_insn ((*genfun) (to1, cst));
2479
2480 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2481 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2482
2483 if (! data->reverse)
2484 data->offset += size;
2485
2486 data->len -= size;
2487 }
2488 }
2489 \f
2490 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2491 its length in bytes. */
2492
2493 rtx
2494 clear_storage (rtx object, rtx size, enum block_op_methods method)
2495 {
2496 enum machine_mode mode = GET_MODE (object);
2497 unsigned int align;
2498
2499 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2500
2501 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2502 just move a zero. Otherwise, do this a piece at a time. */
2503 if (mode != BLKmode
2504 && GET_CODE (size) == CONST_INT
2505 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2506 {
2507 rtx zero = CONST0_RTX (mode);
2508 if (zero != NULL)
2509 {
2510 emit_move_insn (object, zero);
2511 return NULL;
2512 }
2513
2514 if (COMPLEX_MODE_P (mode))
2515 {
2516 zero = CONST0_RTX (GET_MODE_INNER (mode));
2517 if (zero != NULL)
2518 {
2519 write_complex_part (object, zero, 0);
2520 write_complex_part (object, zero, 1);
2521 return NULL;
2522 }
2523 }
2524 }
2525
2526 if (size == const0_rtx)
2527 return NULL;
2528
2529 align = MEM_ALIGN (object);
2530
2531 if (GET_CODE (size) == CONST_INT
2532 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2533 clear_by_pieces (object, INTVAL (size), align);
2534 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2535 ;
2536 else
2537 return clear_storage_via_libcall (object, size,
2538 method == BLOCK_OP_TAILCALL);
2539
2540 return NULL;
2541 }
2542
2543 /* A subroutine of clear_storage. Expand a call to memset.
2544 Return the return value of memset, 0 otherwise. */
2545
2546 static rtx
2547 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2548 {
2549 tree call_expr, arg_list, fn, object_tree, size_tree;
2550 enum machine_mode size_mode;
2551 rtx retval;
2552
2553 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2554 place those into new pseudos into a VAR_DECL and use them later. */
2555
2556 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2557
2558 size_mode = TYPE_MODE (sizetype);
2559 size = convert_to_mode (size_mode, size, 1);
2560 size = copy_to_mode_reg (size_mode, size);
2561
2562 /* It is incorrect to use the libcall calling conventions to call
2563 memset in this context. This could be a user call to memset and
2564 the user may wish to examine the return value from memset. For
2565 targets where libcalls and normal calls have different conventions
2566 for returning pointers, we could end up generating incorrect code. */
2567
2568 object_tree = make_tree (ptr_type_node, object);
2569 size_tree = make_tree (sizetype, size);
2570
2571 fn = clear_storage_libcall_fn (true);
2572 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2573 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2574 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2575
2576 /* Now we have to build up the CALL_EXPR itself. */
2577 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2578 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2579 call_expr, arg_list, NULL_TREE);
2580 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2581
2582 retval = expand_normal (call_expr);
2583
2584 return retval;
2585 }
2586
2587 /* A subroutine of clear_storage_via_libcall. Create the tree node
2588 for the function we use for block clears. The first time FOR_CALL
2589 is true, we call assemble_external. */
2590
2591 static GTY(()) tree block_clear_fn;
2592
2593 void
2594 init_block_clear_fn (const char *asmspec)
2595 {
2596 if (!block_clear_fn)
2597 {
2598 tree fn, args;
2599
2600 fn = get_identifier ("memset");
2601 args = build_function_type_list (ptr_type_node, ptr_type_node,
2602 integer_type_node, sizetype,
2603 NULL_TREE);
2604
2605 fn = build_decl (FUNCTION_DECL, fn, args);
2606 DECL_EXTERNAL (fn) = 1;
2607 TREE_PUBLIC (fn) = 1;
2608 DECL_ARTIFICIAL (fn) = 1;
2609 TREE_NOTHROW (fn) = 1;
2610 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2611 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2612
2613 block_clear_fn = fn;
2614 }
2615
2616 if (asmspec)
2617 set_user_assembler_name (block_clear_fn, asmspec);
2618 }
2619
2620 static tree
2621 clear_storage_libcall_fn (int for_call)
2622 {
2623 static bool emitted_extern;
2624
2625 if (!block_clear_fn)
2626 init_block_clear_fn (NULL);
2627
2628 if (for_call && !emitted_extern)
2629 {
2630 emitted_extern = true;
2631 make_decl_rtl (block_clear_fn);
2632 assemble_external (block_clear_fn);
2633 }
2634
2635 return block_clear_fn;
2636 }
2637 \f
2638 /* Expand a setmem pattern; return true if successful. */
2639
2640 bool
2641 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2642 {
2643 /* Try the most limited insn first, because there's no point
2644 including more than one in the machine description unless
2645 the more limited one has some advantage. */
2646
2647 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2648 enum machine_mode mode;
2649
2650 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2651 mode = GET_MODE_WIDER_MODE (mode))
2652 {
2653 enum insn_code code = setmem_optab[(int) mode];
2654 insn_operand_predicate_fn pred;
2655
2656 if (code != CODE_FOR_nothing
2657 /* We don't need MODE to be narrower than
2658 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2659 the mode mask, as it is returned by the macro, it will
2660 definitely be less than the actual mode mask. */
2661 && ((GET_CODE (size) == CONST_INT
2662 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2663 <= (GET_MODE_MASK (mode) >> 1)))
2664 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2665 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2666 || (*pred) (object, BLKmode))
2667 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2668 || (*pred) (opalign, VOIDmode)))
2669 {
2670 rtx opsize, opchar;
2671 enum machine_mode char_mode;
2672 rtx last = get_last_insn ();
2673 rtx pat;
2674
2675 opsize = convert_to_mode (mode, size, 1);
2676 pred = insn_data[(int) code].operand[1].predicate;
2677 if (pred != 0 && ! (*pred) (opsize, mode))
2678 opsize = copy_to_mode_reg (mode, opsize);
2679
2680 opchar = val;
2681 char_mode = insn_data[(int) code].operand[2].mode;
2682 if (char_mode != VOIDmode)
2683 {
2684 opchar = convert_to_mode (char_mode, opchar, 1);
2685 pred = insn_data[(int) code].operand[2].predicate;
2686 if (pred != 0 && ! (*pred) (opchar, char_mode))
2687 opchar = copy_to_mode_reg (char_mode, opchar);
2688 }
2689
2690 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2691 if (pat)
2692 {
2693 emit_insn (pat);
2694 return true;
2695 }
2696 else
2697 delete_insns_since (last);
2698 }
2699 }
2700
2701 return false;
2702 }
2703
2704 \f
2705 /* Write to one of the components of the complex value CPLX. Write VAL to
2706 the real part if IMAG_P is false, and the imaginary part if its true. */
2707
2708 static void
2709 write_complex_part (rtx cplx, rtx val, bool imag_p)
2710 {
2711 enum machine_mode cmode;
2712 enum machine_mode imode;
2713 unsigned ibitsize;
2714
2715 if (GET_CODE (cplx) == CONCAT)
2716 {
2717 emit_move_insn (XEXP (cplx, imag_p), val);
2718 return;
2719 }
2720
2721 cmode = GET_MODE (cplx);
2722 imode = GET_MODE_INNER (cmode);
2723 ibitsize = GET_MODE_BITSIZE (imode);
2724
2725 /* For MEMs simplify_gen_subreg may generate an invalid new address
2726 because, e.g., the original address is considered mode-dependent
2727 by the target, which restricts simplify_subreg from invoking
2728 adjust_address_nv. Instead of preparing fallback support for an
2729 invalid address, we call adjust_address_nv directly. */
2730 if (MEM_P (cplx))
2731 {
2732 emit_move_insn (adjust_address_nv (cplx, imode,
2733 imag_p ? GET_MODE_SIZE (imode) : 0),
2734 val);
2735 return;
2736 }
2737
2738 /* If the sub-object is at least word sized, then we know that subregging
2739 will work. This special case is important, since store_bit_field
2740 wants to operate on integer modes, and there's rarely an OImode to
2741 correspond to TCmode. */
2742 if (ibitsize >= BITS_PER_WORD
2743 /* For hard regs we have exact predicates. Assume we can split
2744 the original object if it spans an even number of hard regs.
2745 This special case is important for SCmode on 64-bit platforms
2746 where the natural size of floating-point regs is 32-bit. */
2747 || (REG_P (cplx)
2748 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2749 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2750 {
2751 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2752 imag_p ? GET_MODE_SIZE (imode) : 0);
2753 if (part)
2754 {
2755 emit_move_insn (part, val);
2756 return;
2757 }
2758 else
2759 /* simplify_gen_subreg may fail for sub-word MEMs. */
2760 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2761 }
2762
2763 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2764 }
2765
2766 /* Extract one of the components of the complex value CPLX. Extract the
2767 real part if IMAG_P is false, and the imaginary part if it's true. */
2768
2769 static rtx
2770 read_complex_part (rtx cplx, bool imag_p)
2771 {
2772 enum machine_mode cmode, imode;
2773 unsigned ibitsize;
2774
2775 if (GET_CODE (cplx) == CONCAT)
2776 return XEXP (cplx, imag_p);
2777
2778 cmode = GET_MODE (cplx);
2779 imode = GET_MODE_INNER (cmode);
2780 ibitsize = GET_MODE_BITSIZE (imode);
2781
2782 /* Special case reads from complex constants that got spilled to memory. */
2783 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2784 {
2785 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2786 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2787 {
2788 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2789 if (CONSTANT_CLASS_P (part))
2790 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2791 }
2792 }
2793
2794 /* For MEMs simplify_gen_subreg may generate an invalid new address
2795 because, e.g., the original address is considered mode-dependent
2796 by the target, which restricts simplify_subreg from invoking
2797 adjust_address_nv. Instead of preparing fallback support for an
2798 invalid address, we call adjust_address_nv directly. */
2799 if (MEM_P (cplx))
2800 return adjust_address_nv (cplx, imode,
2801 imag_p ? GET_MODE_SIZE (imode) : 0);
2802
2803 /* If the sub-object is at least word sized, then we know that subregging
2804 will work. This special case is important, since extract_bit_field
2805 wants to operate on integer modes, and there's rarely an OImode to
2806 correspond to TCmode. */
2807 if (ibitsize >= BITS_PER_WORD
2808 /* For hard regs we have exact predicates. Assume we can split
2809 the original object if it spans an even number of hard regs.
2810 This special case is important for SCmode on 64-bit platforms
2811 where the natural size of floating-point regs is 32-bit. */
2812 || (REG_P (cplx)
2813 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2814 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2815 {
2816 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2817 imag_p ? GET_MODE_SIZE (imode) : 0);
2818 if (ret)
2819 return ret;
2820 else
2821 /* simplify_gen_subreg may fail for sub-word MEMs. */
2822 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2823 }
2824
2825 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2826 true, NULL_RTX, imode, imode);
2827 }
2828 \f
2829 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2830 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2831 represented in NEW_MODE. If FORCE is true, this will never happen, as
2832 we'll force-create a SUBREG if needed. */
2833
2834 static rtx
2835 emit_move_change_mode (enum machine_mode new_mode,
2836 enum machine_mode old_mode, rtx x, bool force)
2837 {
2838 rtx ret;
2839
2840 if (MEM_P (x))
2841 {
2842 /* We don't have to worry about changing the address since the
2843 size in bytes is supposed to be the same. */
2844 if (reload_in_progress)
2845 {
2846 /* Copy the MEM to change the mode and move any
2847 substitutions from the old MEM to the new one. */
2848 ret = adjust_address_nv (x, new_mode, 0);
2849 copy_replacements (x, ret);
2850 }
2851 else
2852 ret = adjust_address (x, new_mode, 0);
2853 }
2854 else
2855 {
2856 /* Note that we do want simplify_subreg's behavior of validating
2857 that the new mode is ok for a hard register. If we were to use
2858 simplify_gen_subreg, we would create the subreg, but would
2859 probably run into the target not being able to implement it. */
2860 /* Except, of course, when FORCE is true, when this is exactly what
2861 we want. Which is needed for CCmodes on some targets. */
2862 if (force)
2863 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2864 else
2865 ret = simplify_subreg (new_mode, x, old_mode, 0);
2866 }
2867
2868 return ret;
2869 }
2870
2871 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2872 an integer mode of the same size as MODE. Returns the instruction
2873 emitted, or NULL if such a move could not be generated. */
2874
2875 static rtx
2876 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2877 {
2878 enum machine_mode imode;
2879 enum insn_code code;
2880
2881 /* There must exist a mode of the exact size we require. */
2882 imode = int_mode_for_mode (mode);
2883 if (imode == BLKmode)
2884 return NULL_RTX;
2885
2886 /* The target must support moves in this mode. */
2887 code = mov_optab->handlers[imode].insn_code;
2888 if (code == CODE_FOR_nothing)
2889 return NULL_RTX;
2890
2891 x = emit_move_change_mode (imode, mode, x, force);
2892 if (x == NULL_RTX)
2893 return NULL_RTX;
2894 y = emit_move_change_mode (imode, mode, y, force);
2895 if (y == NULL_RTX)
2896 return NULL_RTX;
2897 return emit_insn (GEN_FCN (code) (x, y));
2898 }
2899
2900 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2901 Return an equivalent MEM that does not use an auto-increment. */
2902
2903 static rtx
2904 emit_move_resolve_push (enum machine_mode mode, rtx x)
2905 {
2906 enum rtx_code code = GET_CODE (XEXP (x, 0));
2907 HOST_WIDE_INT adjust;
2908 rtx temp;
2909
2910 adjust = GET_MODE_SIZE (mode);
2911 #ifdef PUSH_ROUNDING
2912 adjust = PUSH_ROUNDING (adjust);
2913 #endif
2914 if (code == PRE_DEC || code == POST_DEC)
2915 adjust = -adjust;
2916 else if (code == PRE_MODIFY || code == POST_MODIFY)
2917 {
2918 rtx expr = XEXP (XEXP (x, 0), 1);
2919 HOST_WIDE_INT val;
2920
2921 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2922 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2923 val = INTVAL (XEXP (expr, 1));
2924 if (GET_CODE (expr) == MINUS)
2925 val = -val;
2926 gcc_assert (adjust == val || adjust == -val);
2927 adjust = val;
2928 }
2929
2930 /* Do not use anti_adjust_stack, since we don't want to update
2931 stack_pointer_delta. */
2932 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2933 GEN_INT (adjust), stack_pointer_rtx,
2934 0, OPTAB_LIB_WIDEN);
2935 if (temp != stack_pointer_rtx)
2936 emit_move_insn (stack_pointer_rtx, temp);
2937
2938 switch (code)
2939 {
2940 case PRE_INC:
2941 case PRE_DEC:
2942 case PRE_MODIFY:
2943 temp = stack_pointer_rtx;
2944 break;
2945 case POST_INC:
2946 case POST_DEC:
2947 case POST_MODIFY:
2948 temp = plus_constant (stack_pointer_rtx, -adjust);
2949 break;
2950 default:
2951 gcc_unreachable ();
2952 }
2953
2954 return replace_equiv_address (x, temp);
2955 }
2956
2957 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2958 X is known to satisfy push_operand, and MODE is known to be complex.
2959 Returns the last instruction emitted. */
2960
2961 static rtx
2962 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2963 {
2964 enum machine_mode submode = GET_MODE_INNER (mode);
2965 bool imag_first;
2966
2967 #ifdef PUSH_ROUNDING
2968 unsigned int submodesize = GET_MODE_SIZE (submode);
2969
2970 /* In case we output to the stack, but the size is smaller than the
2971 machine can push exactly, we need to use move instructions. */
2972 if (PUSH_ROUNDING (submodesize) != submodesize)
2973 {
2974 x = emit_move_resolve_push (mode, x);
2975 return emit_move_insn (x, y);
2976 }
2977 #endif
2978
2979 /* Note that the real part always precedes the imag part in memory
2980 regardless of machine's endianness. */
2981 switch (GET_CODE (XEXP (x, 0)))
2982 {
2983 case PRE_DEC:
2984 case POST_DEC:
2985 imag_first = true;
2986 break;
2987 case PRE_INC:
2988 case POST_INC:
2989 imag_first = false;
2990 break;
2991 default:
2992 gcc_unreachable ();
2993 }
2994
2995 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2996 read_complex_part (y, imag_first));
2997 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2998 read_complex_part (y, !imag_first));
2999 }
3000
3001 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3002 MODE is known to be complex. Returns the last instruction emitted. */
3003
3004 static rtx
3005 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3006 {
3007 bool try_int;
3008
3009 /* Need to take special care for pushes, to maintain proper ordering
3010 of the data, and possibly extra padding. */
3011 if (push_operand (x, mode))
3012 return emit_move_complex_push (mode, x, y);
3013
3014 /* See if we can coerce the target into moving both values at once. */
3015
3016 /* Move floating point as parts. */
3017 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3018 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3019 try_int = false;
3020 /* Not possible if the values are inherently not adjacent. */
3021 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3022 try_int = false;
3023 /* Is possible if both are registers (or subregs of registers). */
3024 else if (register_operand (x, mode) && register_operand (y, mode))
3025 try_int = true;
3026 /* If one of the operands is a memory, and alignment constraints
3027 are friendly enough, we may be able to do combined memory operations.
3028 We do not attempt this if Y is a constant because that combination is
3029 usually better with the by-parts thing below. */
3030 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3031 && (!STRICT_ALIGNMENT
3032 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3033 try_int = true;
3034 else
3035 try_int = false;
3036
3037 if (try_int)
3038 {
3039 rtx ret;
3040
3041 /* For memory to memory moves, optimal behavior can be had with the
3042 existing block move logic. */
3043 if (MEM_P (x) && MEM_P (y))
3044 {
3045 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3046 BLOCK_OP_NO_LIBCALL);
3047 return get_last_insn ();
3048 }
3049
3050 ret = emit_move_via_integer (mode, x, y, true);
3051 if (ret)
3052 return ret;
3053 }
3054
3055 /* Show the output dies here. This is necessary for SUBREGs
3056 of pseudos since we cannot track their lifetimes correctly;
3057 hard regs shouldn't appear here except as return values. */
3058 if (!reload_completed && !reload_in_progress
3059 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3060 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3061
3062 write_complex_part (x, read_complex_part (y, false), false);
3063 write_complex_part (x, read_complex_part (y, true), true);
3064 return get_last_insn ();
3065 }
3066
3067 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3068 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3069
3070 static rtx
3071 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3072 {
3073 rtx ret;
3074
3075 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3076 if (mode != CCmode)
3077 {
3078 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3079 if (code != CODE_FOR_nothing)
3080 {
3081 x = emit_move_change_mode (CCmode, mode, x, true);
3082 y = emit_move_change_mode (CCmode, mode, y, true);
3083 return emit_insn (GEN_FCN (code) (x, y));
3084 }
3085 }
3086
3087 /* Otherwise, find the MODE_INT mode of the same width. */
3088 ret = emit_move_via_integer (mode, x, y, false);
3089 gcc_assert (ret != NULL);
3090 return ret;
3091 }
3092
3093 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3094 MODE is any multi-word or full-word mode that lacks a move_insn
3095 pattern. Note that you will get better code if you define such
3096 patterns, even if they must turn into multiple assembler instructions. */
3097
3098 static rtx
3099 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3100 {
3101 rtx last_insn = 0;
3102 rtx seq, inner;
3103 bool need_clobber;
3104 int i;
3105
3106 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3107
3108 /* If X is a push on the stack, do the push now and replace
3109 X with a reference to the stack pointer. */
3110 if (push_operand (x, mode))
3111 x = emit_move_resolve_push (mode, x);
3112
3113 /* If we are in reload, see if either operand is a MEM whose address
3114 is scheduled for replacement. */
3115 if (reload_in_progress && MEM_P (x)
3116 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3117 x = replace_equiv_address_nv (x, inner);
3118 if (reload_in_progress && MEM_P (y)
3119 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3120 y = replace_equiv_address_nv (y, inner);
3121
3122 start_sequence ();
3123
3124 need_clobber = false;
3125 for (i = 0;
3126 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3127 i++)
3128 {
3129 rtx xpart = operand_subword (x, i, 1, mode);
3130 rtx ypart = operand_subword (y, i, 1, mode);
3131
3132 /* If we can't get a part of Y, put Y into memory if it is a
3133 constant. Otherwise, force it into a register. Then we must
3134 be able to get a part of Y. */
3135 if (ypart == 0 && CONSTANT_P (y))
3136 {
3137 y = use_anchored_address (force_const_mem (mode, y));
3138 ypart = operand_subword (y, i, 1, mode);
3139 }
3140 else if (ypart == 0)
3141 ypart = operand_subword_force (y, i, mode);
3142
3143 gcc_assert (xpart && ypart);
3144
3145 need_clobber |= (GET_CODE (xpart) == SUBREG);
3146
3147 last_insn = emit_move_insn (xpart, ypart);
3148 }
3149
3150 seq = get_insns ();
3151 end_sequence ();
3152
3153 /* Show the output dies here. This is necessary for SUBREGs
3154 of pseudos since we cannot track their lifetimes correctly;
3155 hard regs shouldn't appear here except as return values.
3156 We never want to emit such a clobber after reload. */
3157 if (x != y
3158 && ! (reload_in_progress || reload_completed)
3159 && need_clobber != 0)
3160 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3161
3162 emit_insn (seq);
3163
3164 return last_insn;
3165 }
3166
3167 /* Low level part of emit_move_insn.
3168 Called just like emit_move_insn, but assumes X and Y
3169 are basically valid. */
3170
3171 rtx
3172 emit_move_insn_1 (rtx x, rtx y)
3173 {
3174 enum machine_mode mode = GET_MODE (x);
3175 enum insn_code code;
3176
3177 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3178
3179 code = mov_optab->handlers[mode].insn_code;
3180 if (code != CODE_FOR_nothing)
3181 return emit_insn (GEN_FCN (code) (x, y));
3182
3183 /* Expand complex moves by moving real part and imag part. */
3184 if (COMPLEX_MODE_P (mode))
3185 return emit_move_complex (mode, x, y);
3186
3187 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3188 {
3189 rtx result = emit_move_via_integer (mode, x, y, true);
3190
3191 /* If we can't find an integer mode, use multi words. */
3192 if (result)
3193 return result;
3194 else
3195 return emit_move_multi_word (mode, x, y);
3196 }
3197
3198 if (GET_MODE_CLASS (mode) == MODE_CC)
3199 return emit_move_ccmode (mode, x, y);
3200
3201 /* Try using a move pattern for the corresponding integer mode. This is
3202 only safe when simplify_subreg can convert MODE constants into integer
3203 constants. At present, it can only do this reliably if the value
3204 fits within a HOST_WIDE_INT. */
3205 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3206 {
3207 rtx ret = emit_move_via_integer (mode, x, y, false);
3208 if (ret)
3209 return ret;
3210 }
3211
3212 return emit_move_multi_word (mode, x, y);
3213 }
3214
3215 /* Generate code to copy Y into X.
3216 Both Y and X must have the same mode, except that
3217 Y can be a constant with VOIDmode.
3218 This mode cannot be BLKmode; use emit_block_move for that.
3219
3220 Return the last instruction emitted. */
3221
3222 rtx
3223 emit_move_insn (rtx x, rtx y)
3224 {
3225 enum machine_mode mode = GET_MODE (x);
3226 rtx y_cst = NULL_RTX;
3227 rtx last_insn, set;
3228
3229 gcc_assert (mode != BLKmode
3230 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3231
3232 if (CONSTANT_P (y))
3233 {
3234 if (optimize
3235 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3236 && (last_insn = compress_float_constant (x, y)))
3237 return last_insn;
3238
3239 y_cst = y;
3240
3241 if (!LEGITIMATE_CONSTANT_P (y))
3242 {
3243 y = force_const_mem (mode, y);
3244
3245 /* If the target's cannot_force_const_mem prevented the spill,
3246 assume that the target's move expanders will also take care
3247 of the non-legitimate constant. */
3248 if (!y)
3249 y = y_cst;
3250 else
3251 y = use_anchored_address (y);
3252 }
3253 }
3254
3255 /* If X or Y are memory references, verify that their addresses are valid
3256 for the machine. */
3257 if (MEM_P (x)
3258 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3259 && ! push_operand (x, GET_MODE (x)))
3260 || (flag_force_addr
3261 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3262 x = validize_mem (x);
3263
3264 if (MEM_P (y)
3265 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3266 || (flag_force_addr
3267 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3268 y = validize_mem (y);
3269
3270 gcc_assert (mode != BLKmode);
3271
3272 last_insn = emit_move_insn_1 (x, y);
3273
3274 if (y_cst && REG_P (x)
3275 && (set = single_set (last_insn)) != NULL_RTX
3276 && SET_DEST (set) == x
3277 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3278 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3279
3280 return last_insn;
3281 }
3282
3283 /* If Y is representable exactly in a narrower mode, and the target can
3284 perform the extension directly from constant or memory, then emit the
3285 move as an extension. */
3286
3287 static rtx
3288 compress_float_constant (rtx x, rtx y)
3289 {
3290 enum machine_mode dstmode = GET_MODE (x);
3291 enum machine_mode orig_srcmode = GET_MODE (y);
3292 enum machine_mode srcmode;
3293 REAL_VALUE_TYPE r;
3294 int oldcost, newcost;
3295
3296 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3297
3298 if (LEGITIMATE_CONSTANT_P (y))
3299 oldcost = rtx_cost (y, SET);
3300 else
3301 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3302
3303 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3304 srcmode != orig_srcmode;
3305 srcmode = GET_MODE_WIDER_MODE (srcmode))
3306 {
3307 enum insn_code ic;
3308 rtx trunc_y, last_insn;
3309
3310 /* Skip if the target can't extend this way. */
3311 ic = can_extend_p (dstmode, srcmode, 0);
3312 if (ic == CODE_FOR_nothing)
3313 continue;
3314
3315 /* Skip if the narrowed value isn't exact. */
3316 if (! exact_real_truncate (srcmode, &r))
3317 continue;
3318
3319 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3320
3321 if (LEGITIMATE_CONSTANT_P (trunc_y))
3322 {
3323 /* Skip if the target needs extra instructions to perform
3324 the extension. */
3325 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3326 continue;
3327 /* This is valid, but may not be cheaper than the original. */
3328 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3329 if (oldcost < newcost)
3330 continue;
3331 }
3332 else if (float_extend_from_mem[dstmode][srcmode])
3333 {
3334 trunc_y = force_const_mem (srcmode, trunc_y);
3335 /* This is valid, but may not be cheaper than the original. */
3336 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3337 if (oldcost < newcost)
3338 continue;
3339 trunc_y = validize_mem (trunc_y);
3340 }
3341 else
3342 continue;
3343
3344 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3345 last_insn = get_last_insn ();
3346
3347 if (REG_P (x))
3348 set_unique_reg_note (last_insn, REG_EQUAL, y);
3349
3350 return last_insn;
3351 }
3352
3353 return NULL_RTX;
3354 }
3355 \f
3356 /* Pushing data onto the stack. */
3357
3358 /* Push a block of length SIZE (perhaps variable)
3359 and return an rtx to address the beginning of the block.
3360 The value may be virtual_outgoing_args_rtx.
3361
3362 EXTRA is the number of bytes of padding to push in addition to SIZE.
3363 BELOW nonzero means this padding comes at low addresses;
3364 otherwise, the padding comes at high addresses. */
3365
3366 rtx
3367 push_block (rtx size, int extra, int below)
3368 {
3369 rtx temp;
3370
3371 size = convert_modes (Pmode, ptr_mode, size, 1);
3372 if (CONSTANT_P (size))
3373 anti_adjust_stack (plus_constant (size, extra));
3374 else if (REG_P (size) && extra == 0)
3375 anti_adjust_stack (size);
3376 else
3377 {
3378 temp = copy_to_mode_reg (Pmode, size);
3379 if (extra != 0)
3380 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3381 temp, 0, OPTAB_LIB_WIDEN);
3382 anti_adjust_stack (temp);
3383 }
3384
3385 #ifndef STACK_GROWS_DOWNWARD
3386 if (0)
3387 #else
3388 if (1)
3389 #endif
3390 {
3391 temp = virtual_outgoing_args_rtx;
3392 if (extra != 0 && below)
3393 temp = plus_constant (temp, extra);
3394 }
3395 else
3396 {
3397 if (GET_CODE (size) == CONST_INT)
3398 temp = plus_constant (virtual_outgoing_args_rtx,
3399 -INTVAL (size) - (below ? 0 : extra));
3400 else if (extra != 0 && !below)
3401 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3402 negate_rtx (Pmode, plus_constant (size, extra)));
3403 else
3404 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3405 negate_rtx (Pmode, size));
3406 }
3407
3408 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3409 }
3410
3411 #ifdef PUSH_ROUNDING
3412
3413 /* Emit single push insn. */
3414
3415 static void
3416 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3417 {
3418 rtx dest_addr;
3419 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3420 rtx dest;
3421 enum insn_code icode;
3422 insn_operand_predicate_fn pred;
3423
3424 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3425 /* If there is push pattern, use it. Otherwise try old way of throwing
3426 MEM representing push operation to move expander. */
3427 icode = push_optab->handlers[(int) mode].insn_code;
3428 if (icode != CODE_FOR_nothing)
3429 {
3430 if (((pred = insn_data[(int) icode].operand[0].predicate)
3431 && !((*pred) (x, mode))))
3432 x = force_reg (mode, x);
3433 emit_insn (GEN_FCN (icode) (x));
3434 return;
3435 }
3436 if (GET_MODE_SIZE (mode) == rounded_size)
3437 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3438 /* If we are to pad downward, adjust the stack pointer first and
3439 then store X into the stack location using an offset. This is
3440 because emit_move_insn does not know how to pad; it does not have
3441 access to type. */
3442 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3443 {
3444 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3445 HOST_WIDE_INT offset;
3446
3447 emit_move_insn (stack_pointer_rtx,
3448 expand_binop (Pmode,
3449 #ifdef STACK_GROWS_DOWNWARD
3450 sub_optab,
3451 #else
3452 add_optab,
3453 #endif
3454 stack_pointer_rtx,
3455 GEN_INT (rounded_size),
3456 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3457
3458 offset = (HOST_WIDE_INT) padding_size;
3459 #ifdef STACK_GROWS_DOWNWARD
3460 if (STACK_PUSH_CODE == POST_DEC)
3461 /* We have already decremented the stack pointer, so get the
3462 previous value. */
3463 offset += (HOST_WIDE_INT) rounded_size;
3464 #else
3465 if (STACK_PUSH_CODE == POST_INC)
3466 /* We have already incremented the stack pointer, so get the
3467 previous value. */
3468 offset -= (HOST_WIDE_INT) rounded_size;
3469 #endif
3470 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3471 }
3472 else
3473 {
3474 #ifdef STACK_GROWS_DOWNWARD
3475 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3476 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3477 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3478 #else
3479 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3480 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3481 GEN_INT (rounded_size));
3482 #endif
3483 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3484 }
3485
3486 dest = gen_rtx_MEM (mode, dest_addr);
3487
3488 if (type != 0)
3489 {
3490 set_mem_attributes (dest, type, 1);
3491
3492 if (flag_optimize_sibling_calls)
3493 /* Function incoming arguments may overlap with sibling call
3494 outgoing arguments and we cannot allow reordering of reads
3495 from function arguments with stores to outgoing arguments
3496 of sibling calls. */
3497 set_mem_alias_set (dest, 0);
3498 }
3499 emit_move_insn (dest, x);
3500 }
3501 #endif
3502
3503 /* Generate code to push X onto the stack, assuming it has mode MODE and
3504 type TYPE.
3505 MODE is redundant except when X is a CONST_INT (since they don't
3506 carry mode info).
3507 SIZE is an rtx for the size of data to be copied (in bytes),
3508 needed only if X is BLKmode.
3509
3510 ALIGN (in bits) is maximum alignment we can assume.
3511
3512 If PARTIAL and REG are both nonzero, then copy that many of the first
3513 bytes of X into registers starting with REG, and push the rest of X.
3514 The amount of space pushed is decreased by PARTIAL bytes.
3515 REG must be a hard register in this case.
3516 If REG is zero but PARTIAL is not, take any all others actions for an
3517 argument partially in registers, but do not actually load any
3518 registers.
3519
3520 EXTRA is the amount in bytes of extra space to leave next to this arg.
3521 This is ignored if an argument block has already been allocated.
3522
3523 On a machine that lacks real push insns, ARGS_ADDR is the address of
3524 the bottom of the argument block for this call. We use indexing off there
3525 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3526 argument block has not been preallocated.
3527
3528 ARGS_SO_FAR is the size of args previously pushed for this call.
3529
3530 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3531 for arguments passed in registers. If nonzero, it will be the number
3532 of bytes required. */
3533
3534 void
3535 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3536 unsigned int align, int partial, rtx reg, int extra,
3537 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3538 rtx alignment_pad)
3539 {
3540 rtx xinner;
3541 enum direction stack_direction
3542 #ifdef STACK_GROWS_DOWNWARD
3543 = downward;
3544 #else
3545 = upward;
3546 #endif
3547
3548 /* Decide where to pad the argument: `downward' for below,
3549 `upward' for above, or `none' for don't pad it.
3550 Default is below for small data on big-endian machines; else above. */
3551 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3552
3553 /* Invert direction if stack is post-decrement.
3554 FIXME: why? */
3555 if (STACK_PUSH_CODE == POST_DEC)
3556 if (where_pad != none)
3557 where_pad = (where_pad == downward ? upward : downward);
3558
3559 xinner = x;
3560
3561 if (mode == BLKmode)
3562 {
3563 /* Copy a block into the stack, entirely or partially. */
3564
3565 rtx temp;
3566 int used;
3567 int offset;
3568 int skip;
3569
3570 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3571 used = partial - offset;
3572
3573 gcc_assert (size);
3574
3575 /* USED is now the # of bytes we need not copy to the stack
3576 because registers will take care of them. */
3577
3578 if (partial != 0)
3579 xinner = adjust_address (xinner, BLKmode, used);
3580
3581 /* If the partial register-part of the arg counts in its stack size,
3582 skip the part of stack space corresponding to the registers.
3583 Otherwise, start copying to the beginning of the stack space,
3584 by setting SKIP to 0. */
3585 skip = (reg_parm_stack_space == 0) ? 0 : used;
3586
3587 #ifdef PUSH_ROUNDING
3588 /* Do it with several push insns if that doesn't take lots of insns
3589 and if there is no difficulty with push insns that skip bytes
3590 on the stack for alignment purposes. */
3591 if (args_addr == 0
3592 && PUSH_ARGS
3593 && GET_CODE (size) == CONST_INT
3594 && skip == 0
3595 && MEM_ALIGN (xinner) >= align
3596 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3597 /* Here we avoid the case of a structure whose weak alignment
3598 forces many pushes of a small amount of data,
3599 and such small pushes do rounding that causes trouble. */
3600 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3601 || align >= BIGGEST_ALIGNMENT
3602 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3603 == (align / BITS_PER_UNIT)))
3604 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3605 {
3606 /* Push padding now if padding above and stack grows down,
3607 or if padding below and stack grows up.
3608 But if space already allocated, this has already been done. */
3609 if (extra && args_addr == 0
3610 && where_pad != none && where_pad != stack_direction)
3611 anti_adjust_stack (GEN_INT (extra));
3612
3613 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3614 }
3615 else
3616 #endif /* PUSH_ROUNDING */
3617 {
3618 rtx target;
3619
3620 /* Otherwise make space on the stack and copy the data
3621 to the address of that space. */
3622
3623 /* Deduct words put into registers from the size we must copy. */
3624 if (partial != 0)
3625 {
3626 if (GET_CODE (size) == CONST_INT)
3627 size = GEN_INT (INTVAL (size) - used);
3628 else
3629 size = expand_binop (GET_MODE (size), sub_optab, size,
3630 GEN_INT (used), NULL_RTX, 0,
3631 OPTAB_LIB_WIDEN);
3632 }
3633
3634 /* Get the address of the stack space.
3635 In this case, we do not deal with EXTRA separately.
3636 A single stack adjust will do. */
3637 if (! args_addr)
3638 {
3639 temp = push_block (size, extra, where_pad == downward);
3640 extra = 0;
3641 }
3642 else if (GET_CODE (args_so_far) == CONST_INT)
3643 temp = memory_address (BLKmode,
3644 plus_constant (args_addr,
3645 skip + INTVAL (args_so_far)));
3646 else
3647 temp = memory_address (BLKmode,
3648 plus_constant (gen_rtx_PLUS (Pmode,
3649 args_addr,
3650 args_so_far),
3651 skip));
3652
3653 if (!ACCUMULATE_OUTGOING_ARGS)
3654 {
3655 /* If the source is referenced relative to the stack pointer,
3656 copy it to another register to stabilize it. We do not need
3657 to do this if we know that we won't be changing sp. */
3658
3659 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3660 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3661 temp = copy_to_reg (temp);
3662 }
3663
3664 target = gen_rtx_MEM (BLKmode, temp);
3665
3666 /* We do *not* set_mem_attributes here, because incoming arguments
3667 may overlap with sibling call outgoing arguments and we cannot
3668 allow reordering of reads from function arguments with stores
3669 to outgoing arguments of sibling calls. We do, however, want
3670 to record the alignment of the stack slot. */
3671 /* ALIGN may well be better aligned than TYPE, e.g. due to
3672 PARM_BOUNDARY. Assume the caller isn't lying. */
3673 set_mem_align (target, align);
3674
3675 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3676 }
3677 }
3678 else if (partial > 0)
3679 {
3680 /* Scalar partly in registers. */
3681
3682 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3683 int i;
3684 int not_stack;
3685 /* # bytes of start of argument
3686 that we must make space for but need not store. */
3687 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3688 int args_offset = INTVAL (args_so_far);
3689 int skip;
3690
3691 /* Push padding now if padding above and stack grows down,
3692 or if padding below and stack grows up.
3693 But if space already allocated, this has already been done. */
3694 if (extra && args_addr == 0
3695 && where_pad != none && where_pad != stack_direction)
3696 anti_adjust_stack (GEN_INT (extra));
3697
3698 /* If we make space by pushing it, we might as well push
3699 the real data. Otherwise, we can leave OFFSET nonzero
3700 and leave the space uninitialized. */
3701 if (args_addr == 0)
3702 offset = 0;
3703
3704 /* Now NOT_STACK gets the number of words that we don't need to
3705 allocate on the stack. Convert OFFSET to words too. */
3706 not_stack = (partial - offset) / UNITS_PER_WORD;
3707 offset /= UNITS_PER_WORD;
3708
3709 /* If the partial register-part of the arg counts in its stack size,
3710 skip the part of stack space corresponding to the registers.
3711 Otherwise, start copying to the beginning of the stack space,
3712 by setting SKIP to 0. */
3713 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3714
3715 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3716 x = validize_mem (force_const_mem (mode, x));
3717
3718 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3719 SUBREGs of such registers are not allowed. */
3720 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3721 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3722 x = copy_to_reg (x);
3723
3724 /* Loop over all the words allocated on the stack for this arg. */
3725 /* We can do it by words, because any scalar bigger than a word
3726 has a size a multiple of a word. */
3727 #ifndef PUSH_ARGS_REVERSED
3728 for (i = not_stack; i < size; i++)
3729 #else
3730 for (i = size - 1; i >= not_stack; i--)
3731 #endif
3732 if (i >= not_stack + offset)
3733 emit_push_insn (operand_subword_force (x, i, mode),
3734 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3735 0, args_addr,
3736 GEN_INT (args_offset + ((i - not_stack + skip)
3737 * UNITS_PER_WORD)),
3738 reg_parm_stack_space, alignment_pad);
3739 }
3740 else
3741 {
3742 rtx addr;
3743 rtx dest;
3744
3745 /* Push padding now if padding above and stack grows down,
3746 or if padding below and stack grows up.
3747 But if space already allocated, this has already been done. */
3748 if (extra && args_addr == 0
3749 && where_pad != none && where_pad != stack_direction)
3750 anti_adjust_stack (GEN_INT (extra));
3751
3752 #ifdef PUSH_ROUNDING
3753 if (args_addr == 0 && PUSH_ARGS)
3754 emit_single_push_insn (mode, x, type);
3755 else
3756 #endif
3757 {
3758 if (GET_CODE (args_so_far) == CONST_INT)
3759 addr
3760 = memory_address (mode,
3761 plus_constant (args_addr,
3762 INTVAL (args_so_far)));
3763 else
3764 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3765 args_so_far));
3766 dest = gen_rtx_MEM (mode, addr);
3767
3768 /* We do *not* set_mem_attributes here, because incoming arguments
3769 may overlap with sibling call outgoing arguments and we cannot
3770 allow reordering of reads from function arguments with stores
3771 to outgoing arguments of sibling calls. We do, however, want
3772 to record the alignment of the stack slot. */
3773 /* ALIGN may well be better aligned than TYPE, e.g. due to
3774 PARM_BOUNDARY. Assume the caller isn't lying. */
3775 set_mem_align (dest, align);
3776
3777 emit_move_insn (dest, x);
3778 }
3779 }
3780
3781 /* If part should go in registers, copy that part
3782 into the appropriate registers. Do this now, at the end,
3783 since mem-to-mem copies above may do function calls. */
3784 if (partial > 0 && reg != 0)
3785 {
3786 /* Handle calls that pass values in multiple non-contiguous locations.
3787 The Irix 6 ABI has examples of this. */
3788 if (GET_CODE (reg) == PARALLEL)
3789 emit_group_load (reg, x, type, -1);
3790 else
3791 {
3792 gcc_assert (partial % UNITS_PER_WORD == 0);
3793 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3794 }
3795 }
3796
3797 if (extra && args_addr == 0 && where_pad == stack_direction)
3798 anti_adjust_stack (GEN_INT (extra));
3799
3800 if (alignment_pad && args_addr == 0)
3801 anti_adjust_stack (alignment_pad);
3802 }
3803 \f
3804 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3805 operations. */
3806
3807 static rtx
3808 get_subtarget (rtx x)
3809 {
3810 return (optimize
3811 || x == 0
3812 /* Only registers can be subtargets. */
3813 || !REG_P (x)
3814 /* Don't use hard regs to avoid extending their life. */
3815 || REGNO (x) < FIRST_PSEUDO_REGISTER
3816 ? 0 : x);
3817 }
3818
3819 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3820 FIELD is a bitfield. Returns true if the optimization was successful,
3821 and there's nothing else to do. */
3822
3823 static bool
3824 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3825 unsigned HOST_WIDE_INT bitpos,
3826 enum machine_mode mode1, rtx str_rtx,
3827 tree to, tree src)
3828 {
3829 enum machine_mode str_mode = GET_MODE (str_rtx);
3830 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3831 tree op0, op1;
3832 rtx value, result;
3833 optab binop;
3834
3835 if (mode1 != VOIDmode
3836 || bitsize >= BITS_PER_WORD
3837 || str_bitsize > BITS_PER_WORD
3838 || TREE_SIDE_EFFECTS (to)
3839 || TREE_THIS_VOLATILE (to))
3840 return false;
3841
3842 STRIP_NOPS (src);
3843 if (!BINARY_CLASS_P (src)
3844 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3845 return false;
3846
3847 op0 = TREE_OPERAND (src, 0);
3848 op1 = TREE_OPERAND (src, 1);
3849 STRIP_NOPS (op0);
3850
3851 if (!operand_equal_p (to, op0, 0))
3852 return false;
3853
3854 if (MEM_P (str_rtx))
3855 {
3856 unsigned HOST_WIDE_INT offset1;
3857
3858 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3859 str_mode = word_mode;
3860 str_mode = get_best_mode (bitsize, bitpos,
3861 MEM_ALIGN (str_rtx), str_mode, 0);
3862 if (str_mode == VOIDmode)
3863 return false;
3864 str_bitsize = GET_MODE_BITSIZE (str_mode);
3865
3866 offset1 = bitpos;
3867 bitpos %= str_bitsize;
3868 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3869 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3870 }
3871 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3872 return false;
3873
3874 /* If the bit field covers the whole REG/MEM, store_field
3875 will likely generate better code. */
3876 if (bitsize >= str_bitsize)
3877 return false;
3878
3879 /* We can't handle fields split across multiple entities. */
3880 if (bitpos + bitsize > str_bitsize)
3881 return false;
3882
3883 if (BYTES_BIG_ENDIAN)
3884 bitpos = str_bitsize - bitpos - bitsize;
3885
3886 switch (TREE_CODE (src))
3887 {
3888 case PLUS_EXPR:
3889 case MINUS_EXPR:
3890 /* For now, just optimize the case of the topmost bitfield
3891 where we don't need to do any masking and also
3892 1 bit bitfields where xor can be used.
3893 We might win by one instruction for the other bitfields
3894 too if insv/extv instructions aren't used, so that
3895 can be added later. */
3896 if (bitpos + bitsize != str_bitsize
3897 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3898 break;
3899
3900 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3901 value = convert_modes (str_mode,
3902 TYPE_MODE (TREE_TYPE (op1)), value,
3903 TYPE_UNSIGNED (TREE_TYPE (op1)));
3904
3905 /* We may be accessing data outside the field, which means
3906 we can alias adjacent data. */
3907 if (MEM_P (str_rtx))
3908 {
3909 str_rtx = shallow_copy_rtx (str_rtx);
3910 set_mem_alias_set (str_rtx, 0);
3911 set_mem_expr (str_rtx, 0);
3912 }
3913
3914 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3915 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3916 {
3917 value = expand_and (str_mode, value, const1_rtx, NULL);
3918 binop = xor_optab;
3919 }
3920 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3921 build_int_cst (NULL_TREE, bitpos),
3922 NULL_RTX, 1);
3923 result = expand_binop (str_mode, binop, str_rtx,
3924 value, str_rtx, 1, OPTAB_WIDEN);
3925 if (result != str_rtx)
3926 emit_move_insn (str_rtx, result);
3927 return true;
3928
3929 case BIT_IOR_EXPR:
3930 case BIT_XOR_EXPR:
3931 if (TREE_CODE (op1) != INTEGER_CST)
3932 break;
3933 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3934 value = convert_modes (GET_MODE (str_rtx),
3935 TYPE_MODE (TREE_TYPE (op1)), value,
3936 TYPE_UNSIGNED (TREE_TYPE (op1)));
3937
3938 /* We may be accessing data outside the field, which means
3939 we can alias adjacent data. */
3940 if (MEM_P (str_rtx))
3941 {
3942 str_rtx = shallow_copy_rtx (str_rtx);
3943 set_mem_alias_set (str_rtx, 0);
3944 set_mem_expr (str_rtx, 0);
3945 }
3946
3947 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3948 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3949 {
3950 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3951 - 1);
3952 value = expand_and (GET_MODE (str_rtx), value, mask,
3953 NULL_RTX);
3954 }
3955 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3956 build_int_cst (NULL_TREE, bitpos),
3957 NULL_RTX, 1);
3958 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3959 value, str_rtx, 1, OPTAB_WIDEN);
3960 if (result != str_rtx)
3961 emit_move_insn (str_rtx, result);
3962 return true;
3963
3964 default:
3965 break;
3966 }
3967
3968 return false;
3969 }
3970
3971
3972 /* Expand an assignment that stores the value of FROM into TO. */
3973
3974 void
3975 expand_assignment (tree to, tree from)
3976 {
3977 rtx to_rtx = 0;
3978 rtx result;
3979
3980 /* Don't crash if the lhs of the assignment was erroneous. */
3981
3982 if (TREE_CODE (to) == ERROR_MARK)
3983 {
3984 result = expand_normal (from);
3985 return;
3986 }
3987
3988 /* Assignment of a structure component needs special treatment
3989 if the structure component's rtx is not simply a MEM.
3990 Assignment of an array element at a constant index, and assignment of
3991 an array element in an unaligned packed structure field, has the same
3992 problem. */
3993 if (handled_component_p (to)
3994 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3995 {
3996 enum machine_mode mode1;
3997 HOST_WIDE_INT bitsize, bitpos;
3998 tree offset;
3999 int unsignedp;
4000 int volatilep = 0;
4001 tree tem;
4002
4003 push_temp_slots ();
4004 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4005 &unsignedp, &volatilep, true);
4006
4007 /* If we are going to use store_bit_field and extract_bit_field,
4008 make sure to_rtx will be safe for multiple use. */
4009
4010 to_rtx = expand_normal (tem);
4011
4012 if (offset != 0)
4013 {
4014 rtx offset_rtx;
4015
4016 if (!MEM_P (to_rtx))
4017 {
4018 /* We can get constant negative offsets into arrays with broken
4019 user code. Translate this to a trap instead of ICEing. */
4020 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4021 expand_builtin_trap ();
4022 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4023 }
4024
4025 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4026 #ifdef POINTERS_EXTEND_UNSIGNED
4027 if (GET_MODE (offset_rtx) != Pmode)
4028 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4029 #else
4030 if (GET_MODE (offset_rtx) != ptr_mode)
4031 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4032 #endif
4033
4034 /* A constant address in TO_RTX can have VOIDmode, we must not try
4035 to call force_reg for that case. Avoid that case. */
4036 if (MEM_P (to_rtx)
4037 && GET_MODE (to_rtx) == BLKmode
4038 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4039 && bitsize > 0
4040 && (bitpos % bitsize) == 0
4041 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4042 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4043 {
4044 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4045 bitpos = 0;
4046 }
4047
4048 to_rtx = offset_address (to_rtx, offset_rtx,
4049 highest_pow2_factor_for_target (to,
4050 offset));
4051 }
4052
4053 /* Handle expand_expr of a complex value returning a CONCAT. */
4054 if (GET_CODE (to_rtx) == CONCAT)
4055 {
4056 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4057 {
4058 gcc_assert (bitpos == 0);
4059 result = store_expr (from, to_rtx, false);
4060 }
4061 else
4062 {
4063 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4064 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4065 }
4066 }
4067 else
4068 {
4069 if (MEM_P (to_rtx))
4070 {
4071 /* If the field is at offset zero, we could have been given the
4072 DECL_RTX of the parent struct. Don't munge it. */
4073 to_rtx = shallow_copy_rtx (to_rtx);
4074
4075 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4076
4077 /* Deal with volatile and readonly fields. The former is only
4078 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4079 if (volatilep)
4080 MEM_VOLATILE_P (to_rtx) = 1;
4081 if (component_uses_parent_alias_set (to))
4082 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4083 }
4084
4085 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4086 to_rtx, to, from))
4087 result = NULL;
4088 else
4089 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4090 TREE_TYPE (tem), get_alias_set (to));
4091 }
4092
4093 if (result)
4094 preserve_temp_slots (result);
4095 free_temp_slots ();
4096 pop_temp_slots ();
4097 return;
4098 }
4099
4100 /* If the rhs is a function call and its value is not an aggregate,
4101 call the function before we start to compute the lhs.
4102 This is needed for correct code for cases such as
4103 val = setjmp (buf) on machines where reference to val
4104 requires loading up part of an address in a separate insn.
4105
4106 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4107 since it might be a promoted variable where the zero- or sign- extension
4108 needs to be done. Handling this in the normal way is safe because no
4109 computation is done before the call. */
4110 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4111 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4112 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4113 && REG_P (DECL_RTL (to))))
4114 {
4115 rtx value;
4116
4117 push_temp_slots ();
4118 value = expand_normal (from);
4119 if (to_rtx == 0)
4120 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4121
4122 /* Handle calls that return values in multiple non-contiguous locations.
4123 The Irix 6 ABI has examples of this. */
4124 if (GET_CODE (to_rtx) == PARALLEL)
4125 emit_group_load (to_rtx, value, TREE_TYPE (from),
4126 int_size_in_bytes (TREE_TYPE (from)));
4127 else if (GET_MODE (to_rtx) == BLKmode)
4128 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4129 else
4130 {
4131 if (POINTER_TYPE_P (TREE_TYPE (to)))
4132 value = convert_memory_address (GET_MODE (to_rtx), value);
4133 emit_move_insn (to_rtx, value);
4134 }
4135 preserve_temp_slots (to_rtx);
4136 free_temp_slots ();
4137 pop_temp_slots ();
4138 return;
4139 }
4140
4141 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4142 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4143
4144 if (to_rtx == 0)
4145 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4146
4147 /* Don't move directly into a return register. */
4148 if (TREE_CODE (to) == RESULT_DECL
4149 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4150 {
4151 rtx temp;
4152
4153 push_temp_slots ();
4154 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4155
4156 if (GET_CODE (to_rtx) == PARALLEL)
4157 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4158 int_size_in_bytes (TREE_TYPE (from)));
4159 else
4160 emit_move_insn (to_rtx, temp);
4161
4162 preserve_temp_slots (to_rtx);
4163 free_temp_slots ();
4164 pop_temp_slots ();
4165 return;
4166 }
4167
4168 /* In case we are returning the contents of an object which overlaps
4169 the place the value is being stored, use a safe function when copying
4170 a value through a pointer into a structure value return block. */
4171 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4172 && current_function_returns_struct
4173 && !current_function_returns_pcc_struct)
4174 {
4175 rtx from_rtx, size;
4176
4177 push_temp_slots ();
4178 size = expr_size (from);
4179 from_rtx = expand_normal (from);
4180
4181 emit_library_call (memmove_libfunc, LCT_NORMAL,
4182 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4183 XEXP (from_rtx, 0), Pmode,
4184 convert_to_mode (TYPE_MODE (sizetype),
4185 size, TYPE_UNSIGNED (sizetype)),
4186 TYPE_MODE (sizetype));
4187
4188 preserve_temp_slots (to_rtx);
4189 free_temp_slots ();
4190 pop_temp_slots ();
4191 return;
4192 }
4193
4194 /* Compute FROM and store the value in the rtx we got. */
4195
4196 push_temp_slots ();
4197 result = store_expr (from, to_rtx, 0);
4198 preserve_temp_slots (result);
4199 free_temp_slots ();
4200 pop_temp_slots ();
4201 return;
4202 }
4203
4204 /* Generate code for computing expression EXP,
4205 and storing the value into TARGET.
4206
4207 If the mode is BLKmode then we may return TARGET itself.
4208 It turns out that in BLKmode it doesn't cause a problem.
4209 because C has no operators that could combine two different
4210 assignments into the same BLKmode object with different values
4211 with no sequence point. Will other languages need this to
4212 be more thorough?
4213
4214 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4215 stack, and block moves may need to be treated specially. */
4216
4217 rtx
4218 store_expr (tree exp, rtx target, int call_param_p)
4219 {
4220 rtx temp;
4221 rtx alt_rtl = NULL_RTX;
4222 int dont_return_target = 0;
4223
4224 if (VOID_TYPE_P (TREE_TYPE (exp)))
4225 {
4226 /* C++ can generate ?: expressions with a throw expression in one
4227 branch and an rvalue in the other. Here, we resolve attempts to
4228 store the throw expression's nonexistent result. */
4229 gcc_assert (!call_param_p);
4230 expand_expr (exp, const0_rtx, VOIDmode, 0);
4231 return NULL_RTX;
4232 }
4233 if (TREE_CODE (exp) == COMPOUND_EXPR)
4234 {
4235 /* Perform first part of compound expression, then assign from second
4236 part. */
4237 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4238 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4239 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4240 }
4241 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4242 {
4243 /* For conditional expression, get safe form of the target. Then
4244 test the condition, doing the appropriate assignment on either
4245 side. This avoids the creation of unnecessary temporaries.
4246 For non-BLKmode, it is more efficient not to do this. */
4247
4248 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4249
4250 do_pending_stack_adjust ();
4251 NO_DEFER_POP;
4252 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4253 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4254 emit_jump_insn (gen_jump (lab2));
4255 emit_barrier ();
4256 emit_label (lab1);
4257 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4258 emit_label (lab2);
4259 OK_DEFER_POP;
4260
4261 return NULL_RTX;
4262 }
4263 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4264 /* If this is a scalar in a register that is stored in a wider mode
4265 than the declared mode, compute the result into its declared mode
4266 and then convert to the wider mode. Our value is the computed
4267 expression. */
4268 {
4269 rtx inner_target = 0;
4270
4271 /* We can do the conversion inside EXP, which will often result
4272 in some optimizations. Do the conversion in two steps: first
4273 change the signedness, if needed, then the extend. But don't
4274 do this if the type of EXP is a subtype of something else
4275 since then the conversion might involve more than just
4276 converting modes. */
4277 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4278 && TREE_TYPE (TREE_TYPE (exp)) == 0
4279 && (!lang_hooks.reduce_bit_field_operations
4280 || (GET_MODE_PRECISION (GET_MODE (target))
4281 == TYPE_PRECISION (TREE_TYPE (exp)))))
4282 {
4283 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4284 != SUBREG_PROMOTED_UNSIGNED_P (target))
4285 exp = convert
4286 (lang_hooks.types.signed_or_unsigned_type
4287 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4288
4289 exp = convert (lang_hooks.types.type_for_mode
4290 (GET_MODE (SUBREG_REG (target)),
4291 SUBREG_PROMOTED_UNSIGNED_P (target)),
4292 exp);
4293
4294 inner_target = SUBREG_REG (target);
4295 }
4296
4297 temp = expand_expr (exp, inner_target, VOIDmode,
4298 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4299
4300 /* If TEMP is a VOIDmode constant, use convert_modes to make
4301 sure that we properly convert it. */
4302 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4303 {
4304 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4305 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4306 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4307 GET_MODE (target), temp,
4308 SUBREG_PROMOTED_UNSIGNED_P (target));
4309 }
4310
4311 convert_move (SUBREG_REG (target), temp,
4312 SUBREG_PROMOTED_UNSIGNED_P (target));
4313
4314 return NULL_RTX;
4315 }
4316 else
4317 {
4318 temp = expand_expr_real (exp, target, GET_MODE (target),
4319 (call_param_p
4320 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4321 &alt_rtl);
4322 /* Return TARGET if it's a specified hardware register.
4323 If TARGET is a volatile mem ref, either return TARGET
4324 or return a reg copied *from* TARGET; ANSI requires this.
4325
4326 Otherwise, if TEMP is not TARGET, return TEMP
4327 if it is constant (for efficiency),
4328 or if we really want the correct value. */
4329 if (!(target && REG_P (target)
4330 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4331 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4332 && ! rtx_equal_p (temp, target)
4333 && CONSTANT_P (temp))
4334 dont_return_target = 1;
4335 }
4336
4337 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4338 the same as that of TARGET, adjust the constant. This is needed, for
4339 example, in case it is a CONST_DOUBLE and we want only a word-sized
4340 value. */
4341 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4342 && TREE_CODE (exp) != ERROR_MARK
4343 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4344 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4345 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4346
4347 /* If value was not generated in the target, store it there.
4348 Convert the value to TARGET's type first if necessary and emit the
4349 pending incrementations that have been queued when expanding EXP.
4350 Note that we cannot emit the whole queue blindly because this will
4351 effectively disable the POST_INC optimization later.
4352
4353 If TEMP and TARGET compare equal according to rtx_equal_p, but
4354 one or both of them are volatile memory refs, we have to distinguish
4355 two cases:
4356 - expand_expr has used TARGET. In this case, we must not generate
4357 another copy. This can be detected by TARGET being equal according
4358 to == .
4359 - expand_expr has not used TARGET - that means that the source just
4360 happens to have the same RTX form. Since temp will have been created
4361 by expand_expr, it will compare unequal according to == .
4362 We must generate a copy in this case, to reach the correct number
4363 of volatile memory references. */
4364
4365 if ((! rtx_equal_p (temp, target)
4366 || (temp != target && (side_effects_p (temp)
4367 || side_effects_p (target))))
4368 && TREE_CODE (exp) != ERROR_MARK
4369 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4370 but TARGET is not valid memory reference, TEMP will differ
4371 from TARGET although it is really the same location. */
4372 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4373 /* If there's nothing to copy, don't bother. Don't call
4374 expr_size unless necessary, because some front-ends (C++)
4375 expr_size-hook must not be given objects that are not
4376 supposed to be bit-copied or bit-initialized. */
4377 && expr_size (exp) != const0_rtx)
4378 {
4379 if (GET_MODE (temp) != GET_MODE (target)
4380 && GET_MODE (temp) != VOIDmode)
4381 {
4382 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4383 if (dont_return_target)
4384 {
4385 /* In this case, we will return TEMP,
4386 so make sure it has the proper mode.
4387 But don't forget to store the value into TARGET. */
4388 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4389 emit_move_insn (target, temp);
4390 }
4391 else
4392 convert_move (target, temp, unsignedp);
4393 }
4394
4395 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4396 {
4397 /* Handle copying a string constant into an array. The string
4398 constant may be shorter than the array. So copy just the string's
4399 actual length, and clear the rest. First get the size of the data
4400 type of the string, which is actually the size of the target. */
4401 rtx size = expr_size (exp);
4402
4403 if (GET_CODE (size) == CONST_INT
4404 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4405 emit_block_move (target, temp, size,
4406 (call_param_p
4407 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4408 else
4409 {
4410 /* Compute the size of the data to copy from the string. */
4411 tree copy_size
4412 = size_binop (MIN_EXPR,
4413 make_tree (sizetype, size),
4414 size_int (TREE_STRING_LENGTH (exp)));
4415 rtx copy_size_rtx
4416 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4417 (call_param_p
4418 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4419 rtx label = 0;
4420
4421 /* Copy that much. */
4422 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4423 TYPE_UNSIGNED (sizetype));
4424 emit_block_move (target, temp, copy_size_rtx,
4425 (call_param_p
4426 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4427
4428 /* Figure out how much is left in TARGET that we have to clear.
4429 Do all calculations in ptr_mode. */
4430 if (GET_CODE (copy_size_rtx) == CONST_INT)
4431 {
4432 size = plus_constant (size, -INTVAL (copy_size_rtx));
4433 target = adjust_address (target, BLKmode,
4434 INTVAL (copy_size_rtx));
4435 }
4436 else
4437 {
4438 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4439 copy_size_rtx, NULL_RTX, 0,
4440 OPTAB_LIB_WIDEN);
4441
4442 #ifdef POINTERS_EXTEND_UNSIGNED
4443 if (GET_MODE (copy_size_rtx) != Pmode)
4444 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4445 TYPE_UNSIGNED (sizetype));
4446 #endif
4447
4448 target = offset_address (target, copy_size_rtx,
4449 highest_pow2_factor (copy_size));
4450 label = gen_label_rtx ();
4451 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4452 GET_MODE (size), 0, label);
4453 }
4454
4455 if (size != const0_rtx)
4456 clear_storage (target, size, BLOCK_OP_NORMAL);
4457
4458 if (label)
4459 emit_label (label);
4460 }
4461 }
4462 /* Handle calls that return values in multiple non-contiguous locations.
4463 The Irix 6 ABI has examples of this. */
4464 else if (GET_CODE (target) == PARALLEL)
4465 emit_group_load (target, temp, TREE_TYPE (exp),
4466 int_size_in_bytes (TREE_TYPE (exp)));
4467 else if (GET_MODE (temp) == BLKmode)
4468 emit_block_move (target, temp, expr_size (exp),
4469 (call_param_p
4470 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4471 else
4472 {
4473 temp = force_operand (temp, target);
4474 if (temp != target)
4475 emit_move_insn (target, temp);
4476 }
4477 }
4478
4479 return NULL_RTX;
4480 }
4481 \f
4482 /* Examine CTOR to discover:
4483 * how many scalar fields are set to nonzero values,
4484 and place it in *P_NZ_ELTS;
4485 * how many scalar fields are set to non-constant values,
4486 and place it in *P_NC_ELTS; and
4487 * how many scalar fields in total are in CTOR,
4488 and place it in *P_ELT_COUNT.
4489 * if a type is a union, and the initializer from the constructor
4490 is not the largest element in the union, then set *p_must_clear. */
4491
4492 static void
4493 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4494 HOST_WIDE_INT *p_nc_elts,
4495 HOST_WIDE_INT *p_elt_count,
4496 bool *p_must_clear)
4497 {
4498 unsigned HOST_WIDE_INT idx;
4499 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4500 tree value, purpose;
4501
4502 nz_elts = 0;
4503 nc_elts = 0;
4504 elt_count = 0;
4505
4506 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4507 {
4508 HOST_WIDE_INT mult;
4509
4510 mult = 1;
4511 if (TREE_CODE (purpose) == RANGE_EXPR)
4512 {
4513 tree lo_index = TREE_OPERAND (purpose, 0);
4514 tree hi_index = TREE_OPERAND (purpose, 1);
4515
4516 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4517 mult = (tree_low_cst (hi_index, 1)
4518 - tree_low_cst (lo_index, 1) + 1);
4519 }
4520
4521 switch (TREE_CODE (value))
4522 {
4523 case CONSTRUCTOR:
4524 {
4525 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4526 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
4527 nz_elts += mult * nz;
4528 nc_elts += mult * nc;
4529 elt_count += mult * ic;
4530 }
4531 break;
4532
4533 case INTEGER_CST:
4534 case REAL_CST:
4535 if (!initializer_zerop (value))
4536 nz_elts += mult;
4537 elt_count += mult;
4538 break;
4539
4540 case STRING_CST:
4541 nz_elts += mult * TREE_STRING_LENGTH (value);
4542 elt_count += mult * TREE_STRING_LENGTH (value);
4543 break;
4544
4545 case COMPLEX_CST:
4546 if (!initializer_zerop (TREE_REALPART (value)))
4547 nz_elts += mult;
4548 if (!initializer_zerop (TREE_IMAGPART (value)))
4549 nz_elts += mult;
4550 elt_count += mult;
4551 break;
4552
4553 case VECTOR_CST:
4554 {
4555 tree v;
4556 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4557 {
4558 if (!initializer_zerop (TREE_VALUE (v)))
4559 nz_elts += mult;
4560 elt_count += mult;
4561 }
4562 }
4563 break;
4564
4565 default:
4566 nz_elts += mult;
4567 elt_count += mult;
4568 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4569 nc_elts += mult;
4570 break;
4571 }
4572 }
4573
4574 if (!*p_must_clear
4575 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4576 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4577 {
4578 tree init_sub_type;
4579 bool clear_this = true;
4580
4581 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4582 {
4583 /* We don't expect more than one element of the union to be
4584 initialized. Not sure what we should do otherwise... */
4585 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4586 == 1);
4587
4588 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4589 CONSTRUCTOR_ELTS (ctor),
4590 0)->value);
4591
4592 /* ??? We could look at each element of the union, and find the
4593 largest element. Which would avoid comparing the size of the
4594 initialized element against any tail padding in the union.
4595 Doesn't seem worth the effort... */
4596 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4597 TYPE_SIZE (init_sub_type)) == 1)
4598 {
4599 /* And now we have to find out if the element itself is fully
4600 constructed. E.g. for union { struct { int a, b; } s; } u
4601 = { .s = { .a = 1 } }. */
4602 if (elt_count == count_type_elements (init_sub_type, false))
4603 clear_this = false;
4604 }
4605 }
4606
4607 *p_must_clear = clear_this;
4608 }
4609
4610 *p_nz_elts += nz_elts;
4611 *p_nc_elts += nc_elts;
4612 *p_elt_count += elt_count;
4613 }
4614
4615 void
4616 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4617 HOST_WIDE_INT *p_nc_elts,
4618 HOST_WIDE_INT *p_elt_count,
4619 bool *p_must_clear)
4620 {
4621 *p_nz_elts = 0;
4622 *p_nc_elts = 0;
4623 *p_elt_count = 0;
4624 *p_must_clear = false;
4625 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4626 p_must_clear);
4627 }
4628
4629 /* Count the number of scalars in TYPE. Return -1 on overflow or
4630 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4631 array member at the end of the structure. */
4632
4633 HOST_WIDE_INT
4634 count_type_elements (tree type, bool allow_flexarr)
4635 {
4636 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4637 switch (TREE_CODE (type))
4638 {
4639 case ARRAY_TYPE:
4640 {
4641 tree telts = array_type_nelts (type);
4642 if (telts && host_integerp (telts, 1))
4643 {
4644 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4645 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4646 if (n == 0)
4647 return 0;
4648 else if (max / n > m)
4649 return n * m;
4650 }
4651 return -1;
4652 }
4653
4654 case RECORD_TYPE:
4655 {
4656 HOST_WIDE_INT n = 0, t;
4657 tree f;
4658
4659 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4660 if (TREE_CODE (f) == FIELD_DECL)
4661 {
4662 t = count_type_elements (TREE_TYPE (f), false);
4663 if (t < 0)
4664 {
4665 /* Check for structures with flexible array member. */
4666 tree tf = TREE_TYPE (f);
4667 if (allow_flexarr
4668 && TREE_CHAIN (f) == NULL
4669 && TREE_CODE (tf) == ARRAY_TYPE
4670 && TYPE_DOMAIN (tf)
4671 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4672 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4673 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4674 && int_size_in_bytes (type) >= 0)
4675 break;
4676
4677 return -1;
4678 }
4679 n += t;
4680 }
4681
4682 return n;
4683 }
4684
4685 case UNION_TYPE:
4686 case QUAL_UNION_TYPE:
4687 {
4688 /* Ho hum. How in the world do we guess here? Clearly it isn't
4689 right to count the fields. Guess based on the number of words. */
4690 HOST_WIDE_INT n = int_size_in_bytes (type);
4691 if (n < 0)
4692 return -1;
4693 return n / UNITS_PER_WORD;
4694 }
4695
4696 case COMPLEX_TYPE:
4697 return 2;
4698
4699 case VECTOR_TYPE:
4700 return TYPE_VECTOR_SUBPARTS (type);
4701
4702 case INTEGER_TYPE:
4703 case REAL_TYPE:
4704 case ENUMERAL_TYPE:
4705 case BOOLEAN_TYPE:
4706 case POINTER_TYPE:
4707 case OFFSET_TYPE:
4708 case REFERENCE_TYPE:
4709 return 1;
4710
4711 case VOID_TYPE:
4712 case METHOD_TYPE:
4713 case FUNCTION_TYPE:
4714 case LANG_TYPE:
4715 default:
4716 gcc_unreachable ();
4717 }
4718 }
4719
4720 /* Return 1 if EXP contains mostly (3/4) zeros. */
4721
4722 static int
4723 mostly_zeros_p (tree exp)
4724 {
4725 if (TREE_CODE (exp) == CONSTRUCTOR)
4726
4727 {
4728 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4729 bool must_clear;
4730
4731 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4732 if (must_clear)
4733 return 1;
4734
4735 elts = count_type_elements (TREE_TYPE (exp), false);
4736
4737 return nz_elts < elts / 4;
4738 }
4739
4740 return initializer_zerop (exp);
4741 }
4742
4743 /* Return 1 if EXP contains all zeros. */
4744
4745 static int
4746 all_zeros_p (tree exp)
4747 {
4748 if (TREE_CODE (exp) == CONSTRUCTOR)
4749
4750 {
4751 HOST_WIDE_INT nz_elts, nc_elts, count;
4752 bool must_clear;
4753
4754 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4755 return nz_elts == 0;
4756 }
4757
4758 return initializer_zerop (exp);
4759 }
4760 \f
4761 /* Helper function for store_constructor.
4762 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4763 TYPE is the type of the CONSTRUCTOR, not the element type.
4764 CLEARED is as for store_constructor.
4765 ALIAS_SET is the alias set to use for any stores.
4766
4767 This provides a recursive shortcut back to store_constructor when it isn't
4768 necessary to go through store_field. This is so that we can pass through
4769 the cleared field to let store_constructor know that we may not have to
4770 clear a substructure if the outer structure has already been cleared. */
4771
4772 static void
4773 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4774 HOST_WIDE_INT bitpos, enum machine_mode mode,
4775 tree exp, tree type, int cleared, int alias_set)
4776 {
4777 if (TREE_CODE (exp) == CONSTRUCTOR
4778 /* We can only call store_constructor recursively if the size and
4779 bit position are on a byte boundary. */
4780 && bitpos % BITS_PER_UNIT == 0
4781 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4782 /* If we have a nonzero bitpos for a register target, then we just
4783 let store_field do the bitfield handling. This is unlikely to
4784 generate unnecessary clear instructions anyways. */
4785 && (bitpos == 0 || MEM_P (target)))
4786 {
4787 if (MEM_P (target))
4788 target
4789 = adjust_address (target,
4790 GET_MODE (target) == BLKmode
4791 || 0 != (bitpos
4792 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4793 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4794
4795
4796 /* Update the alias set, if required. */
4797 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4798 && MEM_ALIAS_SET (target) != 0)
4799 {
4800 target = copy_rtx (target);
4801 set_mem_alias_set (target, alias_set);
4802 }
4803
4804 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4805 }
4806 else
4807 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4808 }
4809
4810 /* Store the value of constructor EXP into the rtx TARGET.
4811 TARGET is either a REG or a MEM; we know it cannot conflict, since
4812 safe_from_p has been called.
4813 CLEARED is true if TARGET is known to have been zero'd.
4814 SIZE is the number of bytes of TARGET we are allowed to modify: this
4815 may not be the same as the size of EXP if we are assigning to a field
4816 which has been packed to exclude padding bits. */
4817
4818 static void
4819 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4820 {
4821 tree type = TREE_TYPE (exp);
4822 #ifdef WORD_REGISTER_OPERATIONS
4823 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4824 #endif
4825
4826 switch (TREE_CODE (type))
4827 {
4828 case RECORD_TYPE:
4829 case UNION_TYPE:
4830 case QUAL_UNION_TYPE:
4831 {
4832 unsigned HOST_WIDE_INT idx;
4833 tree field, value;
4834
4835 /* If size is zero or the target is already cleared, do nothing. */
4836 if (size == 0 || cleared)
4837 cleared = 1;
4838 /* We either clear the aggregate or indicate the value is dead. */
4839 else if ((TREE_CODE (type) == UNION_TYPE
4840 || TREE_CODE (type) == QUAL_UNION_TYPE)
4841 && ! CONSTRUCTOR_ELTS (exp))
4842 /* If the constructor is empty, clear the union. */
4843 {
4844 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4845 cleared = 1;
4846 }
4847
4848 /* If we are building a static constructor into a register,
4849 set the initial value as zero so we can fold the value into
4850 a constant. But if more than one register is involved,
4851 this probably loses. */
4852 else if (REG_P (target) && TREE_STATIC (exp)
4853 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4854 {
4855 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4856 cleared = 1;
4857 }
4858
4859 /* If the constructor has fewer fields than the structure or
4860 if we are initializing the structure to mostly zeros, clear
4861 the whole structure first. Don't do this if TARGET is a
4862 register whose mode size isn't equal to SIZE since
4863 clear_storage can't handle this case. */
4864 else if (size > 0
4865 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4866 != fields_length (type))
4867 || mostly_zeros_p (exp))
4868 && (!REG_P (target)
4869 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4870 == size)))
4871 {
4872 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4873 cleared = 1;
4874 }
4875
4876 if (! cleared)
4877 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4878
4879 /* Store each element of the constructor into the
4880 corresponding field of TARGET. */
4881 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4882 {
4883 enum machine_mode mode;
4884 HOST_WIDE_INT bitsize;
4885 HOST_WIDE_INT bitpos = 0;
4886 tree offset;
4887 rtx to_rtx = target;
4888
4889 /* Just ignore missing fields. We cleared the whole
4890 structure, above, if any fields are missing. */
4891 if (field == 0)
4892 continue;
4893
4894 if (cleared && initializer_zerop (value))
4895 continue;
4896
4897 if (host_integerp (DECL_SIZE (field), 1))
4898 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4899 else
4900 bitsize = -1;
4901
4902 mode = DECL_MODE (field);
4903 if (DECL_BIT_FIELD (field))
4904 mode = VOIDmode;
4905
4906 offset = DECL_FIELD_OFFSET (field);
4907 if (host_integerp (offset, 0)
4908 && host_integerp (bit_position (field), 0))
4909 {
4910 bitpos = int_bit_position (field);
4911 offset = 0;
4912 }
4913 else
4914 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4915
4916 if (offset)
4917 {
4918 rtx offset_rtx;
4919
4920 offset
4921 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4922 make_tree (TREE_TYPE (exp),
4923 target));
4924
4925 offset_rtx = expand_normal (offset);
4926 gcc_assert (MEM_P (to_rtx));
4927
4928 #ifdef POINTERS_EXTEND_UNSIGNED
4929 if (GET_MODE (offset_rtx) != Pmode)
4930 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4931 #else
4932 if (GET_MODE (offset_rtx) != ptr_mode)
4933 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4934 #endif
4935
4936 to_rtx = offset_address (to_rtx, offset_rtx,
4937 highest_pow2_factor (offset));
4938 }
4939
4940 #ifdef WORD_REGISTER_OPERATIONS
4941 /* If this initializes a field that is smaller than a
4942 word, at the start of a word, try to widen it to a full
4943 word. This special case allows us to output C++ member
4944 function initializations in a form that the optimizers
4945 can understand. */
4946 if (REG_P (target)
4947 && bitsize < BITS_PER_WORD
4948 && bitpos % BITS_PER_WORD == 0
4949 && GET_MODE_CLASS (mode) == MODE_INT
4950 && TREE_CODE (value) == INTEGER_CST
4951 && exp_size >= 0
4952 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4953 {
4954 tree type = TREE_TYPE (value);
4955
4956 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4957 {
4958 type = lang_hooks.types.type_for_size
4959 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4960 value = convert (type, value);
4961 }
4962
4963 if (BYTES_BIG_ENDIAN)
4964 value
4965 = fold_build2 (LSHIFT_EXPR, type, value,
4966 build_int_cst (NULL_TREE,
4967 BITS_PER_WORD - bitsize));
4968 bitsize = BITS_PER_WORD;
4969 mode = word_mode;
4970 }
4971 #endif
4972
4973 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4974 && DECL_NONADDRESSABLE_P (field))
4975 {
4976 to_rtx = copy_rtx (to_rtx);
4977 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4978 }
4979
4980 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4981 value, type, cleared,
4982 get_alias_set (TREE_TYPE (field)));
4983 }
4984 break;
4985 }
4986 case ARRAY_TYPE:
4987 {
4988 tree value, index;
4989 unsigned HOST_WIDE_INT i;
4990 int need_to_clear;
4991 tree domain;
4992 tree elttype = TREE_TYPE (type);
4993 int const_bounds_p;
4994 HOST_WIDE_INT minelt = 0;
4995 HOST_WIDE_INT maxelt = 0;
4996
4997 domain = TYPE_DOMAIN (type);
4998 const_bounds_p = (TYPE_MIN_VALUE (domain)
4999 && TYPE_MAX_VALUE (domain)
5000 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5001 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5002
5003 /* If we have constant bounds for the range of the type, get them. */
5004 if (const_bounds_p)
5005 {
5006 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5007 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5008 }
5009
5010 /* If the constructor has fewer elements than the array, clear
5011 the whole array first. Similarly if this is static
5012 constructor of a non-BLKmode object. */
5013 if (cleared)
5014 need_to_clear = 0;
5015 else if (REG_P (target) && TREE_STATIC (exp))
5016 need_to_clear = 1;
5017 else
5018 {
5019 unsigned HOST_WIDE_INT idx;
5020 tree index, value;
5021 HOST_WIDE_INT count = 0, zero_count = 0;
5022 need_to_clear = ! const_bounds_p;
5023
5024 /* This loop is a more accurate version of the loop in
5025 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5026 is also needed to check for missing elements. */
5027 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5028 {
5029 HOST_WIDE_INT this_node_count;
5030
5031 if (need_to_clear)
5032 break;
5033
5034 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5035 {
5036 tree lo_index = TREE_OPERAND (index, 0);
5037 tree hi_index = TREE_OPERAND (index, 1);
5038
5039 if (! host_integerp (lo_index, 1)
5040 || ! host_integerp (hi_index, 1))
5041 {
5042 need_to_clear = 1;
5043 break;
5044 }
5045
5046 this_node_count = (tree_low_cst (hi_index, 1)
5047 - tree_low_cst (lo_index, 1) + 1);
5048 }
5049 else
5050 this_node_count = 1;
5051
5052 count += this_node_count;
5053 if (mostly_zeros_p (value))
5054 zero_count += this_node_count;
5055 }
5056
5057 /* Clear the entire array first if there are any missing
5058 elements, or if the incidence of zero elements is >=
5059 75%. */
5060 if (! need_to_clear
5061 && (count < maxelt - minelt + 1
5062 || 4 * zero_count >= 3 * count))
5063 need_to_clear = 1;
5064 }
5065
5066 if (need_to_clear && size > 0)
5067 {
5068 if (REG_P (target))
5069 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5070 else
5071 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5072 cleared = 1;
5073 }
5074
5075 if (!cleared && REG_P (target))
5076 /* Inform later passes that the old value is dead. */
5077 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5078
5079 /* Store each element of the constructor into the
5080 corresponding element of TARGET, determined by counting the
5081 elements. */
5082 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5083 {
5084 enum machine_mode mode;
5085 HOST_WIDE_INT bitsize;
5086 HOST_WIDE_INT bitpos;
5087 int unsignedp;
5088 rtx xtarget = target;
5089
5090 if (cleared && initializer_zerop (value))
5091 continue;
5092
5093 unsignedp = TYPE_UNSIGNED (elttype);
5094 mode = TYPE_MODE (elttype);
5095 if (mode == BLKmode)
5096 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5097 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5098 : -1);
5099 else
5100 bitsize = GET_MODE_BITSIZE (mode);
5101
5102 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5103 {
5104 tree lo_index = TREE_OPERAND (index, 0);
5105 tree hi_index = TREE_OPERAND (index, 1);
5106 rtx index_r, pos_rtx;
5107 HOST_WIDE_INT lo, hi, count;
5108 tree position;
5109
5110 /* If the range is constant and "small", unroll the loop. */
5111 if (const_bounds_p
5112 && host_integerp (lo_index, 0)
5113 && host_integerp (hi_index, 0)
5114 && (lo = tree_low_cst (lo_index, 0),
5115 hi = tree_low_cst (hi_index, 0),
5116 count = hi - lo + 1,
5117 (!MEM_P (target)
5118 || count <= 2
5119 || (host_integerp (TYPE_SIZE (elttype), 1)
5120 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5121 <= 40 * 8)))))
5122 {
5123 lo -= minelt; hi -= minelt;
5124 for (; lo <= hi; lo++)
5125 {
5126 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5127
5128 if (MEM_P (target)
5129 && !MEM_KEEP_ALIAS_SET_P (target)
5130 && TREE_CODE (type) == ARRAY_TYPE
5131 && TYPE_NONALIASED_COMPONENT (type))
5132 {
5133 target = copy_rtx (target);
5134 MEM_KEEP_ALIAS_SET_P (target) = 1;
5135 }
5136
5137 store_constructor_field
5138 (target, bitsize, bitpos, mode, value, type, cleared,
5139 get_alias_set (elttype));
5140 }
5141 }
5142 else
5143 {
5144 rtx loop_start = gen_label_rtx ();
5145 rtx loop_end = gen_label_rtx ();
5146 tree exit_cond;
5147
5148 expand_normal (hi_index);
5149 unsignedp = TYPE_UNSIGNED (domain);
5150
5151 index = build_decl (VAR_DECL, NULL_TREE, domain);
5152
5153 index_r
5154 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5155 &unsignedp, 0));
5156 SET_DECL_RTL (index, index_r);
5157 store_expr (lo_index, index_r, 0);
5158
5159 /* Build the head of the loop. */
5160 do_pending_stack_adjust ();
5161 emit_label (loop_start);
5162
5163 /* Assign value to element index. */
5164 position
5165 = convert (ssizetype,
5166 fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5167 index, TYPE_MIN_VALUE (domain)));
5168 position = size_binop (MULT_EXPR, position,
5169 convert (ssizetype,
5170 TYPE_SIZE_UNIT (elttype)));
5171
5172 pos_rtx = expand_normal (position);
5173 xtarget = offset_address (target, pos_rtx,
5174 highest_pow2_factor (position));
5175 xtarget = adjust_address (xtarget, mode, 0);
5176 if (TREE_CODE (value) == CONSTRUCTOR)
5177 store_constructor (value, xtarget, cleared,
5178 bitsize / BITS_PER_UNIT);
5179 else
5180 store_expr (value, xtarget, 0);
5181
5182 /* Generate a conditional jump to exit the loop. */
5183 exit_cond = build2 (LT_EXPR, integer_type_node,
5184 index, hi_index);
5185 jumpif (exit_cond, loop_end);
5186
5187 /* Update the loop counter, and jump to the head of
5188 the loop. */
5189 expand_assignment (index,
5190 build2 (PLUS_EXPR, TREE_TYPE (index),
5191 index, integer_one_node));
5192
5193 emit_jump (loop_start);
5194
5195 /* Build the end of the loop. */
5196 emit_label (loop_end);
5197 }
5198 }
5199 else if ((index != 0 && ! host_integerp (index, 0))
5200 || ! host_integerp (TYPE_SIZE (elttype), 1))
5201 {
5202 tree position;
5203
5204 if (index == 0)
5205 index = ssize_int (1);
5206
5207 if (minelt)
5208 index = fold_convert (ssizetype,
5209 fold_build2 (MINUS_EXPR,
5210 TREE_TYPE (index),
5211 index,
5212 TYPE_MIN_VALUE (domain)));
5213
5214 position = size_binop (MULT_EXPR, index,
5215 convert (ssizetype,
5216 TYPE_SIZE_UNIT (elttype)));
5217 xtarget = offset_address (target,
5218 expand_normal (position),
5219 highest_pow2_factor (position));
5220 xtarget = adjust_address (xtarget, mode, 0);
5221 store_expr (value, xtarget, 0);
5222 }
5223 else
5224 {
5225 if (index != 0)
5226 bitpos = ((tree_low_cst (index, 0) - minelt)
5227 * tree_low_cst (TYPE_SIZE (elttype), 1));
5228 else
5229 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5230
5231 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5232 && TREE_CODE (type) == ARRAY_TYPE
5233 && TYPE_NONALIASED_COMPONENT (type))
5234 {
5235 target = copy_rtx (target);
5236 MEM_KEEP_ALIAS_SET_P (target) = 1;
5237 }
5238 store_constructor_field (target, bitsize, bitpos, mode, value,
5239 type, cleared, get_alias_set (elttype));
5240 }
5241 }
5242 break;
5243 }
5244
5245 case VECTOR_TYPE:
5246 {
5247 unsigned HOST_WIDE_INT idx;
5248 constructor_elt *ce;
5249 int i;
5250 int need_to_clear;
5251 int icode = 0;
5252 tree elttype = TREE_TYPE (type);
5253 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5254 enum machine_mode eltmode = TYPE_MODE (elttype);
5255 HOST_WIDE_INT bitsize;
5256 HOST_WIDE_INT bitpos;
5257 rtvec vector = NULL;
5258 unsigned n_elts;
5259
5260 gcc_assert (eltmode != BLKmode);
5261
5262 n_elts = TYPE_VECTOR_SUBPARTS (type);
5263 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5264 {
5265 enum machine_mode mode = GET_MODE (target);
5266
5267 icode = (int) vec_init_optab->handlers[mode].insn_code;
5268 if (icode != CODE_FOR_nothing)
5269 {
5270 unsigned int i;
5271
5272 vector = rtvec_alloc (n_elts);
5273 for (i = 0; i < n_elts; i++)
5274 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5275 }
5276 }
5277
5278 /* If the constructor has fewer elements than the vector,
5279 clear the whole array first. Similarly if this is static
5280 constructor of a non-BLKmode object. */
5281 if (cleared)
5282 need_to_clear = 0;
5283 else if (REG_P (target) && TREE_STATIC (exp))
5284 need_to_clear = 1;
5285 else
5286 {
5287 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5288 tree value;
5289
5290 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5291 {
5292 int n_elts_here = tree_low_cst
5293 (int_const_binop (TRUNC_DIV_EXPR,
5294 TYPE_SIZE (TREE_TYPE (value)),
5295 TYPE_SIZE (elttype), 0), 1);
5296
5297 count += n_elts_here;
5298 if (mostly_zeros_p (value))
5299 zero_count += n_elts_here;
5300 }
5301
5302 /* Clear the entire vector first if there are any missing elements,
5303 or if the incidence of zero elements is >= 75%. */
5304 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5305 }
5306
5307 if (need_to_clear && size > 0 && !vector)
5308 {
5309 if (REG_P (target))
5310 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5311 else
5312 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5313 cleared = 1;
5314 }
5315
5316 /* Inform later passes that the old value is dead. */
5317 if (!cleared && REG_P (target))
5318 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5319
5320 /* Store each element of the constructor into the corresponding
5321 element of TARGET, determined by counting the elements. */
5322 for (idx = 0, i = 0;
5323 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5324 idx++, i += bitsize / elt_size)
5325 {
5326 HOST_WIDE_INT eltpos;
5327 tree value = ce->value;
5328
5329 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5330 if (cleared && initializer_zerop (value))
5331 continue;
5332
5333 if (ce->index)
5334 eltpos = tree_low_cst (ce->index, 1);
5335 else
5336 eltpos = i;
5337
5338 if (vector)
5339 {
5340 /* Vector CONSTRUCTORs should only be built from smaller
5341 vectors in the case of BLKmode vectors. */
5342 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5343 RTVEC_ELT (vector, eltpos)
5344 = expand_normal (value);
5345 }
5346 else
5347 {
5348 enum machine_mode value_mode =
5349 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5350 ? TYPE_MODE (TREE_TYPE (value))
5351 : eltmode;
5352 bitpos = eltpos * elt_size;
5353 store_constructor_field (target, bitsize, bitpos,
5354 value_mode, value, type,
5355 cleared, get_alias_set (elttype));
5356 }
5357 }
5358
5359 if (vector)
5360 emit_insn (GEN_FCN (icode)
5361 (target,
5362 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5363 break;
5364 }
5365
5366 default:
5367 gcc_unreachable ();
5368 }
5369 }
5370
5371 /* Store the value of EXP (an expression tree)
5372 into a subfield of TARGET which has mode MODE and occupies
5373 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5374 If MODE is VOIDmode, it means that we are storing into a bit-field.
5375
5376 Always return const0_rtx unless we have something particular to
5377 return.
5378
5379 TYPE is the type of the underlying object,
5380
5381 ALIAS_SET is the alias set for the destination. This value will
5382 (in general) be different from that for TARGET, since TARGET is a
5383 reference to the containing structure. */
5384
5385 static rtx
5386 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5387 enum machine_mode mode, tree exp, tree type, int alias_set)
5388 {
5389 HOST_WIDE_INT width_mask = 0;
5390
5391 if (TREE_CODE (exp) == ERROR_MARK)
5392 return const0_rtx;
5393
5394 /* If we have nothing to store, do nothing unless the expression has
5395 side-effects. */
5396 if (bitsize == 0)
5397 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5398 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5399 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5400
5401 /* If we are storing into an unaligned field of an aligned union that is
5402 in a register, we may have the mode of TARGET being an integer mode but
5403 MODE == BLKmode. In that case, get an aligned object whose size and
5404 alignment are the same as TARGET and store TARGET into it (we can avoid
5405 the store if the field being stored is the entire width of TARGET). Then
5406 call ourselves recursively to store the field into a BLKmode version of
5407 that object. Finally, load from the object into TARGET. This is not
5408 very efficient in general, but should only be slightly more expensive
5409 than the otherwise-required unaligned accesses. Perhaps this can be
5410 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5411 twice, once with emit_move_insn and once via store_field. */
5412
5413 if (mode == BLKmode
5414 && (REG_P (target) || GET_CODE (target) == SUBREG))
5415 {
5416 rtx object = assign_temp (type, 0, 1, 1);
5417 rtx blk_object = adjust_address (object, BLKmode, 0);
5418
5419 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5420 emit_move_insn (object, target);
5421
5422 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5423
5424 emit_move_insn (target, object);
5425
5426 /* We want to return the BLKmode version of the data. */
5427 return blk_object;
5428 }
5429
5430 if (GET_CODE (target) == CONCAT)
5431 {
5432 /* We're storing into a struct containing a single __complex. */
5433
5434 gcc_assert (!bitpos);
5435 return store_expr (exp, target, 0);
5436 }
5437
5438 /* If the structure is in a register or if the component
5439 is a bit field, we cannot use addressing to access it.
5440 Use bit-field techniques or SUBREG to store in it. */
5441
5442 if (mode == VOIDmode
5443 || (mode != BLKmode && ! direct_store[(int) mode]
5444 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5445 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5446 || REG_P (target)
5447 || GET_CODE (target) == SUBREG
5448 /* If the field isn't aligned enough to store as an ordinary memref,
5449 store it as a bit field. */
5450 || (mode != BLKmode
5451 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5452 || bitpos % GET_MODE_ALIGNMENT (mode))
5453 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5454 || (bitpos % BITS_PER_UNIT != 0)))
5455 /* If the RHS and field are a constant size and the size of the
5456 RHS isn't the same size as the bitfield, we must use bitfield
5457 operations. */
5458 || (bitsize >= 0
5459 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5460 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5461 {
5462 rtx temp;
5463
5464 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5465 implies a mask operation. If the precision is the same size as
5466 the field we're storing into, that mask is redundant. This is
5467 particularly common with bit field assignments generated by the
5468 C front end. */
5469 if (TREE_CODE (exp) == NOP_EXPR)
5470 {
5471 tree type = TREE_TYPE (exp);
5472 if (INTEGRAL_TYPE_P (type)
5473 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5474 && bitsize == TYPE_PRECISION (type))
5475 {
5476 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5477 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5478 exp = TREE_OPERAND (exp, 0);
5479 }
5480 }
5481
5482 temp = expand_normal (exp);
5483
5484 /* If BITSIZE is narrower than the size of the type of EXP
5485 we will be narrowing TEMP. Normally, what's wanted are the
5486 low-order bits. However, if EXP's type is a record and this is
5487 big-endian machine, we want the upper BITSIZE bits. */
5488 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5489 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5490 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5491 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5492 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5493 - bitsize),
5494 NULL_RTX, 1);
5495
5496 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5497 MODE. */
5498 if (mode != VOIDmode && mode != BLKmode
5499 && mode != TYPE_MODE (TREE_TYPE (exp)))
5500 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5501
5502 /* If the modes of TARGET and TEMP are both BLKmode, both
5503 must be in memory and BITPOS must be aligned on a byte
5504 boundary. If so, we simply do a block copy. */
5505 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5506 {
5507 gcc_assert (MEM_P (target) && MEM_P (temp)
5508 && !(bitpos % BITS_PER_UNIT));
5509
5510 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5511 emit_block_move (target, temp,
5512 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5513 / BITS_PER_UNIT),
5514 BLOCK_OP_NORMAL);
5515
5516 return const0_rtx;
5517 }
5518
5519 /* Store the value in the bitfield. */
5520 store_bit_field (target, bitsize, bitpos, mode, temp);
5521
5522 return const0_rtx;
5523 }
5524 else
5525 {
5526 /* Now build a reference to just the desired component. */
5527 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5528
5529 if (to_rtx == target)
5530 to_rtx = copy_rtx (to_rtx);
5531
5532 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5533 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5534 set_mem_alias_set (to_rtx, alias_set);
5535
5536 return store_expr (exp, to_rtx, 0);
5537 }
5538 }
5539 \f
5540 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5541 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5542 codes and find the ultimate containing object, which we return.
5543
5544 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5545 bit position, and *PUNSIGNEDP to the signedness of the field.
5546 If the position of the field is variable, we store a tree
5547 giving the variable offset (in units) in *POFFSET.
5548 This offset is in addition to the bit position.
5549 If the position is not variable, we store 0 in *POFFSET.
5550
5551 If any of the extraction expressions is volatile,
5552 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5553
5554 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5555 is a mode that can be used to access the field. In that case, *PBITSIZE
5556 is redundant.
5557
5558 If the field describes a variable-sized object, *PMODE is set to
5559 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5560 this case, but the address of the object can be found.
5561
5562 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5563 look through nodes that serve as markers of a greater alignment than
5564 the one that can be deduced from the expression. These nodes make it
5565 possible for front-ends to prevent temporaries from being created by
5566 the middle-end on alignment considerations. For that purpose, the
5567 normal operating mode at high-level is to always pass FALSE so that
5568 the ultimate containing object is really returned; moreover, the
5569 associated predicate handled_component_p will always return TRUE
5570 on these nodes, thus indicating that they are essentially handled
5571 by get_inner_reference. TRUE should only be passed when the caller
5572 is scanning the expression in order to build another representation
5573 and specifically knows how to handle these nodes; as such, this is
5574 the normal operating mode in the RTL expanders. */
5575
5576 tree
5577 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5578 HOST_WIDE_INT *pbitpos, tree *poffset,
5579 enum machine_mode *pmode, int *punsignedp,
5580 int *pvolatilep, bool keep_aligning)
5581 {
5582 tree size_tree = 0;
5583 enum machine_mode mode = VOIDmode;
5584 tree offset = size_zero_node;
5585 tree bit_offset = bitsize_zero_node;
5586 tree tem;
5587
5588 /* First get the mode, signedness, and size. We do this from just the
5589 outermost expression. */
5590 if (TREE_CODE (exp) == COMPONENT_REF)
5591 {
5592 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5593 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5594 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5595
5596 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5597 }
5598 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5599 {
5600 size_tree = TREE_OPERAND (exp, 1);
5601 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5602 }
5603 else
5604 {
5605 mode = TYPE_MODE (TREE_TYPE (exp));
5606 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5607
5608 if (mode == BLKmode)
5609 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5610 else
5611 *pbitsize = GET_MODE_BITSIZE (mode);
5612 }
5613
5614 if (size_tree != 0)
5615 {
5616 if (! host_integerp (size_tree, 1))
5617 mode = BLKmode, *pbitsize = -1;
5618 else
5619 *pbitsize = tree_low_cst (size_tree, 1);
5620 }
5621
5622 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5623 and find the ultimate containing object. */
5624 while (1)
5625 {
5626 switch (TREE_CODE (exp))
5627 {
5628 case BIT_FIELD_REF:
5629 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5630 TREE_OPERAND (exp, 2));
5631 break;
5632
5633 case COMPONENT_REF:
5634 {
5635 tree field = TREE_OPERAND (exp, 1);
5636 tree this_offset = component_ref_field_offset (exp);
5637
5638 /* If this field hasn't been filled in yet, don't go past it.
5639 This should only happen when folding expressions made during
5640 type construction. */
5641 if (this_offset == 0)
5642 break;
5643
5644 offset = size_binop (PLUS_EXPR, offset, this_offset);
5645 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5646 DECL_FIELD_BIT_OFFSET (field));
5647
5648 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5649 }
5650 break;
5651
5652 case ARRAY_REF:
5653 case ARRAY_RANGE_REF:
5654 {
5655 tree index = TREE_OPERAND (exp, 1);
5656 tree low_bound = array_ref_low_bound (exp);
5657 tree unit_size = array_ref_element_size (exp);
5658
5659 /* We assume all arrays have sizes that are a multiple of a byte.
5660 First subtract the lower bound, if any, in the type of the
5661 index, then convert to sizetype and multiply by the size of
5662 the array element. */
5663 if (! integer_zerop (low_bound))
5664 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5665 index, low_bound);
5666
5667 offset = size_binop (PLUS_EXPR, offset,
5668 size_binop (MULT_EXPR,
5669 convert (sizetype, index),
5670 unit_size));
5671 }
5672 break;
5673
5674 case REALPART_EXPR:
5675 break;
5676
5677 case IMAGPART_EXPR:
5678 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5679 bitsize_int (*pbitsize));
5680 break;
5681
5682 case VIEW_CONVERT_EXPR:
5683 if (keep_aligning && STRICT_ALIGNMENT
5684 && (TYPE_ALIGN (TREE_TYPE (exp))
5685 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5686 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5687 < BIGGEST_ALIGNMENT)
5688 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5689 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5690 goto done;
5691 break;
5692
5693 default:
5694 goto done;
5695 }
5696
5697 /* If any reference in the chain is volatile, the effect is volatile. */
5698 if (TREE_THIS_VOLATILE (exp))
5699 *pvolatilep = 1;
5700
5701 exp = TREE_OPERAND (exp, 0);
5702 }
5703 done:
5704
5705 /* If OFFSET is constant, see if we can return the whole thing as a
5706 constant bit position. Otherwise, split it up. */
5707 if (host_integerp (offset, 0)
5708 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5709 bitsize_unit_node))
5710 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5711 && host_integerp (tem, 0))
5712 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5713 else
5714 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5715
5716 *pmode = mode;
5717 return exp;
5718 }
5719
5720 /* Return a tree of sizetype representing the size, in bytes, of the element
5721 of EXP, an ARRAY_REF. */
5722
5723 tree
5724 array_ref_element_size (tree exp)
5725 {
5726 tree aligned_size = TREE_OPERAND (exp, 3);
5727 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5728
5729 /* If a size was specified in the ARRAY_REF, it's the size measured
5730 in alignment units of the element type. So multiply by that value. */
5731 if (aligned_size)
5732 {
5733 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5734 sizetype from another type of the same width and signedness. */
5735 if (TREE_TYPE (aligned_size) != sizetype)
5736 aligned_size = fold_convert (sizetype, aligned_size);
5737 return size_binop (MULT_EXPR, aligned_size,
5738 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5739 }
5740
5741 /* Otherwise, take the size from that of the element type. Substitute
5742 any PLACEHOLDER_EXPR that we have. */
5743 else
5744 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5745 }
5746
5747 /* Return a tree representing the lower bound of the array mentioned in
5748 EXP, an ARRAY_REF. */
5749
5750 tree
5751 array_ref_low_bound (tree exp)
5752 {
5753 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5754
5755 /* If a lower bound is specified in EXP, use it. */
5756 if (TREE_OPERAND (exp, 2))
5757 return TREE_OPERAND (exp, 2);
5758
5759 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5760 substituting for a PLACEHOLDER_EXPR as needed. */
5761 if (domain_type && TYPE_MIN_VALUE (domain_type))
5762 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5763
5764 /* Otherwise, return a zero of the appropriate type. */
5765 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5766 }
5767
5768 /* Return a tree representing the upper bound of the array mentioned in
5769 EXP, an ARRAY_REF. */
5770
5771 tree
5772 array_ref_up_bound (tree exp)
5773 {
5774 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5775
5776 /* If there is a domain type and it has an upper bound, use it, substituting
5777 for a PLACEHOLDER_EXPR as needed. */
5778 if (domain_type && TYPE_MAX_VALUE (domain_type))
5779 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5780
5781 /* Otherwise fail. */
5782 return NULL_TREE;
5783 }
5784
5785 /* Return a tree representing the offset, in bytes, of the field referenced
5786 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5787
5788 tree
5789 component_ref_field_offset (tree exp)
5790 {
5791 tree aligned_offset = TREE_OPERAND (exp, 2);
5792 tree field = TREE_OPERAND (exp, 1);
5793
5794 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5795 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5796 value. */
5797 if (aligned_offset)
5798 {
5799 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5800 sizetype from another type of the same width and signedness. */
5801 if (TREE_TYPE (aligned_offset) != sizetype)
5802 aligned_offset = fold_convert (sizetype, aligned_offset);
5803 return size_binop (MULT_EXPR, aligned_offset,
5804 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5805 }
5806
5807 /* Otherwise, take the offset from that of the field. Substitute
5808 any PLACEHOLDER_EXPR that we have. */
5809 else
5810 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5811 }
5812
5813 /* Return 1 if T is an expression that get_inner_reference handles. */
5814
5815 int
5816 handled_component_p (tree t)
5817 {
5818 switch (TREE_CODE (t))
5819 {
5820 case BIT_FIELD_REF:
5821 case COMPONENT_REF:
5822 case ARRAY_REF:
5823 case ARRAY_RANGE_REF:
5824 case VIEW_CONVERT_EXPR:
5825 case REALPART_EXPR:
5826 case IMAGPART_EXPR:
5827 return 1;
5828
5829 default:
5830 return 0;
5831 }
5832 }
5833 \f
5834 /* Given an rtx VALUE that may contain additions and multiplications, return
5835 an equivalent value that just refers to a register, memory, or constant.
5836 This is done by generating instructions to perform the arithmetic and
5837 returning a pseudo-register containing the value.
5838
5839 The returned value may be a REG, SUBREG, MEM or constant. */
5840
5841 rtx
5842 force_operand (rtx value, rtx target)
5843 {
5844 rtx op1, op2;
5845 /* Use subtarget as the target for operand 0 of a binary operation. */
5846 rtx subtarget = get_subtarget (target);
5847 enum rtx_code code = GET_CODE (value);
5848
5849 /* Check for subreg applied to an expression produced by loop optimizer. */
5850 if (code == SUBREG
5851 && !REG_P (SUBREG_REG (value))
5852 && !MEM_P (SUBREG_REG (value)))
5853 {
5854 value = simplify_gen_subreg (GET_MODE (value),
5855 force_reg (GET_MODE (SUBREG_REG (value)),
5856 force_operand (SUBREG_REG (value),
5857 NULL_RTX)),
5858 GET_MODE (SUBREG_REG (value)),
5859 SUBREG_BYTE (value));
5860 code = GET_CODE (value);
5861 }
5862
5863 /* Check for a PIC address load. */
5864 if ((code == PLUS || code == MINUS)
5865 && XEXP (value, 0) == pic_offset_table_rtx
5866 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5867 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5868 || GET_CODE (XEXP (value, 1)) == CONST))
5869 {
5870 if (!subtarget)
5871 subtarget = gen_reg_rtx (GET_MODE (value));
5872 emit_move_insn (subtarget, value);
5873 return subtarget;
5874 }
5875
5876 if (ARITHMETIC_P (value))
5877 {
5878 op2 = XEXP (value, 1);
5879 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5880 subtarget = 0;
5881 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5882 {
5883 code = PLUS;
5884 op2 = negate_rtx (GET_MODE (value), op2);
5885 }
5886
5887 /* Check for an addition with OP2 a constant integer and our first
5888 operand a PLUS of a virtual register and something else. In that
5889 case, we want to emit the sum of the virtual register and the
5890 constant first and then add the other value. This allows virtual
5891 register instantiation to simply modify the constant rather than
5892 creating another one around this addition. */
5893 if (code == PLUS && GET_CODE (op2) == CONST_INT
5894 && GET_CODE (XEXP (value, 0)) == PLUS
5895 && REG_P (XEXP (XEXP (value, 0), 0))
5896 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5897 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5898 {
5899 rtx temp = expand_simple_binop (GET_MODE (value), code,
5900 XEXP (XEXP (value, 0), 0), op2,
5901 subtarget, 0, OPTAB_LIB_WIDEN);
5902 return expand_simple_binop (GET_MODE (value), code, temp,
5903 force_operand (XEXP (XEXP (value,
5904 0), 1), 0),
5905 target, 0, OPTAB_LIB_WIDEN);
5906 }
5907
5908 op1 = force_operand (XEXP (value, 0), subtarget);
5909 op2 = force_operand (op2, NULL_RTX);
5910 switch (code)
5911 {
5912 case MULT:
5913 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5914 case DIV:
5915 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5916 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5917 target, 1, OPTAB_LIB_WIDEN);
5918 else
5919 return expand_divmod (0,
5920 FLOAT_MODE_P (GET_MODE (value))
5921 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5922 GET_MODE (value), op1, op2, target, 0);
5923 break;
5924 case MOD:
5925 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5926 target, 0);
5927 break;
5928 case UDIV:
5929 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5930 target, 1);
5931 break;
5932 case UMOD:
5933 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5934 target, 1);
5935 break;
5936 case ASHIFTRT:
5937 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5938 target, 0, OPTAB_LIB_WIDEN);
5939 break;
5940 default:
5941 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5942 target, 1, OPTAB_LIB_WIDEN);
5943 }
5944 }
5945 if (UNARY_P (value))
5946 {
5947 if (!target)
5948 target = gen_reg_rtx (GET_MODE (value));
5949 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5950 switch (code)
5951 {
5952 case ZERO_EXTEND:
5953 case SIGN_EXTEND:
5954 case TRUNCATE:
5955 convert_move (target, op1, code == ZERO_EXTEND);
5956 return target;
5957
5958 case FIX:
5959 case UNSIGNED_FIX:
5960 expand_fix (target, op1, code == UNSIGNED_FIX);
5961 return target;
5962
5963 case FLOAT:
5964 case UNSIGNED_FLOAT:
5965 expand_float (target, op1, code == UNSIGNED_FLOAT);
5966 return target;
5967
5968 default:
5969 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5970 }
5971 }
5972
5973 #ifdef INSN_SCHEDULING
5974 /* On machines that have insn scheduling, we want all memory reference to be
5975 explicit, so we need to deal with such paradoxical SUBREGs. */
5976 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5977 && (GET_MODE_SIZE (GET_MODE (value))
5978 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5979 value
5980 = simplify_gen_subreg (GET_MODE (value),
5981 force_reg (GET_MODE (SUBREG_REG (value)),
5982 force_operand (SUBREG_REG (value),
5983 NULL_RTX)),
5984 GET_MODE (SUBREG_REG (value)),
5985 SUBREG_BYTE (value));
5986 #endif
5987
5988 return value;
5989 }
5990 \f
5991 /* Subroutine of expand_expr: return nonzero iff there is no way that
5992 EXP can reference X, which is being modified. TOP_P is nonzero if this
5993 call is going to be used to determine whether we need a temporary
5994 for EXP, as opposed to a recursive call to this function.
5995
5996 It is always safe for this routine to return zero since it merely
5997 searches for optimization opportunities. */
5998
5999 int
6000 safe_from_p (rtx x, tree exp, int top_p)
6001 {
6002 rtx exp_rtl = 0;
6003 int i, nops;
6004
6005 if (x == 0
6006 /* If EXP has varying size, we MUST use a target since we currently
6007 have no way of allocating temporaries of variable size
6008 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6009 So we assume here that something at a higher level has prevented a
6010 clash. This is somewhat bogus, but the best we can do. Only
6011 do this when X is BLKmode and when we are at the top level. */
6012 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6013 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6014 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6015 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6016 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6017 != INTEGER_CST)
6018 && GET_MODE (x) == BLKmode)
6019 /* If X is in the outgoing argument area, it is always safe. */
6020 || (MEM_P (x)
6021 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6022 || (GET_CODE (XEXP (x, 0)) == PLUS
6023 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6024 return 1;
6025
6026 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6027 find the underlying pseudo. */
6028 if (GET_CODE (x) == SUBREG)
6029 {
6030 x = SUBREG_REG (x);
6031 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6032 return 0;
6033 }
6034
6035 /* Now look at our tree code and possibly recurse. */
6036 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6037 {
6038 case tcc_declaration:
6039 exp_rtl = DECL_RTL_IF_SET (exp);
6040 break;
6041
6042 case tcc_constant:
6043 return 1;
6044
6045 case tcc_exceptional:
6046 if (TREE_CODE (exp) == TREE_LIST)
6047 {
6048 while (1)
6049 {
6050 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6051 return 0;
6052 exp = TREE_CHAIN (exp);
6053 if (!exp)
6054 return 1;
6055 if (TREE_CODE (exp) != TREE_LIST)
6056 return safe_from_p (x, exp, 0);
6057 }
6058 }
6059 else if (TREE_CODE (exp) == ERROR_MARK)
6060 return 1; /* An already-visited SAVE_EXPR? */
6061 else
6062 return 0;
6063
6064 case tcc_statement:
6065 /* The only case we look at here is the DECL_INITIAL inside a
6066 DECL_EXPR. */
6067 return (TREE_CODE (exp) != DECL_EXPR
6068 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6069 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6070 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6071
6072 case tcc_binary:
6073 case tcc_comparison:
6074 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6075 return 0;
6076 /* Fall through. */
6077
6078 case tcc_unary:
6079 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6080
6081 case tcc_expression:
6082 case tcc_reference:
6083 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6084 the expression. If it is set, we conflict iff we are that rtx or
6085 both are in memory. Otherwise, we check all operands of the
6086 expression recursively. */
6087
6088 switch (TREE_CODE (exp))
6089 {
6090 case ADDR_EXPR:
6091 /* If the operand is static or we are static, we can't conflict.
6092 Likewise if we don't conflict with the operand at all. */
6093 if (staticp (TREE_OPERAND (exp, 0))
6094 || TREE_STATIC (exp)
6095 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6096 return 1;
6097
6098 /* Otherwise, the only way this can conflict is if we are taking
6099 the address of a DECL a that address if part of X, which is
6100 very rare. */
6101 exp = TREE_OPERAND (exp, 0);
6102 if (DECL_P (exp))
6103 {
6104 if (!DECL_RTL_SET_P (exp)
6105 || !MEM_P (DECL_RTL (exp)))
6106 return 0;
6107 else
6108 exp_rtl = XEXP (DECL_RTL (exp), 0);
6109 }
6110 break;
6111
6112 case MISALIGNED_INDIRECT_REF:
6113 case ALIGN_INDIRECT_REF:
6114 case INDIRECT_REF:
6115 if (MEM_P (x)
6116 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6117 get_alias_set (exp)))
6118 return 0;
6119 break;
6120
6121 case CALL_EXPR:
6122 /* Assume that the call will clobber all hard registers and
6123 all of memory. */
6124 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6125 || MEM_P (x))
6126 return 0;
6127 break;
6128
6129 case WITH_CLEANUP_EXPR:
6130 case CLEANUP_POINT_EXPR:
6131 /* Lowered by gimplify.c. */
6132 gcc_unreachable ();
6133
6134 case SAVE_EXPR:
6135 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6136
6137 default:
6138 break;
6139 }
6140
6141 /* If we have an rtx, we do not need to scan our operands. */
6142 if (exp_rtl)
6143 break;
6144
6145 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6146 for (i = 0; i < nops; i++)
6147 if (TREE_OPERAND (exp, i) != 0
6148 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6149 return 0;
6150
6151 /* If this is a language-specific tree code, it may require
6152 special handling. */
6153 if ((unsigned int) TREE_CODE (exp)
6154 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6155 && !lang_hooks.safe_from_p (x, exp))
6156 return 0;
6157 break;
6158
6159 case tcc_type:
6160 /* Should never get a type here. */
6161 gcc_unreachable ();
6162 }
6163
6164 /* If we have an rtl, find any enclosed object. Then see if we conflict
6165 with it. */
6166 if (exp_rtl)
6167 {
6168 if (GET_CODE (exp_rtl) == SUBREG)
6169 {
6170 exp_rtl = SUBREG_REG (exp_rtl);
6171 if (REG_P (exp_rtl)
6172 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6173 return 0;
6174 }
6175
6176 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6177 are memory and they conflict. */
6178 return ! (rtx_equal_p (x, exp_rtl)
6179 || (MEM_P (x) && MEM_P (exp_rtl)
6180 && true_dependence (exp_rtl, VOIDmode, x,
6181 rtx_addr_varies_p)));
6182 }
6183
6184 /* If we reach here, it is safe. */
6185 return 1;
6186 }
6187
6188 \f
6189 /* Return the highest power of two that EXP is known to be a multiple of.
6190 This is used in updating alignment of MEMs in array references. */
6191
6192 unsigned HOST_WIDE_INT
6193 highest_pow2_factor (tree exp)
6194 {
6195 unsigned HOST_WIDE_INT c0, c1;
6196
6197 switch (TREE_CODE (exp))
6198 {
6199 case INTEGER_CST:
6200 /* We can find the lowest bit that's a one. If the low
6201 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6202 We need to handle this case since we can find it in a COND_EXPR,
6203 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6204 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6205 later ICE. */
6206 if (TREE_CONSTANT_OVERFLOW (exp))
6207 return BIGGEST_ALIGNMENT;
6208 else
6209 {
6210 /* Note: tree_low_cst is intentionally not used here,
6211 we don't care about the upper bits. */
6212 c0 = TREE_INT_CST_LOW (exp);
6213 c0 &= -c0;
6214 return c0 ? c0 : BIGGEST_ALIGNMENT;
6215 }
6216 break;
6217
6218 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6219 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6220 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6221 return MIN (c0, c1);
6222
6223 case MULT_EXPR:
6224 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6225 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6226 return c0 * c1;
6227
6228 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6229 case CEIL_DIV_EXPR:
6230 if (integer_pow2p (TREE_OPERAND (exp, 1))
6231 && host_integerp (TREE_OPERAND (exp, 1), 1))
6232 {
6233 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6234 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6235 return MAX (1, c0 / c1);
6236 }
6237 break;
6238
6239 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6240 case SAVE_EXPR:
6241 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6242
6243 case COMPOUND_EXPR:
6244 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6245
6246 case COND_EXPR:
6247 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6248 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6249 return MIN (c0, c1);
6250
6251 default:
6252 break;
6253 }
6254
6255 return 1;
6256 }
6257
6258 /* Similar, except that the alignment requirements of TARGET are
6259 taken into account. Assume it is at least as aligned as its
6260 type, unless it is a COMPONENT_REF in which case the layout of
6261 the structure gives the alignment. */
6262
6263 static unsigned HOST_WIDE_INT
6264 highest_pow2_factor_for_target (tree target, tree exp)
6265 {
6266 unsigned HOST_WIDE_INT target_align, factor;
6267
6268 factor = highest_pow2_factor (exp);
6269 if (TREE_CODE (target) == COMPONENT_REF)
6270 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6271 else
6272 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6273 return MAX (factor, target_align);
6274 }
6275 \f
6276 /* Expands variable VAR. */
6277
6278 void
6279 expand_var (tree var)
6280 {
6281 if (DECL_EXTERNAL (var))
6282 return;
6283
6284 if (TREE_STATIC (var))
6285 /* If this is an inlined copy of a static local variable,
6286 look up the original decl. */
6287 var = DECL_ORIGIN (var);
6288
6289 if (TREE_STATIC (var)
6290 ? !TREE_ASM_WRITTEN (var)
6291 : !DECL_RTL_SET_P (var))
6292 {
6293 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6294 /* Should be ignored. */;
6295 else if (lang_hooks.expand_decl (var))
6296 /* OK. */;
6297 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6298 expand_decl (var);
6299 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6300 rest_of_decl_compilation (var, 0, 0);
6301 else
6302 /* No expansion needed. */
6303 gcc_assert (TREE_CODE (var) == TYPE_DECL
6304 || TREE_CODE (var) == CONST_DECL
6305 || TREE_CODE (var) == FUNCTION_DECL
6306 || TREE_CODE (var) == LABEL_DECL);
6307 }
6308 }
6309
6310 /* Subroutine of expand_expr. Expand the two operands of a binary
6311 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6312 The value may be stored in TARGET if TARGET is nonzero. The
6313 MODIFIER argument is as documented by expand_expr. */
6314
6315 static void
6316 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6317 enum expand_modifier modifier)
6318 {
6319 if (! safe_from_p (target, exp1, 1))
6320 target = 0;
6321 if (operand_equal_p (exp0, exp1, 0))
6322 {
6323 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6324 *op1 = copy_rtx (*op0);
6325 }
6326 else
6327 {
6328 /* If we need to preserve evaluation order, copy exp0 into its own
6329 temporary variable so that it can't be clobbered by exp1. */
6330 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6331 exp0 = save_expr (exp0);
6332 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6333 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6334 }
6335 }
6336
6337 \f
6338 /* Return a MEM that contains constant EXP. DEFER is as for
6339 output_constant_def and MODIFIER is as for expand_expr. */
6340
6341 static rtx
6342 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6343 {
6344 rtx mem;
6345
6346 mem = output_constant_def (exp, defer);
6347 if (modifier != EXPAND_INITIALIZER)
6348 mem = use_anchored_address (mem);
6349 return mem;
6350 }
6351
6352 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6353 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6354
6355 static rtx
6356 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6357 enum expand_modifier modifier)
6358 {
6359 rtx result, subtarget;
6360 tree inner, offset;
6361 HOST_WIDE_INT bitsize, bitpos;
6362 int volatilep, unsignedp;
6363 enum machine_mode mode1;
6364
6365 /* If we are taking the address of a constant and are at the top level,
6366 we have to use output_constant_def since we can't call force_const_mem
6367 at top level. */
6368 /* ??? This should be considered a front-end bug. We should not be
6369 generating ADDR_EXPR of something that isn't an LVALUE. The only
6370 exception here is STRING_CST. */
6371 if (TREE_CODE (exp) == CONSTRUCTOR
6372 || CONSTANT_CLASS_P (exp))
6373 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6374
6375 /* Everything must be something allowed by is_gimple_addressable. */
6376 switch (TREE_CODE (exp))
6377 {
6378 case INDIRECT_REF:
6379 /* This case will happen via recursion for &a->b. */
6380 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6381
6382 case CONST_DECL:
6383 /* Recurse and make the output_constant_def clause above handle this. */
6384 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6385 tmode, modifier);
6386
6387 case REALPART_EXPR:
6388 /* The real part of the complex number is always first, therefore
6389 the address is the same as the address of the parent object. */
6390 offset = 0;
6391 bitpos = 0;
6392 inner = TREE_OPERAND (exp, 0);
6393 break;
6394
6395 case IMAGPART_EXPR:
6396 /* The imaginary part of the complex number is always second.
6397 The expression is therefore always offset by the size of the
6398 scalar type. */
6399 offset = 0;
6400 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6401 inner = TREE_OPERAND (exp, 0);
6402 break;
6403
6404 default:
6405 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6406 expand_expr, as that can have various side effects; LABEL_DECLs for
6407 example, may not have their DECL_RTL set yet. Assume language
6408 specific tree nodes can be expanded in some interesting way. */
6409 if (DECL_P (exp)
6410 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6411 {
6412 result = expand_expr (exp, target, tmode,
6413 modifier == EXPAND_INITIALIZER
6414 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6415
6416 /* If the DECL isn't in memory, then the DECL wasn't properly
6417 marked TREE_ADDRESSABLE, which will be either a front-end
6418 or a tree optimizer bug. */
6419 gcc_assert (MEM_P (result));
6420 result = XEXP (result, 0);
6421
6422 /* ??? Is this needed anymore? */
6423 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6424 {
6425 assemble_external (exp);
6426 TREE_USED (exp) = 1;
6427 }
6428
6429 if (modifier != EXPAND_INITIALIZER
6430 && modifier != EXPAND_CONST_ADDRESS)
6431 result = force_operand (result, target);
6432 return result;
6433 }
6434
6435 /* Pass FALSE as the last argument to get_inner_reference although
6436 we are expanding to RTL. The rationale is that we know how to
6437 handle "aligning nodes" here: we can just bypass them because
6438 they won't change the final object whose address will be returned
6439 (they actually exist only for that purpose). */
6440 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6441 &mode1, &unsignedp, &volatilep, false);
6442 break;
6443 }
6444
6445 /* We must have made progress. */
6446 gcc_assert (inner != exp);
6447
6448 subtarget = offset || bitpos ? NULL_RTX : target;
6449 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6450
6451 if (offset)
6452 {
6453 rtx tmp;
6454
6455 if (modifier != EXPAND_NORMAL)
6456 result = force_operand (result, NULL);
6457 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6458
6459 result = convert_memory_address (tmode, result);
6460 tmp = convert_memory_address (tmode, tmp);
6461
6462 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6463 result = gen_rtx_PLUS (tmode, result, tmp);
6464 else
6465 {
6466 subtarget = bitpos ? NULL_RTX : target;
6467 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6468 1, OPTAB_LIB_WIDEN);
6469 }
6470 }
6471
6472 if (bitpos)
6473 {
6474 /* Someone beforehand should have rejected taking the address
6475 of such an object. */
6476 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6477
6478 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6479 if (modifier < EXPAND_SUM)
6480 result = force_operand (result, target);
6481 }
6482
6483 return result;
6484 }
6485
6486 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6487 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6488
6489 static rtx
6490 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6491 enum expand_modifier modifier)
6492 {
6493 enum machine_mode rmode;
6494 rtx result;
6495
6496 /* Target mode of VOIDmode says "whatever's natural". */
6497 if (tmode == VOIDmode)
6498 tmode = TYPE_MODE (TREE_TYPE (exp));
6499
6500 /* We can get called with some Weird Things if the user does silliness
6501 like "(short) &a". In that case, convert_memory_address won't do
6502 the right thing, so ignore the given target mode. */
6503 if (tmode != Pmode && tmode != ptr_mode)
6504 tmode = Pmode;
6505
6506 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6507 tmode, modifier);
6508
6509 /* Despite expand_expr claims concerning ignoring TMODE when not
6510 strictly convenient, stuff breaks if we don't honor it. Note
6511 that combined with the above, we only do this for pointer modes. */
6512 rmode = GET_MODE (result);
6513 if (rmode == VOIDmode)
6514 rmode = tmode;
6515 if (rmode != tmode)
6516 result = convert_memory_address (tmode, result);
6517
6518 return result;
6519 }
6520
6521
6522 /* expand_expr: generate code for computing expression EXP.
6523 An rtx for the computed value is returned. The value is never null.
6524 In the case of a void EXP, const0_rtx is returned.
6525
6526 The value may be stored in TARGET if TARGET is nonzero.
6527 TARGET is just a suggestion; callers must assume that
6528 the rtx returned may not be the same as TARGET.
6529
6530 If TARGET is CONST0_RTX, it means that the value will be ignored.
6531
6532 If TMODE is not VOIDmode, it suggests generating the
6533 result in mode TMODE. But this is done only when convenient.
6534 Otherwise, TMODE is ignored and the value generated in its natural mode.
6535 TMODE is just a suggestion; callers must assume that
6536 the rtx returned may not have mode TMODE.
6537
6538 Note that TARGET may have neither TMODE nor MODE. In that case, it
6539 probably will not be used.
6540
6541 If MODIFIER is EXPAND_SUM then when EXP is an addition
6542 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6543 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6544 products as above, or REG or MEM, or constant.
6545 Ordinarily in such cases we would output mul or add instructions
6546 and then return a pseudo reg containing the sum.
6547
6548 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6549 it also marks a label as absolutely required (it can't be dead).
6550 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6551 This is used for outputting expressions used in initializers.
6552
6553 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6554 with a constant address even if that address is not normally legitimate.
6555 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6556
6557 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6558 a call parameter. Such targets require special care as we haven't yet
6559 marked TARGET so that it's safe from being trashed by libcalls. We
6560 don't want to use TARGET for anything but the final result;
6561 Intermediate values must go elsewhere. Additionally, calls to
6562 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6563
6564 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6565 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6566 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6567 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6568 recursively. */
6569
6570 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6571 enum expand_modifier, rtx *);
6572
6573 rtx
6574 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6575 enum expand_modifier modifier, rtx *alt_rtl)
6576 {
6577 int rn = -1;
6578 rtx ret, last = NULL;
6579
6580 /* Handle ERROR_MARK before anybody tries to access its type. */
6581 if (TREE_CODE (exp) == ERROR_MARK
6582 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6583 {
6584 ret = CONST0_RTX (tmode);
6585 return ret ? ret : const0_rtx;
6586 }
6587
6588 if (flag_non_call_exceptions)
6589 {
6590 rn = lookup_stmt_eh_region (exp);
6591 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6592 if (rn >= 0)
6593 last = get_last_insn ();
6594 }
6595
6596 /* If this is an expression of some kind and it has an associated line
6597 number, then emit the line number before expanding the expression.
6598
6599 We need to save and restore the file and line information so that
6600 errors discovered during expansion are emitted with the right
6601 information. It would be better of the diagnostic routines
6602 used the file/line information embedded in the tree nodes rather
6603 than globals. */
6604 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6605 {
6606 location_t saved_location = input_location;
6607 input_location = EXPR_LOCATION (exp);
6608 emit_line_note (input_location);
6609
6610 /* Record where the insns produced belong. */
6611 record_block_change (TREE_BLOCK (exp));
6612
6613 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6614
6615 input_location = saved_location;
6616 }
6617 else
6618 {
6619 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6620 }
6621
6622 /* If using non-call exceptions, mark all insns that may trap.
6623 expand_call() will mark CALL_INSNs before we get to this code,
6624 but it doesn't handle libcalls, and these may trap. */
6625 if (rn >= 0)
6626 {
6627 rtx insn;
6628 for (insn = next_real_insn (last); insn;
6629 insn = next_real_insn (insn))
6630 {
6631 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6632 /* If we want exceptions for non-call insns, any
6633 may_trap_p instruction may throw. */
6634 && GET_CODE (PATTERN (insn)) != CLOBBER
6635 && GET_CODE (PATTERN (insn)) != USE
6636 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6637 {
6638 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6639 REG_NOTES (insn));
6640 }
6641 }
6642 }
6643
6644 return ret;
6645 }
6646
6647 static rtx
6648 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6649 enum expand_modifier modifier, rtx *alt_rtl)
6650 {
6651 rtx op0, op1, temp, decl_rtl;
6652 tree type = TREE_TYPE (exp);
6653 int unsignedp;
6654 enum machine_mode mode;
6655 enum tree_code code = TREE_CODE (exp);
6656 optab this_optab;
6657 rtx subtarget, original_target;
6658 int ignore;
6659 tree context, subexp0, subexp1;
6660 bool reduce_bit_field = false;
6661 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6662 ? reduce_to_bit_field_precision ((expr), \
6663 target, \
6664 type) \
6665 : (expr))
6666
6667 mode = TYPE_MODE (type);
6668 unsignedp = TYPE_UNSIGNED (type);
6669 if (lang_hooks.reduce_bit_field_operations
6670 && TREE_CODE (type) == INTEGER_TYPE
6671 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6672 {
6673 /* An operation in what may be a bit-field type needs the
6674 result to be reduced to the precision of the bit-field type,
6675 which is narrower than that of the type's mode. */
6676 reduce_bit_field = true;
6677 if (modifier == EXPAND_STACK_PARM)
6678 target = 0;
6679 }
6680
6681 /* Use subtarget as the target for operand 0 of a binary operation. */
6682 subtarget = get_subtarget (target);
6683 original_target = target;
6684 ignore = (target == const0_rtx
6685 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6686 || code == CONVERT_EXPR || code == COND_EXPR
6687 || code == VIEW_CONVERT_EXPR)
6688 && TREE_CODE (type) == VOID_TYPE));
6689
6690 /* If we are going to ignore this result, we need only do something
6691 if there is a side-effect somewhere in the expression. If there
6692 is, short-circuit the most common cases here. Note that we must
6693 not call expand_expr with anything but const0_rtx in case this
6694 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6695
6696 if (ignore)
6697 {
6698 if (! TREE_SIDE_EFFECTS (exp))
6699 return const0_rtx;
6700
6701 /* Ensure we reference a volatile object even if value is ignored, but
6702 don't do this if all we are doing is taking its address. */
6703 if (TREE_THIS_VOLATILE (exp)
6704 && TREE_CODE (exp) != FUNCTION_DECL
6705 && mode != VOIDmode && mode != BLKmode
6706 && modifier != EXPAND_CONST_ADDRESS)
6707 {
6708 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6709 if (MEM_P (temp))
6710 temp = copy_to_reg (temp);
6711 return const0_rtx;
6712 }
6713
6714 if (TREE_CODE_CLASS (code) == tcc_unary
6715 || code == COMPONENT_REF || code == INDIRECT_REF)
6716 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6717 modifier);
6718
6719 else if (TREE_CODE_CLASS (code) == tcc_binary
6720 || TREE_CODE_CLASS (code) == tcc_comparison
6721 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6722 {
6723 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6724 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6725 return const0_rtx;
6726 }
6727 else if (code == BIT_FIELD_REF)
6728 {
6729 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6730 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6731 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6732 return const0_rtx;
6733 }
6734
6735 target = 0;
6736 }
6737
6738
6739 switch (code)
6740 {
6741 case LABEL_DECL:
6742 {
6743 tree function = decl_function_context (exp);
6744
6745 temp = label_rtx (exp);
6746 temp = gen_rtx_LABEL_REF (Pmode, temp);
6747
6748 if (function != current_function_decl
6749 && function != 0)
6750 LABEL_REF_NONLOCAL_P (temp) = 1;
6751
6752 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6753 return temp;
6754 }
6755
6756 case SSA_NAME:
6757 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6758 NULL);
6759
6760 case PARM_DECL:
6761 case VAR_DECL:
6762 /* If a static var's type was incomplete when the decl was written,
6763 but the type is complete now, lay out the decl now. */
6764 if (DECL_SIZE (exp) == 0
6765 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6766 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6767 layout_decl (exp, 0);
6768
6769 /* ... fall through ... */
6770
6771 case FUNCTION_DECL:
6772 case RESULT_DECL:
6773 decl_rtl = DECL_RTL (exp);
6774 gcc_assert (decl_rtl);
6775
6776 /* Ensure variable marked as used even if it doesn't go through
6777 a parser. If it hasn't be used yet, write out an external
6778 definition. */
6779 if (! TREE_USED (exp))
6780 {
6781 assemble_external (exp);
6782 TREE_USED (exp) = 1;
6783 }
6784
6785 /* Show we haven't gotten RTL for this yet. */
6786 temp = 0;
6787
6788 /* Variables inherited from containing functions should have
6789 been lowered by this point. */
6790 context = decl_function_context (exp);
6791 gcc_assert (!context
6792 || context == current_function_decl
6793 || TREE_STATIC (exp)
6794 /* ??? C++ creates functions that are not TREE_STATIC. */
6795 || TREE_CODE (exp) == FUNCTION_DECL);
6796
6797 /* This is the case of an array whose size is to be determined
6798 from its initializer, while the initializer is still being parsed.
6799 See expand_decl. */
6800
6801 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6802 temp = validize_mem (decl_rtl);
6803
6804 /* If DECL_RTL is memory, we are in the normal case and either
6805 the address is not valid or it is not a register and -fforce-addr
6806 is specified, get the address into a register. */
6807
6808 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
6809 {
6810 if (alt_rtl)
6811 *alt_rtl = decl_rtl;
6812 decl_rtl = use_anchored_address (decl_rtl);
6813 if (modifier != EXPAND_CONST_ADDRESS
6814 && modifier != EXPAND_SUM
6815 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6816 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6817 temp = replace_equiv_address (decl_rtl,
6818 copy_rtx (XEXP (decl_rtl, 0)));
6819 }
6820
6821 /* If we got something, return it. But first, set the alignment
6822 if the address is a register. */
6823 if (temp != 0)
6824 {
6825 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6826 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6827
6828 return temp;
6829 }
6830
6831 /* If the mode of DECL_RTL does not match that of the decl, it
6832 must be a promoted value. We return a SUBREG of the wanted mode,
6833 but mark it so that we know that it was already extended. */
6834
6835 if (REG_P (decl_rtl)
6836 && GET_MODE (decl_rtl) != DECL_MODE (exp))
6837 {
6838 enum machine_mode pmode;
6839
6840 /* Get the signedness used for this variable. Ensure we get the
6841 same mode we got when the variable was declared. */
6842 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6843 (TREE_CODE (exp) == RESULT_DECL
6844 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
6845 gcc_assert (GET_MODE (decl_rtl) == pmode);
6846
6847 temp = gen_lowpart_SUBREG (mode, decl_rtl);
6848 SUBREG_PROMOTED_VAR_P (temp) = 1;
6849 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6850 return temp;
6851 }
6852
6853 return decl_rtl;
6854
6855 case INTEGER_CST:
6856 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6857 TREE_INT_CST_HIGH (exp), mode);
6858
6859 /* ??? If overflow is set, fold will have done an incomplete job,
6860 which can result in (plus xx (const_int 0)), which can get
6861 simplified by validate_replace_rtx during virtual register
6862 instantiation, which can result in unrecognizable insns.
6863 Avoid this by forcing all overflows into registers. */
6864 if (TREE_CONSTANT_OVERFLOW (exp)
6865 && modifier != EXPAND_INITIALIZER)
6866 temp = force_reg (mode, temp);
6867
6868 return temp;
6869
6870 case VECTOR_CST:
6871 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6872 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6873 return const_vector_from_tree (exp);
6874 else
6875 return expand_expr (build_constructor_from_list
6876 (TREE_TYPE (exp),
6877 TREE_VECTOR_CST_ELTS (exp)),
6878 ignore ? const0_rtx : target, tmode, modifier);
6879
6880 case CONST_DECL:
6881 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6882
6883 case REAL_CST:
6884 /* If optimized, generate immediate CONST_DOUBLE
6885 which will be turned into memory by reload if necessary.
6886
6887 We used to force a register so that loop.c could see it. But
6888 this does not allow gen_* patterns to perform optimizations with
6889 the constants. It also produces two insns in cases like "x = 1.0;".
6890 On most machines, floating-point constants are not permitted in
6891 many insns, so we'd end up copying it to a register in any case.
6892
6893 Now, we do the copying in expand_binop, if appropriate. */
6894 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6895 TYPE_MODE (TREE_TYPE (exp)));
6896
6897 case COMPLEX_CST:
6898 /* Handle evaluating a complex constant in a CONCAT target. */
6899 if (original_target && GET_CODE (original_target) == CONCAT)
6900 {
6901 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6902 rtx rtarg, itarg;
6903
6904 rtarg = XEXP (original_target, 0);
6905 itarg = XEXP (original_target, 1);
6906
6907 /* Move the real and imaginary parts separately. */
6908 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6909 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6910
6911 if (op0 != rtarg)
6912 emit_move_insn (rtarg, op0);
6913 if (op1 != itarg)
6914 emit_move_insn (itarg, op1);
6915
6916 return original_target;
6917 }
6918
6919 /* ... fall through ... */
6920
6921 case STRING_CST:
6922 temp = expand_expr_constant (exp, 1, modifier);
6923
6924 /* temp contains a constant address.
6925 On RISC machines where a constant address isn't valid,
6926 make some insns to get that address into a register. */
6927 if (modifier != EXPAND_CONST_ADDRESS
6928 && modifier != EXPAND_INITIALIZER
6929 && modifier != EXPAND_SUM
6930 && (! memory_address_p (mode, XEXP (temp, 0))
6931 || flag_force_addr))
6932 return replace_equiv_address (temp,
6933 copy_rtx (XEXP (temp, 0)));
6934 return temp;
6935
6936 case SAVE_EXPR:
6937 {
6938 tree val = TREE_OPERAND (exp, 0);
6939 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6940
6941 if (!SAVE_EXPR_RESOLVED_P (exp))
6942 {
6943 /* We can indeed still hit this case, typically via builtin
6944 expanders calling save_expr immediately before expanding
6945 something. Assume this means that we only have to deal
6946 with non-BLKmode values. */
6947 gcc_assert (GET_MODE (ret) != BLKmode);
6948
6949 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6950 DECL_ARTIFICIAL (val) = 1;
6951 DECL_IGNORED_P (val) = 1;
6952 TREE_OPERAND (exp, 0) = val;
6953 SAVE_EXPR_RESOLVED_P (exp) = 1;
6954
6955 if (!CONSTANT_P (ret))
6956 ret = copy_to_reg (ret);
6957 SET_DECL_RTL (val, ret);
6958 }
6959
6960 return ret;
6961 }
6962
6963 case GOTO_EXPR:
6964 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6965 expand_goto (TREE_OPERAND (exp, 0));
6966 else
6967 expand_computed_goto (TREE_OPERAND (exp, 0));
6968 return const0_rtx;
6969
6970 case CONSTRUCTOR:
6971 /* If we don't need the result, just ensure we evaluate any
6972 subexpressions. */
6973 if (ignore)
6974 {
6975 unsigned HOST_WIDE_INT idx;
6976 tree value;
6977
6978 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6979 expand_expr (value, const0_rtx, VOIDmode, 0);
6980
6981 return const0_rtx;
6982 }
6983
6984 /* Try to avoid creating a temporary at all. This is possible
6985 if all of the initializer is zero.
6986 FIXME: try to handle all [0..255] initializers we can handle
6987 with memset. */
6988 else if (TREE_STATIC (exp)
6989 && !TREE_ADDRESSABLE (exp)
6990 && target != 0 && mode == BLKmode
6991 && all_zeros_p (exp))
6992 {
6993 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6994 return target;
6995 }
6996
6997 /* All elts simple constants => refer to a constant in memory. But
6998 if this is a non-BLKmode mode, let it store a field at a time
6999 since that should make a CONST_INT or CONST_DOUBLE when we
7000 fold. Likewise, if we have a target we can use, it is best to
7001 store directly into the target unless the type is large enough
7002 that memcpy will be used. If we are making an initializer and
7003 all operands are constant, put it in memory as well.
7004
7005 FIXME: Avoid trying to fill vector constructors piece-meal.
7006 Output them with output_constant_def below unless we're sure
7007 they're zeros. This should go away when vector initializers
7008 are treated like VECTOR_CST instead of arrays.
7009 */
7010 else if ((TREE_STATIC (exp)
7011 && ((mode == BLKmode
7012 && ! (target != 0 && safe_from_p (target, exp, 1)))
7013 || TREE_ADDRESSABLE (exp)
7014 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7015 && (! MOVE_BY_PIECES_P
7016 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7017 TYPE_ALIGN (type)))
7018 && ! mostly_zeros_p (exp))))
7019 || ((modifier == EXPAND_INITIALIZER
7020 || modifier == EXPAND_CONST_ADDRESS)
7021 && TREE_CONSTANT (exp)))
7022 {
7023 rtx constructor = expand_expr_constant (exp, 1, modifier);
7024
7025 if (modifier != EXPAND_CONST_ADDRESS
7026 && modifier != EXPAND_INITIALIZER
7027 && modifier != EXPAND_SUM)
7028 constructor = validize_mem (constructor);
7029
7030 return constructor;
7031 }
7032 else
7033 {
7034 /* Handle calls that pass values in multiple non-contiguous
7035 locations. The Irix 6 ABI has examples of this. */
7036 if (target == 0 || ! safe_from_p (target, exp, 1)
7037 || GET_CODE (target) == PARALLEL
7038 || modifier == EXPAND_STACK_PARM)
7039 target
7040 = assign_temp (build_qualified_type (type,
7041 (TYPE_QUALS (type)
7042 | (TREE_READONLY (exp)
7043 * TYPE_QUAL_CONST))),
7044 0, TREE_ADDRESSABLE (exp), 1);
7045
7046 store_constructor (exp, target, 0, int_expr_size (exp));
7047 return target;
7048 }
7049
7050 case MISALIGNED_INDIRECT_REF:
7051 case ALIGN_INDIRECT_REF:
7052 case INDIRECT_REF:
7053 {
7054 tree exp1 = TREE_OPERAND (exp, 0);
7055
7056 if (modifier != EXPAND_WRITE)
7057 {
7058 tree t;
7059
7060 t = fold_read_from_constant_string (exp);
7061 if (t)
7062 return expand_expr (t, target, tmode, modifier);
7063 }
7064
7065 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7066 op0 = memory_address (mode, op0);
7067
7068 if (code == ALIGN_INDIRECT_REF)
7069 {
7070 int align = TYPE_ALIGN_UNIT (type);
7071 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7072 op0 = memory_address (mode, op0);
7073 }
7074
7075 temp = gen_rtx_MEM (mode, op0);
7076
7077 set_mem_attributes (temp, exp, 0);
7078
7079 /* Resolve the misalignment now, so that we don't have to remember
7080 to resolve it later. Of course, this only works for reads. */
7081 /* ??? When we get around to supporting writes, we'll have to handle
7082 this in store_expr directly. The vectorizer isn't generating
7083 those yet, however. */
7084 if (code == MISALIGNED_INDIRECT_REF)
7085 {
7086 int icode;
7087 rtx reg, insn;
7088
7089 gcc_assert (modifier == EXPAND_NORMAL
7090 || modifier == EXPAND_STACK_PARM);
7091
7092 /* The vectorizer should have already checked the mode. */
7093 icode = movmisalign_optab->handlers[mode].insn_code;
7094 gcc_assert (icode != CODE_FOR_nothing);
7095
7096 /* We've already validated the memory, and we're creating a
7097 new pseudo destination. The predicates really can't fail. */
7098 reg = gen_reg_rtx (mode);
7099
7100 /* Nor can the insn generator. */
7101 insn = GEN_FCN (icode) (reg, temp);
7102 emit_insn (insn);
7103
7104 return reg;
7105 }
7106
7107 return temp;
7108 }
7109
7110 case TARGET_MEM_REF:
7111 {
7112 struct mem_address addr;
7113
7114 get_address_description (exp, &addr);
7115 op0 = addr_for_mem_ref (&addr, true);
7116 op0 = memory_address (mode, op0);
7117 temp = gen_rtx_MEM (mode, op0);
7118 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7119 }
7120 return temp;
7121
7122 case ARRAY_REF:
7123
7124 {
7125 tree array = TREE_OPERAND (exp, 0);
7126 tree index = TREE_OPERAND (exp, 1);
7127
7128 /* Fold an expression like: "foo"[2].
7129 This is not done in fold so it won't happen inside &.
7130 Don't fold if this is for wide characters since it's too
7131 difficult to do correctly and this is a very rare case. */
7132
7133 if (modifier != EXPAND_CONST_ADDRESS
7134 && modifier != EXPAND_INITIALIZER
7135 && modifier != EXPAND_MEMORY)
7136 {
7137 tree t = fold_read_from_constant_string (exp);
7138
7139 if (t)
7140 return expand_expr (t, target, tmode, modifier);
7141 }
7142
7143 /* If this is a constant index into a constant array,
7144 just get the value from the array. Handle both the cases when
7145 we have an explicit constructor and when our operand is a variable
7146 that was declared const. */
7147
7148 if (modifier != EXPAND_CONST_ADDRESS
7149 && modifier != EXPAND_INITIALIZER
7150 && modifier != EXPAND_MEMORY
7151 && TREE_CODE (array) == CONSTRUCTOR
7152 && ! TREE_SIDE_EFFECTS (array)
7153 && TREE_CODE (index) == INTEGER_CST)
7154 {
7155 unsigned HOST_WIDE_INT ix;
7156 tree field, value;
7157
7158 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7159 field, value)
7160 if (tree_int_cst_equal (field, index))
7161 {
7162 if (!TREE_SIDE_EFFECTS (value))
7163 return expand_expr (fold (value), target, tmode, modifier);
7164 break;
7165 }
7166 }
7167
7168 else if (optimize >= 1
7169 && modifier != EXPAND_CONST_ADDRESS
7170 && modifier != EXPAND_INITIALIZER
7171 && modifier != EXPAND_MEMORY
7172 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7173 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7174 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7175 && targetm.binds_local_p (array))
7176 {
7177 if (TREE_CODE (index) == INTEGER_CST)
7178 {
7179 tree init = DECL_INITIAL (array);
7180
7181 if (TREE_CODE (init) == CONSTRUCTOR)
7182 {
7183 unsigned HOST_WIDE_INT ix;
7184 tree field, value;
7185
7186 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7187 field, value)
7188 if (tree_int_cst_equal (field, index))
7189 {
7190 if (!TREE_SIDE_EFFECTS (value))
7191 return expand_expr (fold (value), target, tmode,
7192 modifier);
7193 break;
7194 }
7195 }
7196 else if(TREE_CODE (init) == STRING_CST)
7197 {
7198 tree index1 = index;
7199 tree low_bound = array_ref_low_bound (exp);
7200 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7201
7202 /* Optimize the special-case of a zero lower bound.
7203
7204 We convert the low_bound to sizetype to avoid some problems
7205 with constant folding. (E.g. suppose the lower bound is 1,
7206 and its mode is QI. Without the conversion,l (ARRAY
7207 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7208 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7209
7210 if (! integer_zerop (low_bound))
7211 index1 = size_diffop (index1, fold_convert (sizetype,
7212 low_bound));
7213
7214 if (0 > compare_tree_int (index1,
7215 TREE_STRING_LENGTH (init)))
7216 {
7217 tree type = TREE_TYPE (TREE_TYPE (init));
7218 enum machine_mode mode = TYPE_MODE (type);
7219
7220 if (GET_MODE_CLASS (mode) == MODE_INT
7221 && GET_MODE_SIZE (mode) == 1)
7222 return gen_int_mode (TREE_STRING_POINTER (init)
7223 [TREE_INT_CST_LOW (index1)],
7224 mode);
7225 }
7226 }
7227 }
7228 }
7229 }
7230 goto normal_inner_ref;
7231
7232 case COMPONENT_REF:
7233 /* If the operand is a CONSTRUCTOR, we can just extract the
7234 appropriate field if it is present. */
7235 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7236 {
7237 unsigned HOST_WIDE_INT idx;
7238 tree field, value;
7239
7240 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7241 idx, field, value)
7242 if (field == TREE_OPERAND (exp, 1)
7243 /* We can normally use the value of the field in the
7244 CONSTRUCTOR. However, if this is a bitfield in
7245 an integral mode that we can fit in a HOST_WIDE_INT,
7246 we must mask only the number of bits in the bitfield,
7247 since this is done implicitly by the constructor. If
7248 the bitfield does not meet either of those conditions,
7249 we can't do this optimization. */
7250 && (! DECL_BIT_FIELD (field)
7251 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7252 && (GET_MODE_BITSIZE (DECL_MODE (field))
7253 <= HOST_BITS_PER_WIDE_INT))))
7254 {
7255 if (DECL_BIT_FIELD (field)
7256 && modifier == EXPAND_STACK_PARM)
7257 target = 0;
7258 op0 = expand_expr (value, target, tmode, modifier);
7259 if (DECL_BIT_FIELD (field))
7260 {
7261 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7262 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7263
7264 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7265 {
7266 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7267 op0 = expand_and (imode, op0, op1, target);
7268 }
7269 else
7270 {
7271 tree count
7272 = build_int_cst (NULL_TREE,
7273 GET_MODE_BITSIZE (imode) - bitsize);
7274
7275 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7276 target, 0);
7277 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7278 target, 0);
7279 }
7280 }
7281
7282 return op0;
7283 }
7284 }
7285 goto normal_inner_ref;
7286
7287 case BIT_FIELD_REF:
7288 case ARRAY_RANGE_REF:
7289 normal_inner_ref:
7290 {
7291 enum machine_mode mode1;
7292 HOST_WIDE_INT bitsize, bitpos;
7293 tree offset;
7294 int volatilep = 0;
7295 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7296 &mode1, &unsignedp, &volatilep, true);
7297 rtx orig_op0;
7298
7299 /* If we got back the original object, something is wrong. Perhaps
7300 we are evaluating an expression too early. In any event, don't
7301 infinitely recurse. */
7302 gcc_assert (tem != exp);
7303
7304 /* If TEM's type is a union of variable size, pass TARGET to the inner
7305 computation, since it will need a temporary and TARGET is known
7306 to have to do. This occurs in unchecked conversion in Ada. */
7307
7308 orig_op0 = op0
7309 = expand_expr (tem,
7310 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7311 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7312 != INTEGER_CST)
7313 && modifier != EXPAND_STACK_PARM
7314 ? target : NULL_RTX),
7315 VOIDmode,
7316 (modifier == EXPAND_INITIALIZER
7317 || modifier == EXPAND_CONST_ADDRESS
7318 || modifier == EXPAND_STACK_PARM)
7319 ? modifier : EXPAND_NORMAL);
7320
7321 /* If this is a constant, put it into a register if it is a legitimate
7322 constant, OFFSET is 0, and we won't try to extract outside the
7323 register (in case we were passed a partially uninitialized object
7324 or a view_conversion to a larger size). Force the constant to
7325 memory otherwise. */
7326 if (CONSTANT_P (op0))
7327 {
7328 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7329 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7330 && offset == 0
7331 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7332 op0 = force_reg (mode, op0);
7333 else
7334 op0 = validize_mem (force_const_mem (mode, op0));
7335 }
7336
7337 /* Otherwise, if this object not in memory and we either have an
7338 offset, a BLKmode result, or a reference outside the object, put it
7339 there. Such cases can occur in Ada if we have unchecked conversion
7340 of an expression from a scalar type to an array or record type or
7341 for an ARRAY_RANGE_REF whose type is BLKmode. */
7342 else if (!MEM_P (op0)
7343 && (offset != 0
7344 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7345 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7346 {
7347 tree nt = build_qualified_type (TREE_TYPE (tem),
7348 (TYPE_QUALS (TREE_TYPE (tem))
7349 | TYPE_QUAL_CONST));
7350 rtx memloc = assign_temp (nt, 1, 1, 1);
7351
7352 emit_move_insn (memloc, op0);
7353 op0 = memloc;
7354 }
7355
7356 if (offset != 0)
7357 {
7358 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7359 EXPAND_SUM);
7360
7361 gcc_assert (MEM_P (op0));
7362
7363 #ifdef POINTERS_EXTEND_UNSIGNED
7364 if (GET_MODE (offset_rtx) != Pmode)
7365 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7366 #else
7367 if (GET_MODE (offset_rtx) != ptr_mode)
7368 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7369 #endif
7370
7371 if (GET_MODE (op0) == BLKmode
7372 /* A constant address in OP0 can have VOIDmode, we must
7373 not try to call force_reg in that case. */
7374 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7375 && bitsize != 0
7376 && (bitpos % bitsize) == 0
7377 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7378 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7379 {
7380 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7381 bitpos = 0;
7382 }
7383
7384 op0 = offset_address (op0, offset_rtx,
7385 highest_pow2_factor (offset));
7386 }
7387
7388 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7389 record its alignment as BIGGEST_ALIGNMENT. */
7390 if (MEM_P (op0) && bitpos == 0 && offset != 0
7391 && is_aligning_offset (offset, tem))
7392 set_mem_align (op0, BIGGEST_ALIGNMENT);
7393
7394 /* Don't forget about volatility even if this is a bitfield. */
7395 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7396 {
7397 if (op0 == orig_op0)
7398 op0 = copy_rtx (op0);
7399
7400 MEM_VOLATILE_P (op0) = 1;
7401 }
7402
7403 /* The following code doesn't handle CONCAT.
7404 Assume only bitpos == 0 can be used for CONCAT, due to
7405 one element arrays having the same mode as its element. */
7406 if (GET_CODE (op0) == CONCAT)
7407 {
7408 gcc_assert (bitpos == 0
7409 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7410 return op0;
7411 }
7412
7413 /* In cases where an aligned union has an unaligned object
7414 as a field, we might be extracting a BLKmode value from
7415 an integer-mode (e.g., SImode) object. Handle this case
7416 by doing the extract into an object as wide as the field
7417 (which we know to be the width of a basic mode), then
7418 storing into memory, and changing the mode to BLKmode. */
7419 if (mode1 == VOIDmode
7420 || REG_P (op0) || GET_CODE (op0) == SUBREG
7421 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7422 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7423 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7424 && modifier != EXPAND_CONST_ADDRESS
7425 && modifier != EXPAND_INITIALIZER)
7426 /* If the field isn't aligned enough to fetch as a memref,
7427 fetch it as a bit field. */
7428 || (mode1 != BLKmode
7429 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7430 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7431 || (MEM_P (op0)
7432 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7433 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7434 && ((modifier == EXPAND_CONST_ADDRESS
7435 || modifier == EXPAND_INITIALIZER)
7436 ? STRICT_ALIGNMENT
7437 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7438 || (bitpos % BITS_PER_UNIT != 0)))
7439 /* If the type and the field are a constant size and the
7440 size of the type isn't the same size as the bitfield,
7441 we must use bitfield operations. */
7442 || (bitsize >= 0
7443 && TYPE_SIZE (TREE_TYPE (exp))
7444 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7445 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7446 bitsize)))
7447 {
7448 enum machine_mode ext_mode = mode;
7449
7450 if (ext_mode == BLKmode
7451 && ! (target != 0 && MEM_P (op0)
7452 && MEM_P (target)
7453 && bitpos % BITS_PER_UNIT == 0))
7454 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7455
7456 if (ext_mode == BLKmode)
7457 {
7458 if (target == 0)
7459 target = assign_temp (type, 0, 1, 1);
7460
7461 if (bitsize == 0)
7462 return target;
7463
7464 /* In this case, BITPOS must start at a byte boundary and
7465 TARGET, if specified, must be a MEM. */
7466 gcc_assert (MEM_P (op0)
7467 && (!target || MEM_P (target))
7468 && !(bitpos % BITS_PER_UNIT));
7469
7470 emit_block_move (target,
7471 adjust_address (op0, VOIDmode,
7472 bitpos / BITS_PER_UNIT),
7473 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7474 / BITS_PER_UNIT),
7475 (modifier == EXPAND_STACK_PARM
7476 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7477
7478 return target;
7479 }
7480
7481 op0 = validize_mem (op0);
7482
7483 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7484 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7485
7486 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7487 (modifier == EXPAND_STACK_PARM
7488 ? NULL_RTX : target),
7489 ext_mode, ext_mode);
7490
7491 /* If the result is a record type and BITSIZE is narrower than
7492 the mode of OP0, an integral mode, and this is a big endian
7493 machine, we must put the field into the high-order bits. */
7494 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7495 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7496 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7497 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7498 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7499 - bitsize),
7500 op0, 1);
7501
7502 /* If the result type is BLKmode, store the data into a temporary
7503 of the appropriate type, but with the mode corresponding to the
7504 mode for the data we have (op0's mode). It's tempting to make
7505 this a constant type, since we know it's only being stored once,
7506 but that can cause problems if we are taking the address of this
7507 COMPONENT_REF because the MEM of any reference via that address
7508 will have flags corresponding to the type, which will not
7509 necessarily be constant. */
7510 if (mode == BLKmode)
7511 {
7512 rtx new
7513 = assign_stack_temp_for_type
7514 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7515
7516 emit_move_insn (new, op0);
7517 op0 = copy_rtx (new);
7518 PUT_MODE (op0, BLKmode);
7519 set_mem_attributes (op0, exp, 1);
7520 }
7521
7522 return op0;
7523 }
7524
7525 /* If the result is BLKmode, use that to access the object
7526 now as well. */
7527 if (mode == BLKmode)
7528 mode1 = BLKmode;
7529
7530 /* Get a reference to just this component. */
7531 if (modifier == EXPAND_CONST_ADDRESS
7532 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7533 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7534 else
7535 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7536
7537 if (op0 == orig_op0)
7538 op0 = copy_rtx (op0);
7539
7540 set_mem_attributes (op0, exp, 0);
7541 if (REG_P (XEXP (op0, 0)))
7542 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7543
7544 MEM_VOLATILE_P (op0) |= volatilep;
7545 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7546 || modifier == EXPAND_CONST_ADDRESS
7547 || modifier == EXPAND_INITIALIZER)
7548 return op0;
7549 else if (target == 0)
7550 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7551
7552 convert_move (target, op0, unsignedp);
7553 return target;
7554 }
7555
7556 case OBJ_TYPE_REF:
7557 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7558
7559 case CALL_EXPR:
7560 /* Check for a built-in function. */
7561 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7562 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7563 == FUNCTION_DECL)
7564 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7565 {
7566 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7567 == BUILT_IN_FRONTEND)
7568 return lang_hooks.expand_expr (exp, original_target,
7569 tmode, modifier,
7570 alt_rtl);
7571 else
7572 return expand_builtin (exp, target, subtarget, tmode, ignore);
7573 }
7574
7575 return expand_call (exp, target, ignore);
7576
7577 case NON_LVALUE_EXPR:
7578 case NOP_EXPR:
7579 case CONVERT_EXPR:
7580 if (TREE_OPERAND (exp, 0) == error_mark_node)
7581 return const0_rtx;
7582
7583 if (TREE_CODE (type) == UNION_TYPE)
7584 {
7585 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7586
7587 /* If both input and output are BLKmode, this conversion isn't doing
7588 anything except possibly changing memory attribute. */
7589 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7590 {
7591 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7592 modifier);
7593
7594 result = copy_rtx (result);
7595 set_mem_attributes (result, exp, 0);
7596 return result;
7597 }
7598
7599 if (target == 0)
7600 {
7601 if (TYPE_MODE (type) != BLKmode)
7602 target = gen_reg_rtx (TYPE_MODE (type));
7603 else
7604 target = assign_temp (type, 0, 1, 1);
7605 }
7606
7607 if (MEM_P (target))
7608 /* Store data into beginning of memory target. */
7609 store_expr (TREE_OPERAND (exp, 0),
7610 adjust_address (target, TYPE_MODE (valtype), 0),
7611 modifier == EXPAND_STACK_PARM);
7612
7613 else
7614 {
7615 gcc_assert (REG_P (target));
7616
7617 /* Store this field into a union of the proper type. */
7618 store_field (target,
7619 MIN ((int_size_in_bytes (TREE_TYPE
7620 (TREE_OPERAND (exp, 0)))
7621 * BITS_PER_UNIT),
7622 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7623 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7624 type, 0);
7625 }
7626
7627 /* Return the entire union. */
7628 return target;
7629 }
7630
7631 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7632 {
7633 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7634 modifier);
7635
7636 /* If the signedness of the conversion differs and OP0 is
7637 a promoted SUBREG, clear that indication since we now
7638 have to do the proper extension. */
7639 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7640 && GET_CODE (op0) == SUBREG)
7641 SUBREG_PROMOTED_VAR_P (op0) = 0;
7642
7643 return REDUCE_BIT_FIELD (op0);
7644 }
7645
7646 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7647 if (GET_MODE (op0) == mode)
7648 ;
7649
7650 /* If OP0 is a constant, just convert it into the proper mode. */
7651 else if (CONSTANT_P (op0))
7652 {
7653 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7654 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7655
7656 if (modifier == EXPAND_INITIALIZER)
7657 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7658 subreg_lowpart_offset (mode,
7659 inner_mode));
7660 else
7661 op0= convert_modes (mode, inner_mode, op0,
7662 TYPE_UNSIGNED (inner_type));
7663 }
7664
7665 else if (modifier == EXPAND_INITIALIZER)
7666 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7667
7668 else if (target == 0)
7669 op0 = convert_to_mode (mode, op0,
7670 TYPE_UNSIGNED (TREE_TYPE
7671 (TREE_OPERAND (exp, 0))));
7672 else
7673 {
7674 convert_move (target, op0,
7675 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7676 op0 = target;
7677 }
7678
7679 return REDUCE_BIT_FIELD (op0);
7680
7681 case VIEW_CONVERT_EXPR:
7682 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7683
7684 /* If the input and output modes are both the same, we are done. */
7685 if (TYPE_MODE (type) == GET_MODE (op0))
7686 ;
7687 /* If neither mode is BLKmode, and both modes are the same size
7688 then we can use gen_lowpart. */
7689 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7690 && GET_MODE_SIZE (TYPE_MODE (type))
7691 == GET_MODE_SIZE (GET_MODE (op0)))
7692 {
7693 if (GET_CODE (op0) == SUBREG)
7694 op0 = force_reg (GET_MODE (op0), op0);
7695 op0 = gen_lowpart (TYPE_MODE (type), op0);
7696 }
7697 /* If both modes are integral, then we can convert from one to the
7698 other. */
7699 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7700 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7701 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7702 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7703 /* As a last resort, spill op0 to memory, and reload it in a
7704 different mode. */
7705 else if (!MEM_P (op0))
7706 {
7707 /* If the operand is not a MEM, force it into memory. Since we
7708 are going to be be changing the mode of the MEM, don't call
7709 force_const_mem for constants because we don't allow pool
7710 constants to change mode. */
7711 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7712
7713 gcc_assert (!TREE_ADDRESSABLE (exp));
7714
7715 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7716 target
7717 = assign_stack_temp_for_type
7718 (TYPE_MODE (inner_type),
7719 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7720
7721 emit_move_insn (target, op0);
7722 op0 = target;
7723 }
7724
7725 /* At this point, OP0 is in the correct mode. If the output type is such
7726 that the operand is known to be aligned, indicate that it is.
7727 Otherwise, we need only be concerned about alignment for non-BLKmode
7728 results. */
7729 if (MEM_P (op0))
7730 {
7731 op0 = copy_rtx (op0);
7732
7733 if (TYPE_ALIGN_OK (type))
7734 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7735 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7736 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7737 {
7738 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7739 HOST_WIDE_INT temp_size
7740 = MAX (int_size_in_bytes (inner_type),
7741 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7742 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7743 temp_size, 0, type);
7744 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7745
7746 gcc_assert (!TREE_ADDRESSABLE (exp));
7747
7748 if (GET_MODE (op0) == BLKmode)
7749 emit_block_move (new_with_op0_mode, op0,
7750 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7751 (modifier == EXPAND_STACK_PARM
7752 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7753 else
7754 emit_move_insn (new_with_op0_mode, op0);
7755
7756 op0 = new;
7757 }
7758
7759 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7760 }
7761
7762 return op0;
7763
7764 case PLUS_EXPR:
7765 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7766 something else, make sure we add the register to the constant and
7767 then to the other thing. This case can occur during strength
7768 reduction and doing it this way will produce better code if the
7769 frame pointer or argument pointer is eliminated.
7770
7771 fold-const.c will ensure that the constant is always in the inner
7772 PLUS_EXPR, so the only case we need to do anything about is if
7773 sp, ap, or fp is our second argument, in which case we must swap
7774 the innermost first argument and our second argument. */
7775
7776 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7777 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7778 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7779 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7780 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7781 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7782 {
7783 tree t = TREE_OPERAND (exp, 1);
7784
7785 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7786 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7787 }
7788
7789 /* If the result is to be ptr_mode and we are adding an integer to
7790 something, we might be forming a constant. So try to use
7791 plus_constant. If it produces a sum and we can't accept it,
7792 use force_operand. This allows P = &ARR[const] to generate
7793 efficient code on machines where a SYMBOL_REF is not a valid
7794 address.
7795
7796 If this is an EXPAND_SUM call, always return the sum. */
7797 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7798 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7799 {
7800 if (modifier == EXPAND_STACK_PARM)
7801 target = 0;
7802 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7803 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7804 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7805 {
7806 rtx constant_part;
7807
7808 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7809 EXPAND_SUM);
7810 /* Use immed_double_const to ensure that the constant is
7811 truncated according to the mode of OP1, then sign extended
7812 to a HOST_WIDE_INT. Using the constant directly can result
7813 in non-canonical RTL in a 64x32 cross compile. */
7814 constant_part
7815 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7816 (HOST_WIDE_INT) 0,
7817 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7818 op1 = plus_constant (op1, INTVAL (constant_part));
7819 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7820 op1 = force_operand (op1, target);
7821 return REDUCE_BIT_FIELD (op1);
7822 }
7823
7824 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7825 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7826 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7827 {
7828 rtx constant_part;
7829
7830 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7831 (modifier == EXPAND_INITIALIZER
7832 ? EXPAND_INITIALIZER : EXPAND_SUM));
7833 if (! CONSTANT_P (op0))
7834 {
7835 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7836 VOIDmode, modifier);
7837 /* Return a PLUS if modifier says it's OK. */
7838 if (modifier == EXPAND_SUM
7839 || modifier == EXPAND_INITIALIZER)
7840 return simplify_gen_binary (PLUS, mode, op0, op1);
7841 goto binop2;
7842 }
7843 /* Use immed_double_const to ensure that the constant is
7844 truncated according to the mode of OP1, then sign extended
7845 to a HOST_WIDE_INT. Using the constant directly can result
7846 in non-canonical RTL in a 64x32 cross compile. */
7847 constant_part
7848 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7849 (HOST_WIDE_INT) 0,
7850 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7851 op0 = plus_constant (op0, INTVAL (constant_part));
7852 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7853 op0 = force_operand (op0, target);
7854 return REDUCE_BIT_FIELD (op0);
7855 }
7856 }
7857
7858 /* No sense saving up arithmetic to be done
7859 if it's all in the wrong mode to form part of an address.
7860 And force_operand won't know whether to sign-extend or
7861 zero-extend. */
7862 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7863 || mode != ptr_mode)
7864 {
7865 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7866 subtarget, &op0, &op1, 0);
7867 if (op0 == const0_rtx)
7868 return op1;
7869 if (op1 == const0_rtx)
7870 return op0;
7871 goto binop2;
7872 }
7873
7874 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7875 subtarget, &op0, &op1, modifier);
7876 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7877
7878 case MINUS_EXPR:
7879 /* For initializers, we are allowed to return a MINUS of two
7880 symbolic constants. Here we handle all cases when both operands
7881 are constant. */
7882 /* Handle difference of two symbolic constants,
7883 for the sake of an initializer. */
7884 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7885 && really_constant_p (TREE_OPERAND (exp, 0))
7886 && really_constant_p (TREE_OPERAND (exp, 1)))
7887 {
7888 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7889 NULL_RTX, &op0, &op1, modifier);
7890
7891 /* If the last operand is a CONST_INT, use plus_constant of
7892 the negated constant. Else make the MINUS. */
7893 if (GET_CODE (op1) == CONST_INT)
7894 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7895 else
7896 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7897 }
7898
7899 /* No sense saving up arithmetic to be done
7900 if it's all in the wrong mode to form part of an address.
7901 And force_operand won't know whether to sign-extend or
7902 zero-extend. */
7903 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7904 || mode != ptr_mode)
7905 goto binop;
7906
7907 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7908 subtarget, &op0, &op1, modifier);
7909
7910 /* Convert A - const to A + (-const). */
7911 if (GET_CODE (op1) == CONST_INT)
7912 {
7913 op1 = negate_rtx (mode, op1);
7914 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7915 }
7916
7917 goto binop2;
7918
7919 case MULT_EXPR:
7920 /* If first operand is constant, swap them.
7921 Thus the following special case checks need only
7922 check the second operand. */
7923 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7924 {
7925 tree t1 = TREE_OPERAND (exp, 0);
7926 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7927 TREE_OPERAND (exp, 1) = t1;
7928 }
7929
7930 /* Attempt to return something suitable for generating an
7931 indexed address, for machines that support that. */
7932
7933 if (modifier == EXPAND_SUM && mode == ptr_mode
7934 && host_integerp (TREE_OPERAND (exp, 1), 0))
7935 {
7936 tree exp1 = TREE_OPERAND (exp, 1);
7937
7938 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7939 EXPAND_SUM);
7940
7941 if (!REG_P (op0))
7942 op0 = force_operand (op0, NULL_RTX);
7943 if (!REG_P (op0))
7944 op0 = copy_to_mode_reg (mode, op0);
7945
7946 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7947 gen_int_mode (tree_low_cst (exp1, 0),
7948 TYPE_MODE (TREE_TYPE (exp1)))));
7949 }
7950
7951 if (modifier == EXPAND_STACK_PARM)
7952 target = 0;
7953
7954 /* Check for multiplying things that have been extended
7955 from a narrower type. If this machine supports multiplying
7956 in that narrower type with a result in the desired type,
7957 do it that way, and avoid the explicit type-conversion. */
7958
7959 subexp0 = TREE_OPERAND (exp, 0);
7960 subexp1 = TREE_OPERAND (exp, 1);
7961 /* First, check if we have a multiplication of one signed and one
7962 unsigned operand. */
7963 if (TREE_CODE (subexp0) == NOP_EXPR
7964 && TREE_CODE (subexp1) == NOP_EXPR
7965 && TREE_CODE (type) == INTEGER_TYPE
7966 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7967 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7968 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7969 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
7970 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7971 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
7972 {
7973 enum machine_mode innermode
7974 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
7975 this_optab = usmul_widen_optab;
7976 if (mode == GET_MODE_WIDER_MODE (innermode))
7977 {
7978 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7979 {
7980 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
7981 expand_operands (TREE_OPERAND (subexp0, 0),
7982 TREE_OPERAND (subexp1, 0),
7983 NULL_RTX, &op0, &op1, 0);
7984 else
7985 expand_operands (TREE_OPERAND (subexp0, 0),
7986 TREE_OPERAND (subexp1, 0),
7987 NULL_RTX, &op1, &op0, 0);
7988
7989 goto binop3;
7990 }
7991 }
7992 }
7993 /* Check for a multiplication with matching signedness. */
7994 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7995 && TREE_CODE (type) == INTEGER_TYPE
7996 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7997 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7998 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7999 && int_fits_type_p (TREE_OPERAND (exp, 1),
8000 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8001 /* Don't use a widening multiply if a shift will do. */
8002 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8003 > HOST_BITS_PER_WIDE_INT)
8004 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8005 ||
8006 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8007 && (TYPE_PRECISION (TREE_TYPE
8008 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8009 == TYPE_PRECISION (TREE_TYPE
8010 (TREE_OPERAND
8011 (TREE_OPERAND (exp, 0), 0))))
8012 /* If both operands are extended, they must either both
8013 be zero-extended or both be sign-extended. */
8014 && (TYPE_UNSIGNED (TREE_TYPE
8015 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8016 == TYPE_UNSIGNED (TREE_TYPE
8017 (TREE_OPERAND
8018 (TREE_OPERAND (exp, 0), 0)))))))
8019 {
8020 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8021 enum machine_mode innermode = TYPE_MODE (op0type);
8022 bool zextend_p = TYPE_UNSIGNED (op0type);
8023 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8024 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8025
8026 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8027 {
8028 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8029 {
8030 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8031 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8032 TREE_OPERAND (exp, 1),
8033 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8034 else
8035 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8036 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8037 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8038 goto binop3;
8039 }
8040 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8041 && innermode == word_mode)
8042 {
8043 rtx htem, hipart;
8044 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8045 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8046 op1 = convert_modes (innermode, mode,
8047 expand_normal (TREE_OPERAND (exp, 1)),
8048 unsignedp);
8049 else
8050 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8051 temp = expand_binop (mode, other_optab, op0, op1, target,
8052 unsignedp, OPTAB_LIB_WIDEN);
8053 hipart = gen_highpart (innermode, temp);
8054 htem = expand_mult_highpart_adjust (innermode, hipart,
8055 op0, op1, hipart,
8056 zextend_p);
8057 if (htem != hipart)
8058 emit_move_insn (hipart, htem);
8059 return REDUCE_BIT_FIELD (temp);
8060 }
8061 }
8062 }
8063 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8064 subtarget, &op0, &op1, 0);
8065 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8066
8067 case TRUNC_DIV_EXPR:
8068 case FLOOR_DIV_EXPR:
8069 case CEIL_DIV_EXPR:
8070 case ROUND_DIV_EXPR:
8071 case EXACT_DIV_EXPR:
8072 if (modifier == EXPAND_STACK_PARM)
8073 target = 0;
8074 /* Possible optimization: compute the dividend with EXPAND_SUM
8075 then if the divisor is constant can optimize the case
8076 where some terms of the dividend have coeffs divisible by it. */
8077 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8078 subtarget, &op0, &op1, 0);
8079 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8080
8081 case RDIV_EXPR:
8082 goto binop;
8083
8084 case TRUNC_MOD_EXPR:
8085 case FLOOR_MOD_EXPR:
8086 case CEIL_MOD_EXPR:
8087 case ROUND_MOD_EXPR:
8088 if (modifier == EXPAND_STACK_PARM)
8089 target = 0;
8090 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8091 subtarget, &op0, &op1, 0);
8092 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8093
8094 case FIX_ROUND_EXPR:
8095 case FIX_FLOOR_EXPR:
8096 case FIX_CEIL_EXPR:
8097 gcc_unreachable (); /* Not used for C. */
8098
8099 case FIX_TRUNC_EXPR:
8100 op0 = expand_normal (TREE_OPERAND (exp, 0));
8101 if (target == 0 || modifier == EXPAND_STACK_PARM)
8102 target = gen_reg_rtx (mode);
8103 expand_fix (target, op0, unsignedp);
8104 return target;
8105
8106 case FLOAT_EXPR:
8107 op0 = expand_normal (TREE_OPERAND (exp, 0));
8108 if (target == 0 || modifier == EXPAND_STACK_PARM)
8109 target = gen_reg_rtx (mode);
8110 /* expand_float can't figure out what to do if FROM has VOIDmode.
8111 So give it the correct mode. With -O, cse will optimize this. */
8112 if (GET_MODE (op0) == VOIDmode)
8113 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8114 op0);
8115 expand_float (target, op0,
8116 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8117 return target;
8118
8119 case NEGATE_EXPR:
8120 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8121 if (modifier == EXPAND_STACK_PARM)
8122 target = 0;
8123 temp = expand_unop (mode,
8124 optab_for_tree_code (NEGATE_EXPR, type),
8125 op0, target, 0);
8126 gcc_assert (temp);
8127 return REDUCE_BIT_FIELD (temp);
8128
8129 case ABS_EXPR:
8130 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8131 if (modifier == EXPAND_STACK_PARM)
8132 target = 0;
8133
8134 /* ABS_EXPR is not valid for complex arguments. */
8135 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8136 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8137
8138 /* Unsigned abs is simply the operand. Testing here means we don't
8139 risk generating incorrect code below. */
8140 if (TYPE_UNSIGNED (type))
8141 return op0;
8142
8143 return expand_abs (mode, op0, target, unsignedp,
8144 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8145
8146 case MAX_EXPR:
8147 case MIN_EXPR:
8148 target = original_target;
8149 if (target == 0
8150 || modifier == EXPAND_STACK_PARM
8151 || (MEM_P (target) && MEM_VOLATILE_P (target))
8152 || GET_MODE (target) != mode
8153 || (REG_P (target)
8154 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8155 target = gen_reg_rtx (mode);
8156 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8157 target, &op0, &op1, 0);
8158
8159 /* First try to do it with a special MIN or MAX instruction.
8160 If that does not win, use a conditional jump to select the proper
8161 value. */
8162 this_optab = optab_for_tree_code (code, type);
8163 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8164 OPTAB_WIDEN);
8165 if (temp != 0)
8166 return temp;
8167
8168 /* At this point, a MEM target is no longer useful; we will get better
8169 code without it. */
8170
8171 if (! REG_P (target))
8172 target = gen_reg_rtx (mode);
8173
8174 /* If op1 was placed in target, swap op0 and op1. */
8175 if (target != op0 && target == op1)
8176 {
8177 temp = op0;
8178 op0 = op1;
8179 op1 = temp;
8180 }
8181
8182 /* We generate better code and avoid problems with op1 mentioning
8183 target by forcing op1 into a pseudo if it isn't a constant. */
8184 if (! CONSTANT_P (op1))
8185 op1 = force_reg (mode, op1);
8186
8187 {
8188 enum rtx_code comparison_code;
8189 rtx cmpop1 = op1;
8190
8191 if (code == MAX_EXPR)
8192 comparison_code = unsignedp ? GEU : GE;
8193 else
8194 comparison_code = unsignedp ? LEU : LE;
8195
8196 /* Canonicalize to comparisons against 0. */
8197 if (op1 == const1_rtx)
8198 {
8199 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8200 or (a != 0 ? a : 1) for unsigned.
8201 For MIN we are safe converting (a <= 1 ? a : 1)
8202 into (a <= 0 ? a : 1) */
8203 cmpop1 = const0_rtx;
8204 if (code == MAX_EXPR)
8205 comparison_code = unsignedp ? NE : GT;
8206 }
8207 if (op1 == constm1_rtx && !unsignedp)
8208 {
8209 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8210 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8211 cmpop1 = const0_rtx;
8212 if (code == MIN_EXPR)
8213 comparison_code = LT;
8214 }
8215 #ifdef HAVE_conditional_move
8216 /* Use a conditional move if possible. */
8217 if (can_conditionally_move_p (mode))
8218 {
8219 rtx insn;
8220
8221 /* ??? Same problem as in expmed.c: emit_conditional_move
8222 forces a stack adjustment via compare_from_rtx, and we
8223 lose the stack adjustment if the sequence we are about
8224 to create is discarded. */
8225 do_pending_stack_adjust ();
8226
8227 start_sequence ();
8228
8229 /* Try to emit the conditional move. */
8230 insn = emit_conditional_move (target, comparison_code,
8231 op0, cmpop1, mode,
8232 op0, op1, mode,
8233 unsignedp);
8234
8235 /* If we could do the conditional move, emit the sequence,
8236 and return. */
8237 if (insn)
8238 {
8239 rtx seq = get_insns ();
8240 end_sequence ();
8241 emit_insn (seq);
8242 return target;
8243 }
8244
8245 /* Otherwise discard the sequence and fall back to code with
8246 branches. */
8247 end_sequence ();
8248 }
8249 #endif
8250 if (target != op0)
8251 emit_move_insn (target, op0);
8252
8253 temp = gen_label_rtx ();
8254 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8255 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8256 }
8257 emit_move_insn (target, op1);
8258 emit_label (temp);
8259 return target;
8260
8261 case BIT_NOT_EXPR:
8262 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8263 if (modifier == EXPAND_STACK_PARM)
8264 target = 0;
8265 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8266 gcc_assert (temp);
8267 return temp;
8268
8269 /* ??? Can optimize bitwise operations with one arg constant.
8270 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8271 and (a bitwise1 b) bitwise2 b (etc)
8272 but that is probably not worth while. */
8273
8274 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8275 boolean values when we want in all cases to compute both of them. In
8276 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8277 as actual zero-or-1 values and then bitwise anding. In cases where
8278 there cannot be any side effects, better code would be made by
8279 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8280 how to recognize those cases. */
8281
8282 case TRUTH_AND_EXPR:
8283 code = BIT_AND_EXPR;
8284 case BIT_AND_EXPR:
8285 goto binop;
8286
8287 case TRUTH_OR_EXPR:
8288 code = BIT_IOR_EXPR;
8289 case BIT_IOR_EXPR:
8290 goto binop;
8291
8292 case TRUTH_XOR_EXPR:
8293 code = BIT_XOR_EXPR;
8294 case BIT_XOR_EXPR:
8295 goto binop;
8296
8297 case LSHIFT_EXPR:
8298 case RSHIFT_EXPR:
8299 case LROTATE_EXPR:
8300 case RROTATE_EXPR:
8301 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8302 subtarget = 0;
8303 if (modifier == EXPAND_STACK_PARM)
8304 target = 0;
8305 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8306 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8307 unsignedp);
8308
8309 /* Could determine the answer when only additive constants differ. Also,
8310 the addition of one can be handled by changing the condition. */
8311 case LT_EXPR:
8312 case LE_EXPR:
8313 case GT_EXPR:
8314 case GE_EXPR:
8315 case EQ_EXPR:
8316 case NE_EXPR:
8317 case UNORDERED_EXPR:
8318 case ORDERED_EXPR:
8319 case UNLT_EXPR:
8320 case UNLE_EXPR:
8321 case UNGT_EXPR:
8322 case UNGE_EXPR:
8323 case UNEQ_EXPR:
8324 case LTGT_EXPR:
8325 temp = do_store_flag (exp,
8326 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8327 tmode != VOIDmode ? tmode : mode, 0);
8328 if (temp != 0)
8329 return temp;
8330
8331 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8332 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8333 && original_target
8334 && REG_P (original_target)
8335 && (GET_MODE (original_target)
8336 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8337 {
8338 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8339 VOIDmode, 0);
8340
8341 /* If temp is constant, we can just compute the result. */
8342 if (GET_CODE (temp) == CONST_INT)
8343 {
8344 if (INTVAL (temp) != 0)
8345 emit_move_insn (target, const1_rtx);
8346 else
8347 emit_move_insn (target, const0_rtx);
8348
8349 return target;
8350 }
8351
8352 if (temp != original_target)
8353 {
8354 enum machine_mode mode1 = GET_MODE (temp);
8355 if (mode1 == VOIDmode)
8356 mode1 = tmode != VOIDmode ? tmode : mode;
8357
8358 temp = copy_to_mode_reg (mode1, temp);
8359 }
8360
8361 op1 = gen_label_rtx ();
8362 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8363 GET_MODE (temp), unsignedp, op1);
8364 emit_move_insn (temp, const1_rtx);
8365 emit_label (op1);
8366 return temp;
8367 }
8368
8369 /* If no set-flag instruction, must generate a conditional store
8370 into a temporary variable. Drop through and handle this
8371 like && and ||. */
8372
8373 if (! ignore
8374 && (target == 0
8375 || modifier == EXPAND_STACK_PARM
8376 || ! safe_from_p (target, exp, 1)
8377 /* Make sure we don't have a hard reg (such as function's return
8378 value) live across basic blocks, if not optimizing. */
8379 || (!optimize && REG_P (target)
8380 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8381 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8382
8383 if (target)
8384 emit_move_insn (target, const0_rtx);
8385
8386 op1 = gen_label_rtx ();
8387 jumpifnot (exp, op1);
8388
8389 if (target)
8390 emit_move_insn (target, const1_rtx);
8391
8392 emit_label (op1);
8393 return ignore ? const0_rtx : target;
8394
8395 case TRUTH_NOT_EXPR:
8396 if (modifier == EXPAND_STACK_PARM)
8397 target = 0;
8398 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8399 /* The parser is careful to generate TRUTH_NOT_EXPR
8400 only with operands that are always zero or one. */
8401 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8402 target, 1, OPTAB_LIB_WIDEN);
8403 gcc_assert (temp);
8404 return temp;
8405
8406 case STATEMENT_LIST:
8407 {
8408 tree_stmt_iterator iter;
8409
8410 gcc_assert (ignore);
8411
8412 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8413 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8414 }
8415 return const0_rtx;
8416
8417 case COND_EXPR:
8418 /* A COND_EXPR with its type being VOID_TYPE represents a
8419 conditional jump and is handled in
8420 expand_gimple_cond_expr. */
8421 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8422
8423 /* Note that COND_EXPRs whose type is a structure or union
8424 are required to be constructed to contain assignments of
8425 a temporary variable, so that we can evaluate them here
8426 for side effect only. If type is void, we must do likewise. */
8427
8428 gcc_assert (!TREE_ADDRESSABLE (type)
8429 && !ignore
8430 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8431 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8432
8433 /* If we are not to produce a result, we have no target. Otherwise,
8434 if a target was specified use it; it will not be used as an
8435 intermediate target unless it is safe. If no target, use a
8436 temporary. */
8437
8438 if (modifier != EXPAND_STACK_PARM
8439 && original_target
8440 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8441 && GET_MODE (original_target) == mode
8442 #ifdef HAVE_conditional_move
8443 && (! can_conditionally_move_p (mode)
8444 || REG_P (original_target))
8445 #endif
8446 && !MEM_P (original_target))
8447 temp = original_target;
8448 else
8449 temp = assign_temp (type, 0, 0, 1);
8450
8451 do_pending_stack_adjust ();
8452 NO_DEFER_POP;
8453 op0 = gen_label_rtx ();
8454 op1 = gen_label_rtx ();
8455 jumpifnot (TREE_OPERAND (exp, 0), op0);
8456 store_expr (TREE_OPERAND (exp, 1), temp,
8457 modifier == EXPAND_STACK_PARM);
8458
8459 emit_jump_insn (gen_jump (op1));
8460 emit_barrier ();
8461 emit_label (op0);
8462 store_expr (TREE_OPERAND (exp, 2), temp,
8463 modifier == EXPAND_STACK_PARM);
8464
8465 emit_label (op1);
8466 OK_DEFER_POP;
8467 return temp;
8468
8469 case VEC_COND_EXPR:
8470 target = expand_vec_cond_expr (exp, target);
8471 return target;
8472
8473 case MODIFY_EXPR:
8474 {
8475 tree lhs = TREE_OPERAND (exp, 0);
8476 tree rhs = TREE_OPERAND (exp, 1);
8477
8478 gcc_assert (ignore);
8479
8480 /* Check for |= or &= of a bitfield of size one into another bitfield
8481 of size 1. In this case, (unless we need the result of the
8482 assignment) we can do this more efficiently with a
8483 test followed by an assignment, if necessary.
8484
8485 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8486 things change so we do, this code should be enhanced to
8487 support it. */
8488 if (TREE_CODE (lhs) == COMPONENT_REF
8489 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8490 || TREE_CODE (rhs) == BIT_AND_EXPR)
8491 && TREE_OPERAND (rhs, 0) == lhs
8492 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8493 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8494 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8495 {
8496 rtx label = gen_label_rtx ();
8497
8498 do_jump (TREE_OPERAND (rhs, 1),
8499 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8500 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8501 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8502 (TREE_CODE (rhs) == BIT_IOR_EXPR
8503 ? integer_one_node
8504 : integer_zero_node)));
8505 do_pending_stack_adjust ();
8506 emit_label (label);
8507 return const0_rtx;
8508 }
8509
8510 expand_assignment (lhs, rhs);
8511
8512 return const0_rtx;
8513 }
8514
8515 case RETURN_EXPR:
8516 if (!TREE_OPERAND (exp, 0))
8517 expand_null_return ();
8518 else
8519 expand_return (TREE_OPERAND (exp, 0));
8520 return const0_rtx;
8521
8522 case ADDR_EXPR:
8523 return expand_expr_addr_expr (exp, target, tmode, modifier);
8524
8525 case COMPLEX_EXPR:
8526 /* Get the rtx code of the operands. */
8527 op0 = expand_normal (TREE_OPERAND (exp, 0));
8528 op1 = expand_normal (TREE_OPERAND (exp, 1));
8529
8530 if (!target)
8531 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8532
8533 /* Move the real (op0) and imaginary (op1) parts to their location. */
8534 write_complex_part (target, op0, false);
8535 write_complex_part (target, op1, true);
8536
8537 return target;
8538
8539 case REALPART_EXPR:
8540 op0 = expand_normal (TREE_OPERAND (exp, 0));
8541 return read_complex_part (op0, false);
8542
8543 case IMAGPART_EXPR:
8544 op0 = expand_normal (TREE_OPERAND (exp, 0));
8545 return read_complex_part (op0, true);
8546
8547 case RESX_EXPR:
8548 expand_resx_expr (exp);
8549 return const0_rtx;
8550
8551 case TRY_CATCH_EXPR:
8552 case CATCH_EXPR:
8553 case EH_FILTER_EXPR:
8554 case TRY_FINALLY_EXPR:
8555 /* Lowered by tree-eh.c. */
8556 gcc_unreachable ();
8557
8558 case WITH_CLEANUP_EXPR:
8559 case CLEANUP_POINT_EXPR:
8560 case TARGET_EXPR:
8561 case CASE_LABEL_EXPR:
8562 case VA_ARG_EXPR:
8563 case BIND_EXPR:
8564 case INIT_EXPR:
8565 case CONJ_EXPR:
8566 case COMPOUND_EXPR:
8567 case PREINCREMENT_EXPR:
8568 case PREDECREMENT_EXPR:
8569 case POSTINCREMENT_EXPR:
8570 case POSTDECREMENT_EXPR:
8571 case LOOP_EXPR:
8572 case EXIT_EXPR:
8573 case TRUTH_ANDIF_EXPR:
8574 case TRUTH_ORIF_EXPR:
8575 /* Lowered by gimplify.c. */
8576 gcc_unreachable ();
8577
8578 case EXC_PTR_EXPR:
8579 return get_exception_pointer (cfun);
8580
8581 case FILTER_EXPR:
8582 return get_exception_filter (cfun);
8583
8584 case FDESC_EXPR:
8585 /* Function descriptors are not valid except for as
8586 initialization constants, and should not be expanded. */
8587 gcc_unreachable ();
8588
8589 case SWITCH_EXPR:
8590 expand_case (exp);
8591 return const0_rtx;
8592
8593 case LABEL_EXPR:
8594 expand_label (TREE_OPERAND (exp, 0));
8595 return const0_rtx;
8596
8597 case ASM_EXPR:
8598 expand_asm_expr (exp);
8599 return const0_rtx;
8600
8601 case WITH_SIZE_EXPR:
8602 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8603 have pulled out the size to use in whatever context it needed. */
8604 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8605 modifier, alt_rtl);
8606
8607 case REALIGN_LOAD_EXPR:
8608 {
8609 tree oprnd0 = TREE_OPERAND (exp, 0);
8610 tree oprnd1 = TREE_OPERAND (exp, 1);
8611 tree oprnd2 = TREE_OPERAND (exp, 2);
8612 rtx op2;
8613
8614 this_optab = optab_for_tree_code (code, type);
8615 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8616 op2 = expand_normal (oprnd2);
8617 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8618 target, unsignedp);
8619 gcc_assert (temp);
8620 return temp;
8621 }
8622
8623 case DOT_PROD_EXPR:
8624 {
8625 tree oprnd0 = TREE_OPERAND (exp, 0);
8626 tree oprnd1 = TREE_OPERAND (exp, 1);
8627 tree oprnd2 = TREE_OPERAND (exp, 2);
8628 rtx op2;
8629
8630 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8631 op2 = expand_normal (oprnd2);
8632 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8633 target, unsignedp);
8634 return target;
8635 }
8636
8637 case WIDEN_SUM_EXPR:
8638 {
8639 tree oprnd0 = TREE_OPERAND (exp, 0);
8640 tree oprnd1 = TREE_OPERAND (exp, 1);
8641
8642 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8643 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8644 target, unsignedp);
8645 return target;
8646 }
8647
8648 case REDUC_MAX_EXPR:
8649 case REDUC_MIN_EXPR:
8650 case REDUC_PLUS_EXPR:
8651 {
8652 op0 = expand_normal (TREE_OPERAND (exp, 0));
8653 this_optab = optab_for_tree_code (code, type);
8654 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8655 gcc_assert (temp);
8656 return temp;
8657 }
8658
8659 case VEC_LSHIFT_EXPR:
8660 case VEC_RSHIFT_EXPR:
8661 {
8662 target = expand_vec_shift_expr (exp, target);
8663 return target;
8664 }
8665
8666 default:
8667 return lang_hooks.expand_expr (exp, original_target, tmode,
8668 modifier, alt_rtl);
8669 }
8670
8671 /* Here to do an ordinary binary operator. */
8672 binop:
8673 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8674 subtarget, &op0, &op1, 0);
8675 binop2:
8676 this_optab = optab_for_tree_code (code, type);
8677 binop3:
8678 if (modifier == EXPAND_STACK_PARM)
8679 target = 0;
8680 temp = expand_binop (mode, this_optab, op0, op1, target,
8681 unsignedp, OPTAB_LIB_WIDEN);
8682 gcc_assert (temp);
8683 return REDUCE_BIT_FIELD (temp);
8684 }
8685 #undef REDUCE_BIT_FIELD
8686 \f
8687 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8688 signedness of TYPE), possibly returning the result in TARGET. */
8689 static rtx
8690 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8691 {
8692 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8693 if (target && GET_MODE (target) != GET_MODE (exp))
8694 target = 0;
8695 if (TYPE_UNSIGNED (type))
8696 {
8697 rtx mask;
8698 if (prec < HOST_BITS_PER_WIDE_INT)
8699 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8700 GET_MODE (exp));
8701 else
8702 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8703 ((unsigned HOST_WIDE_INT) 1
8704 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8705 GET_MODE (exp));
8706 return expand_and (GET_MODE (exp), exp, mask, target);
8707 }
8708 else
8709 {
8710 tree count = build_int_cst (NULL_TREE,
8711 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8712 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8713 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8714 }
8715 }
8716 \f
8717 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8718 when applied to the address of EXP produces an address known to be
8719 aligned more than BIGGEST_ALIGNMENT. */
8720
8721 static int
8722 is_aligning_offset (tree offset, tree exp)
8723 {
8724 /* Strip off any conversions. */
8725 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8726 || TREE_CODE (offset) == NOP_EXPR
8727 || TREE_CODE (offset) == CONVERT_EXPR)
8728 offset = TREE_OPERAND (offset, 0);
8729
8730 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8731 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8732 if (TREE_CODE (offset) != BIT_AND_EXPR
8733 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8734 || compare_tree_int (TREE_OPERAND (offset, 1),
8735 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8736 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8737 return 0;
8738
8739 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8740 It must be NEGATE_EXPR. Then strip any more conversions. */
8741 offset = TREE_OPERAND (offset, 0);
8742 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8743 || TREE_CODE (offset) == NOP_EXPR
8744 || TREE_CODE (offset) == CONVERT_EXPR)
8745 offset = TREE_OPERAND (offset, 0);
8746
8747 if (TREE_CODE (offset) != NEGATE_EXPR)
8748 return 0;
8749
8750 offset = TREE_OPERAND (offset, 0);
8751 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8752 || TREE_CODE (offset) == NOP_EXPR
8753 || TREE_CODE (offset) == CONVERT_EXPR)
8754 offset = TREE_OPERAND (offset, 0);
8755
8756 /* This must now be the address of EXP. */
8757 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8758 }
8759 \f
8760 /* Return the tree node if an ARG corresponds to a string constant or zero
8761 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8762 in bytes within the string that ARG is accessing. The type of the
8763 offset will be `sizetype'. */
8764
8765 tree
8766 string_constant (tree arg, tree *ptr_offset)
8767 {
8768 tree array, offset;
8769 STRIP_NOPS (arg);
8770
8771 if (TREE_CODE (arg) == ADDR_EXPR)
8772 {
8773 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8774 {
8775 *ptr_offset = size_zero_node;
8776 return TREE_OPERAND (arg, 0);
8777 }
8778 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8779 {
8780 array = TREE_OPERAND (arg, 0);
8781 offset = size_zero_node;
8782 }
8783 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8784 {
8785 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8786 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8787 if (TREE_CODE (array) != STRING_CST
8788 && TREE_CODE (array) != VAR_DECL)
8789 return 0;
8790 }
8791 else
8792 return 0;
8793 }
8794 else if (TREE_CODE (arg) == PLUS_EXPR)
8795 {
8796 tree arg0 = TREE_OPERAND (arg, 0);
8797 tree arg1 = TREE_OPERAND (arg, 1);
8798
8799 STRIP_NOPS (arg0);
8800 STRIP_NOPS (arg1);
8801
8802 if (TREE_CODE (arg0) == ADDR_EXPR
8803 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8804 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8805 {
8806 array = TREE_OPERAND (arg0, 0);
8807 offset = arg1;
8808 }
8809 else if (TREE_CODE (arg1) == ADDR_EXPR
8810 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8811 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8812 {
8813 array = TREE_OPERAND (arg1, 0);
8814 offset = arg0;
8815 }
8816 else
8817 return 0;
8818 }
8819 else
8820 return 0;
8821
8822 if (TREE_CODE (array) == STRING_CST)
8823 {
8824 *ptr_offset = convert (sizetype, offset);
8825 return array;
8826 }
8827 else if (TREE_CODE (array) == VAR_DECL)
8828 {
8829 int length;
8830
8831 /* Variables initialized to string literals can be handled too. */
8832 if (DECL_INITIAL (array) == NULL_TREE
8833 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8834 return 0;
8835
8836 /* If they are read-only, non-volatile and bind locally. */
8837 if (! TREE_READONLY (array)
8838 || TREE_SIDE_EFFECTS (array)
8839 || ! targetm.binds_local_p (array))
8840 return 0;
8841
8842 /* Avoid const char foo[4] = "abcde"; */
8843 if (DECL_SIZE_UNIT (array) == NULL_TREE
8844 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8845 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8846 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8847 return 0;
8848
8849 /* If variable is bigger than the string literal, OFFSET must be constant
8850 and inside of the bounds of the string literal. */
8851 offset = convert (sizetype, offset);
8852 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8853 && (! host_integerp (offset, 1)
8854 || compare_tree_int (offset, length) >= 0))
8855 return 0;
8856
8857 *ptr_offset = offset;
8858 return DECL_INITIAL (array);
8859 }
8860
8861 return 0;
8862 }
8863 \f
8864 /* Generate code to calculate EXP using a store-flag instruction
8865 and return an rtx for the result. EXP is either a comparison
8866 or a TRUTH_NOT_EXPR whose operand is a comparison.
8867
8868 If TARGET is nonzero, store the result there if convenient.
8869
8870 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8871 cheap.
8872
8873 Return zero if there is no suitable set-flag instruction
8874 available on this machine.
8875
8876 Once expand_expr has been called on the arguments of the comparison,
8877 we are committed to doing the store flag, since it is not safe to
8878 re-evaluate the expression. We emit the store-flag insn by calling
8879 emit_store_flag, but only expand the arguments if we have a reason
8880 to believe that emit_store_flag will be successful. If we think that
8881 it will, but it isn't, we have to simulate the store-flag with a
8882 set/jump/set sequence. */
8883
8884 static rtx
8885 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8886 {
8887 enum rtx_code code;
8888 tree arg0, arg1, type;
8889 tree tem;
8890 enum machine_mode operand_mode;
8891 int invert = 0;
8892 int unsignedp;
8893 rtx op0, op1;
8894 enum insn_code icode;
8895 rtx subtarget = target;
8896 rtx result, label;
8897
8898 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8899 result at the end. We can't simply invert the test since it would
8900 have already been inverted if it were valid. This case occurs for
8901 some floating-point comparisons. */
8902
8903 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8904 invert = 1, exp = TREE_OPERAND (exp, 0);
8905
8906 arg0 = TREE_OPERAND (exp, 0);
8907 arg1 = TREE_OPERAND (exp, 1);
8908
8909 /* Don't crash if the comparison was erroneous. */
8910 if (arg0 == error_mark_node || arg1 == error_mark_node)
8911 return const0_rtx;
8912
8913 type = TREE_TYPE (arg0);
8914 operand_mode = TYPE_MODE (type);
8915 unsignedp = TYPE_UNSIGNED (type);
8916
8917 /* We won't bother with BLKmode store-flag operations because it would mean
8918 passing a lot of information to emit_store_flag. */
8919 if (operand_mode == BLKmode)
8920 return 0;
8921
8922 /* We won't bother with store-flag operations involving function pointers
8923 when function pointers must be canonicalized before comparisons. */
8924 #ifdef HAVE_canonicalize_funcptr_for_compare
8925 if (HAVE_canonicalize_funcptr_for_compare
8926 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8927 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8928 == FUNCTION_TYPE))
8929 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8930 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8931 == FUNCTION_TYPE))))
8932 return 0;
8933 #endif
8934
8935 STRIP_NOPS (arg0);
8936 STRIP_NOPS (arg1);
8937
8938 /* Get the rtx comparison code to use. We know that EXP is a comparison
8939 operation of some type. Some comparisons against 1 and -1 can be
8940 converted to comparisons with zero. Do so here so that the tests
8941 below will be aware that we have a comparison with zero. These
8942 tests will not catch constants in the first operand, but constants
8943 are rarely passed as the first operand. */
8944
8945 switch (TREE_CODE (exp))
8946 {
8947 case EQ_EXPR:
8948 code = EQ;
8949 break;
8950 case NE_EXPR:
8951 code = NE;
8952 break;
8953 case LT_EXPR:
8954 if (integer_onep (arg1))
8955 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8956 else
8957 code = unsignedp ? LTU : LT;
8958 break;
8959 case LE_EXPR:
8960 if (! unsignedp && integer_all_onesp (arg1))
8961 arg1 = integer_zero_node, code = LT;
8962 else
8963 code = unsignedp ? LEU : LE;
8964 break;
8965 case GT_EXPR:
8966 if (! unsignedp && integer_all_onesp (arg1))
8967 arg1 = integer_zero_node, code = GE;
8968 else
8969 code = unsignedp ? GTU : GT;
8970 break;
8971 case GE_EXPR:
8972 if (integer_onep (arg1))
8973 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8974 else
8975 code = unsignedp ? GEU : GE;
8976 break;
8977
8978 case UNORDERED_EXPR:
8979 code = UNORDERED;
8980 break;
8981 case ORDERED_EXPR:
8982 code = ORDERED;
8983 break;
8984 case UNLT_EXPR:
8985 code = UNLT;
8986 break;
8987 case UNLE_EXPR:
8988 code = UNLE;
8989 break;
8990 case UNGT_EXPR:
8991 code = UNGT;
8992 break;
8993 case UNGE_EXPR:
8994 code = UNGE;
8995 break;
8996 case UNEQ_EXPR:
8997 code = UNEQ;
8998 break;
8999 case LTGT_EXPR:
9000 code = LTGT;
9001 break;
9002
9003 default:
9004 gcc_unreachable ();
9005 }
9006
9007 /* Put a constant second. */
9008 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9009 {
9010 tem = arg0; arg0 = arg1; arg1 = tem;
9011 code = swap_condition (code);
9012 }
9013
9014 /* If this is an equality or inequality test of a single bit, we can
9015 do this by shifting the bit being tested to the low-order bit and
9016 masking the result with the constant 1. If the condition was EQ,
9017 we xor it with 1. This does not require an scc insn and is faster
9018 than an scc insn even if we have it.
9019
9020 The code to make this transformation was moved into fold_single_bit_test,
9021 so we just call into the folder and expand its result. */
9022
9023 if ((code == NE || code == EQ)
9024 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9025 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9026 {
9027 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9028 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9029 arg0, arg1, type),
9030 target, VOIDmode, EXPAND_NORMAL);
9031 }
9032
9033 /* Now see if we are likely to be able to do this. Return if not. */
9034 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9035 return 0;
9036
9037 icode = setcc_gen_code[(int) code];
9038 if (icode == CODE_FOR_nothing
9039 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9040 {
9041 /* We can only do this if it is one of the special cases that
9042 can be handled without an scc insn. */
9043 if ((code == LT && integer_zerop (arg1))
9044 || (! only_cheap && code == GE && integer_zerop (arg1)))
9045 ;
9046 else if (! only_cheap && (code == NE || code == EQ)
9047 && TREE_CODE (type) != REAL_TYPE
9048 && ((abs_optab->handlers[(int) operand_mode].insn_code
9049 != CODE_FOR_nothing)
9050 || (ffs_optab->handlers[(int) operand_mode].insn_code
9051 != CODE_FOR_nothing)))
9052 ;
9053 else
9054 return 0;
9055 }
9056
9057 if (! get_subtarget (target)
9058 || GET_MODE (subtarget) != operand_mode)
9059 subtarget = 0;
9060
9061 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9062
9063 if (target == 0)
9064 target = gen_reg_rtx (mode);
9065
9066 result = emit_store_flag (target, code, op0, op1,
9067 operand_mode, unsignedp, 1);
9068
9069 if (result)
9070 {
9071 if (invert)
9072 result = expand_binop (mode, xor_optab, result, const1_rtx,
9073 result, 0, OPTAB_LIB_WIDEN);
9074 return result;
9075 }
9076
9077 /* If this failed, we have to do this with set/compare/jump/set code. */
9078 if (!REG_P (target)
9079 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9080 target = gen_reg_rtx (GET_MODE (target));
9081
9082 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9083 result = compare_from_rtx (op0, op1, code, unsignedp,
9084 operand_mode, NULL_RTX);
9085 if (GET_CODE (result) == CONST_INT)
9086 return (((result == const0_rtx && ! invert)
9087 || (result != const0_rtx && invert))
9088 ? const0_rtx : const1_rtx);
9089
9090 /* The code of RESULT may not match CODE if compare_from_rtx
9091 decided to swap its operands and reverse the original code.
9092
9093 We know that compare_from_rtx returns either a CONST_INT or
9094 a new comparison code, so it is safe to just extract the
9095 code from RESULT. */
9096 code = GET_CODE (result);
9097
9098 label = gen_label_rtx ();
9099 gcc_assert (bcc_gen_fctn[(int) code]);
9100
9101 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9102 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9103 emit_label (label);
9104
9105 return target;
9106 }
9107 \f
9108
9109 /* Stubs in case we haven't got a casesi insn. */
9110 #ifndef HAVE_casesi
9111 # define HAVE_casesi 0
9112 # define gen_casesi(a, b, c, d, e) (0)
9113 # define CODE_FOR_casesi CODE_FOR_nothing
9114 #endif
9115
9116 /* If the machine does not have a case insn that compares the bounds,
9117 this means extra overhead for dispatch tables, which raises the
9118 threshold for using them. */
9119 #ifndef CASE_VALUES_THRESHOLD
9120 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9121 #endif /* CASE_VALUES_THRESHOLD */
9122
9123 unsigned int
9124 case_values_threshold (void)
9125 {
9126 return CASE_VALUES_THRESHOLD;
9127 }
9128
9129 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9130 0 otherwise (i.e. if there is no casesi instruction). */
9131 int
9132 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9133 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9134 {
9135 enum machine_mode index_mode = SImode;
9136 int index_bits = GET_MODE_BITSIZE (index_mode);
9137 rtx op1, op2, index;
9138 enum machine_mode op_mode;
9139
9140 if (! HAVE_casesi)
9141 return 0;
9142
9143 /* Convert the index to SImode. */
9144 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9145 {
9146 enum machine_mode omode = TYPE_MODE (index_type);
9147 rtx rangertx = expand_normal (range);
9148
9149 /* We must handle the endpoints in the original mode. */
9150 index_expr = build2 (MINUS_EXPR, index_type,
9151 index_expr, minval);
9152 minval = integer_zero_node;
9153 index = expand_normal (index_expr);
9154 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9155 omode, 1, default_label);
9156 /* Now we can safely truncate. */
9157 index = convert_to_mode (index_mode, index, 0);
9158 }
9159 else
9160 {
9161 if (TYPE_MODE (index_type) != index_mode)
9162 {
9163 index_expr = convert (lang_hooks.types.type_for_size
9164 (index_bits, 0), index_expr);
9165 index_type = TREE_TYPE (index_expr);
9166 }
9167
9168 index = expand_normal (index_expr);
9169 }
9170
9171 do_pending_stack_adjust ();
9172
9173 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9174 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9175 (index, op_mode))
9176 index = copy_to_mode_reg (op_mode, index);
9177
9178 op1 = expand_normal (minval);
9179
9180 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9181 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9182 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9183 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9184 (op1, op_mode))
9185 op1 = copy_to_mode_reg (op_mode, op1);
9186
9187 op2 = expand_normal (range);
9188
9189 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9190 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9191 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9192 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9193 (op2, op_mode))
9194 op2 = copy_to_mode_reg (op_mode, op2);
9195
9196 emit_jump_insn (gen_casesi (index, op1, op2,
9197 table_label, default_label));
9198 return 1;
9199 }
9200
9201 /* Attempt to generate a tablejump instruction; same concept. */
9202 #ifndef HAVE_tablejump
9203 #define HAVE_tablejump 0
9204 #define gen_tablejump(x, y) (0)
9205 #endif
9206
9207 /* Subroutine of the next function.
9208
9209 INDEX is the value being switched on, with the lowest value
9210 in the table already subtracted.
9211 MODE is its expected mode (needed if INDEX is constant).
9212 RANGE is the length of the jump table.
9213 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9214
9215 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9216 index value is out of range. */
9217
9218 static void
9219 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9220 rtx default_label)
9221 {
9222 rtx temp, vector;
9223
9224 if (INTVAL (range) > cfun->max_jumptable_ents)
9225 cfun->max_jumptable_ents = INTVAL (range);
9226
9227 /* Do an unsigned comparison (in the proper mode) between the index
9228 expression and the value which represents the length of the range.
9229 Since we just finished subtracting the lower bound of the range
9230 from the index expression, this comparison allows us to simultaneously
9231 check that the original index expression value is both greater than
9232 or equal to the minimum value of the range and less than or equal to
9233 the maximum value of the range. */
9234
9235 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9236 default_label);
9237
9238 /* If index is in range, it must fit in Pmode.
9239 Convert to Pmode so we can index with it. */
9240 if (mode != Pmode)
9241 index = convert_to_mode (Pmode, index, 1);
9242
9243 /* Don't let a MEM slip through, because then INDEX that comes
9244 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9245 and break_out_memory_refs will go to work on it and mess it up. */
9246 #ifdef PIC_CASE_VECTOR_ADDRESS
9247 if (flag_pic && !REG_P (index))
9248 index = copy_to_mode_reg (Pmode, index);
9249 #endif
9250
9251 /* If flag_force_addr were to affect this address
9252 it could interfere with the tricky assumptions made
9253 about addresses that contain label-refs,
9254 which may be valid only very near the tablejump itself. */
9255 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9256 GET_MODE_SIZE, because this indicates how large insns are. The other
9257 uses should all be Pmode, because they are addresses. This code
9258 could fail if addresses and insns are not the same size. */
9259 index = gen_rtx_PLUS (Pmode,
9260 gen_rtx_MULT (Pmode, index,
9261 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9262 gen_rtx_LABEL_REF (Pmode, table_label));
9263 #ifdef PIC_CASE_VECTOR_ADDRESS
9264 if (flag_pic)
9265 index = PIC_CASE_VECTOR_ADDRESS (index);
9266 else
9267 #endif
9268 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9269 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9270 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9271 convert_move (temp, vector, 0);
9272
9273 emit_jump_insn (gen_tablejump (temp, table_label));
9274
9275 /* If we are generating PIC code or if the table is PC-relative, the
9276 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9277 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9278 emit_barrier ();
9279 }
9280
9281 int
9282 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9283 rtx table_label, rtx default_label)
9284 {
9285 rtx index;
9286
9287 if (! HAVE_tablejump)
9288 return 0;
9289
9290 index_expr = fold_build2 (MINUS_EXPR, index_type,
9291 convert (index_type, index_expr),
9292 convert (index_type, minval));
9293 index = expand_normal (index_expr);
9294 do_pending_stack_adjust ();
9295
9296 do_tablejump (index, TYPE_MODE (index_type),
9297 convert_modes (TYPE_MODE (index_type),
9298 TYPE_MODE (TREE_TYPE (range)),
9299 expand_normal (range),
9300 TYPE_UNSIGNED (TREE_TYPE (range))),
9301 table_label, default_label);
9302 return 1;
9303 }
9304
9305 /* Nonzero if the mode is a valid vector mode for this architecture.
9306 This returns nonzero even if there is no hardware support for the
9307 vector mode, but we can emulate with narrower modes. */
9308
9309 int
9310 vector_mode_valid_p (enum machine_mode mode)
9311 {
9312 enum mode_class class = GET_MODE_CLASS (mode);
9313 enum machine_mode innermode;
9314
9315 /* Doh! What's going on? */
9316 if (class != MODE_VECTOR_INT
9317 && class != MODE_VECTOR_FLOAT)
9318 return 0;
9319
9320 /* Hardware support. Woo hoo! */
9321 if (targetm.vector_mode_supported_p (mode))
9322 return 1;
9323
9324 innermode = GET_MODE_INNER (mode);
9325
9326 /* We should probably return 1 if requesting V4DI and we have no DI,
9327 but we have V2DI, but this is probably very unlikely. */
9328
9329 /* If we have support for the inner mode, we can safely emulate it.
9330 We may not have V2DI, but me can emulate with a pair of DIs. */
9331 return targetm.scalar_mode_supported_p (innermode);
9332 }
9333
9334 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9335 static rtx
9336 const_vector_from_tree (tree exp)
9337 {
9338 rtvec v;
9339 int units, i;
9340 tree link, elt;
9341 enum machine_mode inner, mode;
9342
9343 mode = TYPE_MODE (TREE_TYPE (exp));
9344
9345 if (initializer_zerop (exp))
9346 return CONST0_RTX (mode);
9347
9348 units = GET_MODE_NUNITS (mode);
9349 inner = GET_MODE_INNER (mode);
9350
9351 v = rtvec_alloc (units);
9352
9353 link = TREE_VECTOR_CST_ELTS (exp);
9354 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9355 {
9356 elt = TREE_VALUE (link);
9357
9358 if (TREE_CODE (elt) == REAL_CST)
9359 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9360 inner);
9361 else
9362 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9363 TREE_INT_CST_HIGH (elt),
9364 inner);
9365 }
9366
9367 /* Initialize remaining elements to 0. */
9368 for (; i < units; ++i)
9369 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9370
9371 return gen_rtx_CONST_VECTOR (mode, v);
9372 }
9373 #include "gt-expr.h"