ec44ce3ed966f056ba47a11d20239b5a98c6615d
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
58
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
61
62 #ifdef PUSH_ROUNDING
63
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
69
70 #endif
71
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
79
80
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
88
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
92 {
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
104 };
105
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
108
109 struct store_by_pieces
110 {
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
120 };
121
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, int);
148
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
151
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
157 #ifdef PUSH_ROUNDING
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 #endif
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
162
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
166
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
169
170 /* Record for each mode whether we can float-extend from memory. */
171
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
180 #endif
181
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
188 #endif
189
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
197 #endif
198
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
201
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
204
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
209
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
211
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
214 #endif
215 \f
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
218
219 void
220 init_expr_once (void)
221 {
222 rtx insn, pat;
223 enum machine_mode mode;
224 int num_clobbers;
225 rtx mem, mem1;
226 rtx reg;
227
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
233
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg = gen_rtx_REG (VOIDmode, -1);
237
238 insn = rtx_alloc (INSN);
239 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
240 PATTERN (insn) = pat;
241
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
244 {
245 int regno;
246
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
250 PUT_MODE (reg, mode);
251
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
254
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
258 regno++)
259 {
260 if (! HARD_REGNO_MODE_OK (regno, mode))
261 continue;
262
263 REGNO (reg) = regno;
264
265 SET_SRC (pat) = mem;
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
269
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
274
275 SET_SRC (pat) = reg;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
279
280 SET_SRC (pat) = reg;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
284 }
285 }
286
287 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
288
289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
290 mode = GET_MODE_WIDER_MODE (mode))
291 {
292 enum machine_mode srcmode;
293 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
294 srcmode = GET_MODE_WIDER_MODE (srcmode))
295 {
296 enum insn_code ic;
297
298 ic = can_extend_p (mode, srcmode, 0);
299 if (ic == CODE_FOR_nothing)
300 continue;
301
302 PUT_MODE (mem, srcmode);
303
304 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
305 float_extend_from_mem[mode][srcmode] = true;
306 }
307 }
308 }
309
310 /* This is run at the start of compiling a function. */
311
312 void
313 init_expr (void)
314 {
315 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
316 }
317 \f
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
322
323 void
324 convert_move (rtx to, rtx from, int unsignedp)
325 {
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
330 enum insn_code code;
331 rtx libcall;
332
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
335 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
336
337
338 gcc_assert (to_real == from_real);
339
340 /* If the source and destination are already the same, then there's
341 nothing to do. */
342 if (to == from)
343 return;
344
345 /* If FROM is a SUBREG that indicates that we have already done at least
346 the required extension, strip it. We don't handle such SUBREGs as
347 TO here. */
348
349 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
350 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
351 >= GET_MODE_SIZE (to_mode))
352 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
353 from = gen_lowpart (to_mode, from), from_mode = to_mode;
354
355 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
356
357 if (to_mode == from_mode
358 || (from_mode == VOIDmode && CONSTANT_P (from)))
359 {
360 emit_move_insn (to, from);
361 return;
362 }
363
364 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
365 {
366 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
367
368 if (VECTOR_MODE_P (to_mode))
369 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
370 else
371 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
372
373 emit_move_insn (to, from);
374 return;
375 }
376
377 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
378 {
379 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
380 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
381 return;
382 }
383
384 if (to_real)
385 {
386 rtx value, insns;
387 convert_optab tab;
388
389 gcc_assert (GET_MODE_PRECISION (from_mode)
390 != GET_MODE_PRECISION (to_mode));
391
392 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
393 tab = sext_optab;
394 else
395 tab = trunc_optab;
396
397 /* Try converting directly if the insn is supported. */
398
399 code = tab->handlers[to_mode][from_mode].insn_code;
400 if (code != CODE_FOR_nothing)
401 {
402 emit_unop_insn (code, to, from,
403 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
404 return;
405 }
406
407 /* Otherwise use a libcall. */
408 libcall = tab->handlers[to_mode][from_mode].libfunc;
409
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall);
412
413 start_sequence ();
414 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
415 1, from, from_mode);
416 insns = get_insns ();
417 end_sequence ();
418 emit_libcall_block (insns, to, value,
419 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
420 from)
421 : gen_rtx_FLOAT_EXTEND (to_mode, from));
422 return;
423 }
424
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
429 {
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
432
433 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
434 != CODE_FOR_nothing);
435
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
439 to, from, UNKNOWN);
440 return;
441 }
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
443 {
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
446
447 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
448 != CODE_FOR_nothing);
449
450 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
451 to, from, UNKNOWN);
452 if (to_mode == full_mode)
453 return;
454
455 /* else proceed to integer conversions below. */
456 from_mode = full_mode;
457 }
458
459 /* Now both modes are integers. */
460
461 /* Handle expanding beyond a word. */
462 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
463 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
464 {
465 rtx insns;
466 rtx lowpart;
467 rtx fill_value;
468 rtx lowfrom;
469 int i;
470 enum machine_mode lowpart_mode;
471 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
472
473 /* Try converting directly if the insn is supported. */
474 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
475 != CODE_FOR_nothing)
476 {
477 /* If FROM is a SUBREG, put it into a register. Do this
478 so that we always generate the same set of insns for
479 better cse'ing; if an intermediate assignment occurred,
480 we won't be doing the operation directly on the SUBREG. */
481 if (optimize > 0 && GET_CODE (from) == SUBREG)
482 from = force_reg (from_mode, from);
483 emit_unop_insn (code, to, from, equiv_code);
484 return;
485 }
486 /* Next, try converting via full word. */
487 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
488 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
489 != CODE_FOR_nothing))
490 {
491 if (REG_P (to))
492 {
493 if (reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
495 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
496 }
497 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
498 emit_unop_insn (code, to,
499 gen_lowpart (word_mode, to), equiv_code);
500 return;
501 }
502
503 /* No special multiword conversion insn; do it by hand. */
504 start_sequence ();
505
506 /* Since we will turn this into a no conflict block, we must ensure
507 that the source does not overlap the target. */
508
509 if (reg_overlap_mentioned_p (to, from))
510 from = force_reg (from_mode, from);
511
512 /* Get a copy of FROM widened to a word, if necessary. */
513 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
514 lowpart_mode = word_mode;
515 else
516 lowpart_mode = from_mode;
517
518 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
519
520 lowpart = gen_lowpart (lowpart_mode, to);
521 emit_move_insn (lowpart, lowfrom);
522
523 /* Compute the value to put in each remaining word. */
524 if (unsignedp)
525 fill_value = const0_rtx;
526 else
527 {
528 #ifdef HAVE_slt
529 if (HAVE_slt
530 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
531 && STORE_FLAG_VALUE == -1)
532 {
533 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
534 lowpart_mode, 0);
535 fill_value = gen_reg_rtx (word_mode);
536 emit_insn (gen_slt (fill_value));
537 }
538 else
539 #endif
540 {
541 fill_value
542 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
543 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
544 NULL_RTX, 0);
545 fill_value = convert_to_mode (word_mode, fill_value, 1);
546 }
547 }
548
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
551 {
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
554
555 gcc_assert (subword);
556
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
559 }
560
561 insns = get_insns ();
562 end_sequence ();
563
564 emit_no_conflict_block (insns, to, from, NULL_RTX,
565 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
566 return;
567 }
568
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
572 {
573 if (!((MEM_P (from)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0)))
577 || REG_P (from)
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
581 return;
582 }
583
584 /* Now follow all the conversions between integers
585 no more than a word long. */
586
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
590 GET_MODE_BITSIZE (from_mode)))
591 {
592 if (!((MEM_P (from)
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0)))
596 || REG_P (from)
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
603 return;
604 }
605
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
608 {
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
611 != CODE_FOR_nothing)
612 {
613 if (flag_force_mem)
614 from = force_not_mem (from);
615
616 emit_unop_insn (code, to, from, equiv_code);
617 return;
618 }
619 else
620 {
621 enum machine_mode intermediate;
622 rtx tmp;
623 tree shift_amount;
624
625 /* Search for a mode to convert via. */
626 for (intermediate = from_mode; intermediate != VOIDmode;
627 intermediate = GET_MODE_WIDER_MODE (intermediate))
628 if (((can_extend_p (to_mode, intermediate, unsignedp)
629 != CODE_FOR_nothing)
630 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
631 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
632 GET_MODE_BITSIZE (intermediate))))
633 && (can_extend_p (intermediate, from_mode, unsignedp)
634 != CODE_FOR_nothing))
635 {
636 convert_move (to, convert_to_mode (intermediate, from,
637 unsignedp), unsignedp);
638 return;
639 }
640
641 /* No suitable intermediate mode.
642 Generate what we need with shifts. */
643 shift_amount = build_int_cst (NULL_TREE,
644 GET_MODE_BITSIZE (to_mode)
645 - GET_MODE_BITSIZE (from_mode));
646 from = gen_lowpart (to_mode, force_reg (from_mode, from));
647 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
648 to, unsignedp);
649 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
650 to, unsignedp);
651 if (tmp != to)
652 emit_move_insn (to, tmp);
653 return;
654 }
655 }
656
657 /* Support special truncate insns for certain modes. */
658 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
659 {
660 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
661 to, from, UNKNOWN);
662 return;
663 }
664
665 /* Handle truncation of volatile memrefs, and so on;
666 the things that couldn't be truncated directly,
667 and for which there was no special instruction.
668
669 ??? Code above formerly short-circuited this, for most integer
670 mode pairs, with a force_reg in from_mode followed by a recursive
671 call to this routine. Appears always to have been wrong. */
672 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
673 {
674 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
675 emit_move_insn (to, temp);
676 return;
677 }
678
679 /* Mode combination is not recognized. */
680 gcc_unreachable ();
681 }
682
683 /* Return an rtx for a value that would result
684 from converting X to mode MODE.
685 Both X and MODE may be floating, or both integer.
686 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion. */
689
690 rtx
691 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
692 {
693 return convert_modes (mode, VOIDmode, x, unsignedp);
694 }
695
696 /* Return an rtx for a value that would result
697 from converting X from mode OLDMODE to mode MODE.
698 Both modes may be floating, or both integer.
699 UNSIGNEDP is nonzero if X is an unsigned value.
700
701 This can be done by referring to a part of X in place
702 or by copying to a new temporary with conversion.
703
704 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
705
706 rtx
707 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
708 {
709 rtx temp;
710
711 /* If FROM is a SUBREG that indicates that we have already done at least
712 the required extension, strip it. */
713
714 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
715 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
716 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
717 x = gen_lowpart (mode, x);
718
719 if (GET_MODE (x) != VOIDmode)
720 oldmode = GET_MODE (x);
721
722 if (mode == oldmode)
723 return x;
724
725 /* There is one case that we must handle specially: If we are converting
726 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
727 we are to interpret the constant as unsigned, gen_lowpart will do
728 the wrong if the constant appears negative. What we want to do is
729 make the high-order word of the constant zero, not all ones. */
730
731 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
732 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
733 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
734 {
735 HOST_WIDE_INT val = INTVAL (x);
736
737 if (oldmode != VOIDmode
738 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
739 {
740 int width = GET_MODE_BITSIZE (oldmode);
741
742 /* We need to zero extend VAL. */
743 val &= ((HOST_WIDE_INT) 1 << width) - 1;
744 }
745
746 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
747 }
748
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
753
754 if ((GET_CODE (x) == CONST_INT
755 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (GET_CODE (x) == CONST_DOUBLE
759 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
762 || (REG_P (x)
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
766 GET_MODE_BITSIZE (GET_MODE (x)))))))))
767 {
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
772 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
773 {
774 HOST_WIDE_INT val = INTVAL (x);
775 int width = GET_MODE_BITSIZE (oldmode);
776
777 /* We must sign or zero-extend in this case. Start by
778 zero-extending, then sign extend if we need to. */
779 val &= ((HOST_WIDE_INT) 1 << width) - 1;
780 if (! unsignedp
781 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
782 val |= (HOST_WIDE_INT) (-1) << width;
783
784 return gen_int_mode (val, mode);
785 }
786
787 return gen_lowpart (mode, x);
788 }
789
790 /* Converting from integer constant into mode is always equivalent to an
791 subreg operation. */
792 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
793 {
794 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
795 return simplify_gen_subreg (mode, x, oldmode, 0);
796 }
797
798 temp = gen_reg_rtx (mode);
799 convert_move (temp, x, unsignedp);
800 return temp;
801 }
802 \f
803 /* STORE_MAX_PIECES is the number of bytes at a time that we can
804 store efficiently. Due to internal GCC limitations, this is
805 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
806 for an immediate constant. */
807
808 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
809
810 /* Determine whether the LEN bytes can be moved by using several move
811 instructions. Return nonzero if a call to move_by_pieces should
812 succeed. */
813
814 int
815 can_move_by_pieces (unsigned HOST_WIDE_INT len,
816 unsigned int align ATTRIBUTE_UNUSED)
817 {
818 return MOVE_BY_PIECES_P (len, align);
819 }
820
821 /* Generate several move instructions to copy LEN bytes from block FROM to
822 block TO. (These are MEM rtx's with BLKmode).
823
824 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
825 used to push FROM to the stack.
826
827 ALIGN is maximum stack alignment we can assume.
828
829 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
830 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
831 stpcpy. */
832
833 rtx
834 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
835 unsigned int align, int endp)
836 {
837 struct move_by_pieces data;
838 rtx to_addr, from_addr = XEXP (from, 0);
839 unsigned int max_size = MOVE_MAX_PIECES + 1;
840 enum machine_mode mode = VOIDmode, tmode;
841 enum insn_code icode;
842
843 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
844
845 data.offset = 0;
846 data.from_addr = from_addr;
847 if (to)
848 {
849 to_addr = XEXP (to, 0);
850 data.to = to;
851 data.autinc_to
852 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
853 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
854 data.reverse
855 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
856 }
857 else
858 {
859 to_addr = NULL_RTX;
860 data.to = NULL_RTX;
861 data.autinc_to = 1;
862 #ifdef STACK_GROWS_DOWNWARD
863 data.reverse = 1;
864 #else
865 data.reverse = 0;
866 #endif
867 }
868 data.to_addr = to_addr;
869 data.from = from;
870 data.autinc_from
871 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
872 || GET_CODE (from_addr) == POST_INC
873 || GET_CODE (from_addr) == POST_DEC);
874
875 data.explicit_inc_from = 0;
876 data.explicit_inc_to = 0;
877 if (data.reverse) data.offset = len;
878 data.len = len;
879
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data.autinc_from && data.autinc_to)
884 && move_by_pieces_ninsns (len, align, max_size) > 2)
885 {
886 /* Find the mode of the largest move... */
887 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
888 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
889 if (GET_MODE_SIZE (tmode) < max_size)
890 mode = tmode;
891
892 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
893 {
894 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
895 data.autinc_from = 1;
896 data.explicit_inc_from = -1;
897 }
898 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
899 {
900 data.from_addr = copy_addr_to_reg (from_addr);
901 data.autinc_from = 1;
902 data.explicit_inc_from = 1;
903 }
904 if (!data.autinc_from && CONSTANT_P (from_addr))
905 data.from_addr = copy_addr_to_reg (from_addr);
906 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
907 {
908 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
909 data.autinc_to = 1;
910 data.explicit_inc_to = -1;
911 }
912 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
913 {
914 data.to_addr = copy_addr_to_reg (to_addr);
915 data.autinc_to = 1;
916 data.explicit_inc_to = 1;
917 }
918 if (!data.autinc_to && CONSTANT_P (to_addr))
919 data.to_addr = copy_addr_to_reg (to_addr);
920 }
921
922 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
923 if (align >= GET_MODE_ALIGNMENT (tmode))
924 align = GET_MODE_ALIGNMENT (tmode);
925 else
926 {
927 enum machine_mode xmode;
928
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
930 tmode != VOIDmode;
931 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
932 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
933 || SLOW_UNALIGNED_ACCESS (tmode, align))
934 break;
935
936 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
937 }
938
939 /* First move what we can in the largest integer mode, then go to
940 successively smaller modes. */
941
942 while (max_size > 1)
943 {
944 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
945 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
946 if (GET_MODE_SIZE (tmode) < max_size)
947 mode = tmode;
948
949 if (mode == VOIDmode)
950 break;
951
952 icode = mov_optab->handlers[(int) mode].insn_code;
953 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
954 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
955
956 max_size = GET_MODE_SIZE (mode);
957 }
958
959 /* The code above should have handled everything. */
960 gcc_assert (!data.len);
961
962 if (endp)
963 {
964 rtx to1;
965
966 gcc_assert (!data.reverse);
967 if (data.autinc_to)
968 {
969 if (endp == 2)
970 {
971 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
972 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
973 else
974 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
975 -1));
976 }
977 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
978 data.offset);
979 }
980 else
981 {
982 if (endp == 2)
983 --data.offset;
984 to1 = adjust_address (data.to, QImode, data.offset);
985 }
986 return to1;
987 }
988 else
989 return data.to;
990 }
991
992 /* Return number of insns required to move L bytes by pieces.
993 ALIGN (in bits) is maximum alignment we can assume. */
994
995 static unsigned HOST_WIDE_INT
996 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
997 unsigned int max_size)
998 {
999 unsigned HOST_WIDE_INT n_insns = 0;
1000 enum machine_mode tmode;
1001
1002 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1003 if (align >= GET_MODE_ALIGNMENT (tmode))
1004 align = GET_MODE_ALIGNMENT (tmode);
1005 else
1006 {
1007 enum machine_mode tmode, xmode;
1008
1009 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1010 tmode != VOIDmode;
1011 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1012 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1013 || SLOW_UNALIGNED_ACCESS (tmode, align))
1014 break;
1015
1016 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1017 }
1018
1019 while (max_size > 1)
1020 {
1021 enum machine_mode mode = VOIDmode;
1022 enum insn_code icode;
1023
1024 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1025 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1026 if (GET_MODE_SIZE (tmode) < max_size)
1027 mode = tmode;
1028
1029 if (mode == VOIDmode)
1030 break;
1031
1032 icode = mov_optab->handlers[(int) mode].insn_code;
1033 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1034 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1035
1036 max_size = GET_MODE_SIZE (mode);
1037 }
1038
1039 gcc_assert (!l);
1040 return n_insns;
1041 }
1042
1043 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1044 with move instructions for mode MODE. GENFUN is the gen_... function
1045 to make a move insn for that mode. DATA has all the other info. */
1046
1047 static void
1048 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1049 struct move_by_pieces *data)
1050 {
1051 unsigned int size = GET_MODE_SIZE (mode);
1052 rtx to1 = NULL_RTX, from1;
1053
1054 while (data->len >= size)
1055 {
1056 if (data->reverse)
1057 data->offset -= size;
1058
1059 if (data->to)
1060 {
1061 if (data->autinc_to)
1062 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1063 data->offset);
1064 else
1065 to1 = adjust_address (data->to, mode, data->offset);
1066 }
1067
1068 if (data->autinc_from)
1069 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1070 data->offset);
1071 else
1072 from1 = adjust_address (data->from, mode, data->offset);
1073
1074 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1075 emit_insn (gen_add2_insn (data->to_addr,
1076 GEN_INT (-(HOST_WIDE_INT)size)));
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1078 emit_insn (gen_add2_insn (data->from_addr,
1079 GEN_INT (-(HOST_WIDE_INT)size)));
1080
1081 if (data->to)
1082 emit_insn ((*genfun) (to1, from1));
1083 else
1084 {
1085 #ifdef PUSH_ROUNDING
1086 emit_single_push_insn (mode, from1, NULL);
1087 #else
1088 gcc_unreachable ();
1089 #endif
1090 }
1091
1092 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1093 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1094 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1096
1097 if (! data->reverse)
1098 data->offset += size;
1099
1100 data->len -= size;
1101 }
1102 }
1103 \f
1104 /* Emit code to move a block Y to a block X. This may be done with
1105 string-move instructions, with multiple scalar move instructions,
1106 or with a library call.
1107
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1109 SIZE is an rtx that says how long they are.
1110 ALIGN is the maximum alignment we can assume they have.
1111 METHOD describes what kind of copy this is, and what mechanisms may be used.
1112
1113 Return the address of the new block, if memcpy is called and returns it,
1114 0 otherwise. */
1115
1116 rtx
1117 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1118 {
1119 bool may_use_call;
1120 rtx retval = 0;
1121 unsigned int align;
1122
1123 switch (method)
1124 {
1125 case BLOCK_OP_NORMAL:
1126 may_use_call = true;
1127 break;
1128
1129 case BLOCK_OP_CALL_PARM:
1130 may_use_call = block_move_libcall_safe_for_call_parm ();
1131
1132 /* Make inhibit_defer_pop nonzero around the library call
1133 to force it to pop the arguments right away. */
1134 NO_DEFER_POP;
1135 break;
1136
1137 case BLOCK_OP_NO_LIBCALL:
1138 may_use_call = false;
1139 break;
1140
1141 default:
1142 gcc_unreachable ();
1143 }
1144
1145 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1146
1147 gcc_assert (MEM_P (x));
1148 gcc_assert (MEM_P (y));
1149 gcc_assert (size);
1150
1151 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1152 block copy is more efficient for other large modes, e.g. DCmode. */
1153 x = adjust_address (x, BLKmode, 0);
1154 y = adjust_address (y, BLKmode, 0);
1155
1156 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1157 can be incorrect is coming from __builtin_memcpy. */
1158 if (GET_CODE (size) == CONST_INT)
1159 {
1160 if (INTVAL (size) == 0)
1161 return 0;
1162
1163 x = shallow_copy_rtx (x);
1164 y = shallow_copy_rtx (y);
1165 set_mem_size (x, size);
1166 set_mem_size (y, size);
1167 }
1168
1169 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1170 move_by_pieces (x, y, INTVAL (size), align, 0);
1171 else if (emit_block_move_via_movmem (x, y, size, align))
1172 ;
1173 else if (may_use_call)
1174 retval = emit_block_move_via_libcall (x, y, size);
1175 else
1176 emit_block_move_via_loop (x, y, size, align);
1177
1178 if (method == BLOCK_OP_CALL_PARM)
1179 OK_DEFER_POP;
1180
1181 return retval;
1182 }
1183
1184 /* A subroutine of emit_block_move. Returns true if calling the
1185 block move libcall will not clobber any parameters which may have
1186 already been placed on the stack. */
1187
1188 static bool
1189 block_move_libcall_safe_for_call_parm (void)
1190 {
1191 /* If arguments are pushed on the stack, then they're safe. */
1192 if (PUSH_ARGS)
1193 return true;
1194
1195 /* If registers go on the stack anyway, any argument is sure to clobber
1196 an outgoing argument. */
1197 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1198 {
1199 tree fn = emit_block_move_libcall_fn (false);
1200 (void) fn;
1201 if (REG_PARM_STACK_SPACE (fn) != 0)
1202 return false;
1203 }
1204 #endif
1205
1206 /* If any argument goes in memory, then it might clobber an outgoing
1207 argument. */
1208 {
1209 CUMULATIVE_ARGS args_so_far;
1210 tree fn, arg;
1211
1212 fn = emit_block_move_libcall_fn (false);
1213 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1214
1215 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1216 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1217 {
1218 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1219 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1220 if (!tmp || !REG_P (tmp))
1221 return false;
1222 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1223 NULL_TREE, 1))
1224 return false;
1225 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1226 }
1227 }
1228 return true;
1229 }
1230
1231 /* A subroutine of emit_block_move. Expand a movmem pattern;
1232 return true if successful. */
1233
1234 static bool
1235 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1236 {
1237 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1238 int save_volatile_ok = volatile_ok;
1239 enum machine_mode mode;
1240
1241 /* Since this is a move insn, we don't care about volatility. */
1242 volatile_ok = 1;
1243
1244 /* Try the most limited insn first, because there's no point
1245 including more than one in the machine description unless
1246 the more limited one has some advantage. */
1247
1248 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1249 mode = GET_MODE_WIDER_MODE (mode))
1250 {
1251 enum insn_code code = movmem_optab[(int) mode];
1252 insn_operand_predicate_fn pred;
1253
1254 if (code != CODE_FOR_nothing
1255 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1256 here because if SIZE is less than the mode mask, as it is
1257 returned by the macro, it will definitely be less than the
1258 actual mode mask. */
1259 && ((GET_CODE (size) == CONST_INT
1260 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1261 <= (GET_MODE_MASK (mode) >> 1)))
1262 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1263 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1264 || (*pred) (x, BLKmode))
1265 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1266 || (*pred) (y, BLKmode))
1267 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1268 || (*pred) (opalign, VOIDmode)))
1269 {
1270 rtx op2;
1271 rtx last = get_last_insn ();
1272 rtx pat;
1273
1274 op2 = convert_to_mode (mode, size, 1);
1275 pred = insn_data[(int) code].operand[2].predicate;
1276 if (pred != 0 && ! (*pred) (op2, mode))
1277 op2 = copy_to_mode_reg (mode, op2);
1278
1279 /* ??? When called via emit_block_move_for_call, it'd be
1280 nice if there were some way to inform the backend, so
1281 that it doesn't fail the expansion because it thinks
1282 emitting the libcall would be more efficient. */
1283
1284 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1285 if (pat)
1286 {
1287 emit_insn (pat);
1288 volatile_ok = save_volatile_ok;
1289 return true;
1290 }
1291 else
1292 delete_insns_since (last);
1293 }
1294 }
1295
1296 volatile_ok = save_volatile_ok;
1297 return false;
1298 }
1299
1300 /* A subroutine of emit_block_move. Expand a call to memcpy.
1301 Return the return value from memcpy, 0 otherwise. */
1302
1303 static rtx
1304 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1305 {
1306 rtx dst_addr, src_addr;
1307 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1308 enum machine_mode size_mode;
1309 rtx retval;
1310
1311 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1312 pseudos. We can then place those new pseudos into a VAR_DECL and
1313 use them later. */
1314
1315 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1316 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1317
1318 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1319 src_addr = convert_memory_address (ptr_mode, src_addr);
1320
1321 dst_tree = make_tree (ptr_type_node, dst_addr);
1322 src_tree = make_tree (ptr_type_node, src_addr);
1323
1324 size_mode = TYPE_MODE (sizetype);
1325
1326 size = convert_to_mode (size_mode, size, 1);
1327 size = copy_to_mode_reg (size_mode, size);
1328
1329 /* It is incorrect to use the libcall calling conventions to call
1330 memcpy in this context. This could be a user call to memcpy and
1331 the user may wish to examine the return value from memcpy. For
1332 targets where libcalls and normal calls have different conventions
1333 for returning pointers, we could end up generating incorrect code. */
1334
1335 size_tree = make_tree (sizetype, size);
1336
1337 fn = emit_block_move_libcall_fn (true);
1338 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1339 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1340 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1341
1342 /* Now we have to build up the CALL_EXPR itself. */
1343 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1344 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1345 call_expr, arg_list, NULL_TREE);
1346
1347 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1348
1349 return retval;
1350 }
1351
1352 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1353 for the function we use for block copies. The first time FOR_CALL
1354 is true, we call assemble_external. */
1355
1356 static GTY(()) tree block_move_fn;
1357
1358 void
1359 init_block_move_fn (const char *asmspec)
1360 {
1361 if (!block_move_fn)
1362 {
1363 tree args, fn;
1364
1365 fn = get_identifier ("memcpy");
1366 args = build_function_type_list (ptr_type_node, ptr_type_node,
1367 const_ptr_type_node, sizetype,
1368 NULL_TREE);
1369
1370 fn = build_decl (FUNCTION_DECL, fn, args);
1371 DECL_EXTERNAL (fn) = 1;
1372 TREE_PUBLIC (fn) = 1;
1373 DECL_ARTIFICIAL (fn) = 1;
1374 TREE_NOTHROW (fn) = 1;
1375
1376 block_move_fn = fn;
1377 }
1378
1379 if (asmspec)
1380 set_user_assembler_name (block_move_fn, asmspec);
1381 }
1382
1383 static tree
1384 emit_block_move_libcall_fn (int for_call)
1385 {
1386 static bool emitted_extern;
1387
1388 if (!block_move_fn)
1389 init_block_move_fn (NULL);
1390
1391 if (for_call && !emitted_extern)
1392 {
1393 emitted_extern = true;
1394 make_decl_rtl (block_move_fn);
1395 assemble_external (block_move_fn);
1396 }
1397
1398 return block_move_fn;
1399 }
1400
1401 /* A subroutine of emit_block_move. Copy the data via an explicit
1402 loop. This is used only when libcalls are forbidden. */
1403 /* ??? It'd be nice to copy in hunks larger than QImode. */
1404
1405 static void
1406 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1407 unsigned int align ATTRIBUTE_UNUSED)
1408 {
1409 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1410 enum machine_mode iter_mode;
1411
1412 iter_mode = GET_MODE (size);
1413 if (iter_mode == VOIDmode)
1414 iter_mode = word_mode;
1415
1416 top_label = gen_label_rtx ();
1417 cmp_label = gen_label_rtx ();
1418 iter = gen_reg_rtx (iter_mode);
1419
1420 emit_move_insn (iter, const0_rtx);
1421
1422 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1423 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1424 do_pending_stack_adjust ();
1425
1426 emit_jump (cmp_label);
1427 emit_label (top_label);
1428
1429 tmp = convert_modes (Pmode, iter_mode, iter, true);
1430 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1431 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1432 x = change_address (x, QImode, x_addr);
1433 y = change_address (y, QImode, y_addr);
1434
1435 emit_move_insn (x, y);
1436
1437 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1438 true, OPTAB_LIB_WIDEN);
1439 if (tmp != iter)
1440 emit_move_insn (iter, tmp);
1441
1442 emit_label (cmp_label);
1443
1444 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1445 true, top_label);
1446 }
1447 \f
1448 /* Copy all or part of a value X into registers starting at REGNO.
1449 The number of registers to be filled is NREGS. */
1450
1451 void
1452 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1453 {
1454 int i;
1455 #ifdef HAVE_load_multiple
1456 rtx pat;
1457 rtx last;
1458 #endif
1459
1460 if (nregs == 0)
1461 return;
1462
1463 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1464 x = validize_mem (force_const_mem (mode, x));
1465
1466 /* See if the machine can do this with a load multiple insn. */
1467 #ifdef HAVE_load_multiple
1468 if (HAVE_load_multiple)
1469 {
1470 last = get_last_insn ();
1471 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1472 GEN_INT (nregs));
1473 if (pat)
1474 {
1475 emit_insn (pat);
1476 return;
1477 }
1478 else
1479 delete_insns_since (last);
1480 }
1481 #endif
1482
1483 for (i = 0; i < nregs; i++)
1484 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1485 operand_subword_force (x, i, mode));
1486 }
1487
1488 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1489 The number of registers to be filled is NREGS. */
1490
1491 void
1492 move_block_from_reg (int regno, rtx x, int nregs)
1493 {
1494 int i;
1495
1496 if (nregs == 0)
1497 return;
1498
1499 /* See if the machine can do this with a store multiple insn. */
1500 #ifdef HAVE_store_multiple
1501 if (HAVE_store_multiple)
1502 {
1503 rtx last = get_last_insn ();
1504 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1505 GEN_INT (nregs));
1506 if (pat)
1507 {
1508 emit_insn (pat);
1509 return;
1510 }
1511 else
1512 delete_insns_since (last);
1513 }
1514 #endif
1515
1516 for (i = 0; i < nregs; i++)
1517 {
1518 rtx tem = operand_subword (x, i, 1, BLKmode);
1519
1520 gcc_assert (tem);
1521
1522 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1523 }
1524 }
1525
1526 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1527 ORIG, where ORIG is a non-consecutive group of registers represented by
1528 a PARALLEL. The clone is identical to the original except in that the
1529 original set of registers is replaced by a new set of pseudo registers.
1530 The new set has the same modes as the original set. */
1531
1532 rtx
1533 gen_group_rtx (rtx orig)
1534 {
1535 int i, length;
1536 rtx *tmps;
1537
1538 gcc_assert (GET_CODE (orig) == PARALLEL);
1539
1540 length = XVECLEN (orig, 0);
1541 tmps = alloca (sizeof (rtx) * length);
1542
1543 /* Skip a NULL entry in first slot. */
1544 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1545
1546 if (i)
1547 tmps[0] = 0;
1548
1549 for (; i < length; i++)
1550 {
1551 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1552 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1553
1554 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1555 }
1556
1557 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1558 }
1559
1560 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1561 except that values are placed in TMPS[i], and must later be moved
1562 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1563
1564 static void
1565 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1566 {
1567 rtx src;
1568 int start, i;
1569 enum machine_mode m = GET_MODE (orig_src);
1570
1571 gcc_assert (GET_CODE (dst) == PARALLEL);
1572
1573 if (!SCALAR_INT_MODE_P (m)
1574 && !MEM_P (orig_src) && GET_CODE (orig_src) != CONCAT)
1575 {
1576 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1577 if (imode == BLKmode)
1578 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1579 else
1580 src = gen_reg_rtx (imode);
1581 if (imode != BLKmode)
1582 src = gen_lowpart (GET_MODE (orig_src), src);
1583 emit_move_insn (src, orig_src);
1584 /* ...and back again. */
1585 if (imode != BLKmode)
1586 src = gen_lowpart (imode, src);
1587 emit_group_load_1 (tmps, dst, src, type, ssize);
1588 return;
1589 }
1590
1591 /* Check for a NULL entry, used to indicate that the parameter goes
1592 both on the stack and in registers. */
1593 if (XEXP (XVECEXP (dst, 0, 0), 0))
1594 start = 0;
1595 else
1596 start = 1;
1597
1598 /* Process the pieces. */
1599 for (i = start; i < XVECLEN (dst, 0); i++)
1600 {
1601 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1602 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1603 unsigned int bytelen = GET_MODE_SIZE (mode);
1604 int shift = 0;
1605
1606 /* Handle trailing fragments that run over the size of the struct. */
1607 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1608 {
1609 /* Arrange to shift the fragment to where it belongs.
1610 extract_bit_field loads to the lsb of the reg. */
1611 if (
1612 #ifdef BLOCK_REG_PADDING
1613 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1614 == (BYTES_BIG_ENDIAN ? upward : downward)
1615 #else
1616 BYTES_BIG_ENDIAN
1617 #endif
1618 )
1619 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1620 bytelen = ssize - bytepos;
1621 gcc_assert (bytelen > 0);
1622 }
1623
1624 /* If we won't be loading directly from memory, protect the real source
1625 from strange tricks we might play; but make sure that the source can
1626 be loaded directly into the destination. */
1627 src = orig_src;
1628 if (!MEM_P (orig_src)
1629 && (!CONSTANT_P (orig_src)
1630 || (GET_MODE (orig_src) != mode
1631 && GET_MODE (orig_src) != VOIDmode)))
1632 {
1633 if (GET_MODE (orig_src) == VOIDmode)
1634 src = gen_reg_rtx (mode);
1635 else
1636 src = gen_reg_rtx (GET_MODE (orig_src));
1637
1638 emit_move_insn (src, orig_src);
1639 }
1640
1641 /* Optimize the access just a bit. */
1642 if (MEM_P (src)
1643 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1644 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1645 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1646 && bytelen == GET_MODE_SIZE (mode))
1647 {
1648 tmps[i] = gen_reg_rtx (mode);
1649 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1650 }
1651 else if (GET_CODE (src) == CONCAT)
1652 {
1653 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1654 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1655
1656 if ((bytepos == 0 && bytelen == slen0)
1657 || (bytepos != 0 && bytepos + bytelen <= slen))
1658 {
1659 /* The following assumes that the concatenated objects all
1660 have the same size. In this case, a simple calculation
1661 can be used to determine the object and the bit field
1662 to be extracted. */
1663 tmps[i] = XEXP (src, bytepos / slen0);
1664 if (! CONSTANT_P (tmps[i])
1665 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1666 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1667 (bytepos % slen0) * BITS_PER_UNIT,
1668 1, NULL_RTX, mode, mode);
1669 }
1670 else
1671 {
1672 rtx mem;
1673
1674 gcc_assert (!bytepos);
1675 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1676 emit_move_insn (mem, src);
1677 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1678 0, 1, NULL_RTX, mode, mode);
1679 }
1680 }
1681 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1682 SIMD register, which is currently broken. While we get GCC
1683 to emit proper RTL for these cases, let's dump to memory. */
1684 else if (VECTOR_MODE_P (GET_MODE (dst))
1685 && REG_P (src))
1686 {
1687 int slen = GET_MODE_SIZE (GET_MODE (src));
1688 rtx mem;
1689
1690 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1691 emit_move_insn (mem, src);
1692 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1693 }
1694 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1695 && XVECLEN (dst, 0) > 1)
1696 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1697 else if (CONSTANT_P (src)
1698 || (REG_P (src) && GET_MODE (src) == mode))
1699 tmps[i] = src;
1700 else
1701 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1702 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1703 mode, mode);
1704
1705 if (shift)
1706 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1707 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1708 }
1709 }
1710
1711 /* Emit code to move a block SRC of type TYPE to a block DST,
1712 where DST is non-consecutive registers represented by a PARALLEL.
1713 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1714 if not known. */
1715
1716 void
1717 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1718 {
1719 rtx *tmps;
1720 int i;
1721
1722 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1723 emit_group_load_1 (tmps, dst, src, type, ssize);
1724
1725 /* Copy the extracted pieces into the proper (probable) hard regs. */
1726 for (i = 0; i < XVECLEN (dst, 0); i++)
1727 {
1728 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1729 if (d == NULL)
1730 continue;
1731 emit_move_insn (d, tmps[i]);
1732 }
1733 }
1734
1735 /* Similar, but load SRC into new pseudos in a format that looks like
1736 PARALLEL. This can later be fed to emit_group_move to get things
1737 in the right place. */
1738
1739 rtx
1740 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1741 {
1742 rtvec vec;
1743 int i;
1744
1745 vec = rtvec_alloc (XVECLEN (parallel, 0));
1746 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1747
1748 /* Convert the vector to look just like the original PARALLEL, except
1749 with the computed values. */
1750 for (i = 0; i < XVECLEN (parallel, 0); i++)
1751 {
1752 rtx e = XVECEXP (parallel, 0, i);
1753 rtx d = XEXP (e, 0);
1754
1755 if (d)
1756 {
1757 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1758 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1759 }
1760 RTVEC_ELT (vec, i) = e;
1761 }
1762
1763 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1764 }
1765
1766 /* Emit code to move a block SRC to block DST, where SRC and DST are
1767 non-consecutive groups of registers, each represented by a PARALLEL. */
1768
1769 void
1770 emit_group_move (rtx dst, rtx src)
1771 {
1772 int i;
1773
1774 gcc_assert (GET_CODE (src) == PARALLEL
1775 && GET_CODE (dst) == PARALLEL
1776 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1777
1778 /* Skip first entry if NULL. */
1779 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1780 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1781 XEXP (XVECEXP (src, 0, i), 0));
1782 }
1783
1784 /* Move a group of registers represented by a PARALLEL into pseudos. */
1785
1786 rtx
1787 emit_group_move_into_temps (rtx src)
1788 {
1789 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1790 int i;
1791
1792 for (i = 0; i < XVECLEN (src, 0); i++)
1793 {
1794 rtx e = XVECEXP (src, 0, i);
1795 rtx d = XEXP (e, 0);
1796
1797 if (d)
1798 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1799 RTVEC_ELT (vec, i) = e;
1800 }
1801
1802 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1803 }
1804
1805 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1806 where SRC is non-consecutive registers represented by a PARALLEL.
1807 SSIZE represents the total size of block ORIG_DST, or -1 if not
1808 known. */
1809
1810 void
1811 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1812 {
1813 rtx *tmps, dst;
1814 int start, i;
1815 enum machine_mode m = GET_MODE (orig_dst);
1816
1817 gcc_assert (GET_CODE (src) == PARALLEL);
1818
1819 if (!SCALAR_INT_MODE_P (m)
1820 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1821 {
1822 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1823 if (imode == BLKmode)
1824 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1825 else
1826 dst = gen_reg_rtx (imode);
1827 emit_group_store (dst, src, type, ssize);
1828 if (imode != BLKmode)
1829 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1830 emit_move_insn (orig_dst, dst);
1831 return;
1832 }
1833
1834 /* Check for a NULL entry, used to indicate that the parameter goes
1835 both on the stack and in registers. */
1836 if (XEXP (XVECEXP (src, 0, 0), 0))
1837 start = 0;
1838 else
1839 start = 1;
1840
1841 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1842
1843 /* Copy the (probable) hard regs into pseudos. */
1844 for (i = start; i < XVECLEN (src, 0); i++)
1845 {
1846 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1847 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1848 emit_move_insn (tmps[i], reg);
1849 }
1850
1851 /* If we won't be storing directly into memory, protect the real destination
1852 from strange tricks we might play. */
1853 dst = orig_dst;
1854 if (GET_CODE (dst) == PARALLEL)
1855 {
1856 rtx temp;
1857
1858 /* We can get a PARALLEL dst if there is a conditional expression in
1859 a return statement. In that case, the dst and src are the same,
1860 so no action is necessary. */
1861 if (rtx_equal_p (dst, src))
1862 return;
1863
1864 /* It is unclear if we can ever reach here, but we may as well handle
1865 it. Allocate a temporary, and split this into a store/load to/from
1866 the temporary. */
1867
1868 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1869 emit_group_store (temp, src, type, ssize);
1870 emit_group_load (dst, temp, type, ssize);
1871 return;
1872 }
1873 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1874 {
1875 dst = gen_reg_rtx (GET_MODE (orig_dst));
1876 /* Make life a bit easier for combine. */
1877 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1878 }
1879
1880 /* Process the pieces. */
1881 for (i = start; i < XVECLEN (src, 0); i++)
1882 {
1883 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1884 enum machine_mode mode = GET_MODE (tmps[i]);
1885 unsigned int bytelen = GET_MODE_SIZE (mode);
1886 rtx dest = dst;
1887
1888 /* Handle trailing fragments that run over the size of the struct. */
1889 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1890 {
1891 /* store_bit_field always takes its value from the lsb.
1892 Move the fragment to the lsb if it's not already there. */
1893 if (
1894 #ifdef BLOCK_REG_PADDING
1895 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1896 == (BYTES_BIG_ENDIAN ? upward : downward)
1897 #else
1898 BYTES_BIG_ENDIAN
1899 #endif
1900 )
1901 {
1902 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1903 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1904 build_int_cst (NULL_TREE, shift),
1905 tmps[i], 0);
1906 }
1907 bytelen = ssize - bytepos;
1908 }
1909
1910 if (GET_CODE (dst) == CONCAT)
1911 {
1912 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1913 dest = XEXP (dst, 0);
1914 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1915 {
1916 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1917 dest = XEXP (dst, 1);
1918 }
1919 else
1920 {
1921 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1922 dest = assign_stack_temp (GET_MODE (dest),
1923 GET_MODE_SIZE (GET_MODE (dest)), 0);
1924 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1925 tmps[i]);
1926 dst = dest;
1927 break;
1928 }
1929 }
1930
1931 /* Optimize the access just a bit. */
1932 if (MEM_P (dest)
1933 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1934 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1935 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1936 && bytelen == GET_MODE_SIZE (mode))
1937 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1938 else
1939 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1940 mode, tmps[i]);
1941 }
1942
1943 /* Copy from the pseudo into the (probable) hard reg. */
1944 if (orig_dst != dst)
1945 emit_move_insn (orig_dst, dst);
1946 }
1947
1948 /* Generate code to copy a BLKmode object of TYPE out of a
1949 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1950 is null, a stack temporary is created. TGTBLK is returned.
1951
1952 The purpose of this routine is to handle functions that return
1953 BLKmode structures in registers. Some machines (the PA for example)
1954 want to return all small structures in registers regardless of the
1955 structure's alignment. */
1956
1957 rtx
1958 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1959 {
1960 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1961 rtx src = NULL, dst = NULL;
1962 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1963 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1964
1965 if (tgtblk == 0)
1966 {
1967 tgtblk = assign_temp (build_qualified_type (type,
1968 (TYPE_QUALS (type)
1969 | TYPE_QUAL_CONST)),
1970 0, 1, 1);
1971 preserve_temp_slots (tgtblk);
1972 }
1973
1974 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1975 into a new pseudo which is a full word. */
1976
1977 if (GET_MODE (srcreg) != BLKmode
1978 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1979 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1980
1981 /* If the structure doesn't take up a whole number of words, see whether
1982 SRCREG is padded on the left or on the right. If it's on the left,
1983 set PADDING_CORRECTION to the number of bits to skip.
1984
1985 In most ABIs, the structure will be returned at the least end of
1986 the register, which translates to right padding on little-endian
1987 targets and left padding on big-endian targets. The opposite
1988 holds if the structure is returned at the most significant
1989 end of the register. */
1990 if (bytes % UNITS_PER_WORD != 0
1991 && (targetm.calls.return_in_msb (type)
1992 ? !BYTES_BIG_ENDIAN
1993 : BYTES_BIG_ENDIAN))
1994 padding_correction
1995 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1996
1997 /* Copy the structure BITSIZE bites at a time.
1998
1999 We could probably emit more efficient code for machines which do not use
2000 strict alignment, but it doesn't seem worth the effort at the current
2001 time. */
2002 for (bitpos = 0, xbitpos = padding_correction;
2003 bitpos < bytes * BITS_PER_UNIT;
2004 bitpos += bitsize, xbitpos += bitsize)
2005 {
2006 /* We need a new source operand each time xbitpos is on a
2007 word boundary and when xbitpos == padding_correction
2008 (the first time through). */
2009 if (xbitpos % BITS_PER_WORD == 0
2010 || xbitpos == padding_correction)
2011 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2012 GET_MODE (srcreg));
2013
2014 /* We need a new destination operand each time bitpos is on
2015 a word boundary. */
2016 if (bitpos % BITS_PER_WORD == 0)
2017 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2018
2019 /* Use xbitpos for the source extraction (right justified) and
2020 xbitpos for the destination store (left justified). */
2021 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2022 extract_bit_field (src, bitsize,
2023 xbitpos % BITS_PER_WORD, 1,
2024 NULL_RTX, word_mode, word_mode));
2025 }
2026
2027 return tgtblk;
2028 }
2029
2030 /* Add a USE expression for REG to the (possibly empty) list pointed
2031 to by CALL_FUSAGE. REG must denote a hard register. */
2032
2033 void
2034 use_reg (rtx *call_fusage, rtx reg)
2035 {
2036 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2037
2038 *call_fusage
2039 = gen_rtx_EXPR_LIST (VOIDmode,
2040 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2041 }
2042
2043 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2044 starting at REGNO. All of these registers must be hard registers. */
2045
2046 void
2047 use_regs (rtx *call_fusage, int regno, int nregs)
2048 {
2049 int i;
2050
2051 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2052
2053 for (i = 0; i < nregs; i++)
2054 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2055 }
2056
2057 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2058 PARALLEL REGS. This is for calls that pass values in multiple
2059 non-contiguous locations. The Irix 6 ABI has examples of this. */
2060
2061 void
2062 use_group_regs (rtx *call_fusage, rtx regs)
2063 {
2064 int i;
2065
2066 for (i = 0; i < XVECLEN (regs, 0); i++)
2067 {
2068 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2069
2070 /* A NULL entry means the parameter goes both on the stack and in
2071 registers. This can also be a MEM for targets that pass values
2072 partially on the stack and partially in registers. */
2073 if (reg != 0 && REG_P (reg))
2074 use_reg (call_fusage, reg);
2075 }
2076 }
2077 \f
2078
2079 /* Determine whether the LEN bytes generated by CONSTFUN can be
2080 stored to memory using several move instructions. CONSTFUNDATA is
2081 a pointer which will be passed as argument in every CONSTFUN call.
2082 ALIGN is maximum alignment we can assume. Return nonzero if a
2083 call to store_by_pieces should succeed. */
2084
2085 int
2086 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2087 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2088 void *constfundata, unsigned int align)
2089 {
2090 unsigned HOST_WIDE_INT l;
2091 unsigned int max_size;
2092 HOST_WIDE_INT offset = 0;
2093 enum machine_mode mode, tmode;
2094 enum insn_code icode;
2095 int reverse;
2096 rtx cst;
2097
2098 if (len == 0)
2099 return 1;
2100
2101 if (! STORE_BY_PIECES_P (len, align))
2102 return 0;
2103
2104 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2105 if (align >= GET_MODE_ALIGNMENT (tmode))
2106 align = GET_MODE_ALIGNMENT (tmode);
2107 else
2108 {
2109 enum machine_mode xmode;
2110
2111 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2112 tmode != VOIDmode;
2113 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2114 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2115 || SLOW_UNALIGNED_ACCESS (tmode, align))
2116 break;
2117
2118 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2119 }
2120
2121 /* We would first store what we can in the largest integer mode, then go to
2122 successively smaller modes. */
2123
2124 for (reverse = 0;
2125 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2126 reverse++)
2127 {
2128 l = len;
2129 mode = VOIDmode;
2130 max_size = STORE_MAX_PIECES + 1;
2131 while (max_size > 1)
2132 {
2133 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2134 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2135 if (GET_MODE_SIZE (tmode) < max_size)
2136 mode = tmode;
2137
2138 if (mode == VOIDmode)
2139 break;
2140
2141 icode = mov_optab->handlers[(int) mode].insn_code;
2142 if (icode != CODE_FOR_nothing
2143 && align >= GET_MODE_ALIGNMENT (mode))
2144 {
2145 unsigned int size = GET_MODE_SIZE (mode);
2146
2147 while (l >= size)
2148 {
2149 if (reverse)
2150 offset -= size;
2151
2152 cst = (*constfun) (constfundata, offset, mode);
2153 if (!LEGITIMATE_CONSTANT_P (cst))
2154 return 0;
2155
2156 if (!reverse)
2157 offset += size;
2158
2159 l -= size;
2160 }
2161 }
2162
2163 max_size = GET_MODE_SIZE (mode);
2164 }
2165
2166 /* The code above should have handled everything. */
2167 gcc_assert (!l);
2168 }
2169
2170 return 1;
2171 }
2172
2173 /* Generate several move instructions to store LEN bytes generated by
2174 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2175 pointer which will be passed as argument in every CONSTFUN call.
2176 ALIGN is maximum alignment we can assume.
2177 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2178 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2179 stpcpy. */
2180
2181 rtx
2182 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2183 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2184 void *constfundata, unsigned int align, int endp)
2185 {
2186 struct store_by_pieces data;
2187
2188 if (len == 0)
2189 {
2190 gcc_assert (endp != 2);
2191 return to;
2192 }
2193
2194 gcc_assert (STORE_BY_PIECES_P (len, align));
2195 data.constfun = constfun;
2196 data.constfundata = constfundata;
2197 data.len = len;
2198 data.to = to;
2199 store_by_pieces_1 (&data, align);
2200 if (endp)
2201 {
2202 rtx to1;
2203
2204 gcc_assert (!data.reverse);
2205 if (data.autinc_to)
2206 {
2207 if (endp == 2)
2208 {
2209 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2210 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2211 else
2212 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2213 -1));
2214 }
2215 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2216 data.offset);
2217 }
2218 else
2219 {
2220 if (endp == 2)
2221 --data.offset;
2222 to1 = adjust_address (data.to, QImode, data.offset);
2223 }
2224 return to1;
2225 }
2226 else
2227 return data.to;
2228 }
2229
2230 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2231 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2232
2233 static void
2234 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2235 {
2236 struct store_by_pieces data;
2237
2238 if (len == 0)
2239 return;
2240
2241 data.constfun = clear_by_pieces_1;
2242 data.constfundata = NULL;
2243 data.len = len;
2244 data.to = to;
2245 store_by_pieces_1 (&data, align);
2246 }
2247
2248 /* Callback routine for clear_by_pieces.
2249 Return const0_rtx unconditionally. */
2250
2251 static rtx
2252 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2253 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2254 enum machine_mode mode ATTRIBUTE_UNUSED)
2255 {
2256 return const0_rtx;
2257 }
2258
2259 /* Subroutine of clear_by_pieces and store_by_pieces.
2260 Generate several move instructions to store LEN bytes of block TO. (A MEM
2261 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2262
2263 static void
2264 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2265 unsigned int align ATTRIBUTE_UNUSED)
2266 {
2267 rtx to_addr = XEXP (data->to, 0);
2268 unsigned int max_size = STORE_MAX_PIECES + 1;
2269 enum machine_mode mode = VOIDmode, tmode;
2270 enum insn_code icode;
2271
2272 data->offset = 0;
2273 data->to_addr = to_addr;
2274 data->autinc_to
2275 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2276 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2277
2278 data->explicit_inc_to = 0;
2279 data->reverse
2280 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2281 if (data->reverse)
2282 data->offset = data->len;
2283
2284 /* If storing requires more than two move insns,
2285 copy addresses to registers (to make displacements shorter)
2286 and use post-increment if available. */
2287 if (!data->autinc_to
2288 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2289 {
2290 /* Determine the main mode we'll be using. */
2291 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2292 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2293 if (GET_MODE_SIZE (tmode) < max_size)
2294 mode = tmode;
2295
2296 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2297 {
2298 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2299 data->autinc_to = 1;
2300 data->explicit_inc_to = -1;
2301 }
2302
2303 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2304 && ! data->autinc_to)
2305 {
2306 data->to_addr = copy_addr_to_reg (to_addr);
2307 data->autinc_to = 1;
2308 data->explicit_inc_to = 1;
2309 }
2310
2311 if ( !data->autinc_to && CONSTANT_P (to_addr))
2312 data->to_addr = copy_addr_to_reg (to_addr);
2313 }
2314
2315 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2316 if (align >= GET_MODE_ALIGNMENT (tmode))
2317 align = GET_MODE_ALIGNMENT (tmode);
2318 else
2319 {
2320 enum machine_mode xmode;
2321
2322 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2323 tmode != VOIDmode;
2324 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2325 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2326 || SLOW_UNALIGNED_ACCESS (tmode, align))
2327 break;
2328
2329 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2330 }
2331
2332 /* First store what we can in the largest integer mode, then go to
2333 successively smaller modes. */
2334
2335 while (max_size > 1)
2336 {
2337 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2338 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2339 if (GET_MODE_SIZE (tmode) < max_size)
2340 mode = tmode;
2341
2342 if (mode == VOIDmode)
2343 break;
2344
2345 icode = mov_optab->handlers[(int) mode].insn_code;
2346 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2347 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2348
2349 max_size = GET_MODE_SIZE (mode);
2350 }
2351
2352 /* The code above should have handled everything. */
2353 gcc_assert (!data->len);
2354 }
2355
2356 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2357 with move instructions for mode MODE. GENFUN is the gen_... function
2358 to make a move insn for that mode. DATA has all the other info. */
2359
2360 static void
2361 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2362 struct store_by_pieces *data)
2363 {
2364 unsigned int size = GET_MODE_SIZE (mode);
2365 rtx to1, cst;
2366
2367 while (data->len >= size)
2368 {
2369 if (data->reverse)
2370 data->offset -= size;
2371
2372 if (data->autinc_to)
2373 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2374 data->offset);
2375 else
2376 to1 = adjust_address (data->to, mode, data->offset);
2377
2378 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2379 emit_insn (gen_add2_insn (data->to_addr,
2380 GEN_INT (-(HOST_WIDE_INT) size)));
2381
2382 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2383 emit_insn ((*genfun) (to1, cst));
2384
2385 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2386 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2387
2388 if (! data->reverse)
2389 data->offset += size;
2390
2391 data->len -= size;
2392 }
2393 }
2394 \f
2395 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2396 its length in bytes. */
2397
2398 rtx
2399 clear_storage (rtx object, rtx size)
2400 {
2401 rtx retval = 0;
2402 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2403 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2404
2405 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2406 just move a zero. Otherwise, do this a piece at a time. */
2407 if (GET_MODE (object) != BLKmode
2408 && GET_CODE (size) == CONST_INT
2409 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2410 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2411 else
2412 {
2413 if (size == const0_rtx)
2414 ;
2415 else if (GET_CODE (size) == CONST_INT
2416 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2417 clear_by_pieces (object, INTVAL (size), align);
2418 else if (clear_storage_via_clrmem (object, size, align))
2419 ;
2420 else
2421 retval = clear_storage_via_libcall (object, size);
2422 }
2423
2424 return retval;
2425 }
2426
2427 /* A subroutine of clear_storage. Expand a clrmem pattern;
2428 return true if successful. */
2429
2430 static bool
2431 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2432 {
2433 /* Try the most limited insn first, because there's no point
2434 including more than one in the machine description unless
2435 the more limited one has some advantage. */
2436
2437 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2438 enum machine_mode mode;
2439
2440 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2441 mode = GET_MODE_WIDER_MODE (mode))
2442 {
2443 enum insn_code code = clrmem_optab[(int) mode];
2444 insn_operand_predicate_fn pred;
2445
2446 if (code != CODE_FOR_nothing
2447 /* We don't need MODE to be narrower than
2448 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2449 the mode mask, as it is returned by the macro, it will
2450 definitely be less than the actual mode mask. */
2451 && ((GET_CODE (size) == CONST_INT
2452 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2453 <= (GET_MODE_MASK (mode) >> 1)))
2454 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2455 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2456 || (*pred) (object, BLKmode))
2457 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2458 || (*pred) (opalign, VOIDmode)))
2459 {
2460 rtx op1;
2461 rtx last = get_last_insn ();
2462 rtx pat;
2463
2464 op1 = convert_to_mode (mode, size, 1);
2465 pred = insn_data[(int) code].operand[1].predicate;
2466 if (pred != 0 && ! (*pred) (op1, mode))
2467 op1 = copy_to_mode_reg (mode, op1);
2468
2469 pat = GEN_FCN ((int) code) (object, op1, opalign);
2470 if (pat)
2471 {
2472 emit_insn (pat);
2473 return true;
2474 }
2475 else
2476 delete_insns_since (last);
2477 }
2478 }
2479
2480 return false;
2481 }
2482
2483 /* A subroutine of clear_storage. Expand a call to memset.
2484 Return the return value of memset, 0 otherwise. */
2485
2486 static rtx
2487 clear_storage_via_libcall (rtx object, rtx size)
2488 {
2489 tree call_expr, arg_list, fn, object_tree, size_tree;
2490 enum machine_mode size_mode;
2491 rtx retval;
2492
2493 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2494 place those into new pseudos into a VAR_DECL and use them later. */
2495
2496 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2497
2498 size_mode = TYPE_MODE (sizetype);
2499 size = convert_to_mode (size_mode, size, 1);
2500 size = copy_to_mode_reg (size_mode, size);
2501
2502 /* It is incorrect to use the libcall calling conventions to call
2503 memset in this context. This could be a user call to memset and
2504 the user may wish to examine the return value from memset. For
2505 targets where libcalls and normal calls have different conventions
2506 for returning pointers, we could end up generating incorrect code. */
2507
2508 object_tree = make_tree (ptr_type_node, object);
2509 size_tree = make_tree (sizetype, size);
2510
2511 fn = clear_storage_libcall_fn (true);
2512 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2513 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2514 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2515
2516 /* Now we have to build up the CALL_EXPR itself. */
2517 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2518 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2519 call_expr, arg_list, NULL_TREE);
2520
2521 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2522
2523 return retval;
2524 }
2525
2526 /* A subroutine of clear_storage_via_libcall. Create the tree node
2527 for the function we use for block clears. The first time FOR_CALL
2528 is true, we call assemble_external. */
2529
2530 static GTY(()) tree block_clear_fn;
2531
2532 void
2533 init_block_clear_fn (const char *asmspec)
2534 {
2535 if (!block_clear_fn)
2536 {
2537 tree fn, args;
2538
2539 fn = get_identifier ("memset");
2540 args = build_function_type_list (ptr_type_node, ptr_type_node,
2541 integer_type_node, sizetype,
2542 NULL_TREE);
2543
2544 fn = build_decl (FUNCTION_DECL, fn, args);
2545 DECL_EXTERNAL (fn) = 1;
2546 TREE_PUBLIC (fn) = 1;
2547 DECL_ARTIFICIAL (fn) = 1;
2548 TREE_NOTHROW (fn) = 1;
2549
2550 block_clear_fn = fn;
2551 }
2552
2553 if (asmspec)
2554 set_user_assembler_name (block_clear_fn, asmspec);
2555 }
2556
2557 static tree
2558 clear_storage_libcall_fn (int for_call)
2559 {
2560 static bool emitted_extern;
2561
2562 if (!block_clear_fn)
2563 init_block_clear_fn (NULL);
2564
2565 if (for_call && !emitted_extern)
2566 {
2567 emitted_extern = true;
2568 make_decl_rtl (block_clear_fn);
2569 assemble_external (block_clear_fn);
2570 }
2571
2572 return block_clear_fn;
2573 }
2574 \f
2575 /* Generate code to copy Y into X.
2576 Both Y and X must have the same mode, except that
2577 Y can be a constant with VOIDmode.
2578 This mode cannot be BLKmode; use emit_block_move for that.
2579
2580 Return the last instruction emitted. */
2581
2582 rtx
2583 emit_move_insn (rtx x, rtx y)
2584 {
2585 enum machine_mode mode = GET_MODE (x);
2586 rtx y_cst = NULL_RTX;
2587 rtx last_insn, set;
2588
2589 gcc_assert (mode != BLKmode
2590 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
2591
2592 if (CONSTANT_P (y))
2593 {
2594 if (optimize
2595 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2596 && (last_insn = compress_float_constant (x, y)))
2597 return last_insn;
2598
2599 y_cst = y;
2600
2601 if (!LEGITIMATE_CONSTANT_P (y))
2602 {
2603 y = force_const_mem (mode, y);
2604
2605 /* If the target's cannot_force_const_mem prevented the spill,
2606 assume that the target's move expanders will also take care
2607 of the non-legitimate constant. */
2608 if (!y)
2609 y = y_cst;
2610 }
2611 }
2612
2613 /* If X or Y are memory references, verify that their addresses are valid
2614 for the machine. */
2615 if (MEM_P (x)
2616 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2617 && ! push_operand (x, GET_MODE (x)))
2618 || (flag_force_addr
2619 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2620 x = validize_mem (x);
2621
2622 if (MEM_P (y)
2623 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2624 || (flag_force_addr
2625 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2626 y = validize_mem (y);
2627
2628 gcc_assert (mode != BLKmode);
2629
2630 last_insn = emit_move_insn_1 (x, y);
2631
2632 if (y_cst && REG_P (x)
2633 && (set = single_set (last_insn)) != NULL_RTX
2634 && SET_DEST (set) == x
2635 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2636 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2637
2638 return last_insn;
2639 }
2640
2641 /* Low level part of emit_move_insn.
2642 Called just like emit_move_insn, but assumes X and Y
2643 are basically valid. */
2644
2645 rtx
2646 emit_move_insn_1 (rtx x, rtx y)
2647 {
2648 enum machine_mode mode = GET_MODE (x);
2649 enum machine_mode submode;
2650
2651 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
2652
2653 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2654 return
2655 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2656
2657 /* Expand complex moves by moving real part and imag part, if possible. */
2658 else if (COMPLEX_MODE_P (mode)
2659 && BLKmode != (submode = GET_MODE_INNER (mode))
2660 && (mov_optab->handlers[(int) submode].insn_code
2661 != CODE_FOR_nothing))
2662 {
2663 unsigned int modesize = GET_MODE_SIZE (mode);
2664 unsigned int submodesize = GET_MODE_SIZE (submode);
2665
2666 /* Don't split destination if it is a stack push. */
2667 int stack = push_operand (x, mode);
2668
2669 #ifdef PUSH_ROUNDING
2670 /* In case we output to the stack, but the size is smaller than the
2671 machine can push exactly, we need to use move instructions. */
2672 if (stack && PUSH_ROUNDING (submodesize) != submodesize)
2673 {
2674 rtx temp;
2675 HOST_WIDE_INT offset1, offset2;
2676
2677 /* Do not use anti_adjust_stack, since we don't want to update
2678 stack_pointer_delta. */
2679 temp = expand_binop (Pmode,
2680 #ifdef STACK_GROWS_DOWNWARD
2681 sub_optab,
2682 #else
2683 add_optab,
2684 #endif
2685 stack_pointer_rtx,
2686 GEN_INT (PUSH_ROUNDING (modesize)),
2687 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2688
2689 if (temp != stack_pointer_rtx)
2690 emit_move_insn (stack_pointer_rtx, temp);
2691
2692 #ifdef STACK_GROWS_DOWNWARD
2693 offset1 = 0;
2694 offset2 = submodesize;
2695 #else
2696 offset1 = -PUSH_ROUNDING (modesize);
2697 offset2 = -PUSH_ROUNDING (modesize) + submodesize;
2698 #endif
2699
2700 emit_move_insn (change_address (x, submode,
2701 gen_rtx_PLUS (Pmode,
2702 stack_pointer_rtx,
2703 GEN_INT (offset1))),
2704 gen_realpart (submode, y));
2705 emit_move_insn (change_address (x, submode,
2706 gen_rtx_PLUS (Pmode,
2707 stack_pointer_rtx,
2708 GEN_INT (offset2))),
2709 gen_imagpart (submode, y));
2710 }
2711 else
2712 #endif
2713 /* If this is a stack, push the highpart first, so it
2714 will be in the argument order.
2715
2716 In that case, change_address is used only to convert
2717 the mode, not to change the address. */
2718 if (stack)
2719 {
2720 /* Note that the real part always precedes the imag part in memory
2721 regardless of machine's endianness. */
2722 #ifdef STACK_GROWS_DOWNWARD
2723 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2724 gen_imagpart (submode, y));
2725 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2726 gen_realpart (submode, y));
2727 #else
2728 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2729 gen_realpart (submode, y));
2730 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2731 gen_imagpart (submode, y));
2732 #endif
2733 }
2734 else
2735 {
2736 rtx realpart_x, realpart_y;
2737 rtx imagpart_x, imagpart_y;
2738
2739 /* If this is a complex value with each part being smaller than a
2740 word, the usual calling sequence will likely pack the pieces into
2741 a single register. Unfortunately, SUBREG of hard registers only
2742 deals in terms of words, so we have a problem converting input
2743 arguments to the CONCAT of two registers that is used elsewhere
2744 for complex values. If this is before reload, we can copy it into
2745 memory and reload. FIXME, we should see about using extract and
2746 insert on integer registers, but complex short and complex char
2747 variables should be rarely used. */
2748 if ((reload_in_progress | reload_completed) == 0
2749 && (!validate_subreg (submode, mode, NULL, submodesize)
2750 || !validate_subreg (submode, mode, NULL, 0)))
2751 {
2752 if (REG_P (x) || REG_P (y))
2753 {
2754 rtx mem, cmem;
2755 enum machine_mode reg_mode
2756 = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 1);
2757
2758 gcc_assert (reg_mode != BLKmode);
2759
2760 mem = assign_stack_temp (reg_mode, modesize, 0);
2761 cmem = adjust_address (mem, mode, 0);
2762
2763 if (REG_P (x))
2764 {
2765 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2766 emit_move_insn_1 (cmem, y);
2767 return emit_move_insn_1 (sreg, mem);
2768 }
2769 else
2770 {
2771 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2772 emit_move_insn_1 (mem, sreg);
2773 return emit_move_insn_1 (x, cmem);
2774 }
2775 }
2776 }
2777
2778 realpart_x = gen_realpart (submode, x);
2779 realpart_y = gen_realpart (submode, y);
2780 imagpart_x = gen_imagpart (submode, x);
2781 imagpart_y = gen_imagpart (submode, y);
2782
2783 /* Show the output dies here. This is necessary for SUBREGs
2784 of pseudos since we cannot track their lifetimes correctly;
2785 hard regs shouldn't appear here except as return values.
2786 We never want to emit such a clobber after reload. */
2787 if (x != y
2788 && ! (reload_in_progress || reload_completed)
2789 && (GET_CODE (realpart_x) == SUBREG
2790 || GET_CODE (imagpart_x) == SUBREG))
2791 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2792
2793 emit_move_insn (realpart_x, realpart_y);
2794 emit_move_insn (imagpart_x, imagpart_y);
2795 }
2796
2797 return get_last_insn ();
2798 }
2799
2800 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2801 find a mode to do it in. If we have a movcc, use it. Otherwise,
2802 find the MODE_INT mode of the same width. */
2803 else if (GET_MODE_CLASS (mode) == MODE_CC
2804 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2805 {
2806 enum insn_code insn_code;
2807 enum machine_mode tmode = VOIDmode;
2808 rtx x1 = x, y1 = y;
2809
2810 if (mode != CCmode
2811 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2812 tmode = CCmode;
2813 else
2814 for (tmode = QImode; tmode != VOIDmode;
2815 tmode = GET_MODE_WIDER_MODE (tmode))
2816 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2817 break;
2818
2819 gcc_assert (tmode != VOIDmode);
2820
2821 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2822 may call change_address which is not appropriate if we were
2823 called when a reload was in progress. We don't have to worry
2824 about changing the address since the size in bytes is supposed to
2825 be the same. Copy the MEM to change the mode and move any
2826 substitutions from the old MEM to the new one. */
2827
2828 if (reload_in_progress)
2829 {
2830 x = gen_lowpart_common (tmode, x1);
2831 if (x == 0 && MEM_P (x1))
2832 {
2833 x = adjust_address_nv (x1, tmode, 0);
2834 copy_replacements (x1, x);
2835 }
2836
2837 y = gen_lowpart_common (tmode, y1);
2838 if (y == 0 && MEM_P (y1))
2839 {
2840 y = adjust_address_nv (y1, tmode, 0);
2841 copy_replacements (y1, y);
2842 }
2843 }
2844 else
2845 {
2846 x = gen_lowpart (tmode, x);
2847 y = gen_lowpart (tmode, y);
2848 }
2849
2850 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2851 return emit_insn (GEN_FCN (insn_code) (x, y));
2852 }
2853
2854 /* Try using a move pattern for the corresponding integer mode. This is
2855 only safe when simplify_subreg can convert MODE constants into integer
2856 constants. At present, it can only do this reliably if the value
2857 fits within a HOST_WIDE_INT. */
2858 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2859 && (submode = int_mode_for_mode (mode)) != BLKmode
2860 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2861 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2862 (simplify_gen_subreg (submode, x, mode, 0),
2863 simplify_gen_subreg (submode, y, mode, 0)));
2864
2865 /* This will handle any multi-word or full-word mode that lacks a move_insn
2866 pattern. However, you will get better code if you define such patterns,
2867 even if they must turn into multiple assembler instructions. */
2868 else
2869 {
2870 rtx last_insn = 0;
2871 rtx seq, inner;
2872 int need_clobber;
2873 int i;
2874
2875 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
2876
2877 #ifdef PUSH_ROUNDING
2878
2879 /* If X is a push on the stack, do the push now and replace
2880 X with a reference to the stack pointer. */
2881 if (push_operand (x, GET_MODE (x)))
2882 {
2883 rtx temp;
2884 enum rtx_code code;
2885
2886 /* Do not use anti_adjust_stack, since we don't want to update
2887 stack_pointer_delta. */
2888 temp = expand_binop (Pmode,
2889 #ifdef STACK_GROWS_DOWNWARD
2890 sub_optab,
2891 #else
2892 add_optab,
2893 #endif
2894 stack_pointer_rtx,
2895 GEN_INT
2896 (PUSH_ROUNDING
2897 (GET_MODE_SIZE (GET_MODE (x)))),
2898 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2899
2900 if (temp != stack_pointer_rtx)
2901 emit_move_insn (stack_pointer_rtx, temp);
2902
2903 code = GET_CODE (XEXP (x, 0));
2904
2905 /* Just hope that small offsets off SP are OK. */
2906 if (code == POST_INC)
2907 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2908 GEN_INT (-((HOST_WIDE_INT)
2909 GET_MODE_SIZE (GET_MODE (x)))));
2910 else if (code == POST_DEC)
2911 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2912 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2913 else
2914 temp = stack_pointer_rtx;
2915
2916 x = change_address (x, VOIDmode, temp);
2917 }
2918 #endif
2919
2920 /* If we are in reload, see if either operand is a MEM whose address
2921 is scheduled for replacement. */
2922 if (reload_in_progress && MEM_P (x)
2923 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2924 x = replace_equiv_address_nv (x, inner);
2925 if (reload_in_progress && MEM_P (y)
2926 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2927 y = replace_equiv_address_nv (y, inner);
2928
2929 start_sequence ();
2930
2931 need_clobber = 0;
2932 for (i = 0;
2933 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2934 i++)
2935 {
2936 rtx xpart = operand_subword (x, i, 1, mode);
2937 rtx ypart = operand_subword (y, i, 1, mode);
2938
2939 /* If we can't get a part of Y, put Y into memory if it is a
2940 constant. Otherwise, force it into a register. If we still
2941 can't get a part of Y, abort. */
2942 if (ypart == 0 && CONSTANT_P (y))
2943 {
2944 y = force_const_mem (mode, y);
2945 ypart = operand_subword (y, i, 1, mode);
2946 }
2947 else if (ypart == 0)
2948 ypart = operand_subword_force (y, i, mode);
2949
2950 gcc_assert (xpart && ypart);
2951
2952 need_clobber |= (GET_CODE (xpart) == SUBREG);
2953
2954 last_insn = emit_move_insn (xpart, ypart);
2955 }
2956
2957 seq = get_insns ();
2958 end_sequence ();
2959
2960 /* Show the output dies here. This is necessary for SUBREGs
2961 of pseudos since we cannot track their lifetimes correctly;
2962 hard regs shouldn't appear here except as return values.
2963 We never want to emit such a clobber after reload. */
2964 if (x != y
2965 && ! (reload_in_progress || reload_completed)
2966 && need_clobber != 0)
2967 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2968
2969 emit_insn (seq);
2970
2971 return last_insn;
2972 }
2973 }
2974
2975 /* If Y is representable exactly in a narrower mode, and the target can
2976 perform the extension directly from constant or memory, then emit the
2977 move as an extension. */
2978
2979 static rtx
2980 compress_float_constant (rtx x, rtx y)
2981 {
2982 enum machine_mode dstmode = GET_MODE (x);
2983 enum machine_mode orig_srcmode = GET_MODE (y);
2984 enum machine_mode srcmode;
2985 REAL_VALUE_TYPE r;
2986
2987 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2988
2989 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2990 srcmode != orig_srcmode;
2991 srcmode = GET_MODE_WIDER_MODE (srcmode))
2992 {
2993 enum insn_code ic;
2994 rtx trunc_y, last_insn;
2995
2996 /* Skip if the target can't extend this way. */
2997 ic = can_extend_p (dstmode, srcmode, 0);
2998 if (ic == CODE_FOR_nothing)
2999 continue;
3000
3001 /* Skip if the narrowed value isn't exact. */
3002 if (! exact_real_truncate (srcmode, &r))
3003 continue;
3004
3005 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3006
3007 if (LEGITIMATE_CONSTANT_P (trunc_y))
3008 {
3009 /* Skip if the target needs extra instructions to perform
3010 the extension. */
3011 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3012 continue;
3013 }
3014 else if (float_extend_from_mem[dstmode][srcmode])
3015 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3016 else
3017 continue;
3018
3019 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3020 last_insn = get_last_insn ();
3021
3022 if (REG_P (x))
3023 set_unique_reg_note (last_insn, REG_EQUAL, y);
3024
3025 return last_insn;
3026 }
3027
3028 return NULL_RTX;
3029 }
3030 \f
3031 /* Pushing data onto the stack. */
3032
3033 /* Push a block of length SIZE (perhaps variable)
3034 and return an rtx to address the beginning of the block.
3035 The value may be virtual_outgoing_args_rtx.
3036
3037 EXTRA is the number of bytes of padding to push in addition to SIZE.
3038 BELOW nonzero means this padding comes at low addresses;
3039 otherwise, the padding comes at high addresses. */
3040
3041 rtx
3042 push_block (rtx size, int extra, int below)
3043 {
3044 rtx temp;
3045
3046 size = convert_modes (Pmode, ptr_mode, size, 1);
3047 if (CONSTANT_P (size))
3048 anti_adjust_stack (plus_constant (size, extra));
3049 else if (REG_P (size) && extra == 0)
3050 anti_adjust_stack (size);
3051 else
3052 {
3053 temp = copy_to_mode_reg (Pmode, size);
3054 if (extra != 0)
3055 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3056 temp, 0, OPTAB_LIB_WIDEN);
3057 anti_adjust_stack (temp);
3058 }
3059
3060 #ifndef STACK_GROWS_DOWNWARD
3061 if (0)
3062 #else
3063 if (1)
3064 #endif
3065 {
3066 temp = virtual_outgoing_args_rtx;
3067 if (extra != 0 && below)
3068 temp = plus_constant (temp, extra);
3069 }
3070 else
3071 {
3072 if (GET_CODE (size) == CONST_INT)
3073 temp = plus_constant (virtual_outgoing_args_rtx,
3074 -INTVAL (size) - (below ? 0 : extra));
3075 else if (extra != 0 && !below)
3076 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3077 negate_rtx (Pmode, plus_constant (size, extra)));
3078 else
3079 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3080 negate_rtx (Pmode, size));
3081 }
3082
3083 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3084 }
3085
3086 #ifdef PUSH_ROUNDING
3087
3088 /* Emit single push insn. */
3089
3090 static void
3091 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3092 {
3093 rtx dest_addr;
3094 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3095 rtx dest;
3096 enum insn_code icode;
3097 insn_operand_predicate_fn pred;
3098
3099 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3100 /* If there is push pattern, use it. Otherwise try old way of throwing
3101 MEM representing push operation to move expander. */
3102 icode = push_optab->handlers[(int) mode].insn_code;
3103 if (icode != CODE_FOR_nothing)
3104 {
3105 if (((pred = insn_data[(int) icode].operand[0].predicate)
3106 && !((*pred) (x, mode))))
3107 x = force_reg (mode, x);
3108 emit_insn (GEN_FCN (icode) (x));
3109 return;
3110 }
3111 if (GET_MODE_SIZE (mode) == rounded_size)
3112 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3113 /* If we are to pad downward, adjust the stack pointer first and
3114 then store X into the stack location using an offset. This is
3115 because emit_move_insn does not know how to pad; it does not have
3116 access to type. */
3117 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3118 {
3119 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3120 HOST_WIDE_INT offset;
3121
3122 emit_move_insn (stack_pointer_rtx,
3123 expand_binop (Pmode,
3124 #ifdef STACK_GROWS_DOWNWARD
3125 sub_optab,
3126 #else
3127 add_optab,
3128 #endif
3129 stack_pointer_rtx,
3130 GEN_INT (rounded_size),
3131 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3132
3133 offset = (HOST_WIDE_INT) padding_size;
3134 #ifdef STACK_GROWS_DOWNWARD
3135 if (STACK_PUSH_CODE == POST_DEC)
3136 /* We have already decremented the stack pointer, so get the
3137 previous value. */
3138 offset += (HOST_WIDE_INT) rounded_size;
3139 #else
3140 if (STACK_PUSH_CODE == POST_INC)
3141 /* We have already incremented the stack pointer, so get the
3142 previous value. */
3143 offset -= (HOST_WIDE_INT) rounded_size;
3144 #endif
3145 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3146 }
3147 else
3148 {
3149 #ifdef STACK_GROWS_DOWNWARD
3150 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3151 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3152 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3153 #else
3154 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3155 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3156 GEN_INT (rounded_size));
3157 #endif
3158 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3159 }
3160
3161 dest = gen_rtx_MEM (mode, dest_addr);
3162
3163 if (type != 0)
3164 {
3165 set_mem_attributes (dest, type, 1);
3166
3167 if (flag_optimize_sibling_calls)
3168 /* Function incoming arguments may overlap with sibling call
3169 outgoing arguments and we cannot allow reordering of reads
3170 from function arguments with stores to outgoing arguments
3171 of sibling calls. */
3172 set_mem_alias_set (dest, 0);
3173 }
3174 emit_move_insn (dest, x);
3175 }
3176 #endif
3177
3178 /* Generate code to push X onto the stack, assuming it has mode MODE and
3179 type TYPE.
3180 MODE is redundant except when X is a CONST_INT (since they don't
3181 carry mode info).
3182 SIZE is an rtx for the size of data to be copied (in bytes),
3183 needed only if X is BLKmode.
3184
3185 ALIGN (in bits) is maximum alignment we can assume.
3186
3187 If PARTIAL and REG are both nonzero, then copy that many of the first
3188 words of X into registers starting with REG, and push the rest of X.
3189 The amount of space pushed is decreased by PARTIAL words,
3190 rounded *down* to a multiple of PARM_BOUNDARY.
3191 REG must be a hard register in this case.
3192 If REG is zero but PARTIAL is not, take any all others actions for an
3193 argument partially in registers, but do not actually load any
3194 registers.
3195
3196 EXTRA is the amount in bytes of extra space to leave next to this arg.
3197 This is ignored if an argument block has already been allocated.
3198
3199 On a machine that lacks real push insns, ARGS_ADDR is the address of
3200 the bottom of the argument block for this call. We use indexing off there
3201 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3202 argument block has not been preallocated.
3203
3204 ARGS_SO_FAR is the size of args previously pushed for this call.
3205
3206 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3207 for arguments passed in registers. If nonzero, it will be the number
3208 of bytes required. */
3209
3210 void
3211 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3212 unsigned int align, int partial, rtx reg, int extra,
3213 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3214 rtx alignment_pad)
3215 {
3216 rtx xinner;
3217 enum direction stack_direction
3218 #ifdef STACK_GROWS_DOWNWARD
3219 = downward;
3220 #else
3221 = upward;
3222 #endif
3223
3224 /* Decide where to pad the argument: `downward' for below,
3225 `upward' for above, or `none' for don't pad it.
3226 Default is below for small data on big-endian machines; else above. */
3227 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3228
3229 /* Invert direction if stack is post-decrement.
3230 FIXME: why? */
3231 if (STACK_PUSH_CODE == POST_DEC)
3232 if (where_pad != none)
3233 where_pad = (where_pad == downward ? upward : downward);
3234
3235 xinner = x;
3236
3237 if (mode == BLKmode)
3238 {
3239 /* Copy a block into the stack, entirely or partially. */
3240
3241 rtx temp;
3242 int used = partial * UNITS_PER_WORD;
3243 int offset;
3244 int skip;
3245
3246 if (reg && GET_CODE (reg) == PARALLEL)
3247 {
3248 /* Use the size of the elt to compute offset. */
3249 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3250 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3251 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3252 }
3253 else
3254 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3255
3256 gcc_assert (size);
3257
3258 used -= offset;
3259
3260 /* USED is now the # of bytes we need not copy to the stack
3261 because registers will take care of them. */
3262
3263 if (partial != 0)
3264 xinner = adjust_address (xinner, BLKmode, used);
3265
3266 /* If the partial register-part of the arg counts in its stack size,
3267 skip the part of stack space corresponding to the registers.
3268 Otherwise, start copying to the beginning of the stack space,
3269 by setting SKIP to 0. */
3270 skip = (reg_parm_stack_space == 0) ? 0 : used;
3271
3272 #ifdef PUSH_ROUNDING
3273 /* Do it with several push insns if that doesn't take lots of insns
3274 and if there is no difficulty with push insns that skip bytes
3275 on the stack for alignment purposes. */
3276 if (args_addr == 0
3277 && PUSH_ARGS
3278 && GET_CODE (size) == CONST_INT
3279 && skip == 0
3280 && MEM_ALIGN (xinner) >= align
3281 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3282 /* Here we avoid the case of a structure whose weak alignment
3283 forces many pushes of a small amount of data,
3284 and such small pushes do rounding that causes trouble. */
3285 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3286 || align >= BIGGEST_ALIGNMENT
3287 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3288 == (align / BITS_PER_UNIT)))
3289 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3290 {
3291 /* Push padding now if padding above and stack grows down,
3292 or if padding below and stack grows up.
3293 But if space already allocated, this has already been done. */
3294 if (extra && args_addr == 0
3295 && where_pad != none && where_pad != stack_direction)
3296 anti_adjust_stack (GEN_INT (extra));
3297
3298 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3299 }
3300 else
3301 #endif /* PUSH_ROUNDING */
3302 {
3303 rtx target;
3304
3305 /* Otherwise make space on the stack and copy the data
3306 to the address of that space. */
3307
3308 /* Deduct words put into registers from the size we must copy. */
3309 if (partial != 0)
3310 {
3311 if (GET_CODE (size) == CONST_INT)
3312 size = GEN_INT (INTVAL (size) - used);
3313 else
3314 size = expand_binop (GET_MODE (size), sub_optab, size,
3315 GEN_INT (used), NULL_RTX, 0,
3316 OPTAB_LIB_WIDEN);
3317 }
3318
3319 /* Get the address of the stack space.
3320 In this case, we do not deal with EXTRA separately.
3321 A single stack adjust will do. */
3322 if (! args_addr)
3323 {
3324 temp = push_block (size, extra, where_pad == downward);
3325 extra = 0;
3326 }
3327 else if (GET_CODE (args_so_far) == CONST_INT)
3328 temp = memory_address (BLKmode,
3329 plus_constant (args_addr,
3330 skip + INTVAL (args_so_far)));
3331 else
3332 temp = memory_address (BLKmode,
3333 plus_constant (gen_rtx_PLUS (Pmode,
3334 args_addr,
3335 args_so_far),
3336 skip));
3337
3338 if (!ACCUMULATE_OUTGOING_ARGS)
3339 {
3340 /* If the source is referenced relative to the stack pointer,
3341 copy it to another register to stabilize it. We do not need
3342 to do this if we know that we won't be changing sp. */
3343
3344 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3345 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3346 temp = copy_to_reg (temp);
3347 }
3348
3349 target = gen_rtx_MEM (BLKmode, temp);
3350
3351 /* We do *not* set_mem_attributes here, because incoming arguments
3352 may overlap with sibling call outgoing arguments and we cannot
3353 allow reordering of reads from function arguments with stores
3354 to outgoing arguments of sibling calls. We do, however, want
3355 to record the alignment of the stack slot. */
3356 /* ALIGN may well be better aligned than TYPE, e.g. due to
3357 PARM_BOUNDARY. Assume the caller isn't lying. */
3358 set_mem_align (target, align);
3359
3360 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3361 }
3362 }
3363 else if (partial > 0)
3364 {
3365 /* Scalar partly in registers. */
3366
3367 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3368 int i;
3369 int not_stack;
3370 /* # words of start of argument
3371 that we must make space for but need not store. */
3372 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3373 int args_offset = INTVAL (args_so_far);
3374 int skip;
3375
3376 /* Push padding now if padding above and stack grows down,
3377 or if padding below and stack grows up.
3378 But if space already allocated, this has already been done. */
3379 if (extra && args_addr == 0
3380 && where_pad != none && where_pad != stack_direction)
3381 anti_adjust_stack (GEN_INT (extra));
3382
3383 /* If we make space by pushing it, we might as well push
3384 the real data. Otherwise, we can leave OFFSET nonzero
3385 and leave the space uninitialized. */
3386 if (args_addr == 0)
3387 offset = 0;
3388
3389 /* Now NOT_STACK gets the number of words that we don't need to
3390 allocate on the stack. */
3391 not_stack = partial - offset;
3392
3393 /* If the partial register-part of the arg counts in its stack size,
3394 skip the part of stack space corresponding to the registers.
3395 Otherwise, start copying to the beginning of the stack space,
3396 by setting SKIP to 0. */
3397 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3398
3399 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3400 x = validize_mem (force_const_mem (mode, x));
3401
3402 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3403 SUBREGs of such registers are not allowed. */
3404 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3405 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3406 x = copy_to_reg (x);
3407
3408 /* Loop over all the words allocated on the stack for this arg. */
3409 /* We can do it by words, because any scalar bigger than a word
3410 has a size a multiple of a word. */
3411 #ifndef PUSH_ARGS_REVERSED
3412 for (i = not_stack; i < size; i++)
3413 #else
3414 for (i = size - 1; i >= not_stack; i--)
3415 #endif
3416 if (i >= not_stack + offset)
3417 emit_push_insn (operand_subword_force (x, i, mode),
3418 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3419 0, args_addr,
3420 GEN_INT (args_offset + ((i - not_stack + skip)
3421 * UNITS_PER_WORD)),
3422 reg_parm_stack_space, alignment_pad);
3423 }
3424 else
3425 {
3426 rtx addr;
3427 rtx dest;
3428
3429 /* Push padding now if padding above and stack grows down,
3430 or if padding below and stack grows up.
3431 But if space already allocated, this has already been done. */
3432 if (extra && args_addr == 0
3433 && where_pad != none && where_pad != stack_direction)
3434 anti_adjust_stack (GEN_INT (extra));
3435
3436 #ifdef PUSH_ROUNDING
3437 if (args_addr == 0 && PUSH_ARGS)
3438 emit_single_push_insn (mode, x, type);
3439 else
3440 #endif
3441 {
3442 if (GET_CODE (args_so_far) == CONST_INT)
3443 addr
3444 = memory_address (mode,
3445 plus_constant (args_addr,
3446 INTVAL (args_so_far)));
3447 else
3448 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3449 args_so_far));
3450 dest = gen_rtx_MEM (mode, addr);
3451
3452 /* We do *not* set_mem_attributes here, because incoming arguments
3453 may overlap with sibling call outgoing arguments and we cannot
3454 allow reordering of reads from function arguments with stores
3455 to outgoing arguments of sibling calls. We do, however, want
3456 to record the alignment of the stack slot. */
3457 /* ALIGN may well be better aligned than TYPE, e.g. due to
3458 PARM_BOUNDARY. Assume the caller isn't lying. */
3459 set_mem_align (dest, align);
3460
3461 emit_move_insn (dest, x);
3462 }
3463 }
3464
3465 /* If part should go in registers, copy that part
3466 into the appropriate registers. Do this now, at the end,
3467 since mem-to-mem copies above may do function calls. */
3468 if (partial > 0 && reg != 0)
3469 {
3470 /* Handle calls that pass values in multiple non-contiguous locations.
3471 The Irix 6 ABI has examples of this. */
3472 if (GET_CODE (reg) == PARALLEL)
3473 emit_group_load (reg, x, type, -1);
3474 else
3475 move_block_to_reg (REGNO (reg), x, partial, mode);
3476 }
3477
3478 if (extra && args_addr == 0 && where_pad == stack_direction)
3479 anti_adjust_stack (GEN_INT (extra));
3480
3481 if (alignment_pad && args_addr == 0)
3482 anti_adjust_stack (alignment_pad);
3483 }
3484 \f
3485 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3486 operations. */
3487
3488 static rtx
3489 get_subtarget (rtx x)
3490 {
3491 return (optimize
3492 || x == 0
3493 /* Only registers can be subtargets. */
3494 || !REG_P (x)
3495 /* Don't use hard regs to avoid extending their life. */
3496 || REGNO (x) < FIRST_PSEUDO_REGISTER
3497 ? 0 : x);
3498 }
3499
3500 /* Expand an assignment that stores the value of FROM into TO. */
3501
3502 void
3503 expand_assignment (tree to, tree from)
3504 {
3505 rtx to_rtx = 0;
3506 rtx result;
3507
3508 /* Don't crash if the lhs of the assignment was erroneous. */
3509
3510 if (TREE_CODE (to) == ERROR_MARK)
3511 {
3512 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3513 return;
3514 }
3515
3516 /* Assignment of a structure component needs special treatment
3517 if the structure component's rtx is not simply a MEM.
3518 Assignment of an array element at a constant index, and assignment of
3519 an array element in an unaligned packed structure field, has the same
3520 problem. */
3521
3522 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3523 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3524 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3525 {
3526 enum machine_mode mode1;
3527 HOST_WIDE_INT bitsize, bitpos;
3528 rtx orig_to_rtx;
3529 tree offset;
3530 int unsignedp;
3531 int volatilep = 0;
3532 tree tem;
3533
3534 push_temp_slots ();
3535 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3536 &unsignedp, &volatilep);
3537
3538 /* If we are going to use store_bit_field and extract_bit_field,
3539 make sure to_rtx will be safe for multiple use. */
3540
3541 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3542
3543 if (offset != 0)
3544 {
3545 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3546
3547 gcc_assert (MEM_P (to_rtx));
3548
3549 #ifdef POINTERS_EXTEND_UNSIGNED
3550 if (GET_MODE (offset_rtx) != Pmode)
3551 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3552 #else
3553 if (GET_MODE (offset_rtx) != ptr_mode)
3554 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3555 #endif
3556
3557 /* A constant address in TO_RTX can have VOIDmode, we must not try
3558 to call force_reg for that case. Avoid that case. */
3559 if (MEM_P (to_rtx)
3560 && GET_MODE (to_rtx) == BLKmode
3561 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3562 && bitsize > 0
3563 && (bitpos % bitsize) == 0
3564 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3565 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3566 {
3567 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3568 bitpos = 0;
3569 }
3570
3571 to_rtx = offset_address (to_rtx, offset_rtx,
3572 highest_pow2_factor_for_target (to,
3573 offset));
3574 }
3575
3576 if (MEM_P (to_rtx))
3577 {
3578 /* If the field is at offset zero, we could have been given the
3579 DECL_RTX of the parent struct. Don't munge it. */
3580 to_rtx = shallow_copy_rtx (to_rtx);
3581
3582 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3583 }
3584
3585 /* Deal with volatile and readonly fields. The former is only done
3586 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3587 if (volatilep && MEM_P (to_rtx))
3588 {
3589 if (to_rtx == orig_to_rtx)
3590 to_rtx = copy_rtx (to_rtx);
3591 MEM_VOLATILE_P (to_rtx) = 1;
3592 }
3593
3594 if (MEM_P (to_rtx) && ! can_address_p (to))
3595 {
3596 if (to_rtx == orig_to_rtx)
3597 to_rtx = copy_rtx (to_rtx);
3598 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3599 }
3600
3601 /* Optimize bitfld op= val in certain cases. */
3602 while (mode1 == VOIDmode
3603 && bitsize > 0 && bitsize < BITS_PER_WORD
3604 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3605 && !TREE_SIDE_EFFECTS (to)
3606 && !TREE_THIS_VOLATILE (to))
3607 {
3608 tree src, op0, op1;
3609 rtx value, str_rtx = to_rtx;
3610 HOST_WIDE_INT bitpos1 = bitpos;
3611 optab binop;
3612
3613 src = from;
3614 STRIP_NOPS (src);
3615 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3616 || !BINARY_CLASS_P (src))
3617 break;
3618
3619 op0 = TREE_OPERAND (src, 0);
3620 op1 = TREE_OPERAND (src, 1);
3621 STRIP_NOPS (op0);
3622
3623 if (! operand_equal_p (to, op0, 0))
3624 break;
3625
3626 if (MEM_P (str_rtx))
3627 {
3628 enum machine_mode mode = GET_MODE (str_rtx);
3629 HOST_WIDE_INT offset1;
3630
3631 if (GET_MODE_BITSIZE (mode) == 0
3632 || GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
3633 mode = word_mode;
3634 mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx),
3635 mode, 0);
3636 if (mode == VOIDmode)
3637 break;
3638
3639 offset1 = bitpos1;
3640 bitpos1 %= GET_MODE_BITSIZE (mode);
3641 offset1 = (offset1 - bitpos1) / BITS_PER_UNIT;
3642 str_rtx = adjust_address (str_rtx, mode, offset1);
3643 }
3644 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3645 break;
3646
3647 /* If the bit field covers the whole REG/MEM, store_field
3648 will likely generate better code. */
3649 if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3650 break;
3651
3652 /* We can't handle fields split across multiple entities. */
3653 if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3654 break;
3655
3656 if (BYTES_BIG_ENDIAN)
3657 bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1
3658 - bitsize;
3659
3660 /* Special case some bitfield op= exp. */
3661 switch (TREE_CODE (src))
3662 {
3663 case PLUS_EXPR:
3664 case MINUS_EXPR:
3665 /* For now, just optimize the case of the topmost bitfield
3666 where we don't need to do any masking and also
3667 1 bit bitfields where xor can be used.
3668 We might win by one instruction for the other bitfields
3669 too if insv/extv instructions aren't used, so that
3670 can be added later. */
3671 if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))
3672 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3673 break;
3674 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3675 value = convert_modes (GET_MODE (str_rtx),
3676 TYPE_MODE (TREE_TYPE (op1)), value,
3677 TYPE_UNSIGNED (TREE_TYPE (op1)));
3678
3679 /* We may be accessing data outside the field, which means
3680 we can alias adjacent data. */
3681 if (MEM_P (str_rtx))
3682 {
3683 str_rtx = shallow_copy_rtx (str_rtx);
3684 set_mem_alias_set (str_rtx, 0);
3685 set_mem_expr (str_rtx, 0);
3686 }
3687
3688 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3689 if (bitsize == 1
3690 && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3691 {
3692 value = expand_and (GET_MODE (str_rtx), value, const1_rtx,
3693 NULL_RTX);
3694 binop = xor_optab;
3695 }
3696 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3697 build_int_cst (NULL_TREE, bitpos1),
3698 NULL_RTX, 1);
3699 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3700 value, str_rtx, 1, OPTAB_WIDEN);
3701 if (result != str_rtx)
3702 emit_move_insn (str_rtx, result);
3703 free_temp_slots ();
3704 pop_temp_slots ();
3705 return;
3706
3707 default:
3708 break;
3709 }
3710
3711 break;
3712 }
3713
3714 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3715 TREE_TYPE (tem), get_alias_set (to));
3716
3717 preserve_temp_slots (result);
3718 free_temp_slots ();
3719 pop_temp_slots ();
3720
3721 /* If the value is meaningful, convert RESULT to the proper mode.
3722 Otherwise, return nothing. */
3723 return;
3724 }
3725
3726 /* If the rhs is a function call and its value is not an aggregate,
3727 call the function before we start to compute the lhs.
3728 This is needed for correct code for cases such as
3729 val = setjmp (buf) on machines where reference to val
3730 requires loading up part of an address in a separate insn.
3731
3732 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3733 since it might be a promoted variable where the zero- or sign- extension
3734 needs to be done. Handling this in the normal way is safe because no
3735 computation is done before the call. */
3736 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3737 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3738 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3739 && REG_P (DECL_RTL (to))))
3740 {
3741 rtx value;
3742
3743 push_temp_slots ();
3744 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3745 if (to_rtx == 0)
3746 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3747
3748 /* Handle calls that return values in multiple non-contiguous locations.
3749 The Irix 6 ABI has examples of this. */
3750 if (GET_CODE (to_rtx) == PARALLEL)
3751 emit_group_load (to_rtx, value, TREE_TYPE (from),
3752 int_size_in_bytes (TREE_TYPE (from)));
3753 else if (GET_MODE (to_rtx) == BLKmode)
3754 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3755 else
3756 {
3757 if (POINTER_TYPE_P (TREE_TYPE (to)))
3758 value = convert_memory_address (GET_MODE (to_rtx), value);
3759 emit_move_insn (to_rtx, value);
3760 }
3761 preserve_temp_slots (to_rtx);
3762 free_temp_slots ();
3763 pop_temp_slots ();
3764 return;
3765 }
3766
3767 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3768 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3769
3770 if (to_rtx == 0)
3771 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3772
3773 /* Don't move directly into a return register. */
3774 if (TREE_CODE (to) == RESULT_DECL
3775 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3776 {
3777 rtx temp;
3778
3779 push_temp_slots ();
3780 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3781
3782 if (GET_CODE (to_rtx) == PARALLEL)
3783 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3784 int_size_in_bytes (TREE_TYPE (from)));
3785 else
3786 emit_move_insn (to_rtx, temp);
3787
3788 preserve_temp_slots (to_rtx);
3789 free_temp_slots ();
3790 pop_temp_slots ();
3791 return;
3792 }
3793
3794 /* In case we are returning the contents of an object which overlaps
3795 the place the value is being stored, use a safe function when copying
3796 a value through a pointer into a structure value return block. */
3797 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3798 && current_function_returns_struct
3799 && !current_function_returns_pcc_struct)
3800 {
3801 rtx from_rtx, size;
3802
3803 push_temp_slots ();
3804 size = expr_size (from);
3805 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3806
3807 emit_library_call (memmove_libfunc, LCT_NORMAL,
3808 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3809 XEXP (from_rtx, 0), Pmode,
3810 convert_to_mode (TYPE_MODE (sizetype),
3811 size, TYPE_UNSIGNED (sizetype)),
3812 TYPE_MODE (sizetype));
3813
3814 preserve_temp_slots (to_rtx);
3815 free_temp_slots ();
3816 pop_temp_slots ();
3817 return;
3818 }
3819
3820 /* Compute FROM and store the value in the rtx we got. */
3821
3822 push_temp_slots ();
3823 result = store_expr (from, to_rtx, 0);
3824 preserve_temp_slots (result);
3825 free_temp_slots ();
3826 pop_temp_slots ();
3827 return;
3828 }
3829
3830 /* Generate code for computing expression EXP,
3831 and storing the value into TARGET.
3832
3833 If the mode is BLKmode then we may return TARGET itself.
3834 It turns out that in BLKmode it doesn't cause a problem.
3835 because C has no operators that could combine two different
3836 assignments into the same BLKmode object with different values
3837 with no sequence point. Will other languages need this to
3838 be more thorough?
3839
3840 If CALL_PARAM_P is nonzero, this is a store into a call param on the
3841 stack, and block moves may need to be treated specially. */
3842
3843 rtx
3844 store_expr (tree exp, rtx target, int call_param_p)
3845 {
3846 rtx temp;
3847 rtx alt_rtl = NULL_RTX;
3848 int dont_return_target = 0;
3849
3850 if (VOID_TYPE_P (TREE_TYPE (exp)))
3851 {
3852 /* C++ can generate ?: expressions with a throw expression in one
3853 branch and an rvalue in the other. Here, we resolve attempts to
3854 store the throw expression's nonexistent result. */
3855 gcc_assert (!call_param_p);
3856 expand_expr (exp, const0_rtx, VOIDmode, 0);
3857 return NULL_RTX;
3858 }
3859 if (TREE_CODE (exp) == COMPOUND_EXPR)
3860 {
3861 /* Perform first part of compound expression, then assign from second
3862 part. */
3863 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3864 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3865 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3866 }
3867 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3868 {
3869 /* For conditional expression, get safe form of the target. Then
3870 test the condition, doing the appropriate assignment on either
3871 side. This avoids the creation of unnecessary temporaries.
3872 For non-BLKmode, it is more efficient not to do this. */
3873
3874 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3875
3876 do_pending_stack_adjust ();
3877 NO_DEFER_POP;
3878 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3879 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3880 emit_jump_insn (gen_jump (lab2));
3881 emit_barrier ();
3882 emit_label (lab1);
3883 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
3884 emit_label (lab2);
3885 OK_DEFER_POP;
3886
3887 return NULL_RTX;
3888 }
3889 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3890 /* If this is a scalar in a register that is stored in a wider mode
3891 than the declared mode, compute the result into its declared mode
3892 and then convert to the wider mode. Our value is the computed
3893 expression. */
3894 {
3895 rtx inner_target = 0;
3896
3897 /* We can do the conversion inside EXP, which will often result
3898 in some optimizations. Do the conversion in two steps: first
3899 change the signedness, if needed, then the extend. But don't
3900 do this if the type of EXP is a subtype of something else
3901 since then the conversion might involve more than just
3902 converting modes. */
3903 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
3904 && TREE_TYPE (TREE_TYPE (exp)) == 0
3905 && (!lang_hooks.reduce_bit_field_operations
3906 || (GET_MODE_PRECISION (GET_MODE (target))
3907 == TYPE_PRECISION (TREE_TYPE (exp)))))
3908 {
3909 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3910 != SUBREG_PROMOTED_UNSIGNED_P (target))
3911 exp = convert
3912 (lang_hooks.types.signed_or_unsigned_type
3913 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3914
3915 exp = convert (lang_hooks.types.type_for_mode
3916 (GET_MODE (SUBREG_REG (target)),
3917 SUBREG_PROMOTED_UNSIGNED_P (target)),
3918 exp);
3919
3920 inner_target = SUBREG_REG (target);
3921 }
3922
3923 temp = expand_expr (exp, inner_target, VOIDmode,
3924 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3925
3926 /* If TEMP is a VOIDmode constant, use convert_modes to make
3927 sure that we properly convert it. */
3928 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3929 {
3930 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3931 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3932 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3933 GET_MODE (target), temp,
3934 SUBREG_PROMOTED_UNSIGNED_P (target));
3935 }
3936
3937 convert_move (SUBREG_REG (target), temp,
3938 SUBREG_PROMOTED_UNSIGNED_P (target));
3939
3940 return NULL_RTX;
3941 }
3942 else
3943 {
3944 temp = expand_expr_real (exp, target, GET_MODE (target),
3945 (call_param_p
3946 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3947 &alt_rtl);
3948 /* Return TARGET if it's a specified hardware register.
3949 If TARGET is a volatile mem ref, either return TARGET
3950 or return a reg copied *from* TARGET; ANSI requires this.
3951
3952 Otherwise, if TEMP is not TARGET, return TEMP
3953 if it is constant (for efficiency),
3954 or if we really want the correct value. */
3955 if (!(target && REG_P (target)
3956 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3957 && !(MEM_P (target) && MEM_VOLATILE_P (target))
3958 && ! rtx_equal_p (temp, target)
3959 && CONSTANT_P (temp))
3960 dont_return_target = 1;
3961 }
3962
3963 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3964 the same as that of TARGET, adjust the constant. This is needed, for
3965 example, in case it is a CONST_DOUBLE and we want only a word-sized
3966 value. */
3967 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3968 && TREE_CODE (exp) != ERROR_MARK
3969 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3970 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3971 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
3972
3973 /* If value was not generated in the target, store it there.
3974 Convert the value to TARGET's type first if necessary and emit the
3975 pending incrementations that have been queued when expanding EXP.
3976 Note that we cannot emit the whole queue blindly because this will
3977 effectively disable the POST_INC optimization later.
3978
3979 If TEMP and TARGET compare equal according to rtx_equal_p, but
3980 one or both of them are volatile memory refs, we have to distinguish
3981 two cases:
3982 - expand_expr has used TARGET. In this case, we must not generate
3983 another copy. This can be detected by TARGET being equal according
3984 to == .
3985 - expand_expr has not used TARGET - that means that the source just
3986 happens to have the same RTX form. Since temp will have been created
3987 by expand_expr, it will compare unequal according to == .
3988 We must generate a copy in this case, to reach the correct number
3989 of volatile memory references. */
3990
3991 if ((! rtx_equal_p (temp, target)
3992 || (temp != target && (side_effects_p (temp)
3993 || side_effects_p (target))))
3994 && TREE_CODE (exp) != ERROR_MARK
3995 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
3996 but TARGET is not valid memory reference, TEMP will differ
3997 from TARGET although it is really the same location. */
3998 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
3999 /* If there's nothing to copy, don't bother. Don't call expr_size
4000 unless necessary, because some front-ends (C++) expr_size-hook
4001 aborts on objects that are not supposed to be bit-copied or
4002 bit-initialized. */
4003 && expr_size (exp) != const0_rtx)
4004 {
4005 if (GET_MODE (temp) != GET_MODE (target)
4006 && GET_MODE (temp) != VOIDmode)
4007 {
4008 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4009 if (dont_return_target)
4010 {
4011 /* In this case, we will return TEMP,
4012 so make sure it has the proper mode.
4013 But don't forget to store the value into TARGET. */
4014 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4015 emit_move_insn (target, temp);
4016 }
4017 else
4018 convert_move (target, temp, unsignedp);
4019 }
4020
4021 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4022 {
4023 /* Handle copying a string constant into an array. The string
4024 constant may be shorter than the array. So copy just the string's
4025 actual length, and clear the rest. First get the size of the data
4026 type of the string, which is actually the size of the target. */
4027 rtx size = expr_size (exp);
4028
4029 if (GET_CODE (size) == CONST_INT
4030 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4031 emit_block_move (target, temp, size,
4032 (call_param_p
4033 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4034 else
4035 {
4036 /* Compute the size of the data to copy from the string. */
4037 tree copy_size
4038 = size_binop (MIN_EXPR,
4039 make_tree (sizetype, size),
4040 size_int (TREE_STRING_LENGTH (exp)));
4041 rtx copy_size_rtx
4042 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4043 (call_param_p
4044 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4045 rtx label = 0;
4046
4047 /* Copy that much. */
4048 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4049 TYPE_UNSIGNED (sizetype));
4050 emit_block_move (target, temp, copy_size_rtx,
4051 (call_param_p
4052 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4053
4054 /* Figure out how much is left in TARGET that we have to clear.
4055 Do all calculations in ptr_mode. */
4056 if (GET_CODE (copy_size_rtx) == CONST_INT)
4057 {
4058 size = plus_constant (size, -INTVAL (copy_size_rtx));
4059 target = adjust_address (target, BLKmode,
4060 INTVAL (copy_size_rtx));
4061 }
4062 else
4063 {
4064 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4065 copy_size_rtx, NULL_RTX, 0,
4066 OPTAB_LIB_WIDEN);
4067
4068 #ifdef POINTERS_EXTEND_UNSIGNED
4069 if (GET_MODE (copy_size_rtx) != Pmode)
4070 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4071 TYPE_UNSIGNED (sizetype));
4072 #endif
4073
4074 target = offset_address (target, copy_size_rtx,
4075 highest_pow2_factor (copy_size));
4076 label = gen_label_rtx ();
4077 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4078 GET_MODE (size), 0, label);
4079 }
4080
4081 if (size != const0_rtx)
4082 clear_storage (target, size);
4083
4084 if (label)
4085 emit_label (label);
4086 }
4087 }
4088 /* Handle calls that return values in multiple non-contiguous locations.
4089 The Irix 6 ABI has examples of this. */
4090 else if (GET_CODE (target) == PARALLEL)
4091 emit_group_load (target, temp, TREE_TYPE (exp),
4092 int_size_in_bytes (TREE_TYPE (exp)));
4093 else if (GET_MODE (temp) == BLKmode)
4094 emit_block_move (target, temp, expr_size (exp),
4095 (call_param_p
4096 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4097 else
4098 {
4099 temp = force_operand (temp, target);
4100 if (temp != target)
4101 emit_move_insn (target, temp);
4102 }
4103 }
4104
4105 return NULL_RTX;
4106 }
4107 \f
4108 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4109 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4110 are set to non-constant values and place it in *P_NC_ELTS. */
4111
4112 static void
4113 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4114 HOST_WIDE_INT *p_nc_elts)
4115 {
4116 HOST_WIDE_INT nz_elts, nc_elts;
4117 tree list;
4118
4119 nz_elts = 0;
4120 nc_elts = 0;
4121
4122 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4123 {
4124 tree value = TREE_VALUE (list);
4125 tree purpose = TREE_PURPOSE (list);
4126 HOST_WIDE_INT mult;
4127
4128 mult = 1;
4129 if (TREE_CODE (purpose) == RANGE_EXPR)
4130 {
4131 tree lo_index = TREE_OPERAND (purpose, 0);
4132 tree hi_index = TREE_OPERAND (purpose, 1);
4133
4134 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4135 mult = (tree_low_cst (hi_index, 1)
4136 - tree_low_cst (lo_index, 1) + 1);
4137 }
4138
4139 switch (TREE_CODE (value))
4140 {
4141 case CONSTRUCTOR:
4142 {
4143 HOST_WIDE_INT nz = 0, nc = 0;
4144 categorize_ctor_elements_1 (value, &nz, &nc);
4145 nz_elts += mult * nz;
4146 nc_elts += mult * nc;
4147 }
4148 break;
4149
4150 case INTEGER_CST:
4151 case REAL_CST:
4152 if (!initializer_zerop (value))
4153 nz_elts += mult;
4154 break;
4155
4156 case STRING_CST:
4157 nz_elts += mult * TREE_STRING_LENGTH (value);
4158 break;
4159
4160 case COMPLEX_CST:
4161 if (!initializer_zerop (TREE_REALPART (value)))
4162 nz_elts += mult;
4163 if (!initializer_zerop (TREE_IMAGPART (value)))
4164 nz_elts += mult;
4165 break;
4166
4167 case VECTOR_CST:
4168 {
4169 tree v;
4170 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4171 if (!initializer_zerop (TREE_VALUE (v)))
4172 nz_elts += mult;
4173 }
4174 break;
4175
4176 default:
4177 nz_elts += mult;
4178 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4179 nc_elts += mult;
4180 break;
4181 }
4182 }
4183
4184 *p_nz_elts += nz_elts;
4185 *p_nc_elts += nc_elts;
4186 }
4187
4188 void
4189 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4190 HOST_WIDE_INT *p_nc_elts)
4191 {
4192 *p_nz_elts = 0;
4193 *p_nc_elts = 0;
4194 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4195 }
4196
4197 /* Count the number of scalars in TYPE. Return -1 on overflow or
4198 variable-sized. */
4199
4200 HOST_WIDE_INT
4201 count_type_elements (tree type)
4202 {
4203 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4204 switch (TREE_CODE (type))
4205 {
4206 case ARRAY_TYPE:
4207 {
4208 tree telts = array_type_nelts (type);
4209 if (telts && host_integerp (telts, 1))
4210 {
4211 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4212 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4213 if (n == 0)
4214 return 0;
4215 else if (max / n > m)
4216 return n * m;
4217 }
4218 return -1;
4219 }
4220
4221 case RECORD_TYPE:
4222 {
4223 HOST_WIDE_INT n = 0, t;
4224 tree f;
4225
4226 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4227 if (TREE_CODE (f) == FIELD_DECL)
4228 {
4229 t = count_type_elements (TREE_TYPE (f));
4230 if (t < 0)
4231 return -1;
4232 n += t;
4233 }
4234
4235 return n;
4236 }
4237
4238 case UNION_TYPE:
4239 case QUAL_UNION_TYPE:
4240 {
4241 /* Ho hum. How in the world do we guess here? Clearly it isn't
4242 right to count the fields. Guess based on the number of words. */
4243 HOST_WIDE_INT n = int_size_in_bytes (type);
4244 if (n < 0)
4245 return -1;
4246 return n / UNITS_PER_WORD;
4247 }
4248
4249 case COMPLEX_TYPE:
4250 return 2;
4251
4252 case VECTOR_TYPE:
4253 return TYPE_VECTOR_SUBPARTS (type);
4254
4255 case INTEGER_TYPE:
4256 case REAL_TYPE:
4257 case ENUMERAL_TYPE:
4258 case BOOLEAN_TYPE:
4259 case CHAR_TYPE:
4260 case POINTER_TYPE:
4261 case OFFSET_TYPE:
4262 case REFERENCE_TYPE:
4263 return 1;
4264
4265 case VOID_TYPE:
4266 case METHOD_TYPE:
4267 case FILE_TYPE:
4268 case SET_TYPE:
4269 case FUNCTION_TYPE:
4270 case LANG_TYPE:
4271 default:
4272 gcc_unreachable ();
4273 }
4274 }
4275
4276 /* Return 1 if EXP contains mostly (3/4) zeros. */
4277
4278 static int
4279 mostly_zeros_p (tree exp)
4280 {
4281 if (TREE_CODE (exp) == CONSTRUCTOR)
4282
4283 {
4284 HOST_WIDE_INT nz_elts, nc_elts, elts;
4285
4286 /* If there are no ranges of true bits, it is all zero. */
4287 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4288 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4289
4290 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4291 elts = count_type_elements (TREE_TYPE (exp));
4292
4293 return nz_elts < elts / 4;
4294 }
4295
4296 return initializer_zerop (exp);
4297 }
4298 \f
4299 /* Helper function for store_constructor.
4300 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4301 TYPE is the type of the CONSTRUCTOR, not the element type.
4302 CLEARED is as for store_constructor.
4303 ALIAS_SET is the alias set to use for any stores.
4304
4305 This provides a recursive shortcut back to store_constructor when it isn't
4306 necessary to go through store_field. This is so that we can pass through
4307 the cleared field to let store_constructor know that we may not have to
4308 clear a substructure if the outer structure has already been cleared. */
4309
4310 static void
4311 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4312 HOST_WIDE_INT bitpos, enum machine_mode mode,
4313 tree exp, tree type, int cleared, int alias_set)
4314 {
4315 if (TREE_CODE (exp) == CONSTRUCTOR
4316 /* We can only call store_constructor recursively if the size and
4317 bit position are on a byte boundary. */
4318 && bitpos % BITS_PER_UNIT == 0
4319 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4320 /* If we have a nonzero bitpos for a register target, then we just
4321 let store_field do the bitfield handling. This is unlikely to
4322 generate unnecessary clear instructions anyways. */
4323 && (bitpos == 0 || MEM_P (target)))
4324 {
4325 if (MEM_P (target))
4326 target
4327 = adjust_address (target,
4328 GET_MODE (target) == BLKmode
4329 || 0 != (bitpos
4330 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4331 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4332
4333
4334 /* Update the alias set, if required. */
4335 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4336 && MEM_ALIAS_SET (target) != 0)
4337 {
4338 target = copy_rtx (target);
4339 set_mem_alias_set (target, alias_set);
4340 }
4341
4342 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4343 }
4344 else
4345 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4346 }
4347
4348 /* Store the value of constructor EXP into the rtx TARGET.
4349 TARGET is either a REG or a MEM; we know it cannot conflict, since
4350 safe_from_p has been called.
4351 CLEARED is true if TARGET is known to have been zero'd.
4352 SIZE is the number of bytes of TARGET we are allowed to modify: this
4353 may not be the same as the size of EXP if we are assigning to a field
4354 which has been packed to exclude padding bits. */
4355
4356 static void
4357 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4358 {
4359 tree type = TREE_TYPE (exp);
4360 #ifdef WORD_REGISTER_OPERATIONS
4361 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4362 #endif
4363
4364 switch (TREE_CODE (type))
4365 {
4366 case RECORD_TYPE:
4367 case UNION_TYPE:
4368 case QUAL_UNION_TYPE:
4369 {
4370 tree elt;
4371
4372 /* If size is zero or the target is already cleared, do nothing. */
4373 if (size == 0 || cleared)
4374 cleared = 1;
4375 /* We either clear the aggregate or indicate the value is dead. */
4376 else if ((TREE_CODE (type) == UNION_TYPE
4377 || TREE_CODE (type) == QUAL_UNION_TYPE)
4378 && ! CONSTRUCTOR_ELTS (exp))
4379 /* If the constructor is empty, clear the union. */
4380 {
4381 clear_storage (target, expr_size (exp));
4382 cleared = 1;
4383 }
4384
4385 /* If we are building a static constructor into a register,
4386 set the initial value as zero so we can fold the value into
4387 a constant. But if more than one register is involved,
4388 this probably loses. */
4389 else if (REG_P (target) && TREE_STATIC (exp)
4390 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4391 {
4392 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4393 cleared = 1;
4394 }
4395
4396 /* If the constructor has fewer fields than the structure or
4397 if we are initializing the structure to mostly zeros, clear
4398 the whole structure first. Don't do this if TARGET is a
4399 register whose mode size isn't equal to SIZE since
4400 clear_storage can't handle this case. */
4401 else if (size > 0
4402 && ((list_length (CONSTRUCTOR_ELTS (exp))
4403 != fields_length (type))
4404 || mostly_zeros_p (exp))
4405 && (!REG_P (target)
4406 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4407 == size)))
4408 {
4409 clear_storage (target, GEN_INT (size));
4410 cleared = 1;
4411 }
4412
4413 if (! cleared)
4414 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4415
4416 /* Store each element of the constructor into the
4417 corresponding field of TARGET. */
4418
4419 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4420 {
4421 tree field = TREE_PURPOSE (elt);
4422 tree value = TREE_VALUE (elt);
4423 enum machine_mode mode;
4424 HOST_WIDE_INT bitsize;
4425 HOST_WIDE_INT bitpos = 0;
4426 tree offset;
4427 rtx to_rtx = target;
4428
4429 /* Just ignore missing fields. We cleared the whole
4430 structure, above, if any fields are missing. */
4431 if (field == 0)
4432 continue;
4433
4434 if (cleared && initializer_zerop (value))
4435 continue;
4436
4437 if (host_integerp (DECL_SIZE (field), 1))
4438 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4439 else
4440 bitsize = -1;
4441
4442 mode = DECL_MODE (field);
4443 if (DECL_BIT_FIELD (field))
4444 mode = VOIDmode;
4445
4446 offset = DECL_FIELD_OFFSET (field);
4447 if (host_integerp (offset, 0)
4448 && host_integerp (bit_position (field), 0))
4449 {
4450 bitpos = int_bit_position (field);
4451 offset = 0;
4452 }
4453 else
4454 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4455
4456 if (offset)
4457 {
4458 rtx offset_rtx;
4459
4460 offset
4461 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4462 make_tree (TREE_TYPE (exp),
4463 target));
4464
4465 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4466 gcc_assert (MEM_P (to_rtx));
4467
4468 #ifdef POINTERS_EXTEND_UNSIGNED
4469 if (GET_MODE (offset_rtx) != Pmode)
4470 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4471 #else
4472 if (GET_MODE (offset_rtx) != ptr_mode)
4473 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4474 #endif
4475
4476 to_rtx = offset_address (to_rtx, offset_rtx,
4477 highest_pow2_factor (offset));
4478 }
4479
4480 #ifdef WORD_REGISTER_OPERATIONS
4481 /* If this initializes a field that is smaller than a
4482 word, at the start of a word, try to widen it to a full
4483 word. This special case allows us to output C++ member
4484 function initializations in a form that the optimizers
4485 can understand. */
4486 if (REG_P (target)
4487 && bitsize < BITS_PER_WORD
4488 && bitpos % BITS_PER_WORD == 0
4489 && GET_MODE_CLASS (mode) == MODE_INT
4490 && TREE_CODE (value) == INTEGER_CST
4491 && exp_size >= 0
4492 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4493 {
4494 tree type = TREE_TYPE (value);
4495
4496 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4497 {
4498 type = lang_hooks.types.type_for_size
4499 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4500 value = convert (type, value);
4501 }
4502
4503 if (BYTES_BIG_ENDIAN)
4504 value
4505 = fold (build2 (LSHIFT_EXPR, type, value,
4506 build_int_cst (NULL_TREE,
4507 BITS_PER_WORD - bitsize)));
4508 bitsize = BITS_PER_WORD;
4509 mode = word_mode;
4510 }
4511 #endif
4512
4513 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4514 && DECL_NONADDRESSABLE_P (field))
4515 {
4516 to_rtx = copy_rtx (to_rtx);
4517 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4518 }
4519
4520 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4521 value, type, cleared,
4522 get_alias_set (TREE_TYPE (field)));
4523 }
4524 break;
4525 }
4526 case ARRAY_TYPE:
4527 {
4528 tree elt;
4529 int i;
4530 int need_to_clear;
4531 tree domain;
4532 tree elttype = TREE_TYPE (type);
4533 int const_bounds_p;
4534 HOST_WIDE_INT minelt = 0;
4535 HOST_WIDE_INT maxelt = 0;
4536
4537 domain = TYPE_DOMAIN (type);
4538 const_bounds_p = (TYPE_MIN_VALUE (domain)
4539 && TYPE_MAX_VALUE (domain)
4540 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4541 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4542
4543 /* If we have constant bounds for the range of the type, get them. */
4544 if (const_bounds_p)
4545 {
4546 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4547 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4548 }
4549
4550 /* If the constructor has fewer elements than the array, clear
4551 the whole array first. Similarly if this is static
4552 constructor of a non-BLKmode object. */
4553 if (cleared)
4554 need_to_clear = 0;
4555 else if (REG_P (target) && TREE_STATIC (exp))
4556 need_to_clear = 1;
4557 else
4558 {
4559 HOST_WIDE_INT count = 0, zero_count = 0;
4560 need_to_clear = ! const_bounds_p;
4561
4562 /* This loop is a more accurate version of the loop in
4563 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4564 is also needed to check for missing elements. */
4565 for (elt = CONSTRUCTOR_ELTS (exp);
4566 elt != NULL_TREE && ! need_to_clear;
4567 elt = TREE_CHAIN (elt))
4568 {
4569 tree index = TREE_PURPOSE (elt);
4570 HOST_WIDE_INT this_node_count;
4571
4572 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4573 {
4574 tree lo_index = TREE_OPERAND (index, 0);
4575 tree hi_index = TREE_OPERAND (index, 1);
4576
4577 if (! host_integerp (lo_index, 1)
4578 || ! host_integerp (hi_index, 1))
4579 {
4580 need_to_clear = 1;
4581 break;
4582 }
4583
4584 this_node_count = (tree_low_cst (hi_index, 1)
4585 - tree_low_cst (lo_index, 1) + 1);
4586 }
4587 else
4588 this_node_count = 1;
4589
4590 count += this_node_count;
4591 if (mostly_zeros_p (TREE_VALUE (elt)))
4592 zero_count += this_node_count;
4593 }
4594
4595 /* Clear the entire array first if there are any missing
4596 elements, or if the incidence of zero elements is >=
4597 75%. */
4598 if (! need_to_clear
4599 && (count < maxelt - minelt + 1
4600 || 4 * zero_count >= 3 * count))
4601 need_to_clear = 1;
4602 }
4603
4604 if (need_to_clear && size > 0)
4605 {
4606 if (REG_P (target))
4607 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4608 else
4609 clear_storage (target, GEN_INT (size));
4610 cleared = 1;
4611 }
4612
4613 if (!cleared && REG_P (target))
4614 /* Inform later passes that the old value is dead. */
4615 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4616
4617 /* Store each element of the constructor into the
4618 corresponding element of TARGET, determined by counting the
4619 elements. */
4620 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4621 elt;
4622 elt = TREE_CHAIN (elt), i++)
4623 {
4624 enum machine_mode mode;
4625 HOST_WIDE_INT bitsize;
4626 HOST_WIDE_INT bitpos;
4627 int unsignedp;
4628 tree value = TREE_VALUE (elt);
4629 tree index = TREE_PURPOSE (elt);
4630 rtx xtarget = target;
4631
4632 if (cleared && initializer_zerop (value))
4633 continue;
4634
4635 unsignedp = TYPE_UNSIGNED (elttype);
4636 mode = TYPE_MODE (elttype);
4637 if (mode == BLKmode)
4638 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4639 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4640 : -1);
4641 else
4642 bitsize = GET_MODE_BITSIZE (mode);
4643
4644 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4645 {
4646 tree lo_index = TREE_OPERAND (index, 0);
4647 tree hi_index = TREE_OPERAND (index, 1);
4648 rtx index_r, pos_rtx;
4649 HOST_WIDE_INT lo, hi, count;
4650 tree position;
4651
4652 /* If the range is constant and "small", unroll the loop. */
4653 if (const_bounds_p
4654 && host_integerp (lo_index, 0)
4655 && host_integerp (hi_index, 0)
4656 && (lo = tree_low_cst (lo_index, 0),
4657 hi = tree_low_cst (hi_index, 0),
4658 count = hi - lo + 1,
4659 (!MEM_P (target)
4660 || count <= 2
4661 || (host_integerp (TYPE_SIZE (elttype), 1)
4662 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4663 <= 40 * 8)))))
4664 {
4665 lo -= minelt; hi -= minelt;
4666 for (; lo <= hi; lo++)
4667 {
4668 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4669
4670 if (MEM_P (target)
4671 && !MEM_KEEP_ALIAS_SET_P (target)
4672 && TREE_CODE (type) == ARRAY_TYPE
4673 && TYPE_NONALIASED_COMPONENT (type))
4674 {
4675 target = copy_rtx (target);
4676 MEM_KEEP_ALIAS_SET_P (target) = 1;
4677 }
4678
4679 store_constructor_field
4680 (target, bitsize, bitpos, mode, value, type, cleared,
4681 get_alias_set (elttype));
4682 }
4683 }
4684 else
4685 {
4686 rtx loop_start = gen_label_rtx ();
4687 rtx loop_end = gen_label_rtx ();
4688 tree exit_cond;
4689
4690 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4691 unsignedp = TYPE_UNSIGNED (domain);
4692
4693 index = build_decl (VAR_DECL, NULL_TREE, domain);
4694
4695 index_r
4696 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4697 &unsignedp, 0));
4698 SET_DECL_RTL (index, index_r);
4699 store_expr (lo_index, index_r, 0);
4700
4701 /* Build the head of the loop. */
4702 do_pending_stack_adjust ();
4703 emit_label (loop_start);
4704
4705 /* Assign value to element index. */
4706 position
4707 = convert (ssizetype,
4708 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4709 index, TYPE_MIN_VALUE (domain))));
4710 position = size_binop (MULT_EXPR, position,
4711 convert (ssizetype,
4712 TYPE_SIZE_UNIT (elttype)));
4713
4714 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4715 xtarget = offset_address (target, pos_rtx,
4716 highest_pow2_factor (position));
4717 xtarget = adjust_address (xtarget, mode, 0);
4718 if (TREE_CODE (value) == CONSTRUCTOR)
4719 store_constructor (value, xtarget, cleared,
4720 bitsize / BITS_PER_UNIT);
4721 else
4722 store_expr (value, xtarget, 0);
4723
4724 /* Generate a conditional jump to exit the loop. */
4725 exit_cond = build2 (LT_EXPR, integer_type_node,
4726 index, hi_index);
4727 jumpif (exit_cond, loop_end);
4728
4729 /* Update the loop counter, and jump to the head of
4730 the loop. */
4731 expand_assignment (index,
4732 build2 (PLUS_EXPR, TREE_TYPE (index),
4733 index, integer_one_node));
4734
4735 emit_jump (loop_start);
4736
4737 /* Build the end of the loop. */
4738 emit_label (loop_end);
4739 }
4740 }
4741 else if ((index != 0 && ! host_integerp (index, 0))
4742 || ! host_integerp (TYPE_SIZE (elttype), 1))
4743 {
4744 tree position;
4745
4746 if (index == 0)
4747 index = ssize_int (1);
4748
4749 if (minelt)
4750 index = fold_convert (ssizetype,
4751 fold (build2 (MINUS_EXPR,
4752 TREE_TYPE (index),
4753 index,
4754 TYPE_MIN_VALUE (domain))));
4755
4756 position = size_binop (MULT_EXPR, index,
4757 convert (ssizetype,
4758 TYPE_SIZE_UNIT (elttype)));
4759 xtarget = offset_address (target,
4760 expand_expr (position, 0, VOIDmode, 0),
4761 highest_pow2_factor (position));
4762 xtarget = adjust_address (xtarget, mode, 0);
4763 store_expr (value, xtarget, 0);
4764 }
4765 else
4766 {
4767 if (index != 0)
4768 bitpos = ((tree_low_cst (index, 0) - minelt)
4769 * tree_low_cst (TYPE_SIZE (elttype), 1));
4770 else
4771 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4772
4773 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4774 && TREE_CODE (type) == ARRAY_TYPE
4775 && TYPE_NONALIASED_COMPONENT (type))
4776 {
4777 target = copy_rtx (target);
4778 MEM_KEEP_ALIAS_SET_P (target) = 1;
4779 }
4780 store_constructor_field (target, bitsize, bitpos, mode, value,
4781 type, cleared, get_alias_set (elttype));
4782 }
4783 }
4784 break;
4785 }
4786
4787 case VECTOR_TYPE:
4788 {
4789 tree elt;
4790 int i;
4791 int need_to_clear;
4792 int icode = 0;
4793 tree elttype = TREE_TYPE (type);
4794 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4795 enum machine_mode eltmode = TYPE_MODE (elttype);
4796 HOST_WIDE_INT bitsize;
4797 HOST_WIDE_INT bitpos;
4798 rtx *vector = NULL;
4799 unsigned n_elts;
4800
4801 gcc_assert (eltmode != BLKmode);
4802
4803 n_elts = TYPE_VECTOR_SUBPARTS (type);
4804 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4805 {
4806 enum machine_mode mode = GET_MODE (target);
4807
4808 icode = (int) vec_init_optab->handlers[mode].insn_code;
4809 if (icode != CODE_FOR_nothing)
4810 {
4811 unsigned int i;
4812
4813 vector = alloca (n_elts);
4814 for (i = 0; i < n_elts; i++)
4815 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4816 }
4817 }
4818
4819 /* If the constructor has fewer elements than the vector,
4820 clear the whole array first. Similarly if this is static
4821 constructor of a non-BLKmode object. */
4822 if (cleared)
4823 need_to_clear = 0;
4824 else if (REG_P (target) && TREE_STATIC (exp))
4825 need_to_clear = 1;
4826 else
4827 {
4828 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4829
4830 for (elt = CONSTRUCTOR_ELTS (exp);
4831 elt != NULL_TREE;
4832 elt = TREE_CHAIN (elt))
4833 {
4834 int n_elts_here = tree_low_cst
4835 (int_const_binop (TRUNC_DIV_EXPR,
4836 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4837 TYPE_SIZE (elttype), 0), 1);
4838
4839 count += n_elts_here;
4840 if (mostly_zeros_p (TREE_VALUE (elt)))
4841 zero_count += n_elts_here;
4842 }
4843
4844 /* Clear the entire vector first if there are any missing elements,
4845 or if the incidence of zero elements is >= 75%. */
4846 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4847 }
4848
4849 if (need_to_clear && size > 0 && !vector)
4850 {
4851 if (REG_P (target))
4852 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4853 else
4854 clear_storage (target, GEN_INT (size));
4855 cleared = 1;
4856 }
4857
4858 if (!cleared && REG_P (target))
4859 /* Inform later passes that the old value is dead. */
4860 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4861
4862 /* Store each element of the constructor into the corresponding
4863 element of TARGET, determined by counting the elements. */
4864 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4865 elt;
4866 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4867 {
4868 tree value = TREE_VALUE (elt);
4869 tree index = TREE_PURPOSE (elt);
4870 HOST_WIDE_INT eltpos;
4871
4872 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4873 if (cleared && initializer_zerop (value))
4874 continue;
4875
4876 if (index != 0)
4877 eltpos = tree_low_cst (index, 1);
4878 else
4879 eltpos = i;
4880
4881 if (vector)
4882 {
4883 /* Vector CONSTRUCTORs should only be built from smaller
4884 vectors in the case of BLKmode vectors. */
4885 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
4886 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4887 }
4888 else
4889 {
4890 enum machine_mode value_mode =
4891 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4892 ? TYPE_MODE (TREE_TYPE (value))
4893 : eltmode;
4894 bitpos = eltpos * elt_size;
4895 store_constructor_field (target, bitsize, bitpos,
4896 value_mode, value, type,
4897 cleared, get_alias_set (elttype));
4898 }
4899 }
4900
4901 if (vector)
4902 emit_insn (GEN_FCN (icode)
4903 (target,
4904 gen_rtx_PARALLEL (GET_MODE (target),
4905 gen_rtvec_v (n_elts, vector))));
4906 break;
4907 }
4908
4909 /* Set constructor assignments. */
4910 case SET_TYPE:
4911 {
4912 tree elt = CONSTRUCTOR_ELTS (exp);
4913 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4914 tree domain = TYPE_DOMAIN (type);
4915 tree domain_min, domain_max, bitlength;
4916
4917 /* The default implementation strategy is to extract the
4918 constant parts of the constructor, use that to initialize
4919 the target, and then "or" in whatever non-constant ranges
4920 we need in addition.
4921
4922 If a large set is all zero or all ones, it is probably
4923 better to set it using memset. Also, if a large set has
4924 just a single range, it may also be better to first clear
4925 all the first clear the set (using memset), and set the
4926 bits we want. */
4927
4928 /* Check for all zeros. */
4929 if (elt == NULL_TREE && size > 0)
4930 {
4931 if (!cleared)
4932 clear_storage (target, GEN_INT (size));
4933 return;
4934 }
4935
4936 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4937 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4938 bitlength = size_binop (PLUS_EXPR,
4939 size_diffop (domain_max, domain_min),
4940 ssize_int (1));
4941
4942 nbits = tree_low_cst (bitlength, 1);
4943
4944 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets
4945 that are "complicated" (more than one range), initialize
4946 (the constant parts) by copying from a constant. */
4947 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4948 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4949 {
4950 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4951 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4952 char *bit_buffer = alloca (nbits);
4953 HOST_WIDE_INT word = 0;
4954 unsigned int bit_pos = 0;
4955 unsigned int ibit = 0;
4956 unsigned int offset = 0; /* In bytes from beginning of set. */
4957
4958 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4959 for (;;)
4960 {
4961 if (bit_buffer[ibit])
4962 {
4963 if (BYTES_BIG_ENDIAN)
4964 word |= (1 << (set_word_size - 1 - bit_pos));
4965 else
4966 word |= 1 << bit_pos;
4967 }
4968
4969 bit_pos++; ibit++;
4970 if (bit_pos >= set_word_size || ibit == nbits)
4971 {
4972 if (word != 0 || ! cleared)
4973 {
4974 rtx datum = gen_int_mode (word, mode);
4975 rtx to_rtx;
4976
4977 /* The assumption here is that it is safe to
4978 use XEXP if the set is multi-word, but not
4979 if it's single-word. */
4980 if (MEM_P (target))
4981 to_rtx = adjust_address (target, mode, offset);
4982 else
4983 {
4984 gcc_assert (!offset);
4985 to_rtx = target;
4986 }
4987 emit_move_insn (to_rtx, datum);
4988 }
4989
4990 if (ibit == nbits)
4991 break;
4992 word = 0;
4993 bit_pos = 0;
4994 offset += set_word_size / BITS_PER_UNIT;
4995 }
4996 }
4997 }
4998 else if (!cleared)
4999 /* Don't bother clearing storage if the set is all ones. */
5000 if (TREE_CHAIN (elt) != NULL_TREE
5001 || (TREE_PURPOSE (elt) == NULL_TREE
5002 ? nbits != 1
5003 : ( ! host_integerp (TREE_VALUE (elt), 0)
5004 || ! host_integerp (TREE_PURPOSE (elt), 0)
5005 || (tree_low_cst (TREE_VALUE (elt), 0)
5006 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5007 != (HOST_WIDE_INT) nbits))))
5008 clear_storage (target, expr_size (exp));
5009
5010 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5011 {
5012 /* Start of range of element or NULL. */
5013 tree startbit = TREE_PURPOSE (elt);
5014 /* End of range of element, or element value. */
5015 tree endbit = TREE_VALUE (elt);
5016 HOST_WIDE_INT startb, endb;
5017 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5018
5019 bitlength_rtx = expand_expr (bitlength,
5020 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5021
5022 /* Handle non-range tuple element like [ expr ]. */
5023 if (startbit == NULL_TREE)
5024 {
5025 startbit = save_expr (endbit);
5026 endbit = startbit;
5027 }
5028
5029 startbit = convert (sizetype, startbit);
5030 endbit = convert (sizetype, endbit);
5031 if (! integer_zerop (domain_min))
5032 {
5033 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5034 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5035 }
5036 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5037 EXPAND_CONST_ADDRESS);
5038 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5039 EXPAND_CONST_ADDRESS);
5040
5041 if (REG_P (target))
5042 {
5043 targetx
5044 = assign_temp
5045 ((build_qualified_type (lang_hooks.types.type_for_mode
5046 (GET_MODE (target), 0),
5047 TYPE_QUAL_CONST)),
5048 0, 1, 1);
5049 emit_move_insn (targetx, target);
5050 }
5051
5052 else
5053 {
5054 gcc_assert (MEM_P (target));
5055 targetx = target;
5056 }
5057
5058 /* Optimization: If startbit and endbit are constants divisible
5059 by BITS_PER_UNIT, call memset instead. */
5060 if (TREE_CODE (startbit) == INTEGER_CST
5061 && TREE_CODE (endbit) == INTEGER_CST
5062 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5063 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5064 {
5065 emit_library_call (memset_libfunc, LCT_NORMAL,
5066 VOIDmode, 3,
5067 plus_constant (XEXP (targetx, 0),
5068 startb / BITS_PER_UNIT),
5069 Pmode,
5070 constm1_rtx, TYPE_MODE (integer_type_node),
5071 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5072 TYPE_MODE (sizetype));
5073 }
5074 else
5075 emit_library_call (setbits_libfunc, LCT_NORMAL,
5076 VOIDmode, 4, XEXP (targetx, 0),
5077 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5078 startbit_rtx, TYPE_MODE (sizetype),
5079 endbit_rtx, TYPE_MODE (sizetype));
5080
5081 if (REG_P (target))
5082 emit_move_insn (target, targetx);
5083 }
5084 break;
5085 }
5086 default:
5087 gcc_unreachable ();
5088 }
5089 }
5090
5091 /* Store the value of EXP (an expression tree)
5092 into a subfield of TARGET which has mode MODE and occupies
5093 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5094 If MODE is VOIDmode, it means that we are storing into a bit-field.
5095
5096 Always return const0_rtx unless we have something particular to
5097 return.
5098
5099 TYPE is the type of the underlying object,
5100
5101 ALIAS_SET is the alias set for the destination. This value will
5102 (in general) be different from that for TARGET, since TARGET is a
5103 reference to the containing structure. */
5104
5105 static rtx
5106 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5107 enum machine_mode mode, tree exp, tree type, int alias_set)
5108 {
5109 HOST_WIDE_INT width_mask = 0;
5110
5111 if (TREE_CODE (exp) == ERROR_MARK)
5112 return const0_rtx;
5113
5114 /* If we have nothing to store, do nothing unless the expression has
5115 side-effects. */
5116 if (bitsize == 0)
5117 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5118 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5119 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5120
5121 /* If we are storing into an unaligned field of an aligned union that is
5122 in a register, we may have the mode of TARGET being an integer mode but
5123 MODE == BLKmode. In that case, get an aligned object whose size and
5124 alignment are the same as TARGET and store TARGET into it (we can avoid
5125 the store if the field being stored is the entire width of TARGET). Then
5126 call ourselves recursively to store the field into a BLKmode version of
5127 that object. Finally, load from the object into TARGET. This is not
5128 very efficient in general, but should only be slightly more expensive
5129 than the otherwise-required unaligned accesses. Perhaps this can be
5130 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5131 twice, once with emit_move_insn and once via store_field. */
5132
5133 if (mode == BLKmode
5134 && (REG_P (target) || GET_CODE (target) == SUBREG))
5135 {
5136 rtx object = assign_temp (type, 0, 1, 1);
5137 rtx blk_object = adjust_address (object, BLKmode, 0);
5138
5139 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5140 emit_move_insn (object, target);
5141
5142 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5143
5144 emit_move_insn (target, object);
5145
5146 /* We want to return the BLKmode version of the data. */
5147 return blk_object;
5148 }
5149
5150 if (GET_CODE (target) == CONCAT)
5151 {
5152 /* We're storing into a struct containing a single __complex. */
5153
5154 gcc_assert (!bitpos);
5155 return store_expr (exp, target, 0);
5156 }
5157
5158 /* If the structure is in a register or if the component
5159 is a bit field, we cannot use addressing to access it.
5160 Use bit-field techniques or SUBREG to store in it. */
5161
5162 if (mode == VOIDmode
5163 || (mode != BLKmode && ! direct_store[(int) mode]
5164 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5165 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5166 || REG_P (target)
5167 || GET_CODE (target) == SUBREG
5168 /* If the field isn't aligned enough to store as an ordinary memref,
5169 store it as a bit field. */
5170 || (mode != BLKmode
5171 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5172 || bitpos % GET_MODE_ALIGNMENT (mode))
5173 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5174 || (bitpos % BITS_PER_UNIT != 0)))
5175 /* If the RHS and field are a constant size and the size of the
5176 RHS isn't the same size as the bitfield, we must use bitfield
5177 operations. */
5178 || (bitsize >= 0
5179 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5180 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5181 {
5182 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5183
5184 /* If BITSIZE is narrower than the size of the type of EXP
5185 we will be narrowing TEMP. Normally, what's wanted are the
5186 low-order bits. However, if EXP's type is a record and this is
5187 big-endian machine, we want the upper BITSIZE bits. */
5188 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5189 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5190 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5191 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5192 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5193 - bitsize),
5194 NULL_RTX, 1);
5195
5196 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5197 MODE. */
5198 if (mode != VOIDmode && mode != BLKmode
5199 && mode != TYPE_MODE (TREE_TYPE (exp)))
5200 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5201
5202 /* If the modes of TARGET and TEMP are both BLKmode, both
5203 must be in memory and BITPOS must be aligned on a byte
5204 boundary. If so, we simply do a block copy. */
5205 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5206 {
5207 gcc_assert (MEM_P (target) && MEM_P (temp)
5208 && !(bitpos % BITS_PER_UNIT));
5209
5210 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5211 emit_block_move (target, temp,
5212 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5213 / BITS_PER_UNIT),
5214 BLOCK_OP_NORMAL);
5215
5216 return const0_rtx;
5217 }
5218
5219 /* Store the value in the bitfield. */
5220 store_bit_field (target, bitsize, bitpos, mode, temp);
5221
5222 return const0_rtx;
5223 }
5224 else
5225 {
5226 /* Now build a reference to just the desired component. */
5227 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5228
5229 if (to_rtx == target)
5230 to_rtx = copy_rtx (to_rtx);
5231
5232 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5233 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5234 set_mem_alias_set (to_rtx, alias_set);
5235
5236 return store_expr (exp, to_rtx, 0);
5237 }
5238 }
5239 \f
5240 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5241 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5242 codes and find the ultimate containing object, which we return.
5243
5244 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5245 bit position, and *PUNSIGNEDP to the signedness of the field.
5246 If the position of the field is variable, we store a tree
5247 giving the variable offset (in units) in *POFFSET.
5248 This offset is in addition to the bit position.
5249 If the position is not variable, we store 0 in *POFFSET.
5250
5251 If any of the extraction expressions is volatile,
5252 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5253
5254 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5255 is a mode that can be used to access the field. In that case, *PBITSIZE
5256 is redundant.
5257
5258 If the field describes a variable-sized object, *PMODE is set to
5259 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5260 this case, but the address of the object can be found. */
5261
5262 tree
5263 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5264 HOST_WIDE_INT *pbitpos, tree *poffset,
5265 enum machine_mode *pmode, int *punsignedp,
5266 int *pvolatilep)
5267 {
5268 tree size_tree = 0;
5269 enum machine_mode mode = VOIDmode;
5270 tree offset = size_zero_node;
5271 tree bit_offset = bitsize_zero_node;
5272 tree tem;
5273
5274 /* First get the mode, signedness, and size. We do this from just the
5275 outermost expression. */
5276 if (TREE_CODE (exp) == COMPONENT_REF)
5277 {
5278 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5279 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5280 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5281
5282 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5283 }
5284 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5285 {
5286 size_tree = TREE_OPERAND (exp, 1);
5287 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5288 }
5289 else
5290 {
5291 mode = TYPE_MODE (TREE_TYPE (exp));
5292 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5293
5294 if (mode == BLKmode)
5295 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5296 else
5297 *pbitsize = GET_MODE_BITSIZE (mode);
5298 }
5299
5300 if (size_tree != 0)
5301 {
5302 if (! host_integerp (size_tree, 1))
5303 mode = BLKmode, *pbitsize = -1;
5304 else
5305 *pbitsize = tree_low_cst (size_tree, 1);
5306 }
5307
5308 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5309 and find the ultimate containing object. */
5310 while (1)
5311 {
5312 if (TREE_CODE (exp) == BIT_FIELD_REF)
5313 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5314 else if (TREE_CODE (exp) == COMPONENT_REF)
5315 {
5316 tree field = TREE_OPERAND (exp, 1);
5317 tree this_offset = component_ref_field_offset (exp);
5318
5319 /* If this field hasn't been filled in yet, don't go
5320 past it. This should only happen when folding expressions
5321 made during type construction. */
5322 if (this_offset == 0)
5323 break;
5324
5325 offset = size_binop (PLUS_EXPR, offset, this_offset);
5326 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5327 DECL_FIELD_BIT_OFFSET (field));
5328
5329 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5330 }
5331
5332 else if (TREE_CODE (exp) == ARRAY_REF
5333 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5334 {
5335 tree index = TREE_OPERAND (exp, 1);
5336 tree low_bound = array_ref_low_bound (exp);
5337 tree unit_size = array_ref_element_size (exp);
5338
5339 /* We assume all arrays have sizes that are a multiple of a byte.
5340 First subtract the lower bound, if any, in the type of the
5341 index, then convert to sizetype and multiply by the size of the
5342 array element. */
5343 if (! integer_zerop (low_bound))
5344 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5345 index, low_bound));
5346
5347 offset = size_binop (PLUS_EXPR, offset,
5348 size_binop (MULT_EXPR,
5349 convert (sizetype, index),
5350 unit_size));
5351 }
5352
5353 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5354 conversions that don't change the mode, and all view conversions
5355 except those that need to "step up" the alignment. */
5356 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5357 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5358 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5359 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5360 && STRICT_ALIGNMENT
5361 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5362 < BIGGEST_ALIGNMENT)
5363 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5364 || TYPE_ALIGN_OK (TREE_TYPE
5365 (TREE_OPERAND (exp, 0))))))
5366 && ! ((TREE_CODE (exp) == NOP_EXPR
5367 || TREE_CODE (exp) == CONVERT_EXPR)
5368 && (TYPE_MODE (TREE_TYPE (exp))
5369 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5370 break;
5371
5372 /* If any reference in the chain is volatile, the effect is volatile. */
5373 if (TREE_THIS_VOLATILE (exp))
5374 *pvolatilep = 1;
5375
5376 exp = TREE_OPERAND (exp, 0);
5377 }
5378
5379 /* If OFFSET is constant, see if we can return the whole thing as a
5380 constant bit position. Otherwise, split it up. */
5381 if (host_integerp (offset, 0)
5382 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5383 bitsize_unit_node))
5384 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5385 && host_integerp (tem, 0))
5386 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5387 else
5388 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5389
5390 *pmode = mode;
5391 return exp;
5392 }
5393
5394 /* Return a tree of sizetype representing the size, in bytes, of the element
5395 of EXP, an ARRAY_REF. */
5396
5397 tree
5398 array_ref_element_size (tree exp)
5399 {
5400 tree aligned_size = TREE_OPERAND (exp, 3);
5401 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5402
5403 /* If a size was specified in the ARRAY_REF, it's the size measured
5404 in alignment units of the element type. So multiply by that value. */
5405 if (aligned_size)
5406 {
5407 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5408 sizetype from another type of the same width and signedness. */
5409 if (TREE_TYPE (aligned_size) != sizetype)
5410 aligned_size = fold_convert (sizetype, aligned_size);
5411 return size_binop (MULT_EXPR, aligned_size,
5412 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5413 }
5414
5415 /* Otherwise, take the size from that of the element type. Substitute
5416 any PLACEHOLDER_EXPR that we have. */
5417 else
5418 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5419 }
5420
5421 /* Return a tree representing the lower bound of the array mentioned in
5422 EXP, an ARRAY_REF. */
5423
5424 tree
5425 array_ref_low_bound (tree exp)
5426 {
5427 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5428
5429 /* If a lower bound is specified in EXP, use it. */
5430 if (TREE_OPERAND (exp, 2))
5431 return TREE_OPERAND (exp, 2);
5432
5433 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5434 substituting for a PLACEHOLDER_EXPR as needed. */
5435 if (domain_type && TYPE_MIN_VALUE (domain_type))
5436 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5437
5438 /* Otherwise, return a zero of the appropriate type. */
5439 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5440 }
5441
5442 /* Return a tree representing the upper bound of the array mentioned in
5443 EXP, an ARRAY_REF. */
5444
5445 tree
5446 array_ref_up_bound (tree exp)
5447 {
5448 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5449
5450 /* If there is a domain type and it has an upper bound, use it, substituting
5451 for a PLACEHOLDER_EXPR as needed. */
5452 if (domain_type && TYPE_MAX_VALUE (domain_type))
5453 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5454
5455 /* Otherwise fail. */
5456 return NULL_TREE;
5457 }
5458
5459 /* Return a tree representing the offset, in bytes, of the field referenced
5460 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5461
5462 tree
5463 component_ref_field_offset (tree exp)
5464 {
5465 tree aligned_offset = TREE_OPERAND (exp, 2);
5466 tree field = TREE_OPERAND (exp, 1);
5467
5468 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5469 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5470 value. */
5471 if (aligned_offset)
5472 {
5473 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5474 sizetype from another type of the same width and signedness. */
5475 if (TREE_TYPE (aligned_offset) != sizetype)
5476 aligned_offset = fold_convert (sizetype, aligned_offset);
5477 return size_binop (MULT_EXPR, aligned_offset,
5478 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5479 }
5480
5481 /* Otherwise, take the offset from that of the field. Substitute
5482 any PLACEHOLDER_EXPR that we have. */
5483 else
5484 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5485 }
5486
5487 /* Return 1 if T is an expression that get_inner_reference handles. */
5488
5489 int
5490 handled_component_p (tree t)
5491 {
5492 switch (TREE_CODE (t))
5493 {
5494 case BIT_FIELD_REF:
5495 case COMPONENT_REF:
5496 case ARRAY_REF:
5497 case ARRAY_RANGE_REF:
5498 case NON_LVALUE_EXPR:
5499 case VIEW_CONVERT_EXPR:
5500 return 1;
5501
5502 /* ??? Sure they are handled, but get_inner_reference may return
5503 a different PBITSIZE, depending upon whether the expression is
5504 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5505 case NOP_EXPR:
5506 case CONVERT_EXPR:
5507 return (TYPE_MODE (TREE_TYPE (t))
5508 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5509
5510 default:
5511 return 0;
5512 }
5513 }
5514 \f
5515 /* Given an rtx VALUE that may contain additions and multiplications, return
5516 an equivalent value that just refers to a register, memory, or constant.
5517 This is done by generating instructions to perform the arithmetic and
5518 returning a pseudo-register containing the value.
5519
5520 The returned value may be a REG, SUBREG, MEM or constant. */
5521
5522 rtx
5523 force_operand (rtx value, rtx target)
5524 {
5525 rtx op1, op2;
5526 /* Use subtarget as the target for operand 0 of a binary operation. */
5527 rtx subtarget = get_subtarget (target);
5528 enum rtx_code code = GET_CODE (value);
5529
5530 /* Check for subreg applied to an expression produced by loop optimizer. */
5531 if (code == SUBREG
5532 && !REG_P (SUBREG_REG (value))
5533 && !MEM_P (SUBREG_REG (value)))
5534 {
5535 value = simplify_gen_subreg (GET_MODE (value),
5536 force_reg (GET_MODE (SUBREG_REG (value)),
5537 force_operand (SUBREG_REG (value),
5538 NULL_RTX)),
5539 GET_MODE (SUBREG_REG (value)),
5540 SUBREG_BYTE (value));
5541 code = GET_CODE (value);
5542 }
5543
5544 /* Check for a PIC address load. */
5545 if ((code == PLUS || code == MINUS)
5546 && XEXP (value, 0) == pic_offset_table_rtx
5547 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5548 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5549 || GET_CODE (XEXP (value, 1)) == CONST))
5550 {
5551 if (!subtarget)
5552 subtarget = gen_reg_rtx (GET_MODE (value));
5553 emit_move_insn (subtarget, value);
5554 return subtarget;
5555 }
5556
5557 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5558 {
5559 if (!target)
5560 target = gen_reg_rtx (GET_MODE (value));
5561 convert_move (target, force_operand (XEXP (value, 0), NULL),
5562 code == ZERO_EXTEND);
5563 return target;
5564 }
5565
5566 if (ARITHMETIC_P (value))
5567 {
5568 op2 = XEXP (value, 1);
5569 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5570 subtarget = 0;
5571 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5572 {
5573 code = PLUS;
5574 op2 = negate_rtx (GET_MODE (value), op2);
5575 }
5576
5577 /* Check for an addition with OP2 a constant integer and our first
5578 operand a PLUS of a virtual register and something else. In that
5579 case, we want to emit the sum of the virtual register and the
5580 constant first and then add the other value. This allows virtual
5581 register instantiation to simply modify the constant rather than
5582 creating another one around this addition. */
5583 if (code == PLUS && GET_CODE (op2) == CONST_INT
5584 && GET_CODE (XEXP (value, 0)) == PLUS
5585 && REG_P (XEXP (XEXP (value, 0), 0))
5586 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5587 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5588 {
5589 rtx temp = expand_simple_binop (GET_MODE (value), code,
5590 XEXP (XEXP (value, 0), 0), op2,
5591 subtarget, 0, OPTAB_LIB_WIDEN);
5592 return expand_simple_binop (GET_MODE (value), code, temp,
5593 force_operand (XEXP (XEXP (value,
5594 0), 1), 0),
5595 target, 0, OPTAB_LIB_WIDEN);
5596 }
5597
5598 op1 = force_operand (XEXP (value, 0), subtarget);
5599 op2 = force_operand (op2, NULL_RTX);
5600 switch (code)
5601 {
5602 case MULT:
5603 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5604 case DIV:
5605 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5606 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5607 target, 1, OPTAB_LIB_WIDEN);
5608 else
5609 return expand_divmod (0,
5610 FLOAT_MODE_P (GET_MODE (value))
5611 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5612 GET_MODE (value), op1, op2, target, 0);
5613 break;
5614 case MOD:
5615 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5616 target, 0);
5617 break;
5618 case UDIV:
5619 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5620 target, 1);
5621 break;
5622 case UMOD:
5623 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5624 target, 1);
5625 break;
5626 case ASHIFTRT:
5627 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5628 target, 0, OPTAB_LIB_WIDEN);
5629 break;
5630 default:
5631 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5632 target, 1, OPTAB_LIB_WIDEN);
5633 }
5634 }
5635 if (UNARY_P (value))
5636 {
5637 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5638 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5639 }
5640
5641 #ifdef INSN_SCHEDULING
5642 /* On machines that have insn scheduling, we want all memory reference to be
5643 explicit, so we need to deal with such paradoxical SUBREGs. */
5644 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5645 && (GET_MODE_SIZE (GET_MODE (value))
5646 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5647 value
5648 = simplify_gen_subreg (GET_MODE (value),
5649 force_reg (GET_MODE (SUBREG_REG (value)),
5650 force_operand (SUBREG_REG (value),
5651 NULL_RTX)),
5652 GET_MODE (SUBREG_REG (value)),
5653 SUBREG_BYTE (value));
5654 #endif
5655
5656 return value;
5657 }
5658 \f
5659 /* Subroutine of expand_expr: return nonzero iff there is no way that
5660 EXP can reference X, which is being modified. TOP_P is nonzero if this
5661 call is going to be used to determine whether we need a temporary
5662 for EXP, as opposed to a recursive call to this function.
5663
5664 It is always safe for this routine to return zero since it merely
5665 searches for optimization opportunities. */
5666
5667 int
5668 safe_from_p (rtx x, tree exp, int top_p)
5669 {
5670 rtx exp_rtl = 0;
5671 int i, nops;
5672
5673 if (x == 0
5674 /* If EXP has varying size, we MUST use a target since we currently
5675 have no way of allocating temporaries of variable size
5676 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5677 So we assume here that something at a higher level has prevented a
5678 clash. This is somewhat bogus, but the best we can do. Only
5679 do this when X is BLKmode and when we are at the top level. */
5680 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5681 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5682 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5683 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5684 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5685 != INTEGER_CST)
5686 && GET_MODE (x) == BLKmode)
5687 /* If X is in the outgoing argument area, it is always safe. */
5688 || (MEM_P (x)
5689 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5690 || (GET_CODE (XEXP (x, 0)) == PLUS
5691 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5692 return 1;
5693
5694 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5695 find the underlying pseudo. */
5696 if (GET_CODE (x) == SUBREG)
5697 {
5698 x = SUBREG_REG (x);
5699 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5700 return 0;
5701 }
5702
5703 /* Now look at our tree code and possibly recurse. */
5704 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5705 {
5706 case tcc_declaration:
5707 exp_rtl = DECL_RTL_IF_SET (exp);
5708 break;
5709
5710 case tcc_constant:
5711 return 1;
5712
5713 case tcc_exceptional:
5714 if (TREE_CODE (exp) == TREE_LIST)
5715 {
5716 while (1)
5717 {
5718 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5719 return 0;
5720 exp = TREE_CHAIN (exp);
5721 if (!exp)
5722 return 1;
5723 if (TREE_CODE (exp) != TREE_LIST)
5724 return safe_from_p (x, exp, 0);
5725 }
5726 }
5727 else if (TREE_CODE (exp) == ERROR_MARK)
5728 return 1; /* An already-visited SAVE_EXPR? */
5729 else
5730 return 0;
5731
5732 case tcc_statement:
5733 /* The only case we look at here is the DECL_INITIAL inside a
5734 DECL_EXPR. */
5735 return (TREE_CODE (exp) != DECL_EXPR
5736 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5737 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5738 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5739
5740 case tcc_binary:
5741 case tcc_comparison:
5742 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5743 return 0;
5744 /* Fall through. */
5745
5746 case tcc_unary:
5747 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5748
5749 case tcc_expression:
5750 case tcc_reference:
5751 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5752 the expression. If it is set, we conflict iff we are that rtx or
5753 both are in memory. Otherwise, we check all operands of the
5754 expression recursively. */
5755
5756 switch (TREE_CODE (exp))
5757 {
5758 case ADDR_EXPR:
5759 /* If the operand is static or we are static, we can't conflict.
5760 Likewise if we don't conflict with the operand at all. */
5761 if (staticp (TREE_OPERAND (exp, 0))
5762 || TREE_STATIC (exp)
5763 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5764 return 1;
5765
5766 /* Otherwise, the only way this can conflict is if we are taking
5767 the address of a DECL a that address if part of X, which is
5768 very rare. */
5769 exp = TREE_OPERAND (exp, 0);
5770 if (DECL_P (exp))
5771 {
5772 if (!DECL_RTL_SET_P (exp)
5773 || !MEM_P (DECL_RTL (exp)))
5774 return 0;
5775 else
5776 exp_rtl = XEXP (DECL_RTL (exp), 0);
5777 }
5778 break;
5779
5780 case MISALIGNED_INDIRECT_REF:
5781 case ALIGN_INDIRECT_REF:
5782 case INDIRECT_REF:
5783 if (MEM_P (x)
5784 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5785 get_alias_set (exp)))
5786 return 0;
5787 break;
5788
5789 case CALL_EXPR:
5790 /* Assume that the call will clobber all hard registers and
5791 all of memory. */
5792 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5793 || MEM_P (x))
5794 return 0;
5795 break;
5796
5797 case WITH_CLEANUP_EXPR:
5798 case CLEANUP_POINT_EXPR:
5799 /* Lowered by gimplify.c. */
5800 gcc_unreachable ();
5801
5802 case SAVE_EXPR:
5803 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5804
5805 default:
5806 break;
5807 }
5808
5809 /* If we have an rtx, we do not need to scan our operands. */
5810 if (exp_rtl)
5811 break;
5812
5813 nops = first_rtl_op (TREE_CODE (exp));
5814 for (i = 0; i < nops; i++)
5815 if (TREE_OPERAND (exp, i) != 0
5816 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5817 return 0;
5818
5819 /* If this is a language-specific tree code, it may require
5820 special handling. */
5821 if ((unsigned int) TREE_CODE (exp)
5822 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5823 && !lang_hooks.safe_from_p (x, exp))
5824 return 0;
5825 break;
5826
5827 case tcc_type:
5828 /* Should never get a type here. */
5829 gcc_unreachable ();
5830 }
5831
5832 /* If we have an rtl, find any enclosed object. Then see if we conflict
5833 with it. */
5834 if (exp_rtl)
5835 {
5836 if (GET_CODE (exp_rtl) == SUBREG)
5837 {
5838 exp_rtl = SUBREG_REG (exp_rtl);
5839 if (REG_P (exp_rtl)
5840 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5841 return 0;
5842 }
5843
5844 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5845 are memory and they conflict. */
5846 return ! (rtx_equal_p (x, exp_rtl)
5847 || (MEM_P (x) && MEM_P (exp_rtl)
5848 && true_dependence (exp_rtl, VOIDmode, x,
5849 rtx_addr_varies_p)));
5850 }
5851
5852 /* If we reach here, it is safe. */
5853 return 1;
5854 }
5855
5856 \f
5857 /* Return the highest power of two that EXP is known to be a multiple of.
5858 This is used in updating alignment of MEMs in array references. */
5859
5860 static unsigned HOST_WIDE_INT
5861 highest_pow2_factor (tree exp)
5862 {
5863 unsigned HOST_WIDE_INT c0, c1;
5864
5865 switch (TREE_CODE (exp))
5866 {
5867 case INTEGER_CST:
5868 /* We can find the lowest bit that's a one. If the low
5869 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5870 We need to handle this case since we can find it in a COND_EXPR,
5871 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5872 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5873 later ICE. */
5874 if (TREE_CONSTANT_OVERFLOW (exp))
5875 return BIGGEST_ALIGNMENT;
5876 else
5877 {
5878 /* Note: tree_low_cst is intentionally not used here,
5879 we don't care about the upper bits. */
5880 c0 = TREE_INT_CST_LOW (exp);
5881 c0 &= -c0;
5882 return c0 ? c0 : BIGGEST_ALIGNMENT;
5883 }
5884 break;
5885
5886 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5887 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5888 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5889 return MIN (c0, c1);
5890
5891 case MULT_EXPR:
5892 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5893 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5894 return c0 * c1;
5895
5896 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5897 case CEIL_DIV_EXPR:
5898 if (integer_pow2p (TREE_OPERAND (exp, 1))
5899 && host_integerp (TREE_OPERAND (exp, 1), 1))
5900 {
5901 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5902 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5903 return MAX (1, c0 / c1);
5904 }
5905 break;
5906
5907 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5908 case SAVE_EXPR:
5909 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5910
5911 case COMPOUND_EXPR:
5912 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5913
5914 case COND_EXPR:
5915 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5916 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5917 return MIN (c0, c1);
5918
5919 default:
5920 break;
5921 }
5922
5923 return 1;
5924 }
5925
5926 /* Similar, except that the alignment requirements of TARGET are
5927 taken into account. Assume it is at least as aligned as its
5928 type, unless it is a COMPONENT_REF in which case the layout of
5929 the structure gives the alignment. */
5930
5931 static unsigned HOST_WIDE_INT
5932 highest_pow2_factor_for_target (tree target, tree exp)
5933 {
5934 unsigned HOST_WIDE_INT target_align, factor;
5935
5936 factor = highest_pow2_factor (exp);
5937 if (TREE_CODE (target) == COMPONENT_REF)
5938 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
5939 else
5940 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
5941 return MAX (factor, target_align);
5942 }
5943 \f
5944 /* Expands variable VAR. */
5945
5946 void
5947 expand_var (tree var)
5948 {
5949 if (DECL_EXTERNAL (var))
5950 return;
5951
5952 if (TREE_STATIC (var))
5953 /* If this is an inlined copy of a static local variable,
5954 look up the original decl. */
5955 var = DECL_ORIGIN (var);
5956
5957 if (TREE_STATIC (var)
5958 ? !TREE_ASM_WRITTEN (var)
5959 : !DECL_RTL_SET_P (var))
5960 {
5961 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
5962 /* Should be ignored. */;
5963 else if (lang_hooks.expand_decl (var))
5964 /* OK. */;
5965 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
5966 expand_decl (var);
5967 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
5968 rest_of_decl_compilation (var, 0, 0);
5969 else
5970 /* No expansion needed. */
5971 gcc_assert (TREE_CODE (var) == TYPE_DECL
5972 || TREE_CODE (var) == CONST_DECL
5973 || TREE_CODE (var) == FUNCTION_DECL
5974 || TREE_CODE (var) == LABEL_DECL);
5975 }
5976 }
5977
5978 /* Subroutine of expand_expr. Expand the two operands of a binary
5979 expression EXP0 and EXP1 placing the results in OP0 and OP1.
5980 The value may be stored in TARGET if TARGET is nonzero. The
5981 MODIFIER argument is as documented by expand_expr. */
5982
5983 static void
5984 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
5985 enum expand_modifier modifier)
5986 {
5987 if (! safe_from_p (target, exp1, 1))
5988 target = 0;
5989 if (operand_equal_p (exp0, exp1, 0))
5990 {
5991 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5992 *op1 = copy_rtx (*op0);
5993 }
5994 else
5995 {
5996 /* If we need to preserve evaluation order, copy exp0 into its own
5997 temporary variable so that it can't be clobbered by exp1. */
5998 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
5999 exp0 = save_expr (exp0);
6000 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6001 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6002 }
6003 }
6004
6005 \f
6006 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6007 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6008
6009 static rtx
6010 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6011 enum expand_modifier modifier)
6012 {
6013 rtx result, subtarget;
6014 tree inner, offset;
6015 HOST_WIDE_INT bitsize, bitpos;
6016 int volatilep, unsignedp;
6017 enum machine_mode mode1;
6018
6019 /* If we are taking the address of a constant and are at the top level,
6020 we have to use output_constant_def since we can't call force_const_mem
6021 at top level. */
6022 /* ??? This should be considered a front-end bug. We should not be
6023 generating ADDR_EXPR of something that isn't an LVALUE. The only
6024 exception here is STRING_CST. */
6025 if (TREE_CODE (exp) == CONSTRUCTOR
6026 || CONSTANT_CLASS_P (exp))
6027 return XEXP (output_constant_def (exp, 0), 0);
6028
6029 /* Everything must be something allowed by is_gimple_addressable. */
6030 switch (TREE_CODE (exp))
6031 {
6032 case INDIRECT_REF:
6033 /* This case will happen via recursion for &a->b. */
6034 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6035
6036 case CONST_DECL:
6037 /* Recurse and make the output_constant_def clause above handle this. */
6038 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6039 tmode, modifier);
6040
6041 case REALPART_EXPR:
6042 /* The real part of the complex number is always first, therefore
6043 the address is the same as the address of the parent object. */
6044 offset = 0;
6045 bitpos = 0;
6046 inner = TREE_OPERAND (exp, 0);
6047 break;
6048
6049 case IMAGPART_EXPR:
6050 /* The imaginary part of the complex number is always second.
6051 The expression is therefore always offset by the size of the
6052 scalar type. */
6053 offset = 0;
6054 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6055 inner = TREE_OPERAND (exp, 0);
6056 break;
6057
6058 default:
6059 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6060 expand_expr, as that can have various side effects; LABEL_DECLs for
6061 example, may not have their DECL_RTL set yet. Assume language
6062 specific tree nodes can be expanded in some interesting way. */
6063 if (DECL_P (exp)
6064 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6065 {
6066 result = expand_expr (exp, target, tmode,
6067 modifier == EXPAND_INITIALIZER
6068 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6069
6070 /* If the DECL isn't in memory, then the DECL wasn't properly
6071 marked TREE_ADDRESSABLE, which will be either a front-end
6072 or a tree optimizer bug. */
6073 gcc_assert (GET_CODE (result) == MEM);
6074 result = XEXP (result, 0);
6075
6076 /* ??? Is this needed anymore? */
6077 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6078 {
6079 assemble_external (exp);
6080 TREE_USED (exp) = 1;
6081 }
6082
6083 if (modifier != EXPAND_INITIALIZER
6084 && modifier != EXPAND_CONST_ADDRESS)
6085 result = force_operand (result, target);
6086 return result;
6087 }
6088
6089 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6090 &mode1, &unsignedp, &volatilep);
6091 break;
6092 }
6093
6094 /* We must have made progress. */
6095 gcc_assert (inner != exp);
6096
6097 subtarget = offset || bitpos ? NULL_RTX : target;
6098 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6099
6100 if (offset)
6101 {
6102 rtx tmp;
6103
6104 if (modifier != EXPAND_NORMAL)
6105 result = force_operand (result, NULL);
6106 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6107
6108 result = convert_memory_address (tmode, result);
6109 tmp = convert_memory_address (tmode, tmp);
6110
6111 if (modifier == EXPAND_SUM)
6112 result = gen_rtx_PLUS (tmode, result, tmp);
6113 else
6114 {
6115 subtarget = bitpos ? NULL_RTX : target;
6116 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6117 1, OPTAB_LIB_WIDEN);
6118 }
6119 }
6120
6121 if (bitpos)
6122 {
6123 /* Someone beforehand should have rejected taking the address
6124 of such an object. */
6125 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6126
6127 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6128 if (modifier < EXPAND_SUM)
6129 result = force_operand (result, target);
6130 }
6131
6132 return result;
6133 }
6134
6135 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6136 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6137
6138 static rtx
6139 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6140 enum expand_modifier modifier)
6141 {
6142 enum machine_mode rmode;
6143 rtx result;
6144
6145 /* Target mode of VOIDmode says "whatever's natural". */
6146 if (tmode == VOIDmode)
6147 tmode = TYPE_MODE (TREE_TYPE (exp));
6148
6149 /* We can get called with some Weird Things if the user does silliness
6150 like "(short) &a". In that case, convert_memory_address won't do
6151 the right thing, so ignore the given target mode. */
6152 if (tmode != Pmode && tmode != ptr_mode)
6153 tmode = Pmode;
6154
6155 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6156 tmode, modifier);
6157
6158 /* Despite expand_expr claims concerning ignoring TMODE when not
6159 strictly convenient, stuff breaks if we don't honor it. Note
6160 that combined with the above, we only do this for pointer modes. */
6161 rmode = GET_MODE (result);
6162 if (rmode == VOIDmode)
6163 rmode = tmode;
6164 if (rmode != tmode)
6165 result = convert_memory_address (tmode, result);
6166
6167 return result;
6168 }
6169
6170
6171 /* expand_expr: generate code for computing expression EXP.
6172 An rtx for the computed value is returned. The value is never null.
6173 In the case of a void EXP, const0_rtx is returned.
6174
6175 The value may be stored in TARGET if TARGET is nonzero.
6176 TARGET is just a suggestion; callers must assume that
6177 the rtx returned may not be the same as TARGET.
6178
6179 If TARGET is CONST0_RTX, it means that the value will be ignored.
6180
6181 If TMODE is not VOIDmode, it suggests generating the
6182 result in mode TMODE. But this is done only when convenient.
6183 Otherwise, TMODE is ignored and the value generated in its natural mode.
6184 TMODE is just a suggestion; callers must assume that
6185 the rtx returned may not have mode TMODE.
6186
6187 Note that TARGET may have neither TMODE nor MODE. In that case, it
6188 probably will not be used.
6189
6190 If MODIFIER is EXPAND_SUM then when EXP is an addition
6191 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6192 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6193 products as above, or REG or MEM, or constant.
6194 Ordinarily in such cases we would output mul or add instructions
6195 and then return a pseudo reg containing the sum.
6196
6197 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6198 it also marks a label as absolutely required (it can't be dead).
6199 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6200 This is used for outputting expressions used in initializers.
6201
6202 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6203 with a constant address even if that address is not normally legitimate.
6204 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6205
6206 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6207 a call parameter. Such targets require special care as we haven't yet
6208 marked TARGET so that it's safe from being trashed by libcalls. We
6209 don't want to use TARGET for anything but the final result;
6210 Intermediate values must go elsewhere. Additionally, calls to
6211 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6212
6213 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6214 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6215 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6216 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6217 recursively. */
6218
6219 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6220 enum expand_modifier, rtx *);
6221
6222 rtx
6223 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6224 enum expand_modifier modifier, rtx *alt_rtl)
6225 {
6226 int rn = -1;
6227 rtx ret, last = NULL;
6228
6229 /* Handle ERROR_MARK before anybody tries to access its type. */
6230 if (TREE_CODE (exp) == ERROR_MARK
6231 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6232 {
6233 ret = CONST0_RTX (tmode);
6234 return ret ? ret : const0_rtx;
6235 }
6236
6237 if (flag_non_call_exceptions)
6238 {
6239 rn = lookup_stmt_eh_region (exp);
6240 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6241 if (rn >= 0)
6242 last = get_last_insn ();
6243 }
6244
6245 /* If this is an expression of some kind and it has an associated line
6246 number, then emit the line number before expanding the expression.
6247
6248 We need to save and restore the file and line information so that
6249 errors discovered during expansion are emitted with the right
6250 information. It would be better of the diagnostic routines
6251 used the file/line information embedded in the tree nodes rather
6252 than globals. */
6253 if (cfun && EXPR_HAS_LOCATION (exp))
6254 {
6255 location_t saved_location = input_location;
6256 input_location = EXPR_LOCATION (exp);
6257 emit_line_note (input_location);
6258
6259 /* Record where the insns produced belong. */
6260 record_block_change (TREE_BLOCK (exp));
6261
6262 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6263
6264 input_location = saved_location;
6265 }
6266 else
6267 {
6268 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6269 }
6270
6271 /* If using non-call exceptions, mark all insns that may trap.
6272 expand_call() will mark CALL_INSNs before we get to this code,
6273 but it doesn't handle libcalls, and these may trap. */
6274 if (rn >= 0)
6275 {
6276 rtx insn;
6277 for (insn = next_real_insn (last); insn;
6278 insn = next_real_insn (insn))
6279 {
6280 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6281 /* If we want exceptions for non-call insns, any
6282 may_trap_p instruction may throw. */
6283 && GET_CODE (PATTERN (insn)) != CLOBBER
6284 && GET_CODE (PATTERN (insn)) != USE
6285 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6286 {
6287 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6288 REG_NOTES (insn));
6289 }
6290 }
6291 }
6292
6293 return ret;
6294 }
6295
6296 static rtx
6297 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6298 enum expand_modifier modifier, rtx *alt_rtl)
6299 {
6300 rtx op0, op1, temp;
6301 tree type = TREE_TYPE (exp);
6302 int unsignedp;
6303 enum machine_mode mode;
6304 enum tree_code code = TREE_CODE (exp);
6305 optab this_optab;
6306 rtx subtarget, original_target;
6307 int ignore;
6308 tree context;
6309 bool reduce_bit_field = false;
6310 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6311 ? reduce_to_bit_field_precision ((expr), \
6312 target, \
6313 type) \
6314 : (expr))
6315
6316 mode = TYPE_MODE (type);
6317 unsignedp = TYPE_UNSIGNED (type);
6318 if (lang_hooks.reduce_bit_field_operations
6319 && TREE_CODE (type) == INTEGER_TYPE
6320 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6321 {
6322 /* An operation in what may be a bit-field type needs the
6323 result to be reduced to the precision of the bit-field type,
6324 which is narrower than that of the type's mode. */
6325 reduce_bit_field = true;
6326 if (modifier == EXPAND_STACK_PARM)
6327 target = 0;
6328 }
6329
6330 /* Use subtarget as the target for operand 0 of a binary operation. */
6331 subtarget = get_subtarget (target);
6332 original_target = target;
6333 ignore = (target == const0_rtx
6334 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6335 || code == CONVERT_EXPR || code == COND_EXPR
6336 || code == VIEW_CONVERT_EXPR)
6337 && TREE_CODE (type) == VOID_TYPE));
6338
6339 /* If we are going to ignore this result, we need only do something
6340 if there is a side-effect somewhere in the expression. If there
6341 is, short-circuit the most common cases here. Note that we must
6342 not call expand_expr with anything but const0_rtx in case this
6343 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6344
6345 if (ignore)
6346 {
6347 if (! TREE_SIDE_EFFECTS (exp))
6348 return const0_rtx;
6349
6350 /* Ensure we reference a volatile object even if value is ignored, but
6351 don't do this if all we are doing is taking its address. */
6352 if (TREE_THIS_VOLATILE (exp)
6353 && TREE_CODE (exp) != FUNCTION_DECL
6354 && mode != VOIDmode && mode != BLKmode
6355 && modifier != EXPAND_CONST_ADDRESS)
6356 {
6357 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6358 if (MEM_P (temp))
6359 temp = copy_to_reg (temp);
6360 return const0_rtx;
6361 }
6362
6363 if (TREE_CODE_CLASS (code) == tcc_unary
6364 || code == COMPONENT_REF || code == INDIRECT_REF)
6365 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6366 modifier);
6367
6368 else if (TREE_CODE_CLASS (code) == tcc_binary
6369 || TREE_CODE_CLASS (code) == tcc_comparison
6370 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6371 {
6372 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6373 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6374 return const0_rtx;
6375 }
6376 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6377 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6378 /* If the second operand has no side effects, just evaluate
6379 the first. */
6380 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6381 modifier);
6382 else if (code == BIT_FIELD_REF)
6383 {
6384 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6385 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6386 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6387 return const0_rtx;
6388 }
6389
6390 target = 0;
6391 }
6392
6393 /* If will do cse, generate all results into pseudo registers
6394 since 1) that allows cse to find more things
6395 and 2) otherwise cse could produce an insn the machine
6396 cannot support. An exception is a CONSTRUCTOR into a multi-word
6397 MEM: that's much more likely to be most efficient into the MEM.
6398 Another is a CALL_EXPR which must return in memory. */
6399
6400 if (! cse_not_expected && mode != BLKmode && target
6401 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6402 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6403 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6404 target = 0;
6405
6406 switch (code)
6407 {
6408 case LABEL_DECL:
6409 {
6410 tree function = decl_function_context (exp);
6411
6412 temp = label_rtx (exp);
6413 temp = gen_rtx_LABEL_REF (Pmode, temp);
6414
6415 if (function != current_function_decl
6416 && function != 0)
6417 LABEL_REF_NONLOCAL_P (temp) = 1;
6418
6419 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6420 return temp;
6421 }
6422
6423 case SSA_NAME:
6424 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6425 NULL);
6426
6427 case PARM_DECL:
6428 case VAR_DECL:
6429 /* If a static var's type was incomplete when the decl was written,
6430 but the type is complete now, lay out the decl now. */
6431 if (DECL_SIZE (exp) == 0
6432 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6433 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6434 layout_decl (exp, 0);
6435
6436 /* ... fall through ... */
6437
6438 case FUNCTION_DECL:
6439 case RESULT_DECL:
6440 gcc_assert (DECL_RTL (exp));
6441
6442 /* Ensure variable marked as used even if it doesn't go through
6443 a parser. If it hasn't be used yet, write out an external
6444 definition. */
6445 if (! TREE_USED (exp))
6446 {
6447 assemble_external (exp);
6448 TREE_USED (exp) = 1;
6449 }
6450
6451 /* Show we haven't gotten RTL for this yet. */
6452 temp = 0;
6453
6454 /* Variables inherited from containing functions should have
6455 been lowered by this point. */
6456 context = decl_function_context (exp);
6457 gcc_assert (!context
6458 || context == current_function_decl
6459 || TREE_STATIC (exp)
6460 /* ??? C++ creates functions that are not TREE_STATIC. */
6461 || TREE_CODE (exp) == FUNCTION_DECL);
6462
6463 /* This is the case of an array whose size is to be determined
6464 from its initializer, while the initializer is still being parsed.
6465 See expand_decl. */
6466
6467 if (MEM_P (DECL_RTL (exp))
6468 && REG_P (XEXP (DECL_RTL (exp), 0)))
6469 temp = validize_mem (DECL_RTL (exp));
6470
6471 /* If DECL_RTL is memory, we are in the normal case and either
6472 the address is not valid or it is not a register and -fforce-addr
6473 is specified, get the address into a register. */
6474
6475 else if (MEM_P (DECL_RTL (exp))
6476 && modifier != EXPAND_CONST_ADDRESS
6477 && modifier != EXPAND_SUM
6478 && modifier != EXPAND_INITIALIZER
6479 && (! memory_address_p (DECL_MODE (exp),
6480 XEXP (DECL_RTL (exp), 0))
6481 || (flag_force_addr
6482 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6483 {
6484 if (alt_rtl)
6485 *alt_rtl = DECL_RTL (exp);
6486 temp = replace_equiv_address (DECL_RTL (exp),
6487 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6488 }
6489
6490 /* If we got something, return it. But first, set the alignment
6491 if the address is a register. */
6492 if (temp != 0)
6493 {
6494 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6495 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6496
6497 return temp;
6498 }
6499
6500 /* If the mode of DECL_RTL does not match that of the decl, it
6501 must be a promoted value. We return a SUBREG of the wanted mode,
6502 but mark it so that we know that it was already extended. */
6503
6504 if (REG_P (DECL_RTL (exp))
6505 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6506 {
6507 enum machine_mode pmode;
6508
6509 /* Get the signedness used for this variable. Ensure we get the
6510 same mode we got when the variable was declared. */
6511 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6512 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6513 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6514
6515 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6516 SUBREG_PROMOTED_VAR_P (temp) = 1;
6517 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6518 return temp;
6519 }
6520
6521 return DECL_RTL (exp);
6522
6523 case INTEGER_CST:
6524 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6525 TREE_INT_CST_HIGH (exp), mode);
6526
6527 /* ??? If overflow is set, fold will have done an incomplete job,
6528 which can result in (plus xx (const_int 0)), which can get
6529 simplified by validate_replace_rtx during virtual register
6530 instantiation, which can result in unrecognizable insns.
6531 Avoid this by forcing all overflows into registers. */
6532 if (TREE_CONSTANT_OVERFLOW (exp)
6533 && modifier != EXPAND_INITIALIZER)
6534 temp = force_reg (mode, temp);
6535
6536 return temp;
6537
6538 case VECTOR_CST:
6539 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6540 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6541 return const_vector_from_tree (exp);
6542 else
6543 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6544 TREE_VECTOR_CST_ELTS (exp)),
6545 ignore ? const0_rtx : target, tmode, modifier);
6546
6547 case CONST_DECL:
6548 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6549
6550 case REAL_CST:
6551 /* If optimized, generate immediate CONST_DOUBLE
6552 which will be turned into memory by reload if necessary.
6553
6554 We used to force a register so that loop.c could see it. But
6555 this does not allow gen_* patterns to perform optimizations with
6556 the constants. It also produces two insns in cases like "x = 1.0;".
6557 On most machines, floating-point constants are not permitted in
6558 many insns, so we'd end up copying it to a register in any case.
6559
6560 Now, we do the copying in expand_binop, if appropriate. */
6561 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6562 TYPE_MODE (TREE_TYPE (exp)));
6563
6564 case COMPLEX_CST:
6565 /* Handle evaluating a complex constant in a CONCAT target. */
6566 if (original_target && GET_CODE (original_target) == CONCAT)
6567 {
6568 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6569 rtx rtarg, itarg;
6570
6571 rtarg = XEXP (original_target, 0);
6572 itarg = XEXP (original_target, 1);
6573
6574 /* Move the real and imaginary parts separately. */
6575 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6576 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6577
6578 if (op0 != rtarg)
6579 emit_move_insn (rtarg, op0);
6580 if (op1 != itarg)
6581 emit_move_insn (itarg, op1);
6582
6583 return original_target;
6584 }
6585
6586 /* ... fall through ... */
6587
6588 case STRING_CST:
6589 temp = output_constant_def (exp, 1);
6590
6591 /* temp contains a constant address.
6592 On RISC machines where a constant address isn't valid,
6593 make some insns to get that address into a register. */
6594 if (modifier != EXPAND_CONST_ADDRESS
6595 && modifier != EXPAND_INITIALIZER
6596 && modifier != EXPAND_SUM
6597 && (! memory_address_p (mode, XEXP (temp, 0))
6598 || flag_force_addr))
6599 return replace_equiv_address (temp,
6600 copy_rtx (XEXP (temp, 0)));
6601 return temp;
6602
6603 case SAVE_EXPR:
6604 {
6605 tree val = TREE_OPERAND (exp, 0);
6606 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6607
6608 if (!SAVE_EXPR_RESOLVED_P (exp))
6609 {
6610 /* We can indeed still hit this case, typically via builtin
6611 expanders calling save_expr immediately before expanding
6612 something. Assume this means that we only have to deal
6613 with non-BLKmode values. */
6614 gcc_assert (GET_MODE (ret) != BLKmode);
6615
6616 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6617 DECL_ARTIFICIAL (val) = 1;
6618 DECL_IGNORED_P (val) = 1;
6619 TREE_OPERAND (exp, 0) = val;
6620 SAVE_EXPR_RESOLVED_P (exp) = 1;
6621
6622 if (!CONSTANT_P (ret))
6623 ret = copy_to_reg (ret);
6624 SET_DECL_RTL (val, ret);
6625 }
6626
6627 return ret;
6628 }
6629
6630 case GOTO_EXPR:
6631 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6632 expand_goto (TREE_OPERAND (exp, 0));
6633 else
6634 expand_computed_goto (TREE_OPERAND (exp, 0));
6635 return const0_rtx;
6636
6637 case CONSTRUCTOR:
6638 /* If we don't need the result, just ensure we evaluate any
6639 subexpressions. */
6640 if (ignore)
6641 {
6642 tree elt;
6643
6644 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6645 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6646
6647 return const0_rtx;
6648 }
6649
6650 /* All elts simple constants => refer to a constant in memory. But
6651 if this is a non-BLKmode mode, let it store a field at a time
6652 since that should make a CONST_INT or CONST_DOUBLE when we
6653 fold. Likewise, if we have a target we can use, it is best to
6654 store directly into the target unless the type is large enough
6655 that memcpy will be used. If we are making an initializer and
6656 all operands are constant, put it in memory as well.
6657
6658 FIXME: Avoid trying to fill vector constructors piece-meal.
6659 Output them with output_constant_def below unless we're sure
6660 they're zeros. This should go away when vector initializers
6661 are treated like VECTOR_CST instead of arrays.
6662 */
6663 else if ((TREE_STATIC (exp)
6664 && ((mode == BLKmode
6665 && ! (target != 0 && safe_from_p (target, exp, 1)))
6666 || TREE_ADDRESSABLE (exp)
6667 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6668 && (! MOVE_BY_PIECES_P
6669 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6670 TYPE_ALIGN (type)))
6671 && ! mostly_zeros_p (exp))))
6672 || ((modifier == EXPAND_INITIALIZER
6673 || modifier == EXPAND_CONST_ADDRESS)
6674 && TREE_CONSTANT (exp)))
6675 {
6676 rtx constructor = output_constant_def (exp, 1);
6677
6678 if (modifier != EXPAND_CONST_ADDRESS
6679 && modifier != EXPAND_INITIALIZER
6680 && modifier != EXPAND_SUM)
6681 constructor = validize_mem (constructor);
6682
6683 return constructor;
6684 }
6685 else
6686 {
6687 /* Handle calls that pass values in multiple non-contiguous
6688 locations. The Irix 6 ABI has examples of this. */
6689 if (target == 0 || ! safe_from_p (target, exp, 1)
6690 || GET_CODE (target) == PARALLEL
6691 || modifier == EXPAND_STACK_PARM)
6692 target
6693 = assign_temp (build_qualified_type (type,
6694 (TYPE_QUALS (type)
6695 | (TREE_READONLY (exp)
6696 * TYPE_QUAL_CONST))),
6697 0, TREE_ADDRESSABLE (exp), 1);
6698
6699 store_constructor (exp, target, 0, int_expr_size (exp));
6700 return target;
6701 }
6702
6703 case MISALIGNED_INDIRECT_REF:
6704 case ALIGN_INDIRECT_REF:
6705 case INDIRECT_REF:
6706 {
6707 tree exp1 = TREE_OPERAND (exp, 0);
6708 tree orig;
6709
6710 if (code == MISALIGNED_INDIRECT_REF
6711 && !targetm.vectorize.misaligned_mem_ok (mode))
6712 abort ();
6713
6714 if (modifier != EXPAND_WRITE)
6715 {
6716 tree t;
6717
6718 t = fold_read_from_constant_string (exp);
6719 if (t)
6720 return expand_expr (t, target, tmode, modifier);
6721 }
6722
6723 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6724 op0 = memory_address (mode, op0);
6725
6726 if (code == ALIGN_INDIRECT_REF)
6727 {
6728 int align = TYPE_ALIGN_UNIT (type);
6729 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6730 op0 = memory_address (mode, op0);
6731 }
6732
6733 temp = gen_rtx_MEM (mode, op0);
6734
6735 orig = REF_ORIGINAL (exp);
6736 if (!orig)
6737 orig = exp;
6738 set_mem_attributes (temp, orig, 0);
6739
6740 return temp;
6741 }
6742
6743 case ARRAY_REF:
6744
6745 {
6746 tree array = TREE_OPERAND (exp, 0);
6747 tree index = TREE_OPERAND (exp, 1);
6748
6749 /* Fold an expression like: "foo"[2].
6750 This is not done in fold so it won't happen inside &.
6751 Don't fold if this is for wide characters since it's too
6752 difficult to do correctly and this is a very rare case. */
6753
6754 if (modifier != EXPAND_CONST_ADDRESS
6755 && modifier != EXPAND_INITIALIZER
6756 && modifier != EXPAND_MEMORY)
6757 {
6758 tree t = fold_read_from_constant_string (exp);
6759
6760 if (t)
6761 return expand_expr (t, target, tmode, modifier);
6762 }
6763
6764 /* If this is a constant index into a constant array,
6765 just get the value from the array. Handle both the cases when
6766 we have an explicit constructor and when our operand is a variable
6767 that was declared const. */
6768
6769 if (modifier != EXPAND_CONST_ADDRESS
6770 && modifier != EXPAND_INITIALIZER
6771 && modifier != EXPAND_MEMORY
6772 && TREE_CODE (array) == CONSTRUCTOR
6773 && ! TREE_SIDE_EFFECTS (array)
6774 && TREE_CODE (index) == INTEGER_CST)
6775 {
6776 tree elem;
6777
6778 for (elem = CONSTRUCTOR_ELTS (array);
6779 (elem && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6780 elem = TREE_CHAIN (elem))
6781 ;
6782
6783 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6784 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6785 modifier);
6786 }
6787
6788 else if (optimize >= 1
6789 && modifier != EXPAND_CONST_ADDRESS
6790 && modifier != EXPAND_INITIALIZER
6791 && modifier != EXPAND_MEMORY
6792 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6793 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6794 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6795 && targetm.binds_local_p (array))
6796 {
6797 if (TREE_CODE (index) == INTEGER_CST)
6798 {
6799 tree init = DECL_INITIAL (array);
6800
6801 if (TREE_CODE (init) == CONSTRUCTOR)
6802 {
6803 tree elem;
6804
6805 for (elem = CONSTRUCTOR_ELTS (init);
6806 (elem
6807 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6808 elem = TREE_CHAIN (elem))
6809 ;
6810
6811 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6812 return expand_expr (fold (TREE_VALUE (elem)), target,
6813 tmode, modifier);
6814 }
6815 else if (TREE_CODE (init) == STRING_CST
6816 && 0 > compare_tree_int (index,
6817 TREE_STRING_LENGTH (init)))
6818 {
6819 tree type = TREE_TYPE (TREE_TYPE (init));
6820 enum machine_mode mode = TYPE_MODE (type);
6821
6822 if (GET_MODE_CLASS (mode) == MODE_INT
6823 && GET_MODE_SIZE (mode) == 1)
6824 return gen_int_mode (TREE_STRING_POINTER (init)
6825 [TREE_INT_CST_LOW (index)], mode);
6826 }
6827 }
6828 }
6829 }
6830 goto normal_inner_ref;
6831
6832 case COMPONENT_REF:
6833 /* If the operand is a CONSTRUCTOR, we can just extract the
6834 appropriate field if it is present. */
6835 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6836 {
6837 tree elt;
6838
6839 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6840 elt = TREE_CHAIN (elt))
6841 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6842 /* We can normally use the value of the field in the
6843 CONSTRUCTOR. However, if this is a bitfield in
6844 an integral mode that we can fit in a HOST_WIDE_INT,
6845 we must mask only the number of bits in the bitfield,
6846 since this is done implicitly by the constructor. If
6847 the bitfield does not meet either of those conditions,
6848 we can't do this optimization. */
6849 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6850 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6851 == MODE_INT)
6852 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6853 <= HOST_BITS_PER_WIDE_INT))))
6854 {
6855 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6856 && modifier == EXPAND_STACK_PARM)
6857 target = 0;
6858 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6859 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6860 {
6861 HOST_WIDE_INT bitsize
6862 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6863 enum machine_mode imode
6864 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6865
6866 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6867 {
6868 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6869 op0 = expand_and (imode, op0, op1, target);
6870 }
6871 else
6872 {
6873 tree count
6874 = build_int_cst (NULL_TREE,
6875 GET_MODE_BITSIZE (imode) - bitsize);
6876
6877 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6878 target, 0);
6879 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6880 target, 0);
6881 }
6882 }
6883
6884 return op0;
6885 }
6886 }
6887 goto normal_inner_ref;
6888
6889 case BIT_FIELD_REF:
6890 case ARRAY_RANGE_REF:
6891 normal_inner_ref:
6892 {
6893 enum machine_mode mode1;
6894 HOST_WIDE_INT bitsize, bitpos;
6895 tree offset;
6896 int volatilep = 0;
6897 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6898 &mode1, &unsignedp, &volatilep);
6899 rtx orig_op0;
6900
6901 /* If we got back the original object, something is wrong. Perhaps
6902 we are evaluating an expression too early. In any event, don't
6903 infinitely recurse. */
6904 gcc_assert (tem != exp);
6905
6906 /* If TEM's type is a union of variable size, pass TARGET to the inner
6907 computation, since it will need a temporary and TARGET is known
6908 to have to do. This occurs in unchecked conversion in Ada. */
6909
6910 orig_op0 = op0
6911 = expand_expr (tem,
6912 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6913 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6914 != INTEGER_CST)
6915 && modifier != EXPAND_STACK_PARM
6916 ? target : NULL_RTX),
6917 VOIDmode,
6918 (modifier == EXPAND_INITIALIZER
6919 || modifier == EXPAND_CONST_ADDRESS
6920 || modifier == EXPAND_STACK_PARM)
6921 ? modifier : EXPAND_NORMAL);
6922
6923 /* If this is a constant, put it into a register if it is a
6924 legitimate constant and OFFSET is 0 and memory if it isn't. */
6925 if (CONSTANT_P (op0))
6926 {
6927 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6928 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6929 && offset == 0)
6930 op0 = force_reg (mode, op0);
6931 else
6932 op0 = validize_mem (force_const_mem (mode, op0));
6933 }
6934
6935 /* Otherwise, if this object not in memory and we either have an
6936 offset or a BLKmode result, put it there. This case can't occur in
6937 C, but can in Ada if we have unchecked conversion of an expression
6938 from a scalar type to an array or record type or for an
6939 ARRAY_RANGE_REF whose type is BLKmode. */
6940 else if (!MEM_P (op0)
6941 && (offset != 0
6942 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6943 {
6944 tree nt = build_qualified_type (TREE_TYPE (tem),
6945 (TYPE_QUALS (TREE_TYPE (tem))
6946 | TYPE_QUAL_CONST));
6947 rtx memloc = assign_temp (nt, 1, 1, 1);
6948
6949 emit_move_insn (memloc, op0);
6950 op0 = memloc;
6951 }
6952
6953 if (offset != 0)
6954 {
6955 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6956 EXPAND_SUM);
6957
6958 gcc_assert (MEM_P (op0));
6959
6960 #ifdef POINTERS_EXTEND_UNSIGNED
6961 if (GET_MODE (offset_rtx) != Pmode)
6962 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6963 #else
6964 if (GET_MODE (offset_rtx) != ptr_mode)
6965 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6966 #endif
6967
6968 if (GET_MODE (op0) == BLKmode
6969 /* A constant address in OP0 can have VOIDmode, we must
6970 not try to call force_reg in that case. */
6971 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6972 && bitsize != 0
6973 && (bitpos % bitsize) == 0
6974 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6975 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6976 {
6977 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6978 bitpos = 0;
6979 }
6980
6981 op0 = offset_address (op0, offset_rtx,
6982 highest_pow2_factor (offset));
6983 }
6984
6985 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6986 record its alignment as BIGGEST_ALIGNMENT. */
6987 if (MEM_P (op0) && bitpos == 0 && offset != 0
6988 && is_aligning_offset (offset, tem))
6989 set_mem_align (op0, BIGGEST_ALIGNMENT);
6990
6991 /* Don't forget about volatility even if this is a bitfield. */
6992 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6993 {
6994 if (op0 == orig_op0)
6995 op0 = copy_rtx (op0);
6996
6997 MEM_VOLATILE_P (op0) = 1;
6998 }
6999
7000 /* The following code doesn't handle CONCAT.
7001 Assume only bitpos == 0 can be used for CONCAT, due to
7002 one element arrays having the same mode as its element. */
7003 if (GET_CODE (op0) == CONCAT)
7004 {
7005 gcc_assert (bitpos == 0
7006 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7007 return op0;
7008 }
7009
7010 /* In cases where an aligned union has an unaligned object
7011 as a field, we might be extracting a BLKmode value from
7012 an integer-mode (e.g., SImode) object. Handle this case
7013 by doing the extract into an object as wide as the field
7014 (which we know to be the width of a basic mode), then
7015 storing into memory, and changing the mode to BLKmode. */
7016 if (mode1 == VOIDmode
7017 || REG_P (op0) || GET_CODE (op0) == SUBREG
7018 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7019 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7020 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7021 && modifier != EXPAND_CONST_ADDRESS
7022 && modifier != EXPAND_INITIALIZER)
7023 /* If the field isn't aligned enough to fetch as a memref,
7024 fetch it as a bit field. */
7025 || (mode1 != BLKmode
7026 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7027 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7028 || (MEM_P (op0)
7029 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7030 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7031 && ((modifier == EXPAND_CONST_ADDRESS
7032 || modifier == EXPAND_INITIALIZER)
7033 ? STRICT_ALIGNMENT
7034 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7035 || (bitpos % BITS_PER_UNIT != 0)))
7036 /* If the type and the field are a constant size and the
7037 size of the type isn't the same size as the bitfield,
7038 we must use bitfield operations. */
7039 || (bitsize >= 0
7040 && TYPE_SIZE (TREE_TYPE (exp))
7041 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7042 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7043 bitsize)))
7044 {
7045 enum machine_mode ext_mode = mode;
7046
7047 if (ext_mode == BLKmode
7048 && ! (target != 0 && MEM_P (op0)
7049 && MEM_P (target)
7050 && bitpos % BITS_PER_UNIT == 0))
7051 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7052
7053 if (ext_mode == BLKmode)
7054 {
7055 if (target == 0)
7056 target = assign_temp (type, 0, 1, 1);
7057
7058 if (bitsize == 0)
7059 return target;
7060
7061 /* In this case, BITPOS must start at a byte boundary and
7062 TARGET, if specified, must be a MEM. */
7063 gcc_assert (MEM_P (op0)
7064 && (!target || MEM_P (target))
7065 && !(bitpos % BITS_PER_UNIT));
7066
7067 emit_block_move (target,
7068 adjust_address (op0, VOIDmode,
7069 bitpos / BITS_PER_UNIT),
7070 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7071 / BITS_PER_UNIT),
7072 (modifier == EXPAND_STACK_PARM
7073 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7074
7075 return target;
7076 }
7077
7078 op0 = validize_mem (op0);
7079
7080 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7081 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7082
7083 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7084 (modifier == EXPAND_STACK_PARM
7085 ? NULL_RTX : target),
7086 ext_mode, ext_mode);
7087
7088 /* If the result is a record type and BITSIZE is narrower than
7089 the mode of OP0, an integral mode, and this is a big endian
7090 machine, we must put the field into the high-order bits. */
7091 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7092 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7093 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7094 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7095 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7096 - bitsize),
7097 op0, 1);
7098
7099 /* If the result type is BLKmode, store the data into a temporary
7100 of the appropriate type, but with the mode corresponding to the
7101 mode for the data we have (op0's mode). It's tempting to make
7102 this a constant type, since we know it's only being stored once,
7103 but that can cause problems if we are taking the address of this
7104 COMPONENT_REF because the MEM of any reference via that address
7105 will have flags corresponding to the type, which will not
7106 necessarily be constant. */
7107 if (mode == BLKmode)
7108 {
7109 rtx new
7110 = assign_stack_temp_for_type
7111 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7112
7113 emit_move_insn (new, op0);
7114 op0 = copy_rtx (new);
7115 PUT_MODE (op0, BLKmode);
7116 set_mem_attributes (op0, exp, 1);
7117 }
7118
7119 return op0;
7120 }
7121
7122 /* If the result is BLKmode, use that to access the object
7123 now as well. */
7124 if (mode == BLKmode)
7125 mode1 = BLKmode;
7126
7127 /* Get a reference to just this component. */
7128 if (modifier == EXPAND_CONST_ADDRESS
7129 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7130 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7131 else
7132 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7133
7134 if (op0 == orig_op0)
7135 op0 = copy_rtx (op0);
7136
7137 set_mem_attributes (op0, exp, 0);
7138 if (REG_P (XEXP (op0, 0)))
7139 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7140
7141 MEM_VOLATILE_P (op0) |= volatilep;
7142 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7143 || modifier == EXPAND_CONST_ADDRESS
7144 || modifier == EXPAND_INITIALIZER)
7145 return op0;
7146 else if (target == 0)
7147 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7148
7149 convert_move (target, op0, unsignedp);
7150 return target;
7151 }
7152
7153 case OBJ_TYPE_REF:
7154 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7155
7156 case CALL_EXPR:
7157 /* Check for a built-in function. */
7158 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7159 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7160 == FUNCTION_DECL)
7161 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7162 {
7163 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7164 == BUILT_IN_FRONTEND)
7165 return lang_hooks.expand_expr (exp, original_target,
7166 tmode, modifier,
7167 alt_rtl);
7168 else
7169 return expand_builtin (exp, target, subtarget, tmode, ignore);
7170 }
7171
7172 return expand_call (exp, target, ignore);
7173
7174 case NON_LVALUE_EXPR:
7175 case NOP_EXPR:
7176 case CONVERT_EXPR:
7177 if (TREE_OPERAND (exp, 0) == error_mark_node)
7178 return const0_rtx;
7179
7180 if (TREE_CODE (type) == UNION_TYPE)
7181 {
7182 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7183
7184 /* If both input and output are BLKmode, this conversion isn't doing
7185 anything except possibly changing memory attribute. */
7186 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7187 {
7188 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7189 modifier);
7190
7191 result = copy_rtx (result);
7192 set_mem_attributes (result, exp, 0);
7193 return result;
7194 }
7195
7196 if (target == 0)
7197 {
7198 if (TYPE_MODE (type) != BLKmode)
7199 target = gen_reg_rtx (TYPE_MODE (type));
7200 else
7201 target = assign_temp (type, 0, 1, 1);
7202 }
7203
7204 if (MEM_P (target))
7205 /* Store data into beginning of memory target. */
7206 store_expr (TREE_OPERAND (exp, 0),
7207 adjust_address (target, TYPE_MODE (valtype), 0),
7208 modifier == EXPAND_STACK_PARM);
7209
7210 else
7211 {
7212 gcc_assert (REG_P (target));
7213
7214 /* Store this field into a union of the proper type. */
7215 store_field (target,
7216 MIN ((int_size_in_bytes (TREE_TYPE
7217 (TREE_OPERAND (exp, 0)))
7218 * BITS_PER_UNIT),
7219 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7220 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7221 type, 0);
7222 }
7223
7224 /* Return the entire union. */
7225 return target;
7226 }
7227
7228 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7229 {
7230 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7231 modifier);
7232
7233 /* If the signedness of the conversion differs and OP0 is
7234 a promoted SUBREG, clear that indication since we now
7235 have to do the proper extension. */
7236 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7237 && GET_CODE (op0) == SUBREG)
7238 SUBREG_PROMOTED_VAR_P (op0) = 0;
7239
7240 return REDUCE_BIT_FIELD (op0);
7241 }
7242
7243 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7244 if (GET_MODE (op0) == mode)
7245 ;
7246
7247 /* If OP0 is a constant, just convert it into the proper mode. */
7248 else if (CONSTANT_P (op0))
7249 {
7250 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7251 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7252
7253 if (modifier == EXPAND_INITIALIZER)
7254 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7255 subreg_lowpart_offset (mode,
7256 inner_mode));
7257 else
7258 op0= convert_modes (mode, inner_mode, op0,
7259 TYPE_UNSIGNED (inner_type));
7260 }
7261
7262 else if (modifier == EXPAND_INITIALIZER)
7263 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7264
7265 else if (target == 0)
7266 op0 = convert_to_mode (mode, op0,
7267 TYPE_UNSIGNED (TREE_TYPE
7268 (TREE_OPERAND (exp, 0))));
7269 else
7270 {
7271 convert_move (target, op0,
7272 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7273 op0 = target;
7274 }
7275
7276 return REDUCE_BIT_FIELD (op0);
7277
7278 case VIEW_CONVERT_EXPR:
7279 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7280
7281 /* If the input and output modes are both the same, we are done.
7282 Otherwise, if neither mode is BLKmode and both are integral and within
7283 a word, we can use gen_lowpart. If neither is true, make sure the
7284 operand is in memory and convert the MEM to the new mode. */
7285 if (TYPE_MODE (type) == GET_MODE (op0))
7286 ;
7287 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7288 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7289 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7290 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7291 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7292 op0 = gen_lowpart (TYPE_MODE (type), op0);
7293 else if (!MEM_P (op0))
7294 {
7295 /* If the operand is not a MEM, force it into memory. Since we
7296 are going to be be changing the mode of the MEM, don't call
7297 force_const_mem for constants because we don't allow pool
7298 constants to change mode. */
7299 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7300
7301 gcc_assert (!TREE_ADDRESSABLE (exp));
7302
7303 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7304 target
7305 = assign_stack_temp_for_type
7306 (TYPE_MODE (inner_type),
7307 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7308
7309 emit_move_insn (target, op0);
7310 op0 = target;
7311 }
7312
7313 /* At this point, OP0 is in the correct mode. If the output type is such
7314 that the operand is known to be aligned, indicate that it is.
7315 Otherwise, we need only be concerned about alignment for non-BLKmode
7316 results. */
7317 if (MEM_P (op0))
7318 {
7319 op0 = copy_rtx (op0);
7320
7321 if (TYPE_ALIGN_OK (type))
7322 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7323 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7324 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7325 {
7326 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7327 HOST_WIDE_INT temp_size
7328 = MAX (int_size_in_bytes (inner_type),
7329 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7330 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7331 temp_size, 0, type);
7332 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7333
7334 gcc_assert (!TREE_ADDRESSABLE (exp));
7335
7336 if (GET_MODE (op0) == BLKmode)
7337 emit_block_move (new_with_op0_mode, op0,
7338 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7339 (modifier == EXPAND_STACK_PARM
7340 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7341 else
7342 emit_move_insn (new_with_op0_mode, op0);
7343
7344 op0 = new;
7345 }
7346
7347 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7348 }
7349
7350 return op0;
7351
7352 case PLUS_EXPR:
7353 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7354 something else, make sure we add the register to the constant and
7355 then to the other thing. This case can occur during strength
7356 reduction and doing it this way will produce better code if the
7357 frame pointer or argument pointer is eliminated.
7358
7359 fold-const.c will ensure that the constant is always in the inner
7360 PLUS_EXPR, so the only case we need to do anything about is if
7361 sp, ap, or fp is our second argument, in which case we must swap
7362 the innermost first argument and our second argument. */
7363
7364 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7365 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7366 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7367 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7368 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7369 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7370 {
7371 tree t = TREE_OPERAND (exp, 1);
7372
7373 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7374 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7375 }
7376
7377 /* If the result is to be ptr_mode and we are adding an integer to
7378 something, we might be forming a constant. So try to use
7379 plus_constant. If it produces a sum and we can't accept it,
7380 use force_operand. This allows P = &ARR[const] to generate
7381 efficient code on machines where a SYMBOL_REF is not a valid
7382 address.
7383
7384 If this is an EXPAND_SUM call, always return the sum. */
7385 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7386 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7387 {
7388 if (modifier == EXPAND_STACK_PARM)
7389 target = 0;
7390 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7391 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7392 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7393 {
7394 rtx constant_part;
7395
7396 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7397 EXPAND_SUM);
7398 /* Use immed_double_const to ensure that the constant is
7399 truncated according to the mode of OP1, then sign extended
7400 to a HOST_WIDE_INT. Using the constant directly can result
7401 in non-canonical RTL in a 64x32 cross compile. */
7402 constant_part
7403 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7404 (HOST_WIDE_INT) 0,
7405 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7406 op1 = plus_constant (op1, INTVAL (constant_part));
7407 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7408 op1 = force_operand (op1, target);
7409 return REDUCE_BIT_FIELD (op1);
7410 }
7411
7412 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7413 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7414 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7415 {
7416 rtx constant_part;
7417
7418 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7419 (modifier == EXPAND_INITIALIZER
7420 ? EXPAND_INITIALIZER : EXPAND_SUM));
7421 if (! CONSTANT_P (op0))
7422 {
7423 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7424 VOIDmode, modifier);
7425 /* Return a PLUS if modifier says it's OK. */
7426 if (modifier == EXPAND_SUM
7427 || modifier == EXPAND_INITIALIZER)
7428 return simplify_gen_binary (PLUS, mode, op0, op1);
7429 goto binop2;
7430 }
7431 /* Use immed_double_const to ensure that the constant is
7432 truncated according to the mode of OP1, then sign extended
7433 to a HOST_WIDE_INT. Using the constant directly can result
7434 in non-canonical RTL in a 64x32 cross compile. */
7435 constant_part
7436 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7437 (HOST_WIDE_INT) 0,
7438 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7439 op0 = plus_constant (op0, INTVAL (constant_part));
7440 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7441 op0 = force_operand (op0, target);
7442 return REDUCE_BIT_FIELD (op0);
7443 }
7444 }
7445
7446 /* No sense saving up arithmetic to be done
7447 if it's all in the wrong mode to form part of an address.
7448 And force_operand won't know whether to sign-extend or
7449 zero-extend. */
7450 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7451 || mode != ptr_mode)
7452 {
7453 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7454 subtarget, &op0, &op1, 0);
7455 if (op0 == const0_rtx)
7456 return op1;
7457 if (op1 == const0_rtx)
7458 return op0;
7459 goto binop2;
7460 }
7461
7462 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7463 subtarget, &op0, &op1, modifier);
7464 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7465
7466 case MINUS_EXPR:
7467 /* For initializers, we are allowed to return a MINUS of two
7468 symbolic constants. Here we handle all cases when both operands
7469 are constant. */
7470 /* Handle difference of two symbolic constants,
7471 for the sake of an initializer. */
7472 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7473 && really_constant_p (TREE_OPERAND (exp, 0))
7474 && really_constant_p (TREE_OPERAND (exp, 1)))
7475 {
7476 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7477 NULL_RTX, &op0, &op1, modifier);
7478
7479 /* If the last operand is a CONST_INT, use plus_constant of
7480 the negated constant. Else make the MINUS. */
7481 if (GET_CODE (op1) == CONST_INT)
7482 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7483 else
7484 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7485 }
7486
7487 /* No sense saving up arithmetic to be done
7488 if it's all in the wrong mode to form part of an address.
7489 And force_operand won't know whether to sign-extend or
7490 zero-extend. */
7491 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7492 || mode != ptr_mode)
7493 goto binop;
7494
7495 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7496 subtarget, &op0, &op1, modifier);
7497
7498 /* Convert A - const to A + (-const). */
7499 if (GET_CODE (op1) == CONST_INT)
7500 {
7501 op1 = negate_rtx (mode, op1);
7502 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7503 }
7504
7505 goto binop2;
7506
7507 case MULT_EXPR:
7508 /* If first operand is constant, swap them.
7509 Thus the following special case checks need only
7510 check the second operand. */
7511 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7512 {
7513 tree t1 = TREE_OPERAND (exp, 0);
7514 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7515 TREE_OPERAND (exp, 1) = t1;
7516 }
7517
7518 /* Attempt to return something suitable for generating an
7519 indexed address, for machines that support that. */
7520
7521 if (modifier == EXPAND_SUM && mode == ptr_mode
7522 && host_integerp (TREE_OPERAND (exp, 1), 0))
7523 {
7524 tree exp1 = TREE_OPERAND (exp, 1);
7525
7526 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7527 EXPAND_SUM);
7528
7529 if (!REG_P (op0))
7530 op0 = force_operand (op0, NULL_RTX);
7531 if (!REG_P (op0))
7532 op0 = copy_to_mode_reg (mode, op0);
7533
7534 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7535 gen_int_mode (tree_low_cst (exp1, 0),
7536 TYPE_MODE (TREE_TYPE (exp1)))));
7537 }
7538
7539 if (modifier == EXPAND_STACK_PARM)
7540 target = 0;
7541
7542 /* Check for multiplying things that have been extended
7543 from a narrower type. If this machine supports multiplying
7544 in that narrower type with a result in the desired type,
7545 do it that way, and avoid the explicit type-conversion. */
7546 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7547 && TREE_CODE (type) == INTEGER_TYPE
7548 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7549 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7550 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7551 && int_fits_type_p (TREE_OPERAND (exp, 1),
7552 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7553 /* Don't use a widening multiply if a shift will do. */
7554 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7555 > HOST_BITS_PER_WIDE_INT)
7556 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7557 ||
7558 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7559 && (TYPE_PRECISION (TREE_TYPE
7560 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7561 == TYPE_PRECISION (TREE_TYPE
7562 (TREE_OPERAND
7563 (TREE_OPERAND (exp, 0), 0))))
7564 /* If both operands are extended, they must either both
7565 be zero-extended or both be sign-extended. */
7566 && (TYPE_UNSIGNED (TREE_TYPE
7567 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7568 == TYPE_UNSIGNED (TREE_TYPE
7569 (TREE_OPERAND
7570 (TREE_OPERAND (exp, 0), 0)))))))
7571 {
7572 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7573 enum machine_mode innermode = TYPE_MODE (op0type);
7574 bool zextend_p = TYPE_UNSIGNED (op0type);
7575 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7576 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7577
7578 if (mode == GET_MODE_WIDER_MODE (innermode))
7579 {
7580 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7581 {
7582 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7583 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7584 TREE_OPERAND (exp, 1),
7585 NULL_RTX, &op0, &op1, 0);
7586 else
7587 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7588 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7589 NULL_RTX, &op0, &op1, 0);
7590 goto binop3;
7591 }
7592 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7593 && innermode == word_mode)
7594 {
7595 rtx htem, hipart;
7596 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7597 NULL_RTX, VOIDmode, 0);
7598 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7599 op1 = convert_modes (innermode, mode,
7600 expand_expr (TREE_OPERAND (exp, 1),
7601 NULL_RTX, VOIDmode, 0),
7602 unsignedp);
7603 else
7604 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7605 NULL_RTX, VOIDmode, 0);
7606 temp = expand_binop (mode, other_optab, op0, op1, target,
7607 unsignedp, OPTAB_LIB_WIDEN);
7608 hipart = gen_highpart (innermode, temp);
7609 htem = expand_mult_highpart_adjust (innermode, hipart,
7610 op0, op1, hipart,
7611 zextend_p);
7612 if (htem != hipart)
7613 emit_move_insn (hipart, htem);
7614 return REDUCE_BIT_FIELD (temp);
7615 }
7616 }
7617 }
7618 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7619 subtarget, &op0, &op1, 0);
7620 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7621
7622 case TRUNC_DIV_EXPR:
7623 case FLOOR_DIV_EXPR:
7624 case CEIL_DIV_EXPR:
7625 case ROUND_DIV_EXPR:
7626 case EXACT_DIV_EXPR:
7627 if (modifier == EXPAND_STACK_PARM)
7628 target = 0;
7629 /* Possible optimization: compute the dividend with EXPAND_SUM
7630 then if the divisor is constant can optimize the case
7631 where some terms of the dividend have coeffs divisible by it. */
7632 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7633 subtarget, &op0, &op1, 0);
7634 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7635
7636 case RDIV_EXPR:
7637 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7638 expensive divide. If not, combine will rebuild the original
7639 computation. */
7640 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7641 && TREE_CODE (type) == REAL_TYPE
7642 && !real_onep (TREE_OPERAND (exp, 0)))
7643 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7644 build2 (RDIV_EXPR, type,
7645 build_real (type, dconst1),
7646 TREE_OPERAND (exp, 1))),
7647 target, tmode, modifier);
7648
7649 goto binop;
7650
7651 case TRUNC_MOD_EXPR:
7652 case FLOOR_MOD_EXPR:
7653 case CEIL_MOD_EXPR:
7654 case ROUND_MOD_EXPR:
7655 if (modifier == EXPAND_STACK_PARM)
7656 target = 0;
7657 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7658 subtarget, &op0, &op1, 0);
7659 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7660
7661 case FIX_ROUND_EXPR:
7662 case FIX_FLOOR_EXPR:
7663 case FIX_CEIL_EXPR:
7664 gcc_unreachable (); /* Not used for C. */
7665
7666 case FIX_TRUNC_EXPR:
7667 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7668 if (target == 0 || modifier == EXPAND_STACK_PARM)
7669 target = gen_reg_rtx (mode);
7670 expand_fix (target, op0, unsignedp);
7671 return target;
7672
7673 case FLOAT_EXPR:
7674 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7675 if (target == 0 || modifier == EXPAND_STACK_PARM)
7676 target = gen_reg_rtx (mode);
7677 /* expand_float can't figure out what to do if FROM has VOIDmode.
7678 So give it the correct mode. With -O, cse will optimize this. */
7679 if (GET_MODE (op0) == VOIDmode)
7680 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7681 op0);
7682 expand_float (target, op0,
7683 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7684 return target;
7685
7686 case NEGATE_EXPR:
7687 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7688 if (modifier == EXPAND_STACK_PARM)
7689 target = 0;
7690 temp = expand_unop (mode,
7691 optab_for_tree_code (NEGATE_EXPR, type),
7692 op0, target, 0);
7693 gcc_assert (temp);
7694 return REDUCE_BIT_FIELD (temp);
7695
7696 case ABS_EXPR:
7697 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7698 if (modifier == EXPAND_STACK_PARM)
7699 target = 0;
7700
7701 /* ABS_EXPR is not valid for complex arguments. */
7702 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7703 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7704
7705 /* Unsigned abs is simply the operand. Testing here means we don't
7706 risk generating incorrect code below. */
7707 if (TYPE_UNSIGNED (type))
7708 return op0;
7709
7710 return expand_abs (mode, op0, target, unsignedp,
7711 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7712
7713 case MAX_EXPR:
7714 case MIN_EXPR:
7715 target = original_target;
7716 if (target == 0
7717 || modifier == EXPAND_STACK_PARM
7718 || (MEM_P (target) && MEM_VOLATILE_P (target))
7719 || GET_MODE (target) != mode
7720 || (REG_P (target)
7721 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7722 target = gen_reg_rtx (mode);
7723 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7724 target, &op0, &op1, 0);
7725
7726 /* First try to do it with a special MIN or MAX instruction.
7727 If that does not win, use a conditional jump to select the proper
7728 value. */
7729 this_optab = optab_for_tree_code (code, type);
7730 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7731 OPTAB_WIDEN);
7732 if (temp != 0)
7733 return temp;
7734
7735 /* At this point, a MEM target is no longer useful; we will get better
7736 code without it. */
7737
7738 if (MEM_P (target))
7739 target = gen_reg_rtx (mode);
7740
7741 /* If op1 was placed in target, swap op0 and op1. */
7742 if (target != op0 && target == op1)
7743 {
7744 rtx tem = op0;
7745 op0 = op1;
7746 op1 = tem;
7747 }
7748
7749 if (target != op0)
7750 emit_move_insn (target, op0);
7751
7752 op0 = gen_label_rtx ();
7753
7754 /* If this mode is an integer too wide to compare properly,
7755 compare word by word. Rely on cse to optimize constant cases. */
7756 if (GET_MODE_CLASS (mode) == MODE_INT
7757 && ! can_compare_p (GE, mode, ccp_jump))
7758 {
7759 if (code == MAX_EXPR)
7760 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7761 NULL_RTX, op0);
7762 else
7763 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7764 NULL_RTX, op0);
7765 }
7766 else
7767 {
7768 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7769 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7770 }
7771 emit_move_insn (target, op1);
7772 emit_label (op0);
7773 return target;
7774
7775 case BIT_NOT_EXPR:
7776 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7777 if (modifier == EXPAND_STACK_PARM)
7778 target = 0;
7779 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7780 gcc_assert (temp);
7781 return temp;
7782
7783 /* ??? Can optimize bitwise operations with one arg constant.
7784 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7785 and (a bitwise1 b) bitwise2 b (etc)
7786 but that is probably not worth while. */
7787
7788 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7789 boolean values when we want in all cases to compute both of them. In
7790 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7791 as actual zero-or-1 values and then bitwise anding. In cases where
7792 there cannot be any side effects, better code would be made by
7793 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7794 how to recognize those cases. */
7795
7796 case TRUTH_AND_EXPR:
7797 code = BIT_AND_EXPR;
7798 case BIT_AND_EXPR:
7799 goto binop;
7800
7801 case TRUTH_OR_EXPR:
7802 code = BIT_IOR_EXPR;
7803 case BIT_IOR_EXPR:
7804 goto binop;
7805
7806 case TRUTH_XOR_EXPR:
7807 code = BIT_XOR_EXPR;
7808 case BIT_XOR_EXPR:
7809 goto binop;
7810
7811 case LSHIFT_EXPR:
7812 case RSHIFT_EXPR:
7813 case LROTATE_EXPR:
7814 case RROTATE_EXPR:
7815 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7816 subtarget = 0;
7817 if (modifier == EXPAND_STACK_PARM)
7818 target = 0;
7819 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7820 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7821 unsignedp);
7822
7823 /* Could determine the answer when only additive constants differ. Also,
7824 the addition of one can be handled by changing the condition. */
7825 case LT_EXPR:
7826 case LE_EXPR:
7827 case GT_EXPR:
7828 case GE_EXPR:
7829 case EQ_EXPR:
7830 case NE_EXPR:
7831 case UNORDERED_EXPR:
7832 case ORDERED_EXPR:
7833 case UNLT_EXPR:
7834 case UNLE_EXPR:
7835 case UNGT_EXPR:
7836 case UNGE_EXPR:
7837 case UNEQ_EXPR:
7838 case LTGT_EXPR:
7839 temp = do_store_flag (exp,
7840 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7841 tmode != VOIDmode ? tmode : mode, 0);
7842 if (temp != 0)
7843 return temp;
7844
7845 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7846 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7847 && original_target
7848 && REG_P (original_target)
7849 && (GET_MODE (original_target)
7850 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7851 {
7852 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7853 VOIDmode, 0);
7854
7855 /* If temp is constant, we can just compute the result. */
7856 if (GET_CODE (temp) == CONST_INT)
7857 {
7858 if (INTVAL (temp) != 0)
7859 emit_move_insn (target, const1_rtx);
7860 else
7861 emit_move_insn (target, const0_rtx);
7862
7863 return target;
7864 }
7865
7866 if (temp != original_target)
7867 {
7868 enum machine_mode mode1 = GET_MODE (temp);
7869 if (mode1 == VOIDmode)
7870 mode1 = tmode != VOIDmode ? tmode : mode;
7871
7872 temp = copy_to_mode_reg (mode1, temp);
7873 }
7874
7875 op1 = gen_label_rtx ();
7876 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7877 GET_MODE (temp), unsignedp, op1);
7878 emit_move_insn (temp, const1_rtx);
7879 emit_label (op1);
7880 return temp;
7881 }
7882
7883 /* If no set-flag instruction, must generate a conditional store
7884 into a temporary variable. Drop through and handle this
7885 like && and ||. */
7886
7887 if (! ignore
7888 && (target == 0
7889 || modifier == EXPAND_STACK_PARM
7890 || ! safe_from_p (target, exp, 1)
7891 /* Make sure we don't have a hard reg (such as function's return
7892 value) live across basic blocks, if not optimizing. */
7893 || (!optimize && REG_P (target)
7894 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7895 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7896
7897 if (target)
7898 emit_move_insn (target, const0_rtx);
7899
7900 op1 = gen_label_rtx ();
7901 jumpifnot (exp, op1);
7902
7903 if (target)
7904 emit_move_insn (target, const1_rtx);
7905
7906 emit_label (op1);
7907 return ignore ? const0_rtx : target;
7908
7909 case TRUTH_NOT_EXPR:
7910 if (modifier == EXPAND_STACK_PARM)
7911 target = 0;
7912 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7913 /* The parser is careful to generate TRUTH_NOT_EXPR
7914 only with operands that are always zero or one. */
7915 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7916 target, 1, OPTAB_LIB_WIDEN);
7917 gcc_assert (temp);
7918 return temp;
7919
7920 case STATEMENT_LIST:
7921 {
7922 tree_stmt_iterator iter;
7923
7924 gcc_assert (ignore);
7925
7926 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7927 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7928 }
7929 return const0_rtx;
7930
7931 case COND_EXPR:
7932 /* If it's void, we don't need to worry about computing a value. */
7933 if (VOID_TYPE_P (TREE_TYPE (exp)))
7934 {
7935 tree pred = TREE_OPERAND (exp, 0);
7936 tree then_ = TREE_OPERAND (exp, 1);
7937 tree else_ = TREE_OPERAND (exp, 2);
7938
7939 gcc_assert (TREE_CODE (then_) == GOTO_EXPR
7940 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL
7941 && TREE_CODE (else_) == GOTO_EXPR
7942 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL);
7943
7944 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7945 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7946 }
7947
7948 /* Note that COND_EXPRs whose type is a structure or union
7949 are required to be constructed to contain assignments of
7950 a temporary variable, so that we can evaluate them here
7951 for side effect only. If type is void, we must do likewise. */
7952
7953 gcc_assert (!TREE_ADDRESSABLE (type)
7954 && !ignore
7955 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
7956 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
7957
7958 /* If we are not to produce a result, we have no target. Otherwise,
7959 if a target was specified use it; it will not be used as an
7960 intermediate target unless it is safe. If no target, use a
7961 temporary. */
7962
7963 if (modifier != EXPAND_STACK_PARM
7964 && original_target
7965 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7966 && GET_MODE (original_target) == mode
7967 #ifdef HAVE_conditional_move
7968 && (! can_conditionally_move_p (mode)
7969 || REG_P (original_target))
7970 #endif
7971 && !MEM_P (original_target))
7972 temp = original_target;
7973 else
7974 temp = assign_temp (type, 0, 0, 1);
7975
7976 do_pending_stack_adjust ();
7977 NO_DEFER_POP;
7978 op0 = gen_label_rtx ();
7979 op1 = gen_label_rtx ();
7980 jumpifnot (TREE_OPERAND (exp, 0), op0);
7981 store_expr (TREE_OPERAND (exp, 1), temp,
7982 modifier == EXPAND_STACK_PARM);
7983
7984 emit_jump_insn (gen_jump (op1));
7985 emit_barrier ();
7986 emit_label (op0);
7987 store_expr (TREE_OPERAND (exp, 2), temp,
7988 modifier == EXPAND_STACK_PARM);
7989
7990 emit_label (op1);
7991 OK_DEFER_POP;
7992 return temp;
7993
7994 case VEC_COND_EXPR:
7995 target = expand_vec_cond_expr (exp, target);
7996 return target;
7997
7998 case MODIFY_EXPR:
7999 {
8000 tree lhs = TREE_OPERAND (exp, 0);
8001 tree rhs = TREE_OPERAND (exp, 1);
8002
8003 gcc_assert (ignore);
8004
8005 /* Check for |= or &= of a bitfield of size one into another bitfield
8006 of size 1. In this case, (unless we need the result of the
8007 assignment) we can do this more efficiently with a
8008 test followed by an assignment, if necessary.
8009
8010 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8011 things change so we do, this code should be enhanced to
8012 support it. */
8013 if (TREE_CODE (lhs) == COMPONENT_REF
8014 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8015 || TREE_CODE (rhs) == BIT_AND_EXPR)
8016 && TREE_OPERAND (rhs, 0) == lhs
8017 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8018 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8019 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8020 {
8021 rtx label = gen_label_rtx ();
8022
8023 do_jump (TREE_OPERAND (rhs, 1),
8024 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8025 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8026 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8027 (TREE_CODE (rhs) == BIT_IOR_EXPR
8028 ? integer_one_node
8029 : integer_zero_node)));
8030 do_pending_stack_adjust ();
8031 emit_label (label);
8032 return const0_rtx;
8033 }
8034
8035 expand_assignment (lhs, rhs);
8036
8037 return const0_rtx;
8038 }
8039
8040 case RETURN_EXPR:
8041 if (!TREE_OPERAND (exp, 0))
8042 expand_null_return ();
8043 else
8044 expand_return (TREE_OPERAND (exp, 0));
8045 return const0_rtx;
8046
8047 case ADDR_EXPR:
8048 return expand_expr_addr_expr (exp, target, tmode, modifier);
8049
8050 /* COMPLEX type for Extended Pascal & Fortran */
8051 case COMPLEX_EXPR:
8052 {
8053 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8054 rtx insns;
8055
8056 /* Get the rtx code of the operands. */
8057 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8058 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8059
8060 if (! target)
8061 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8062
8063 start_sequence ();
8064
8065 /* Move the real (op0) and imaginary (op1) parts to their location. */
8066 emit_move_insn (gen_realpart (mode, target), op0);
8067 emit_move_insn (gen_imagpart (mode, target), op1);
8068
8069 insns = get_insns ();
8070 end_sequence ();
8071
8072 /* Complex construction should appear as a single unit. */
8073 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8074 each with a separate pseudo as destination.
8075 It's not correct for flow to treat them as a unit. */
8076 if (GET_CODE (target) != CONCAT)
8077 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8078 else
8079 emit_insn (insns);
8080
8081 return target;
8082 }
8083
8084 case REALPART_EXPR:
8085 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8086 return gen_realpart (mode, op0);
8087
8088 case IMAGPART_EXPR:
8089 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8090 return gen_imagpart (mode, op0);
8091
8092 case RESX_EXPR:
8093 expand_resx_expr (exp);
8094 return const0_rtx;
8095
8096 case TRY_CATCH_EXPR:
8097 case CATCH_EXPR:
8098 case EH_FILTER_EXPR:
8099 case TRY_FINALLY_EXPR:
8100 /* Lowered by tree-eh.c. */
8101 gcc_unreachable ();
8102
8103 case WITH_CLEANUP_EXPR:
8104 case CLEANUP_POINT_EXPR:
8105 case TARGET_EXPR:
8106 case CASE_LABEL_EXPR:
8107 case VA_ARG_EXPR:
8108 case BIND_EXPR:
8109 case INIT_EXPR:
8110 case CONJ_EXPR:
8111 case COMPOUND_EXPR:
8112 case PREINCREMENT_EXPR:
8113 case PREDECREMENT_EXPR:
8114 case POSTINCREMENT_EXPR:
8115 case POSTDECREMENT_EXPR:
8116 case LOOP_EXPR:
8117 case EXIT_EXPR:
8118 case TRUTH_ANDIF_EXPR:
8119 case TRUTH_ORIF_EXPR:
8120 /* Lowered by gimplify.c. */
8121 gcc_unreachable ();
8122
8123 case EXC_PTR_EXPR:
8124 return get_exception_pointer (cfun);
8125
8126 case FILTER_EXPR:
8127 return get_exception_filter (cfun);
8128
8129 case FDESC_EXPR:
8130 /* Function descriptors are not valid except for as
8131 initialization constants, and should not be expanded. */
8132 gcc_unreachable ();
8133
8134 case SWITCH_EXPR:
8135 expand_case (exp);
8136 return const0_rtx;
8137
8138 case LABEL_EXPR:
8139 expand_label (TREE_OPERAND (exp, 0));
8140 return const0_rtx;
8141
8142 case ASM_EXPR:
8143 expand_asm_expr (exp);
8144 return const0_rtx;
8145
8146 case WITH_SIZE_EXPR:
8147 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8148 have pulled out the size to use in whatever context it needed. */
8149 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8150 modifier, alt_rtl);
8151
8152 case REALIGN_LOAD_EXPR:
8153 {
8154 tree oprnd0 = TREE_OPERAND (exp, 0);
8155 tree oprnd1 = TREE_OPERAND (exp, 1);
8156 tree oprnd2 = TREE_OPERAND (exp, 2);
8157 rtx op2;
8158
8159 this_optab = optab_for_tree_code (code, type);
8160 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8161 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8162 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8163 target, unsignedp);
8164 if (temp == 0)
8165 abort ();
8166 return temp;
8167 }
8168
8169
8170 default:
8171 return lang_hooks.expand_expr (exp, original_target, tmode,
8172 modifier, alt_rtl);
8173 }
8174
8175 /* Here to do an ordinary binary operator. */
8176 binop:
8177 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8178 subtarget, &op0, &op1, 0);
8179 binop2:
8180 this_optab = optab_for_tree_code (code, type);
8181 binop3:
8182 if (modifier == EXPAND_STACK_PARM)
8183 target = 0;
8184 temp = expand_binop (mode, this_optab, op0, op1, target,
8185 unsignedp, OPTAB_LIB_WIDEN);
8186 gcc_assert (temp);
8187 return REDUCE_BIT_FIELD (temp);
8188 }
8189 #undef REDUCE_BIT_FIELD
8190 \f
8191 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8192 signedness of TYPE), possibly returning the result in TARGET. */
8193 static rtx
8194 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8195 {
8196 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8197 if (target && GET_MODE (target) != GET_MODE (exp))
8198 target = 0;
8199 if (TYPE_UNSIGNED (type))
8200 {
8201 rtx mask;
8202 if (prec < HOST_BITS_PER_WIDE_INT)
8203 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8204 GET_MODE (exp));
8205 else
8206 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8207 ((unsigned HOST_WIDE_INT) 1
8208 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8209 GET_MODE (exp));
8210 return expand_and (GET_MODE (exp), exp, mask, target);
8211 }
8212 else
8213 {
8214 tree count = build_int_cst (NULL_TREE,
8215 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8216 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8217 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8218 }
8219 }
8220 \f
8221 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8222 when applied to the address of EXP produces an address known to be
8223 aligned more than BIGGEST_ALIGNMENT. */
8224
8225 static int
8226 is_aligning_offset (tree offset, tree exp)
8227 {
8228 /* Strip off any conversions. */
8229 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8230 || TREE_CODE (offset) == NOP_EXPR
8231 || TREE_CODE (offset) == CONVERT_EXPR)
8232 offset = TREE_OPERAND (offset, 0);
8233
8234 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8235 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8236 if (TREE_CODE (offset) != BIT_AND_EXPR
8237 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8238 || compare_tree_int (TREE_OPERAND (offset, 1),
8239 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8240 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8241 return 0;
8242
8243 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8244 It must be NEGATE_EXPR. Then strip any more conversions. */
8245 offset = TREE_OPERAND (offset, 0);
8246 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8247 || TREE_CODE (offset) == NOP_EXPR
8248 || TREE_CODE (offset) == CONVERT_EXPR)
8249 offset = TREE_OPERAND (offset, 0);
8250
8251 if (TREE_CODE (offset) != NEGATE_EXPR)
8252 return 0;
8253
8254 offset = TREE_OPERAND (offset, 0);
8255 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8256 || TREE_CODE (offset) == NOP_EXPR
8257 || TREE_CODE (offset) == CONVERT_EXPR)
8258 offset = TREE_OPERAND (offset, 0);
8259
8260 /* This must now be the address of EXP. */
8261 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8262 }
8263 \f
8264 /* Return the tree node if an ARG corresponds to a string constant or zero
8265 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8266 in bytes within the string that ARG is accessing. The type of the
8267 offset will be `sizetype'. */
8268
8269 tree
8270 string_constant (tree arg, tree *ptr_offset)
8271 {
8272 tree array, offset;
8273 STRIP_NOPS (arg);
8274
8275 if (TREE_CODE (arg) == ADDR_EXPR)
8276 {
8277 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8278 {
8279 *ptr_offset = size_zero_node;
8280 return TREE_OPERAND (arg, 0);
8281 }
8282 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8283 {
8284 array = TREE_OPERAND (arg, 0);
8285 offset = size_zero_node;
8286 }
8287 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8288 {
8289 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8290 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8291 if (TREE_CODE (array) != STRING_CST
8292 && TREE_CODE (array) != VAR_DECL)
8293 return 0;
8294 }
8295 else
8296 return 0;
8297 }
8298 else if (TREE_CODE (arg) == PLUS_EXPR)
8299 {
8300 tree arg0 = TREE_OPERAND (arg, 0);
8301 tree arg1 = TREE_OPERAND (arg, 1);
8302
8303 STRIP_NOPS (arg0);
8304 STRIP_NOPS (arg1);
8305
8306 if (TREE_CODE (arg0) == ADDR_EXPR
8307 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8308 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8309 {
8310 array = TREE_OPERAND (arg0, 0);
8311 offset = arg1;
8312 }
8313 else if (TREE_CODE (arg1) == ADDR_EXPR
8314 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8315 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8316 {
8317 array = TREE_OPERAND (arg1, 0);
8318 offset = arg0;
8319 }
8320 else
8321 return 0;
8322 }
8323 else
8324 return 0;
8325
8326 if (TREE_CODE (array) == STRING_CST)
8327 {
8328 *ptr_offset = convert (sizetype, offset);
8329 return array;
8330 }
8331 else if (TREE_CODE (array) == VAR_DECL)
8332 {
8333 int length;
8334
8335 /* Variables initialized to string literals can be handled too. */
8336 if (DECL_INITIAL (array) == NULL_TREE
8337 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8338 return 0;
8339
8340 /* If they are read-only, non-volatile and bind locally. */
8341 if (! TREE_READONLY (array)
8342 || TREE_SIDE_EFFECTS (array)
8343 || ! targetm.binds_local_p (array))
8344 return 0;
8345
8346 /* Avoid const char foo[4] = "abcde"; */
8347 if (DECL_SIZE_UNIT (array) == NULL_TREE
8348 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8349 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8350 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8351 return 0;
8352
8353 /* If variable is bigger than the string literal, OFFSET must be constant
8354 and inside of the bounds of the string literal. */
8355 offset = convert (sizetype, offset);
8356 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8357 && (! host_integerp (offset, 1)
8358 || compare_tree_int (offset, length) >= 0))
8359 return 0;
8360
8361 *ptr_offset = offset;
8362 return DECL_INITIAL (array);
8363 }
8364
8365 return 0;
8366 }
8367 \f
8368 /* Generate code to calculate EXP using a store-flag instruction
8369 and return an rtx for the result. EXP is either a comparison
8370 or a TRUTH_NOT_EXPR whose operand is a comparison.
8371
8372 If TARGET is nonzero, store the result there if convenient.
8373
8374 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8375 cheap.
8376
8377 Return zero if there is no suitable set-flag instruction
8378 available on this machine.
8379
8380 Once expand_expr has been called on the arguments of the comparison,
8381 we are committed to doing the store flag, since it is not safe to
8382 re-evaluate the expression. We emit the store-flag insn by calling
8383 emit_store_flag, but only expand the arguments if we have a reason
8384 to believe that emit_store_flag will be successful. If we think that
8385 it will, but it isn't, we have to simulate the store-flag with a
8386 set/jump/set sequence. */
8387
8388 static rtx
8389 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8390 {
8391 enum rtx_code code;
8392 tree arg0, arg1, type;
8393 tree tem;
8394 enum machine_mode operand_mode;
8395 int invert = 0;
8396 int unsignedp;
8397 rtx op0, op1;
8398 enum insn_code icode;
8399 rtx subtarget = target;
8400 rtx result, label;
8401
8402 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8403 result at the end. We can't simply invert the test since it would
8404 have already been inverted if it were valid. This case occurs for
8405 some floating-point comparisons. */
8406
8407 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8408 invert = 1, exp = TREE_OPERAND (exp, 0);
8409
8410 arg0 = TREE_OPERAND (exp, 0);
8411 arg1 = TREE_OPERAND (exp, 1);
8412
8413 /* Don't crash if the comparison was erroneous. */
8414 if (arg0 == error_mark_node || arg1 == error_mark_node)
8415 return const0_rtx;
8416
8417 type = TREE_TYPE (arg0);
8418 operand_mode = TYPE_MODE (type);
8419 unsignedp = TYPE_UNSIGNED (type);
8420
8421 /* We won't bother with BLKmode store-flag operations because it would mean
8422 passing a lot of information to emit_store_flag. */
8423 if (operand_mode == BLKmode)
8424 return 0;
8425
8426 /* We won't bother with store-flag operations involving function pointers
8427 when function pointers must be canonicalized before comparisons. */
8428 #ifdef HAVE_canonicalize_funcptr_for_compare
8429 if (HAVE_canonicalize_funcptr_for_compare
8430 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8431 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8432 == FUNCTION_TYPE))
8433 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8434 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8435 == FUNCTION_TYPE))))
8436 return 0;
8437 #endif
8438
8439 STRIP_NOPS (arg0);
8440 STRIP_NOPS (arg1);
8441
8442 /* Get the rtx comparison code to use. We know that EXP is a comparison
8443 operation of some type. Some comparisons against 1 and -1 can be
8444 converted to comparisons with zero. Do so here so that the tests
8445 below will be aware that we have a comparison with zero. These
8446 tests will not catch constants in the first operand, but constants
8447 are rarely passed as the first operand. */
8448
8449 switch (TREE_CODE (exp))
8450 {
8451 case EQ_EXPR:
8452 code = EQ;
8453 break;
8454 case NE_EXPR:
8455 code = NE;
8456 break;
8457 case LT_EXPR:
8458 if (integer_onep (arg1))
8459 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8460 else
8461 code = unsignedp ? LTU : LT;
8462 break;
8463 case LE_EXPR:
8464 if (! unsignedp && integer_all_onesp (arg1))
8465 arg1 = integer_zero_node, code = LT;
8466 else
8467 code = unsignedp ? LEU : LE;
8468 break;
8469 case GT_EXPR:
8470 if (! unsignedp && integer_all_onesp (arg1))
8471 arg1 = integer_zero_node, code = GE;
8472 else
8473 code = unsignedp ? GTU : GT;
8474 break;
8475 case GE_EXPR:
8476 if (integer_onep (arg1))
8477 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8478 else
8479 code = unsignedp ? GEU : GE;
8480 break;
8481
8482 case UNORDERED_EXPR:
8483 code = UNORDERED;
8484 break;
8485 case ORDERED_EXPR:
8486 code = ORDERED;
8487 break;
8488 case UNLT_EXPR:
8489 code = UNLT;
8490 break;
8491 case UNLE_EXPR:
8492 code = UNLE;
8493 break;
8494 case UNGT_EXPR:
8495 code = UNGT;
8496 break;
8497 case UNGE_EXPR:
8498 code = UNGE;
8499 break;
8500 case UNEQ_EXPR:
8501 code = UNEQ;
8502 break;
8503 case LTGT_EXPR:
8504 code = LTGT;
8505 break;
8506
8507 default:
8508 gcc_unreachable ();
8509 }
8510
8511 /* Put a constant second. */
8512 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8513 {
8514 tem = arg0; arg0 = arg1; arg1 = tem;
8515 code = swap_condition (code);
8516 }
8517
8518 /* If this is an equality or inequality test of a single bit, we can
8519 do this by shifting the bit being tested to the low-order bit and
8520 masking the result with the constant 1. If the condition was EQ,
8521 we xor it with 1. This does not require an scc insn and is faster
8522 than an scc insn even if we have it.
8523
8524 The code to make this transformation was moved into fold_single_bit_test,
8525 so we just call into the folder and expand its result. */
8526
8527 if ((code == NE || code == EQ)
8528 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8529 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8530 {
8531 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8532 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8533 arg0, arg1, type),
8534 target, VOIDmode, EXPAND_NORMAL);
8535 }
8536
8537 /* Now see if we are likely to be able to do this. Return if not. */
8538 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8539 return 0;
8540
8541 icode = setcc_gen_code[(int) code];
8542 if (icode == CODE_FOR_nothing
8543 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8544 {
8545 /* We can only do this if it is one of the special cases that
8546 can be handled without an scc insn. */
8547 if ((code == LT && integer_zerop (arg1))
8548 || (! only_cheap && code == GE && integer_zerop (arg1)))
8549 ;
8550 else if (BRANCH_COST >= 0
8551 && ! only_cheap && (code == NE || code == EQ)
8552 && TREE_CODE (type) != REAL_TYPE
8553 && ((abs_optab->handlers[(int) operand_mode].insn_code
8554 != CODE_FOR_nothing)
8555 || (ffs_optab->handlers[(int) operand_mode].insn_code
8556 != CODE_FOR_nothing)))
8557 ;
8558 else
8559 return 0;
8560 }
8561
8562 if (! get_subtarget (target)
8563 || GET_MODE (subtarget) != operand_mode)
8564 subtarget = 0;
8565
8566 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8567
8568 if (target == 0)
8569 target = gen_reg_rtx (mode);
8570
8571 result = emit_store_flag (target, code, op0, op1,
8572 operand_mode, unsignedp, 1);
8573
8574 if (result)
8575 {
8576 if (invert)
8577 result = expand_binop (mode, xor_optab, result, const1_rtx,
8578 result, 0, OPTAB_LIB_WIDEN);
8579 return result;
8580 }
8581
8582 /* If this failed, we have to do this with set/compare/jump/set code. */
8583 if (!REG_P (target)
8584 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8585 target = gen_reg_rtx (GET_MODE (target));
8586
8587 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8588 result = compare_from_rtx (op0, op1, code, unsignedp,
8589 operand_mode, NULL_RTX);
8590 if (GET_CODE (result) == CONST_INT)
8591 return (((result == const0_rtx && ! invert)
8592 || (result != const0_rtx && invert))
8593 ? const0_rtx : const1_rtx);
8594
8595 /* The code of RESULT may not match CODE if compare_from_rtx
8596 decided to swap its operands and reverse the original code.
8597
8598 We know that compare_from_rtx returns either a CONST_INT or
8599 a new comparison code, so it is safe to just extract the
8600 code from RESULT. */
8601 code = GET_CODE (result);
8602
8603 label = gen_label_rtx ();
8604 gcc_assert (bcc_gen_fctn[(int) code]);
8605
8606 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8607 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8608 emit_label (label);
8609
8610 return target;
8611 }
8612 \f
8613
8614 /* Stubs in case we haven't got a casesi insn. */
8615 #ifndef HAVE_casesi
8616 # define HAVE_casesi 0
8617 # define gen_casesi(a, b, c, d, e) (0)
8618 # define CODE_FOR_casesi CODE_FOR_nothing
8619 #endif
8620
8621 /* If the machine does not have a case insn that compares the bounds,
8622 this means extra overhead for dispatch tables, which raises the
8623 threshold for using them. */
8624 #ifndef CASE_VALUES_THRESHOLD
8625 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8626 #endif /* CASE_VALUES_THRESHOLD */
8627
8628 unsigned int
8629 case_values_threshold (void)
8630 {
8631 return CASE_VALUES_THRESHOLD;
8632 }
8633
8634 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8635 0 otherwise (i.e. if there is no casesi instruction). */
8636 int
8637 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8638 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8639 {
8640 enum machine_mode index_mode = SImode;
8641 int index_bits = GET_MODE_BITSIZE (index_mode);
8642 rtx op1, op2, index;
8643 enum machine_mode op_mode;
8644
8645 if (! HAVE_casesi)
8646 return 0;
8647
8648 /* Convert the index to SImode. */
8649 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8650 {
8651 enum machine_mode omode = TYPE_MODE (index_type);
8652 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8653
8654 /* We must handle the endpoints in the original mode. */
8655 index_expr = build2 (MINUS_EXPR, index_type,
8656 index_expr, minval);
8657 minval = integer_zero_node;
8658 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8659 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8660 omode, 1, default_label);
8661 /* Now we can safely truncate. */
8662 index = convert_to_mode (index_mode, index, 0);
8663 }
8664 else
8665 {
8666 if (TYPE_MODE (index_type) != index_mode)
8667 {
8668 index_expr = convert (lang_hooks.types.type_for_size
8669 (index_bits, 0), index_expr);
8670 index_type = TREE_TYPE (index_expr);
8671 }
8672
8673 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8674 }
8675
8676 do_pending_stack_adjust ();
8677
8678 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8679 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8680 (index, op_mode))
8681 index = copy_to_mode_reg (op_mode, index);
8682
8683 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8684
8685 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8686 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8687 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8688 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8689 (op1, op_mode))
8690 op1 = copy_to_mode_reg (op_mode, op1);
8691
8692 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8693
8694 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8695 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8696 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8697 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8698 (op2, op_mode))
8699 op2 = copy_to_mode_reg (op_mode, op2);
8700
8701 emit_jump_insn (gen_casesi (index, op1, op2,
8702 table_label, default_label));
8703 return 1;
8704 }
8705
8706 /* Attempt to generate a tablejump instruction; same concept. */
8707 #ifndef HAVE_tablejump
8708 #define HAVE_tablejump 0
8709 #define gen_tablejump(x, y) (0)
8710 #endif
8711
8712 /* Subroutine of the next function.
8713
8714 INDEX is the value being switched on, with the lowest value
8715 in the table already subtracted.
8716 MODE is its expected mode (needed if INDEX is constant).
8717 RANGE is the length of the jump table.
8718 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8719
8720 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8721 index value is out of range. */
8722
8723 static void
8724 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8725 rtx default_label)
8726 {
8727 rtx temp, vector;
8728
8729 if (INTVAL (range) > cfun->max_jumptable_ents)
8730 cfun->max_jumptable_ents = INTVAL (range);
8731
8732 /* Do an unsigned comparison (in the proper mode) between the index
8733 expression and the value which represents the length of the range.
8734 Since we just finished subtracting the lower bound of the range
8735 from the index expression, this comparison allows us to simultaneously
8736 check that the original index expression value is both greater than
8737 or equal to the minimum value of the range and less than or equal to
8738 the maximum value of the range. */
8739
8740 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8741 default_label);
8742
8743 /* If index is in range, it must fit in Pmode.
8744 Convert to Pmode so we can index with it. */
8745 if (mode != Pmode)
8746 index = convert_to_mode (Pmode, index, 1);
8747
8748 /* Don't let a MEM slip through, because then INDEX that comes
8749 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8750 and break_out_memory_refs will go to work on it and mess it up. */
8751 #ifdef PIC_CASE_VECTOR_ADDRESS
8752 if (flag_pic && !REG_P (index))
8753 index = copy_to_mode_reg (Pmode, index);
8754 #endif
8755
8756 /* If flag_force_addr were to affect this address
8757 it could interfere with the tricky assumptions made
8758 about addresses that contain label-refs,
8759 which may be valid only very near the tablejump itself. */
8760 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8761 GET_MODE_SIZE, because this indicates how large insns are. The other
8762 uses should all be Pmode, because they are addresses. This code
8763 could fail if addresses and insns are not the same size. */
8764 index = gen_rtx_PLUS (Pmode,
8765 gen_rtx_MULT (Pmode, index,
8766 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8767 gen_rtx_LABEL_REF (Pmode, table_label));
8768 #ifdef PIC_CASE_VECTOR_ADDRESS
8769 if (flag_pic)
8770 index = PIC_CASE_VECTOR_ADDRESS (index);
8771 else
8772 #endif
8773 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8774 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8775 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8776 convert_move (temp, vector, 0);
8777
8778 emit_jump_insn (gen_tablejump (temp, table_label));
8779
8780 /* If we are generating PIC code or if the table is PC-relative, the
8781 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8782 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8783 emit_barrier ();
8784 }
8785
8786 int
8787 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8788 rtx table_label, rtx default_label)
8789 {
8790 rtx index;
8791
8792 if (! HAVE_tablejump)
8793 return 0;
8794
8795 index_expr = fold (build2 (MINUS_EXPR, index_type,
8796 convert (index_type, index_expr),
8797 convert (index_type, minval)));
8798 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8799 do_pending_stack_adjust ();
8800
8801 do_tablejump (index, TYPE_MODE (index_type),
8802 convert_modes (TYPE_MODE (index_type),
8803 TYPE_MODE (TREE_TYPE (range)),
8804 expand_expr (range, NULL_RTX,
8805 VOIDmode, 0),
8806 TYPE_UNSIGNED (TREE_TYPE (range))),
8807 table_label, default_label);
8808 return 1;
8809 }
8810
8811 /* Nonzero if the mode is a valid vector mode for this architecture.
8812 This returns nonzero even if there is no hardware support for the
8813 vector mode, but we can emulate with narrower modes. */
8814
8815 int
8816 vector_mode_valid_p (enum machine_mode mode)
8817 {
8818 enum mode_class class = GET_MODE_CLASS (mode);
8819 enum machine_mode innermode;
8820
8821 /* Doh! What's going on? */
8822 if (class != MODE_VECTOR_INT
8823 && class != MODE_VECTOR_FLOAT)
8824 return 0;
8825
8826 /* Hardware support. Woo hoo! */
8827 if (targetm.vector_mode_supported_p (mode))
8828 return 1;
8829
8830 innermode = GET_MODE_INNER (mode);
8831
8832 /* We should probably return 1 if requesting V4DI and we have no DI,
8833 but we have V2DI, but this is probably very unlikely. */
8834
8835 /* If we have support for the inner mode, we can safely emulate it.
8836 We may not have V2DI, but me can emulate with a pair of DIs. */
8837 return targetm.scalar_mode_supported_p (innermode);
8838 }
8839
8840 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8841 static rtx
8842 const_vector_from_tree (tree exp)
8843 {
8844 rtvec v;
8845 int units, i;
8846 tree link, elt;
8847 enum machine_mode inner, mode;
8848
8849 mode = TYPE_MODE (TREE_TYPE (exp));
8850
8851 if (initializer_zerop (exp))
8852 return CONST0_RTX (mode);
8853
8854 units = GET_MODE_NUNITS (mode);
8855 inner = GET_MODE_INNER (mode);
8856
8857 v = rtvec_alloc (units);
8858
8859 link = TREE_VECTOR_CST_ELTS (exp);
8860 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8861 {
8862 elt = TREE_VALUE (link);
8863
8864 if (TREE_CODE (elt) == REAL_CST)
8865 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8866 inner);
8867 else
8868 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8869 TREE_INT_CST_HIGH (elt),
8870 inner);
8871 }
8872
8873 /* Initialize remaining elements to 0. */
8874 for (; i < units; ++i)
8875 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8876
8877 return gen_rtx_CONST_VECTOR (mode, v);
8878 }
8879 #include "gt-expr.h"