re PR c++/23167 (internal compiler error: in create_tmp_var)
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
58
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
61
62 #ifdef PUSH_ROUNDING
63
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
69
70 #endif
71
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
79
80
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
88
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
92 {
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
104 };
105
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
108
109 struct store_by_pieces
110 {
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
120 };
121
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static rtx clear_storage_via_libcall (rtx, rtx, bool);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, int);
147
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
149
150 static int is_aligning_offset (tree, tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
161
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
165
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
168
169 /* Record for each mode whether we can float-extend from memory. */
170
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
179 #endif
180
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
187 #endif
188
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero, or
191 to "memcpy" storage when the source is a constant string. */
192 #ifndef STORE_BY_PIECES_P
193 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) MOVE_RATIO)
196 #endif
197
198 /* This array records the insn_code of insns to perform block moves. */
199 enum insn_code movmem_optab[NUM_MACHINE_MODES];
200
201 /* This array records the insn_code of insns to perform block sets. */
202 enum insn_code setmem_optab[NUM_MACHINE_MODES];
203
204 /* These arrays record the insn_code of three different kinds of insns
205 to perform block compares. */
206 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
209
210 /* Synchronization primitives. */
211 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
233
234 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
235
236 #ifndef SLOW_UNALIGNED_ACCESS
237 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
238 #endif
239 \f
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
242
243 void
244 init_expr_once (void)
245 {
246 rtx insn, pat;
247 enum machine_mode mode;
248 int num_clobbers;
249 rtx mem, mem1;
250 rtx reg;
251
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
255 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
257
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg = gen_rtx_REG (VOIDmode, -1);
261
262 insn = rtx_alloc (INSN);
263 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264 PATTERN (insn) = pat;
265
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
268 {
269 int regno;
270
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
273 PUT_MODE (mem1, mode);
274 PUT_MODE (reg, mode);
275
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
278
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 regno++)
283 {
284 if (! HARD_REGNO_MODE_OK (regno, mode))
285 continue;
286
287 REGNO (reg) = regno;
288
289 SET_SRC (pat) = mem;
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
293
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
298
299 SET_SRC (pat) = reg;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
303
304 SET_SRC (pat) = reg;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
308 }
309 }
310
311 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
312
313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314 mode = GET_MODE_WIDER_MODE (mode))
315 {
316 enum machine_mode srcmode;
317 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
318 srcmode = GET_MODE_WIDER_MODE (srcmode))
319 {
320 enum insn_code ic;
321
322 ic = can_extend_p (mode, srcmode, 0);
323 if (ic == CODE_FOR_nothing)
324 continue;
325
326 PUT_MODE (mem, srcmode);
327
328 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329 float_extend_from_mem[mode][srcmode] = true;
330 }
331 }
332 }
333
334 /* This is run at the start of compiling a function. */
335
336 void
337 init_expr (void)
338 {
339 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
340 }
341 \f
342 /* Copy data from FROM to TO, where the machine modes are not the same.
343 Both modes may be integer, or both may be floating.
344 UNSIGNEDP should be nonzero if FROM is an unsigned type.
345 This causes zero-extension instead of sign-extension. */
346
347 void
348 convert_move (rtx to, rtx from, int unsignedp)
349 {
350 enum machine_mode to_mode = GET_MODE (to);
351 enum machine_mode from_mode = GET_MODE (from);
352 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
353 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
354 enum insn_code code;
355 rtx libcall;
356
357 /* rtx code for making an equivalent value. */
358 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
359 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
360
361
362 gcc_assert (to_real == from_real);
363
364 /* If the source and destination are already the same, then there's
365 nothing to do. */
366 if (to == from)
367 return;
368
369 /* If FROM is a SUBREG that indicates that we have already done at least
370 the required extension, strip it. We don't handle such SUBREGs as
371 TO here. */
372
373 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
374 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
375 >= GET_MODE_SIZE (to_mode))
376 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
377 from = gen_lowpart (to_mode, from), from_mode = to_mode;
378
379 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
380
381 if (to_mode == from_mode
382 || (from_mode == VOIDmode && CONSTANT_P (from)))
383 {
384 emit_move_insn (to, from);
385 return;
386 }
387
388 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
389 {
390 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
391
392 if (VECTOR_MODE_P (to_mode))
393 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
394 else
395 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
396
397 emit_move_insn (to, from);
398 return;
399 }
400
401 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
402 {
403 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
404 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
405 return;
406 }
407
408 if (to_real)
409 {
410 rtx value, insns;
411 convert_optab tab;
412
413 gcc_assert (GET_MODE_PRECISION (from_mode)
414 != GET_MODE_PRECISION (to_mode));
415
416 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
417 tab = sext_optab;
418 else
419 tab = trunc_optab;
420
421 /* Try converting directly if the insn is supported. */
422
423 code = tab->handlers[to_mode][from_mode].insn_code;
424 if (code != CODE_FOR_nothing)
425 {
426 emit_unop_insn (code, to, from,
427 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
428 return;
429 }
430
431 /* Otherwise use a libcall. */
432 libcall = tab->handlers[to_mode][from_mode].libfunc;
433
434 /* Is this conversion implemented yet? */
435 gcc_assert (libcall);
436
437 start_sequence ();
438 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
439 1, from, from_mode);
440 insns = get_insns ();
441 end_sequence ();
442 emit_libcall_block (insns, to, value,
443 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
444 from)
445 : gen_rtx_FLOAT_EXTEND (to_mode, from));
446 return;
447 }
448
449 /* Handle pointer conversion. */ /* SPEE 900220. */
450 /* Targets are expected to provide conversion insns between PxImode and
451 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
452 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
453 {
454 enum machine_mode full_mode
455 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
456
457 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
458 != CODE_FOR_nothing);
459
460 if (full_mode != from_mode)
461 from = convert_to_mode (full_mode, from, unsignedp);
462 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
463 to, from, UNKNOWN);
464 return;
465 }
466 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
467 {
468 rtx new_from;
469 enum machine_mode full_mode
470 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
471
472 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
473 != CODE_FOR_nothing);
474
475 if (to_mode == full_mode)
476 {
477 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
478 to, from, UNKNOWN);
479 return;
480 }
481
482 new_from = gen_reg_rtx (full_mode);
483 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
484 new_from, from, UNKNOWN);
485
486 /* else proceed to integer conversions below. */
487 from_mode = full_mode;
488 from = new_from;
489 }
490
491 /* Now both modes are integers. */
492
493 /* Handle expanding beyond a word. */
494 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
495 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
496 {
497 rtx insns;
498 rtx lowpart;
499 rtx fill_value;
500 rtx lowfrom;
501 int i;
502 enum machine_mode lowpart_mode;
503 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
504
505 /* Try converting directly if the insn is supported. */
506 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
507 != CODE_FOR_nothing)
508 {
509 /* If FROM is a SUBREG, put it into a register. Do this
510 so that we always generate the same set of insns for
511 better cse'ing; if an intermediate assignment occurred,
512 we won't be doing the operation directly on the SUBREG. */
513 if (optimize > 0 && GET_CODE (from) == SUBREG)
514 from = force_reg (from_mode, from);
515 emit_unop_insn (code, to, from, equiv_code);
516 return;
517 }
518 /* Next, try converting via full word. */
519 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
520 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
521 != CODE_FOR_nothing))
522 {
523 if (REG_P (to))
524 {
525 if (reg_overlap_mentioned_p (to, from))
526 from = force_reg (from_mode, from);
527 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
528 }
529 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
530 emit_unop_insn (code, to,
531 gen_lowpart (word_mode, to), equiv_code);
532 return;
533 }
534
535 /* No special multiword conversion insn; do it by hand. */
536 start_sequence ();
537
538 /* Since we will turn this into a no conflict block, we must ensure
539 that the source does not overlap the target. */
540
541 if (reg_overlap_mentioned_p (to, from))
542 from = force_reg (from_mode, from);
543
544 /* Get a copy of FROM widened to a word, if necessary. */
545 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
546 lowpart_mode = word_mode;
547 else
548 lowpart_mode = from_mode;
549
550 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
551
552 lowpart = gen_lowpart (lowpart_mode, to);
553 emit_move_insn (lowpart, lowfrom);
554
555 /* Compute the value to put in each remaining word. */
556 if (unsignedp)
557 fill_value = const0_rtx;
558 else
559 {
560 #ifdef HAVE_slt
561 if (HAVE_slt
562 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
563 && STORE_FLAG_VALUE == -1)
564 {
565 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
566 lowpart_mode, 0);
567 fill_value = gen_reg_rtx (word_mode);
568 emit_insn (gen_slt (fill_value));
569 }
570 else
571 #endif
572 {
573 fill_value
574 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
575 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
576 NULL_RTX, 0);
577 fill_value = convert_to_mode (word_mode, fill_value, 1);
578 }
579 }
580
581 /* Fill the remaining words. */
582 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
583 {
584 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
585 rtx subword = operand_subword (to, index, 1, to_mode);
586
587 gcc_assert (subword);
588
589 if (fill_value != subword)
590 emit_move_insn (subword, fill_value);
591 }
592
593 insns = get_insns ();
594 end_sequence ();
595
596 emit_no_conflict_block (insns, to, from, NULL_RTX,
597 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
598 return;
599 }
600
601 /* Truncating multi-word to a word or less. */
602 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
603 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
604 {
605 if (!((MEM_P (from)
606 && ! MEM_VOLATILE_P (from)
607 && direct_load[(int) to_mode]
608 && ! mode_dependent_address_p (XEXP (from, 0)))
609 || REG_P (from)
610 || GET_CODE (from) == SUBREG))
611 from = force_reg (from_mode, from);
612 convert_move (to, gen_lowpart (word_mode, from), 0);
613 return;
614 }
615
616 /* Now follow all the conversions between integers
617 no more than a word long. */
618
619 /* For truncation, usually we can just refer to FROM in a narrower mode. */
620 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
621 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
622 GET_MODE_BITSIZE (from_mode)))
623 {
624 if (!((MEM_P (from)
625 && ! MEM_VOLATILE_P (from)
626 && direct_load[(int) to_mode]
627 && ! mode_dependent_address_p (XEXP (from, 0)))
628 || REG_P (from)
629 || GET_CODE (from) == SUBREG))
630 from = force_reg (from_mode, from);
631 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
632 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
633 from = copy_to_reg (from);
634 emit_move_insn (to, gen_lowpart (to_mode, from));
635 return;
636 }
637
638 /* Handle extension. */
639 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
640 {
641 /* Convert directly if that works. */
642 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
643 != CODE_FOR_nothing)
644 {
645 emit_unop_insn (code, to, from, equiv_code);
646 return;
647 }
648 else
649 {
650 enum machine_mode intermediate;
651 rtx tmp;
652 tree shift_amount;
653
654 /* Search for a mode to convert via. */
655 for (intermediate = from_mode; intermediate != VOIDmode;
656 intermediate = GET_MODE_WIDER_MODE (intermediate))
657 if (((can_extend_p (to_mode, intermediate, unsignedp)
658 != CODE_FOR_nothing)
659 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
660 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
661 GET_MODE_BITSIZE (intermediate))))
662 && (can_extend_p (intermediate, from_mode, unsignedp)
663 != CODE_FOR_nothing))
664 {
665 convert_move (to, convert_to_mode (intermediate, from,
666 unsignedp), unsignedp);
667 return;
668 }
669
670 /* No suitable intermediate mode.
671 Generate what we need with shifts. */
672 shift_amount = build_int_cst (NULL_TREE,
673 GET_MODE_BITSIZE (to_mode)
674 - GET_MODE_BITSIZE (from_mode));
675 from = gen_lowpart (to_mode, force_reg (from_mode, from));
676 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
677 to, unsignedp);
678 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
679 to, unsignedp);
680 if (tmp != to)
681 emit_move_insn (to, tmp);
682 return;
683 }
684 }
685
686 /* Support special truncate insns for certain modes. */
687 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
688 {
689 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
690 to, from, UNKNOWN);
691 return;
692 }
693
694 /* Handle truncation of volatile memrefs, and so on;
695 the things that couldn't be truncated directly,
696 and for which there was no special instruction.
697
698 ??? Code above formerly short-circuited this, for most integer
699 mode pairs, with a force_reg in from_mode followed by a recursive
700 call to this routine. Appears always to have been wrong. */
701 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
702 {
703 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
704 emit_move_insn (to, temp);
705 return;
706 }
707
708 /* Mode combination is not recognized. */
709 gcc_unreachable ();
710 }
711
712 /* Return an rtx for a value that would result
713 from converting X to mode MODE.
714 Both X and MODE may be floating, or both integer.
715 UNSIGNEDP is nonzero if X is an unsigned value.
716 This can be done by referring to a part of X in place
717 or by copying to a new temporary with conversion. */
718
719 rtx
720 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
721 {
722 return convert_modes (mode, VOIDmode, x, unsignedp);
723 }
724
725 /* Return an rtx for a value that would result
726 from converting X from mode OLDMODE to mode MODE.
727 Both modes may be floating, or both integer.
728 UNSIGNEDP is nonzero if X is an unsigned value.
729
730 This can be done by referring to a part of X in place
731 or by copying to a new temporary with conversion.
732
733 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
734
735 rtx
736 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
737 {
738 rtx temp;
739
740 /* If FROM is a SUBREG that indicates that we have already done at least
741 the required extension, strip it. */
742
743 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
744 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
745 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
746 x = gen_lowpart (mode, x);
747
748 if (GET_MODE (x) != VOIDmode)
749 oldmode = GET_MODE (x);
750
751 if (mode == oldmode)
752 return x;
753
754 /* There is one case that we must handle specially: If we are converting
755 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
756 we are to interpret the constant as unsigned, gen_lowpart will do
757 the wrong if the constant appears negative. What we want to do is
758 make the high-order word of the constant zero, not all ones. */
759
760 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
761 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
762 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
763 {
764 HOST_WIDE_INT val = INTVAL (x);
765
766 if (oldmode != VOIDmode
767 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
768 {
769 int width = GET_MODE_BITSIZE (oldmode);
770
771 /* We need to zero extend VAL. */
772 val &= ((HOST_WIDE_INT) 1 << width) - 1;
773 }
774
775 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
776 }
777
778 /* We can do this with a gen_lowpart if both desired and current modes
779 are integer, and this is either a constant integer, a register, or a
780 non-volatile MEM. Except for the constant case where MODE is no
781 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
782
783 if ((GET_CODE (x) == CONST_INT
784 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
785 || (GET_MODE_CLASS (mode) == MODE_INT
786 && GET_MODE_CLASS (oldmode) == MODE_INT
787 && (GET_CODE (x) == CONST_DOUBLE
788 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
789 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
790 && direct_load[(int) mode])
791 || (REG_P (x)
792 && (! HARD_REGISTER_P (x)
793 || HARD_REGNO_MODE_OK (REGNO (x), mode))
794 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
795 GET_MODE_BITSIZE (GET_MODE (x)))))))))
796 {
797 /* ?? If we don't know OLDMODE, we have to assume here that
798 X does not need sign- or zero-extension. This may not be
799 the case, but it's the best we can do. */
800 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
801 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
802 {
803 HOST_WIDE_INT val = INTVAL (x);
804 int width = GET_MODE_BITSIZE (oldmode);
805
806 /* We must sign or zero-extend in this case. Start by
807 zero-extending, then sign extend if we need to. */
808 val &= ((HOST_WIDE_INT) 1 << width) - 1;
809 if (! unsignedp
810 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
811 val |= (HOST_WIDE_INT) (-1) << width;
812
813 return gen_int_mode (val, mode);
814 }
815
816 return gen_lowpart (mode, x);
817 }
818
819 /* Converting from integer constant into mode is always equivalent to an
820 subreg operation. */
821 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
822 {
823 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
824 return simplify_gen_subreg (mode, x, oldmode, 0);
825 }
826
827 temp = gen_reg_rtx (mode);
828 convert_move (temp, x, unsignedp);
829 return temp;
830 }
831 \f
832 /* STORE_MAX_PIECES is the number of bytes at a time that we can
833 store efficiently. Due to internal GCC limitations, this is
834 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
835 for an immediate constant. */
836
837 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
838
839 /* Determine whether the LEN bytes can be moved by using several move
840 instructions. Return nonzero if a call to move_by_pieces should
841 succeed. */
842
843 int
844 can_move_by_pieces (unsigned HOST_WIDE_INT len,
845 unsigned int align ATTRIBUTE_UNUSED)
846 {
847 return MOVE_BY_PIECES_P (len, align);
848 }
849
850 /* Generate several move instructions to copy LEN bytes from block FROM to
851 block TO. (These are MEM rtx's with BLKmode).
852
853 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
854 used to push FROM to the stack.
855
856 ALIGN is maximum stack alignment we can assume.
857
858 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
859 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
860 stpcpy. */
861
862 rtx
863 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
864 unsigned int align, int endp)
865 {
866 struct move_by_pieces data;
867 rtx to_addr, from_addr = XEXP (from, 0);
868 unsigned int max_size = MOVE_MAX_PIECES + 1;
869 enum machine_mode mode = VOIDmode, tmode;
870 enum insn_code icode;
871
872 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
873
874 data.offset = 0;
875 data.from_addr = from_addr;
876 if (to)
877 {
878 to_addr = XEXP (to, 0);
879 data.to = to;
880 data.autinc_to
881 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
882 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
883 data.reverse
884 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
885 }
886 else
887 {
888 to_addr = NULL_RTX;
889 data.to = NULL_RTX;
890 data.autinc_to = 1;
891 #ifdef STACK_GROWS_DOWNWARD
892 data.reverse = 1;
893 #else
894 data.reverse = 0;
895 #endif
896 }
897 data.to_addr = to_addr;
898 data.from = from;
899 data.autinc_from
900 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
901 || GET_CODE (from_addr) == POST_INC
902 || GET_CODE (from_addr) == POST_DEC);
903
904 data.explicit_inc_from = 0;
905 data.explicit_inc_to = 0;
906 if (data.reverse) data.offset = len;
907 data.len = len;
908
909 /* If copying requires more than two move insns,
910 copy addresses to registers (to make displacements shorter)
911 and use post-increment if available. */
912 if (!(data.autinc_from && data.autinc_to)
913 && move_by_pieces_ninsns (len, align, max_size) > 2)
914 {
915 /* Find the mode of the largest move... */
916 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
917 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
918 if (GET_MODE_SIZE (tmode) < max_size)
919 mode = tmode;
920
921 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
922 {
923 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
924 data.autinc_from = 1;
925 data.explicit_inc_from = -1;
926 }
927 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
928 {
929 data.from_addr = copy_addr_to_reg (from_addr);
930 data.autinc_from = 1;
931 data.explicit_inc_from = 1;
932 }
933 if (!data.autinc_from && CONSTANT_P (from_addr))
934 data.from_addr = copy_addr_to_reg (from_addr);
935 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
936 {
937 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
938 data.autinc_to = 1;
939 data.explicit_inc_to = -1;
940 }
941 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
942 {
943 data.to_addr = copy_addr_to_reg (to_addr);
944 data.autinc_to = 1;
945 data.explicit_inc_to = 1;
946 }
947 if (!data.autinc_to && CONSTANT_P (to_addr))
948 data.to_addr = copy_addr_to_reg (to_addr);
949 }
950
951 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
952 if (align >= GET_MODE_ALIGNMENT (tmode))
953 align = GET_MODE_ALIGNMENT (tmode);
954 else
955 {
956 enum machine_mode xmode;
957
958 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
959 tmode != VOIDmode;
960 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
961 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
962 || SLOW_UNALIGNED_ACCESS (tmode, align))
963 break;
964
965 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
966 }
967
968 /* First move what we can in the largest integer mode, then go to
969 successively smaller modes. */
970
971 while (max_size > 1)
972 {
973 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
974 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
975 if (GET_MODE_SIZE (tmode) < max_size)
976 mode = tmode;
977
978 if (mode == VOIDmode)
979 break;
980
981 icode = mov_optab->handlers[(int) mode].insn_code;
982 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
983 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
984
985 max_size = GET_MODE_SIZE (mode);
986 }
987
988 /* The code above should have handled everything. */
989 gcc_assert (!data.len);
990
991 if (endp)
992 {
993 rtx to1;
994
995 gcc_assert (!data.reverse);
996 if (data.autinc_to)
997 {
998 if (endp == 2)
999 {
1000 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1001 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1002 else
1003 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1004 -1));
1005 }
1006 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1007 data.offset);
1008 }
1009 else
1010 {
1011 if (endp == 2)
1012 --data.offset;
1013 to1 = adjust_address (data.to, QImode, data.offset);
1014 }
1015 return to1;
1016 }
1017 else
1018 return data.to;
1019 }
1020
1021 /* Return number of insns required to move L bytes by pieces.
1022 ALIGN (in bits) is maximum alignment we can assume. */
1023
1024 static unsigned HOST_WIDE_INT
1025 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1026 unsigned int max_size)
1027 {
1028 unsigned HOST_WIDE_INT n_insns = 0;
1029 enum machine_mode tmode;
1030
1031 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1032 if (align >= GET_MODE_ALIGNMENT (tmode))
1033 align = GET_MODE_ALIGNMENT (tmode);
1034 else
1035 {
1036 enum machine_mode tmode, xmode;
1037
1038 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1039 tmode != VOIDmode;
1040 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1041 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1042 || SLOW_UNALIGNED_ACCESS (tmode, align))
1043 break;
1044
1045 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1046 }
1047
1048 while (max_size > 1)
1049 {
1050 enum machine_mode mode = VOIDmode;
1051 enum insn_code icode;
1052
1053 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1054 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1055 if (GET_MODE_SIZE (tmode) < max_size)
1056 mode = tmode;
1057
1058 if (mode == VOIDmode)
1059 break;
1060
1061 icode = mov_optab->handlers[(int) mode].insn_code;
1062 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1063 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1064
1065 max_size = GET_MODE_SIZE (mode);
1066 }
1067
1068 gcc_assert (!l);
1069 return n_insns;
1070 }
1071
1072 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1073 with move instructions for mode MODE. GENFUN is the gen_... function
1074 to make a move insn for that mode. DATA has all the other info. */
1075
1076 static void
1077 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1078 struct move_by_pieces *data)
1079 {
1080 unsigned int size = GET_MODE_SIZE (mode);
1081 rtx to1 = NULL_RTX, from1;
1082
1083 while (data->len >= size)
1084 {
1085 if (data->reverse)
1086 data->offset -= size;
1087
1088 if (data->to)
1089 {
1090 if (data->autinc_to)
1091 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1092 data->offset);
1093 else
1094 to1 = adjust_address (data->to, mode, data->offset);
1095 }
1096
1097 if (data->autinc_from)
1098 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1099 data->offset);
1100 else
1101 from1 = adjust_address (data->from, mode, data->offset);
1102
1103 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1104 emit_insn (gen_add2_insn (data->to_addr,
1105 GEN_INT (-(HOST_WIDE_INT)size)));
1106 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1107 emit_insn (gen_add2_insn (data->from_addr,
1108 GEN_INT (-(HOST_WIDE_INT)size)));
1109
1110 if (data->to)
1111 emit_insn ((*genfun) (to1, from1));
1112 else
1113 {
1114 #ifdef PUSH_ROUNDING
1115 emit_single_push_insn (mode, from1, NULL);
1116 #else
1117 gcc_unreachable ();
1118 #endif
1119 }
1120
1121 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1122 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1123 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1124 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1125
1126 if (! data->reverse)
1127 data->offset += size;
1128
1129 data->len -= size;
1130 }
1131 }
1132 \f
1133 /* Emit code to move a block Y to a block X. This may be done with
1134 string-move instructions, with multiple scalar move instructions,
1135 or with a library call.
1136
1137 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1138 SIZE is an rtx that says how long they are.
1139 ALIGN is the maximum alignment we can assume they have.
1140 METHOD describes what kind of copy this is, and what mechanisms may be used.
1141
1142 Return the address of the new block, if memcpy is called and returns it,
1143 0 otherwise. */
1144
1145 rtx
1146 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1147 {
1148 bool may_use_call;
1149 rtx retval = 0;
1150 unsigned int align;
1151
1152 switch (method)
1153 {
1154 case BLOCK_OP_NORMAL:
1155 case BLOCK_OP_TAILCALL:
1156 may_use_call = true;
1157 break;
1158
1159 case BLOCK_OP_CALL_PARM:
1160 may_use_call = block_move_libcall_safe_for_call_parm ();
1161
1162 /* Make inhibit_defer_pop nonzero around the library call
1163 to force it to pop the arguments right away. */
1164 NO_DEFER_POP;
1165 break;
1166
1167 case BLOCK_OP_NO_LIBCALL:
1168 may_use_call = false;
1169 break;
1170
1171 default:
1172 gcc_unreachable ();
1173 }
1174
1175 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1176
1177 gcc_assert (MEM_P (x));
1178 gcc_assert (MEM_P (y));
1179 gcc_assert (size);
1180
1181 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1182 block copy is more efficient for other large modes, e.g. DCmode. */
1183 x = adjust_address (x, BLKmode, 0);
1184 y = adjust_address (y, BLKmode, 0);
1185
1186 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1187 can be incorrect is coming from __builtin_memcpy. */
1188 if (GET_CODE (size) == CONST_INT)
1189 {
1190 if (INTVAL (size) == 0)
1191 return 0;
1192
1193 x = shallow_copy_rtx (x);
1194 y = shallow_copy_rtx (y);
1195 set_mem_size (x, size);
1196 set_mem_size (y, size);
1197 }
1198
1199 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1200 move_by_pieces (x, y, INTVAL (size), align, 0);
1201 else if (emit_block_move_via_movmem (x, y, size, align))
1202 ;
1203 else if (may_use_call)
1204 retval = emit_block_move_via_libcall (x, y, size,
1205 method == BLOCK_OP_TAILCALL);
1206 else
1207 emit_block_move_via_loop (x, y, size, align);
1208
1209 if (method == BLOCK_OP_CALL_PARM)
1210 OK_DEFER_POP;
1211
1212 return retval;
1213 }
1214
1215 /* A subroutine of emit_block_move. Returns true if calling the
1216 block move libcall will not clobber any parameters which may have
1217 already been placed on the stack. */
1218
1219 static bool
1220 block_move_libcall_safe_for_call_parm (void)
1221 {
1222 /* If arguments are pushed on the stack, then they're safe. */
1223 if (PUSH_ARGS)
1224 return true;
1225
1226 /* If registers go on the stack anyway, any argument is sure to clobber
1227 an outgoing argument. */
1228 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1229 {
1230 tree fn = emit_block_move_libcall_fn (false);
1231 (void) fn;
1232 if (REG_PARM_STACK_SPACE (fn) != 0)
1233 return false;
1234 }
1235 #endif
1236
1237 /* If any argument goes in memory, then it might clobber an outgoing
1238 argument. */
1239 {
1240 CUMULATIVE_ARGS args_so_far;
1241 tree fn, arg;
1242
1243 fn = emit_block_move_libcall_fn (false);
1244 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1245
1246 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1247 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1248 {
1249 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1250 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1251 if (!tmp || !REG_P (tmp))
1252 return false;
1253 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1254 return false;
1255 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1256 }
1257 }
1258 return true;
1259 }
1260
1261 /* A subroutine of emit_block_move. Expand a movmem pattern;
1262 return true if successful. */
1263
1264 static bool
1265 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1266 {
1267 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1268 int save_volatile_ok = volatile_ok;
1269 enum machine_mode mode;
1270
1271 /* Since this is a move insn, we don't care about volatility. */
1272 volatile_ok = 1;
1273
1274 /* Try the most limited insn first, because there's no point
1275 including more than one in the machine description unless
1276 the more limited one has some advantage. */
1277
1278 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1279 mode = GET_MODE_WIDER_MODE (mode))
1280 {
1281 enum insn_code code = movmem_optab[(int) mode];
1282 insn_operand_predicate_fn pred;
1283
1284 if (code != CODE_FOR_nothing
1285 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1286 here because if SIZE is less than the mode mask, as it is
1287 returned by the macro, it will definitely be less than the
1288 actual mode mask. */
1289 && ((GET_CODE (size) == CONST_INT
1290 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1291 <= (GET_MODE_MASK (mode) >> 1)))
1292 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1293 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1294 || (*pred) (x, BLKmode))
1295 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1296 || (*pred) (y, BLKmode))
1297 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1298 || (*pred) (opalign, VOIDmode)))
1299 {
1300 rtx op2;
1301 rtx last = get_last_insn ();
1302 rtx pat;
1303
1304 op2 = convert_to_mode (mode, size, 1);
1305 pred = insn_data[(int) code].operand[2].predicate;
1306 if (pred != 0 && ! (*pred) (op2, mode))
1307 op2 = copy_to_mode_reg (mode, op2);
1308
1309 /* ??? When called via emit_block_move_for_call, it'd be
1310 nice if there were some way to inform the backend, so
1311 that it doesn't fail the expansion because it thinks
1312 emitting the libcall would be more efficient. */
1313
1314 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1315 if (pat)
1316 {
1317 emit_insn (pat);
1318 volatile_ok = save_volatile_ok;
1319 return true;
1320 }
1321 else
1322 delete_insns_since (last);
1323 }
1324 }
1325
1326 volatile_ok = save_volatile_ok;
1327 return false;
1328 }
1329
1330 /* A subroutine of emit_block_move. Expand a call to memcpy.
1331 Return the return value from memcpy, 0 otherwise. */
1332
1333 static rtx
1334 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1335 {
1336 rtx dst_addr, src_addr;
1337 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1338 enum machine_mode size_mode;
1339 rtx retval;
1340
1341 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1342 pseudos. We can then place those new pseudos into a VAR_DECL and
1343 use them later. */
1344
1345 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1346 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1347
1348 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1349 src_addr = convert_memory_address (ptr_mode, src_addr);
1350
1351 dst_tree = make_tree (ptr_type_node, dst_addr);
1352 src_tree = make_tree (ptr_type_node, src_addr);
1353
1354 size_mode = TYPE_MODE (sizetype);
1355
1356 size = convert_to_mode (size_mode, size, 1);
1357 size = copy_to_mode_reg (size_mode, size);
1358
1359 /* It is incorrect to use the libcall calling conventions to call
1360 memcpy in this context. This could be a user call to memcpy and
1361 the user may wish to examine the return value from memcpy. For
1362 targets where libcalls and normal calls have different conventions
1363 for returning pointers, we could end up generating incorrect code. */
1364
1365 size_tree = make_tree (sizetype, size);
1366
1367 fn = emit_block_move_libcall_fn (true);
1368 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1369 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1370 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1371
1372 /* Now we have to build up the CALL_EXPR itself. */
1373 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1374 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1375 call_expr, arg_list, NULL_TREE);
1376 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1377
1378 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1379
1380 return retval;
1381 }
1382
1383 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1384 for the function we use for block copies. The first time FOR_CALL
1385 is true, we call assemble_external. */
1386
1387 static GTY(()) tree block_move_fn;
1388
1389 void
1390 init_block_move_fn (const char *asmspec)
1391 {
1392 if (!block_move_fn)
1393 {
1394 tree args, fn;
1395
1396 fn = get_identifier ("memcpy");
1397 args = build_function_type_list (ptr_type_node, ptr_type_node,
1398 const_ptr_type_node, sizetype,
1399 NULL_TREE);
1400
1401 fn = build_decl (FUNCTION_DECL, fn, args);
1402 DECL_EXTERNAL (fn) = 1;
1403 TREE_PUBLIC (fn) = 1;
1404 DECL_ARTIFICIAL (fn) = 1;
1405 TREE_NOTHROW (fn) = 1;
1406
1407 block_move_fn = fn;
1408 }
1409
1410 if (asmspec)
1411 set_user_assembler_name (block_move_fn, asmspec);
1412 }
1413
1414 static tree
1415 emit_block_move_libcall_fn (int for_call)
1416 {
1417 static bool emitted_extern;
1418
1419 if (!block_move_fn)
1420 init_block_move_fn (NULL);
1421
1422 if (for_call && !emitted_extern)
1423 {
1424 emitted_extern = true;
1425 make_decl_rtl (block_move_fn);
1426 assemble_external (block_move_fn);
1427 }
1428
1429 return block_move_fn;
1430 }
1431
1432 /* A subroutine of emit_block_move. Copy the data via an explicit
1433 loop. This is used only when libcalls are forbidden. */
1434 /* ??? It'd be nice to copy in hunks larger than QImode. */
1435
1436 static void
1437 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1438 unsigned int align ATTRIBUTE_UNUSED)
1439 {
1440 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1441 enum machine_mode iter_mode;
1442
1443 iter_mode = GET_MODE (size);
1444 if (iter_mode == VOIDmode)
1445 iter_mode = word_mode;
1446
1447 top_label = gen_label_rtx ();
1448 cmp_label = gen_label_rtx ();
1449 iter = gen_reg_rtx (iter_mode);
1450
1451 emit_move_insn (iter, const0_rtx);
1452
1453 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1454 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1455 do_pending_stack_adjust ();
1456
1457 emit_jump (cmp_label);
1458 emit_label (top_label);
1459
1460 tmp = convert_modes (Pmode, iter_mode, iter, true);
1461 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1462 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1463 x = change_address (x, QImode, x_addr);
1464 y = change_address (y, QImode, y_addr);
1465
1466 emit_move_insn (x, y);
1467
1468 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1469 true, OPTAB_LIB_WIDEN);
1470 if (tmp != iter)
1471 emit_move_insn (iter, tmp);
1472
1473 emit_label (cmp_label);
1474
1475 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1476 true, top_label);
1477 }
1478 \f
1479 /* Copy all or part of a value X into registers starting at REGNO.
1480 The number of registers to be filled is NREGS. */
1481
1482 void
1483 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1484 {
1485 int i;
1486 #ifdef HAVE_load_multiple
1487 rtx pat;
1488 rtx last;
1489 #endif
1490
1491 if (nregs == 0)
1492 return;
1493
1494 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1495 x = validize_mem (force_const_mem (mode, x));
1496
1497 /* See if the machine can do this with a load multiple insn. */
1498 #ifdef HAVE_load_multiple
1499 if (HAVE_load_multiple)
1500 {
1501 last = get_last_insn ();
1502 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1503 GEN_INT (nregs));
1504 if (pat)
1505 {
1506 emit_insn (pat);
1507 return;
1508 }
1509 else
1510 delete_insns_since (last);
1511 }
1512 #endif
1513
1514 for (i = 0; i < nregs; i++)
1515 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1516 operand_subword_force (x, i, mode));
1517 }
1518
1519 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1520 The number of registers to be filled is NREGS. */
1521
1522 void
1523 move_block_from_reg (int regno, rtx x, int nregs)
1524 {
1525 int i;
1526
1527 if (nregs == 0)
1528 return;
1529
1530 /* See if the machine can do this with a store multiple insn. */
1531 #ifdef HAVE_store_multiple
1532 if (HAVE_store_multiple)
1533 {
1534 rtx last = get_last_insn ();
1535 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1536 GEN_INT (nregs));
1537 if (pat)
1538 {
1539 emit_insn (pat);
1540 return;
1541 }
1542 else
1543 delete_insns_since (last);
1544 }
1545 #endif
1546
1547 for (i = 0; i < nregs; i++)
1548 {
1549 rtx tem = operand_subword (x, i, 1, BLKmode);
1550
1551 gcc_assert (tem);
1552
1553 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1554 }
1555 }
1556
1557 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1558 ORIG, where ORIG is a non-consecutive group of registers represented by
1559 a PARALLEL. The clone is identical to the original except in that the
1560 original set of registers is replaced by a new set of pseudo registers.
1561 The new set has the same modes as the original set. */
1562
1563 rtx
1564 gen_group_rtx (rtx orig)
1565 {
1566 int i, length;
1567 rtx *tmps;
1568
1569 gcc_assert (GET_CODE (orig) == PARALLEL);
1570
1571 length = XVECLEN (orig, 0);
1572 tmps = alloca (sizeof (rtx) * length);
1573
1574 /* Skip a NULL entry in first slot. */
1575 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1576
1577 if (i)
1578 tmps[0] = 0;
1579
1580 for (; i < length; i++)
1581 {
1582 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1583 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1584
1585 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1586 }
1587
1588 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1589 }
1590
1591 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1592 except that values are placed in TMPS[i], and must later be moved
1593 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1594
1595 static void
1596 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1597 {
1598 rtx src;
1599 int start, i;
1600 enum machine_mode m = GET_MODE (orig_src);
1601
1602 gcc_assert (GET_CODE (dst) == PARALLEL);
1603
1604 if (m != VOIDmode
1605 && !SCALAR_INT_MODE_P (m)
1606 && !MEM_P (orig_src)
1607 && GET_CODE (orig_src) != CONCAT)
1608 {
1609 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1610 if (imode == BLKmode)
1611 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1612 else
1613 src = gen_reg_rtx (imode);
1614 if (imode != BLKmode)
1615 src = gen_lowpart (GET_MODE (orig_src), src);
1616 emit_move_insn (src, orig_src);
1617 /* ...and back again. */
1618 if (imode != BLKmode)
1619 src = gen_lowpart (imode, src);
1620 emit_group_load_1 (tmps, dst, src, type, ssize);
1621 return;
1622 }
1623
1624 /* Check for a NULL entry, used to indicate that the parameter goes
1625 both on the stack and in registers. */
1626 if (XEXP (XVECEXP (dst, 0, 0), 0))
1627 start = 0;
1628 else
1629 start = 1;
1630
1631 /* Process the pieces. */
1632 for (i = start; i < XVECLEN (dst, 0); i++)
1633 {
1634 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1635 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1636 unsigned int bytelen = GET_MODE_SIZE (mode);
1637 int shift = 0;
1638
1639 /* Handle trailing fragments that run over the size of the struct. */
1640 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1641 {
1642 /* Arrange to shift the fragment to where it belongs.
1643 extract_bit_field loads to the lsb of the reg. */
1644 if (
1645 #ifdef BLOCK_REG_PADDING
1646 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1647 == (BYTES_BIG_ENDIAN ? upward : downward)
1648 #else
1649 BYTES_BIG_ENDIAN
1650 #endif
1651 )
1652 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1653 bytelen = ssize - bytepos;
1654 gcc_assert (bytelen > 0);
1655 }
1656
1657 /* If we won't be loading directly from memory, protect the real source
1658 from strange tricks we might play; but make sure that the source can
1659 be loaded directly into the destination. */
1660 src = orig_src;
1661 if (!MEM_P (orig_src)
1662 && (!CONSTANT_P (orig_src)
1663 || (GET_MODE (orig_src) != mode
1664 && GET_MODE (orig_src) != VOIDmode)))
1665 {
1666 if (GET_MODE (orig_src) == VOIDmode)
1667 src = gen_reg_rtx (mode);
1668 else
1669 src = gen_reg_rtx (GET_MODE (orig_src));
1670
1671 emit_move_insn (src, orig_src);
1672 }
1673
1674 /* Optimize the access just a bit. */
1675 if (MEM_P (src)
1676 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1677 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1678 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1679 && bytelen == GET_MODE_SIZE (mode))
1680 {
1681 tmps[i] = gen_reg_rtx (mode);
1682 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1683 }
1684 else if (COMPLEX_MODE_P (mode)
1685 && GET_MODE (src) == mode
1686 && bytelen == GET_MODE_SIZE (mode))
1687 /* Let emit_move_complex do the bulk of the work. */
1688 tmps[i] = src;
1689 else if (GET_CODE (src) == CONCAT)
1690 {
1691 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1692 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1693
1694 if ((bytepos == 0 && bytelen == slen0)
1695 || (bytepos != 0 && bytepos + bytelen <= slen))
1696 {
1697 /* The following assumes that the concatenated objects all
1698 have the same size. In this case, a simple calculation
1699 can be used to determine the object and the bit field
1700 to be extracted. */
1701 tmps[i] = XEXP (src, bytepos / slen0);
1702 if (! CONSTANT_P (tmps[i])
1703 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1704 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1705 (bytepos % slen0) * BITS_PER_UNIT,
1706 1, NULL_RTX, mode, mode);
1707 }
1708 else
1709 {
1710 rtx mem;
1711
1712 gcc_assert (!bytepos);
1713 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1714 emit_move_insn (mem, src);
1715 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1716 0, 1, NULL_RTX, mode, mode);
1717 }
1718 }
1719 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1720 SIMD register, which is currently broken. While we get GCC
1721 to emit proper RTL for these cases, let's dump to memory. */
1722 else if (VECTOR_MODE_P (GET_MODE (dst))
1723 && REG_P (src))
1724 {
1725 int slen = GET_MODE_SIZE (GET_MODE (src));
1726 rtx mem;
1727
1728 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1729 emit_move_insn (mem, src);
1730 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1731 }
1732 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1733 && XVECLEN (dst, 0) > 1)
1734 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1735 else if (CONSTANT_P (src)
1736 || (REG_P (src) && GET_MODE (src) == mode))
1737 tmps[i] = src;
1738 else
1739 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1740 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1741 mode, mode);
1742
1743 if (shift)
1744 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1745 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1746 }
1747 }
1748
1749 /* Emit code to move a block SRC of type TYPE to a block DST,
1750 where DST is non-consecutive registers represented by a PARALLEL.
1751 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1752 if not known. */
1753
1754 void
1755 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1756 {
1757 rtx *tmps;
1758 int i;
1759
1760 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1761 emit_group_load_1 (tmps, dst, src, type, ssize);
1762
1763 /* Copy the extracted pieces into the proper (probable) hard regs. */
1764 for (i = 0; i < XVECLEN (dst, 0); i++)
1765 {
1766 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1767 if (d == NULL)
1768 continue;
1769 emit_move_insn (d, tmps[i]);
1770 }
1771 }
1772
1773 /* Similar, but load SRC into new pseudos in a format that looks like
1774 PARALLEL. This can later be fed to emit_group_move to get things
1775 in the right place. */
1776
1777 rtx
1778 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1779 {
1780 rtvec vec;
1781 int i;
1782
1783 vec = rtvec_alloc (XVECLEN (parallel, 0));
1784 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1785
1786 /* Convert the vector to look just like the original PARALLEL, except
1787 with the computed values. */
1788 for (i = 0; i < XVECLEN (parallel, 0); i++)
1789 {
1790 rtx e = XVECEXP (parallel, 0, i);
1791 rtx d = XEXP (e, 0);
1792
1793 if (d)
1794 {
1795 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1796 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1797 }
1798 RTVEC_ELT (vec, i) = e;
1799 }
1800
1801 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1802 }
1803
1804 /* Emit code to move a block SRC to block DST, where SRC and DST are
1805 non-consecutive groups of registers, each represented by a PARALLEL. */
1806
1807 void
1808 emit_group_move (rtx dst, rtx src)
1809 {
1810 int i;
1811
1812 gcc_assert (GET_CODE (src) == PARALLEL
1813 && GET_CODE (dst) == PARALLEL
1814 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1815
1816 /* Skip first entry if NULL. */
1817 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1818 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1819 XEXP (XVECEXP (src, 0, i), 0));
1820 }
1821
1822 /* Move a group of registers represented by a PARALLEL into pseudos. */
1823
1824 rtx
1825 emit_group_move_into_temps (rtx src)
1826 {
1827 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1828 int i;
1829
1830 for (i = 0; i < XVECLEN (src, 0); i++)
1831 {
1832 rtx e = XVECEXP (src, 0, i);
1833 rtx d = XEXP (e, 0);
1834
1835 if (d)
1836 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1837 RTVEC_ELT (vec, i) = e;
1838 }
1839
1840 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1841 }
1842
1843 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1844 where SRC is non-consecutive registers represented by a PARALLEL.
1845 SSIZE represents the total size of block ORIG_DST, or -1 if not
1846 known. */
1847
1848 void
1849 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1850 {
1851 rtx *tmps, dst;
1852 int start, i;
1853 enum machine_mode m = GET_MODE (orig_dst);
1854
1855 gcc_assert (GET_CODE (src) == PARALLEL);
1856
1857 if (!SCALAR_INT_MODE_P (m)
1858 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1859 {
1860 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1861 if (imode == BLKmode)
1862 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1863 else
1864 dst = gen_reg_rtx (imode);
1865 emit_group_store (dst, src, type, ssize);
1866 if (imode != BLKmode)
1867 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1868 emit_move_insn (orig_dst, dst);
1869 return;
1870 }
1871
1872 /* Check for a NULL entry, used to indicate that the parameter goes
1873 both on the stack and in registers. */
1874 if (XEXP (XVECEXP (src, 0, 0), 0))
1875 start = 0;
1876 else
1877 start = 1;
1878
1879 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1880
1881 /* Copy the (probable) hard regs into pseudos. */
1882 for (i = start; i < XVECLEN (src, 0); i++)
1883 {
1884 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1885 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1886 emit_move_insn (tmps[i], reg);
1887 }
1888
1889 /* If we won't be storing directly into memory, protect the real destination
1890 from strange tricks we might play. */
1891 dst = orig_dst;
1892 if (GET_CODE (dst) == PARALLEL)
1893 {
1894 rtx temp;
1895
1896 /* We can get a PARALLEL dst if there is a conditional expression in
1897 a return statement. In that case, the dst and src are the same,
1898 so no action is necessary. */
1899 if (rtx_equal_p (dst, src))
1900 return;
1901
1902 /* It is unclear if we can ever reach here, but we may as well handle
1903 it. Allocate a temporary, and split this into a store/load to/from
1904 the temporary. */
1905
1906 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1907 emit_group_store (temp, src, type, ssize);
1908 emit_group_load (dst, temp, type, ssize);
1909 return;
1910 }
1911 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1912 {
1913 dst = gen_reg_rtx (GET_MODE (orig_dst));
1914 /* Make life a bit easier for combine. */
1915 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1916 }
1917
1918 /* Process the pieces. */
1919 for (i = start; i < XVECLEN (src, 0); i++)
1920 {
1921 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1922 enum machine_mode mode = GET_MODE (tmps[i]);
1923 unsigned int bytelen = GET_MODE_SIZE (mode);
1924 rtx dest = dst;
1925
1926 /* Handle trailing fragments that run over the size of the struct. */
1927 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1928 {
1929 /* store_bit_field always takes its value from the lsb.
1930 Move the fragment to the lsb if it's not already there. */
1931 if (
1932 #ifdef BLOCK_REG_PADDING
1933 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1934 == (BYTES_BIG_ENDIAN ? upward : downward)
1935 #else
1936 BYTES_BIG_ENDIAN
1937 #endif
1938 )
1939 {
1940 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1941 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1942 build_int_cst (NULL_TREE, shift),
1943 tmps[i], 0);
1944 }
1945 bytelen = ssize - bytepos;
1946 }
1947
1948 if (GET_CODE (dst) == CONCAT)
1949 {
1950 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1951 dest = XEXP (dst, 0);
1952 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1953 {
1954 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1955 dest = XEXP (dst, 1);
1956 }
1957 else
1958 {
1959 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1960 dest = assign_stack_temp (GET_MODE (dest),
1961 GET_MODE_SIZE (GET_MODE (dest)), 0);
1962 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1963 tmps[i]);
1964 dst = dest;
1965 break;
1966 }
1967 }
1968
1969 /* Optimize the access just a bit. */
1970 if (MEM_P (dest)
1971 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1972 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1973 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1974 && bytelen == GET_MODE_SIZE (mode))
1975 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1976 else
1977 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1978 mode, tmps[i]);
1979 }
1980
1981 /* Copy from the pseudo into the (probable) hard reg. */
1982 if (orig_dst != dst)
1983 emit_move_insn (orig_dst, dst);
1984 }
1985
1986 /* Generate code to copy a BLKmode object of TYPE out of a
1987 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1988 is null, a stack temporary is created. TGTBLK is returned.
1989
1990 The purpose of this routine is to handle functions that return
1991 BLKmode structures in registers. Some machines (the PA for example)
1992 want to return all small structures in registers regardless of the
1993 structure's alignment. */
1994
1995 rtx
1996 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1997 {
1998 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1999 rtx src = NULL, dst = NULL;
2000 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2001 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2002
2003 if (tgtblk == 0)
2004 {
2005 tgtblk = assign_temp (build_qualified_type (type,
2006 (TYPE_QUALS (type)
2007 | TYPE_QUAL_CONST)),
2008 0, 1, 1);
2009 preserve_temp_slots (tgtblk);
2010 }
2011
2012 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2013 into a new pseudo which is a full word. */
2014
2015 if (GET_MODE (srcreg) != BLKmode
2016 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2017 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2018
2019 /* If the structure doesn't take up a whole number of words, see whether
2020 SRCREG is padded on the left or on the right. If it's on the left,
2021 set PADDING_CORRECTION to the number of bits to skip.
2022
2023 In most ABIs, the structure will be returned at the least end of
2024 the register, which translates to right padding on little-endian
2025 targets and left padding on big-endian targets. The opposite
2026 holds if the structure is returned at the most significant
2027 end of the register. */
2028 if (bytes % UNITS_PER_WORD != 0
2029 && (targetm.calls.return_in_msb (type)
2030 ? !BYTES_BIG_ENDIAN
2031 : BYTES_BIG_ENDIAN))
2032 padding_correction
2033 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2034
2035 /* Copy the structure BITSIZE bites at a time.
2036
2037 We could probably emit more efficient code for machines which do not use
2038 strict alignment, but it doesn't seem worth the effort at the current
2039 time. */
2040 for (bitpos = 0, xbitpos = padding_correction;
2041 bitpos < bytes * BITS_PER_UNIT;
2042 bitpos += bitsize, xbitpos += bitsize)
2043 {
2044 /* We need a new source operand each time xbitpos is on a
2045 word boundary and when xbitpos == padding_correction
2046 (the first time through). */
2047 if (xbitpos % BITS_PER_WORD == 0
2048 || xbitpos == padding_correction)
2049 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2050 GET_MODE (srcreg));
2051
2052 /* We need a new destination operand each time bitpos is on
2053 a word boundary. */
2054 if (bitpos % BITS_PER_WORD == 0)
2055 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2056
2057 /* Use xbitpos for the source extraction (right justified) and
2058 xbitpos for the destination store (left justified). */
2059 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2060 extract_bit_field (src, bitsize,
2061 xbitpos % BITS_PER_WORD, 1,
2062 NULL_RTX, word_mode, word_mode));
2063 }
2064
2065 return tgtblk;
2066 }
2067
2068 /* Add a USE expression for REG to the (possibly empty) list pointed
2069 to by CALL_FUSAGE. REG must denote a hard register. */
2070
2071 void
2072 use_reg (rtx *call_fusage, rtx reg)
2073 {
2074 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2075
2076 *call_fusage
2077 = gen_rtx_EXPR_LIST (VOIDmode,
2078 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2079 }
2080
2081 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2082 starting at REGNO. All of these registers must be hard registers. */
2083
2084 void
2085 use_regs (rtx *call_fusage, int regno, int nregs)
2086 {
2087 int i;
2088
2089 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2090
2091 for (i = 0; i < nregs; i++)
2092 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2093 }
2094
2095 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2096 PARALLEL REGS. This is for calls that pass values in multiple
2097 non-contiguous locations. The Irix 6 ABI has examples of this. */
2098
2099 void
2100 use_group_regs (rtx *call_fusage, rtx regs)
2101 {
2102 int i;
2103
2104 for (i = 0; i < XVECLEN (regs, 0); i++)
2105 {
2106 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2107
2108 /* A NULL entry means the parameter goes both on the stack and in
2109 registers. This can also be a MEM for targets that pass values
2110 partially on the stack and partially in registers. */
2111 if (reg != 0 && REG_P (reg))
2112 use_reg (call_fusage, reg);
2113 }
2114 }
2115 \f
2116
2117 /* Determine whether the LEN bytes generated by CONSTFUN can be
2118 stored to memory using several move instructions. CONSTFUNDATA is
2119 a pointer which will be passed as argument in every CONSTFUN call.
2120 ALIGN is maximum alignment we can assume. Return nonzero if a
2121 call to store_by_pieces should succeed. */
2122
2123 int
2124 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2125 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2126 void *constfundata, unsigned int align)
2127 {
2128 unsigned HOST_WIDE_INT l;
2129 unsigned int max_size;
2130 HOST_WIDE_INT offset = 0;
2131 enum machine_mode mode, tmode;
2132 enum insn_code icode;
2133 int reverse;
2134 rtx cst;
2135
2136 if (len == 0)
2137 return 1;
2138
2139 if (! STORE_BY_PIECES_P (len, align))
2140 return 0;
2141
2142 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2143 if (align >= GET_MODE_ALIGNMENT (tmode))
2144 align = GET_MODE_ALIGNMENT (tmode);
2145 else
2146 {
2147 enum machine_mode xmode;
2148
2149 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2150 tmode != VOIDmode;
2151 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2152 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2153 || SLOW_UNALIGNED_ACCESS (tmode, align))
2154 break;
2155
2156 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2157 }
2158
2159 /* We would first store what we can in the largest integer mode, then go to
2160 successively smaller modes. */
2161
2162 for (reverse = 0;
2163 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2164 reverse++)
2165 {
2166 l = len;
2167 mode = VOIDmode;
2168 max_size = STORE_MAX_PIECES + 1;
2169 while (max_size > 1)
2170 {
2171 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2172 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2173 if (GET_MODE_SIZE (tmode) < max_size)
2174 mode = tmode;
2175
2176 if (mode == VOIDmode)
2177 break;
2178
2179 icode = mov_optab->handlers[(int) mode].insn_code;
2180 if (icode != CODE_FOR_nothing
2181 && align >= GET_MODE_ALIGNMENT (mode))
2182 {
2183 unsigned int size = GET_MODE_SIZE (mode);
2184
2185 while (l >= size)
2186 {
2187 if (reverse)
2188 offset -= size;
2189
2190 cst = (*constfun) (constfundata, offset, mode);
2191 if (!LEGITIMATE_CONSTANT_P (cst))
2192 return 0;
2193
2194 if (!reverse)
2195 offset += size;
2196
2197 l -= size;
2198 }
2199 }
2200
2201 max_size = GET_MODE_SIZE (mode);
2202 }
2203
2204 /* The code above should have handled everything. */
2205 gcc_assert (!l);
2206 }
2207
2208 return 1;
2209 }
2210
2211 /* Generate several move instructions to store LEN bytes generated by
2212 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2213 pointer which will be passed as argument in every CONSTFUN call.
2214 ALIGN is maximum alignment we can assume.
2215 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2216 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2217 stpcpy. */
2218
2219 rtx
2220 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2221 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2222 void *constfundata, unsigned int align, int endp)
2223 {
2224 struct store_by_pieces data;
2225
2226 if (len == 0)
2227 {
2228 gcc_assert (endp != 2);
2229 return to;
2230 }
2231
2232 gcc_assert (STORE_BY_PIECES_P (len, align));
2233 data.constfun = constfun;
2234 data.constfundata = constfundata;
2235 data.len = len;
2236 data.to = to;
2237 store_by_pieces_1 (&data, align);
2238 if (endp)
2239 {
2240 rtx to1;
2241
2242 gcc_assert (!data.reverse);
2243 if (data.autinc_to)
2244 {
2245 if (endp == 2)
2246 {
2247 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2248 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2249 else
2250 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2251 -1));
2252 }
2253 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2254 data.offset);
2255 }
2256 else
2257 {
2258 if (endp == 2)
2259 --data.offset;
2260 to1 = adjust_address (data.to, QImode, data.offset);
2261 }
2262 return to1;
2263 }
2264 else
2265 return data.to;
2266 }
2267
2268 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2269 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2270
2271 static void
2272 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2273 {
2274 struct store_by_pieces data;
2275
2276 if (len == 0)
2277 return;
2278
2279 data.constfun = clear_by_pieces_1;
2280 data.constfundata = NULL;
2281 data.len = len;
2282 data.to = to;
2283 store_by_pieces_1 (&data, align);
2284 }
2285
2286 /* Callback routine for clear_by_pieces.
2287 Return const0_rtx unconditionally. */
2288
2289 static rtx
2290 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2291 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2292 enum machine_mode mode ATTRIBUTE_UNUSED)
2293 {
2294 return const0_rtx;
2295 }
2296
2297 /* Subroutine of clear_by_pieces and store_by_pieces.
2298 Generate several move instructions to store LEN bytes of block TO. (A MEM
2299 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2300
2301 static void
2302 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2303 unsigned int align ATTRIBUTE_UNUSED)
2304 {
2305 rtx to_addr = XEXP (data->to, 0);
2306 unsigned int max_size = STORE_MAX_PIECES + 1;
2307 enum machine_mode mode = VOIDmode, tmode;
2308 enum insn_code icode;
2309
2310 data->offset = 0;
2311 data->to_addr = to_addr;
2312 data->autinc_to
2313 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2314 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2315
2316 data->explicit_inc_to = 0;
2317 data->reverse
2318 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2319 if (data->reverse)
2320 data->offset = data->len;
2321
2322 /* If storing requires more than two move insns,
2323 copy addresses to registers (to make displacements shorter)
2324 and use post-increment if available. */
2325 if (!data->autinc_to
2326 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2327 {
2328 /* Determine the main mode we'll be using. */
2329 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2330 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2331 if (GET_MODE_SIZE (tmode) < max_size)
2332 mode = tmode;
2333
2334 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2335 {
2336 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2337 data->autinc_to = 1;
2338 data->explicit_inc_to = -1;
2339 }
2340
2341 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2342 && ! data->autinc_to)
2343 {
2344 data->to_addr = copy_addr_to_reg (to_addr);
2345 data->autinc_to = 1;
2346 data->explicit_inc_to = 1;
2347 }
2348
2349 if ( !data->autinc_to && CONSTANT_P (to_addr))
2350 data->to_addr = copy_addr_to_reg (to_addr);
2351 }
2352
2353 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2354 if (align >= GET_MODE_ALIGNMENT (tmode))
2355 align = GET_MODE_ALIGNMENT (tmode);
2356 else
2357 {
2358 enum machine_mode xmode;
2359
2360 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2361 tmode != VOIDmode;
2362 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2363 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2364 || SLOW_UNALIGNED_ACCESS (tmode, align))
2365 break;
2366
2367 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2368 }
2369
2370 /* First store what we can in the largest integer mode, then go to
2371 successively smaller modes. */
2372
2373 while (max_size > 1)
2374 {
2375 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2376 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2377 if (GET_MODE_SIZE (tmode) < max_size)
2378 mode = tmode;
2379
2380 if (mode == VOIDmode)
2381 break;
2382
2383 icode = mov_optab->handlers[(int) mode].insn_code;
2384 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2385 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2386
2387 max_size = GET_MODE_SIZE (mode);
2388 }
2389
2390 /* The code above should have handled everything. */
2391 gcc_assert (!data->len);
2392 }
2393
2394 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2395 with move instructions for mode MODE. GENFUN is the gen_... function
2396 to make a move insn for that mode. DATA has all the other info. */
2397
2398 static void
2399 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2400 struct store_by_pieces *data)
2401 {
2402 unsigned int size = GET_MODE_SIZE (mode);
2403 rtx to1, cst;
2404
2405 while (data->len >= size)
2406 {
2407 if (data->reverse)
2408 data->offset -= size;
2409
2410 if (data->autinc_to)
2411 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2412 data->offset);
2413 else
2414 to1 = adjust_address (data->to, mode, data->offset);
2415
2416 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2417 emit_insn (gen_add2_insn (data->to_addr,
2418 GEN_INT (-(HOST_WIDE_INT) size)));
2419
2420 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2421 emit_insn ((*genfun) (to1, cst));
2422
2423 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2424 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2425
2426 if (! data->reverse)
2427 data->offset += size;
2428
2429 data->len -= size;
2430 }
2431 }
2432 \f
2433 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2434 its length in bytes. */
2435
2436 rtx
2437 clear_storage (rtx object, rtx size, enum block_op_methods method)
2438 {
2439 enum machine_mode mode = GET_MODE (object);
2440 unsigned int align;
2441
2442 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2443
2444 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2445 just move a zero. Otherwise, do this a piece at a time. */
2446 if (mode != BLKmode
2447 && GET_CODE (size) == CONST_INT
2448 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2449 {
2450 rtx zero = CONST0_RTX (mode);
2451 if (zero != NULL)
2452 {
2453 emit_move_insn (object, zero);
2454 return NULL;
2455 }
2456
2457 if (COMPLEX_MODE_P (mode))
2458 {
2459 zero = CONST0_RTX (GET_MODE_INNER (mode));
2460 if (zero != NULL)
2461 {
2462 write_complex_part (object, zero, 0);
2463 write_complex_part (object, zero, 1);
2464 return NULL;
2465 }
2466 }
2467 }
2468
2469 if (size == const0_rtx)
2470 return NULL;
2471
2472 align = MEM_ALIGN (object);
2473
2474 if (GET_CODE (size) == CONST_INT
2475 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2476 clear_by_pieces (object, INTVAL (size), align);
2477 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2478 ;
2479 else
2480 return clear_storage_via_libcall (object, size,
2481 method == BLOCK_OP_TAILCALL);
2482
2483 return NULL;
2484 }
2485
2486 /* A subroutine of clear_storage. Expand a call to memset.
2487 Return the return value of memset, 0 otherwise. */
2488
2489 static rtx
2490 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2491 {
2492 tree call_expr, arg_list, fn, object_tree, size_tree;
2493 enum machine_mode size_mode;
2494 rtx retval;
2495
2496 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2497 place those into new pseudos into a VAR_DECL and use them later. */
2498
2499 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2500
2501 size_mode = TYPE_MODE (sizetype);
2502 size = convert_to_mode (size_mode, size, 1);
2503 size = copy_to_mode_reg (size_mode, size);
2504
2505 /* It is incorrect to use the libcall calling conventions to call
2506 memset in this context. This could be a user call to memset and
2507 the user may wish to examine the return value from memset. For
2508 targets where libcalls and normal calls have different conventions
2509 for returning pointers, we could end up generating incorrect code. */
2510
2511 object_tree = make_tree (ptr_type_node, object);
2512 size_tree = make_tree (sizetype, size);
2513
2514 fn = clear_storage_libcall_fn (true);
2515 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2516 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2517 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2518
2519 /* Now we have to build up the CALL_EXPR itself. */
2520 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2521 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2522 call_expr, arg_list, NULL_TREE);
2523 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2524
2525 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2526
2527 return retval;
2528 }
2529
2530 /* A subroutine of clear_storage_via_libcall. Create the tree node
2531 for the function we use for block clears. The first time FOR_CALL
2532 is true, we call assemble_external. */
2533
2534 static GTY(()) tree block_clear_fn;
2535
2536 void
2537 init_block_clear_fn (const char *asmspec)
2538 {
2539 if (!block_clear_fn)
2540 {
2541 tree fn, args;
2542
2543 fn = get_identifier ("memset");
2544 args = build_function_type_list (ptr_type_node, ptr_type_node,
2545 integer_type_node, sizetype,
2546 NULL_TREE);
2547
2548 fn = build_decl (FUNCTION_DECL, fn, args);
2549 DECL_EXTERNAL (fn) = 1;
2550 TREE_PUBLIC (fn) = 1;
2551 DECL_ARTIFICIAL (fn) = 1;
2552 TREE_NOTHROW (fn) = 1;
2553
2554 block_clear_fn = fn;
2555 }
2556
2557 if (asmspec)
2558 set_user_assembler_name (block_clear_fn, asmspec);
2559 }
2560
2561 static tree
2562 clear_storage_libcall_fn (int for_call)
2563 {
2564 static bool emitted_extern;
2565
2566 if (!block_clear_fn)
2567 init_block_clear_fn (NULL);
2568
2569 if (for_call && !emitted_extern)
2570 {
2571 emitted_extern = true;
2572 make_decl_rtl (block_clear_fn);
2573 assemble_external (block_clear_fn);
2574 }
2575
2576 return block_clear_fn;
2577 }
2578 \f
2579 /* Expand a setmem pattern; return true if successful. */
2580
2581 bool
2582 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2583 {
2584 /* Try the most limited insn first, because there's no point
2585 including more than one in the machine description unless
2586 the more limited one has some advantage. */
2587
2588 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2589 enum machine_mode mode;
2590
2591 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2592 mode = GET_MODE_WIDER_MODE (mode))
2593 {
2594 enum insn_code code = setmem_optab[(int) mode];
2595 insn_operand_predicate_fn pred;
2596
2597 if (code != CODE_FOR_nothing
2598 /* We don't need MODE to be narrower than
2599 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2600 the mode mask, as it is returned by the macro, it will
2601 definitely be less than the actual mode mask. */
2602 && ((GET_CODE (size) == CONST_INT
2603 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2604 <= (GET_MODE_MASK (mode) >> 1)))
2605 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2606 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2607 || (*pred) (object, BLKmode))
2608 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2609 || (*pred) (opalign, VOIDmode)))
2610 {
2611 rtx opsize, opchar;
2612 enum machine_mode char_mode;
2613 rtx last = get_last_insn ();
2614 rtx pat;
2615
2616 opsize = convert_to_mode (mode, size, 1);
2617 pred = insn_data[(int) code].operand[1].predicate;
2618 if (pred != 0 && ! (*pred) (opsize, mode))
2619 opsize = copy_to_mode_reg (mode, opsize);
2620
2621 opchar = val;
2622 char_mode = insn_data[(int) code].operand[2].mode;
2623 if (char_mode != VOIDmode)
2624 {
2625 opchar = convert_to_mode (char_mode, opchar, 1);
2626 pred = insn_data[(int) code].operand[2].predicate;
2627 if (pred != 0 && ! (*pred) (opchar, char_mode))
2628 opchar = copy_to_mode_reg (char_mode, opchar);
2629 }
2630
2631 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2632 if (pat)
2633 {
2634 emit_insn (pat);
2635 return true;
2636 }
2637 else
2638 delete_insns_since (last);
2639 }
2640 }
2641
2642 return false;
2643 }
2644
2645 \f
2646 /* Write to one of the components of the complex value CPLX. Write VAL to
2647 the real part if IMAG_P is false, and the imaginary part if its true. */
2648
2649 static void
2650 write_complex_part (rtx cplx, rtx val, bool imag_p)
2651 {
2652 enum machine_mode cmode;
2653 enum machine_mode imode;
2654 unsigned ibitsize;
2655
2656 if (GET_CODE (cplx) == CONCAT)
2657 {
2658 emit_move_insn (XEXP (cplx, imag_p), val);
2659 return;
2660 }
2661
2662 cmode = GET_MODE (cplx);
2663 imode = GET_MODE_INNER (cmode);
2664 ibitsize = GET_MODE_BITSIZE (imode);
2665
2666 /* For MEMs simplify_gen_subreg may generate an invalid new address
2667 because, e.g., the original address is considered mode-dependent
2668 by the target, which restricts simplify_subreg from invoking
2669 adjust_address_nv. Instead of preparing fallback support for an
2670 invalid address, we call adjust_address_nv directly. */
2671 if (MEM_P (cplx))
2672 {
2673 emit_move_insn (adjust_address_nv (cplx, imode,
2674 imag_p ? GET_MODE_SIZE (imode) : 0),
2675 val);
2676 return;
2677 }
2678
2679 /* If the sub-object is at least word sized, then we know that subregging
2680 will work. This special case is important, since store_bit_field
2681 wants to operate on integer modes, and there's rarely an OImode to
2682 correspond to TCmode. */
2683 if (ibitsize >= BITS_PER_WORD
2684 /* For hard regs we have exact predicates. Assume we can split
2685 the original object if it spans an even number of hard regs.
2686 This special case is important for SCmode on 64-bit platforms
2687 where the natural size of floating-point regs is 32-bit. */
2688 || (REG_P (cplx)
2689 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2690 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2691 {
2692 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2693 imag_p ? GET_MODE_SIZE (imode) : 0);
2694 if (part)
2695 {
2696 emit_move_insn (part, val);
2697 return;
2698 }
2699 else
2700 /* simplify_gen_subreg may fail for sub-word MEMs. */
2701 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2702 }
2703
2704 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2705 }
2706
2707 /* Extract one of the components of the complex value CPLX. Extract the
2708 real part if IMAG_P is false, and the imaginary part if it's true. */
2709
2710 static rtx
2711 read_complex_part (rtx cplx, bool imag_p)
2712 {
2713 enum machine_mode cmode, imode;
2714 unsigned ibitsize;
2715
2716 if (GET_CODE (cplx) == CONCAT)
2717 return XEXP (cplx, imag_p);
2718
2719 cmode = GET_MODE (cplx);
2720 imode = GET_MODE_INNER (cmode);
2721 ibitsize = GET_MODE_BITSIZE (imode);
2722
2723 /* Special case reads from complex constants that got spilled to memory. */
2724 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2725 {
2726 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2727 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2728 {
2729 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2730 if (CONSTANT_CLASS_P (part))
2731 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2732 }
2733 }
2734
2735 /* For MEMs simplify_gen_subreg may generate an invalid new address
2736 because, e.g., the original address is considered mode-dependent
2737 by the target, which restricts simplify_subreg from invoking
2738 adjust_address_nv. Instead of preparing fallback support for an
2739 invalid address, we call adjust_address_nv directly. */
2740 if (MEM_P (cplx))
2741 return adjust_address_nv (cplx, imode,
2742 imag_p ? GET_MODE_SIZE (imode) : 0);
2743
2744 /* If the sub-object is at least word sized, then we know that subregging
2745 will work. This special case is important, since extract_bit_field
2746 wants to operate on integer modes, and there's rarely an OImode to
2747 correspond to TCmode. */
2748 if (ibitsize >= BITS_PER_WORD
2749 /* For hard regs we have exact predicates. Assume we can split
2750 the original object if it spans an even number of hard regs.
2751 This special case is important for SCmode on 64-bit platforms
2752 where the natural size of floating-point regs is 32-bit. */
2753 || (REG_P (cplx)
2754 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2755 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2756 {
2757 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2758 imag_p ? GET_MODE_SIZE (imode) : 0);
2759 if (ret)
2760 return ret;
2761 else
2762 /* simplify_gen_subreg may fail for sub-word MEMs. */
2763 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2764 }
2765
2766 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2767 true, NULL_RTX, imode, imode);
2768 }
2769 \f
2770 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2771 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2772 represented in NEW_MODE. If FORCE is true, this will never happen, as
2773 we'll force-create a SUBREG if needed. */
2774
2775 static rtx
2776 emit_move_change_mode (enum machine_mode new_mode,
2777 enum machine_mode old_mode, rtx x, bool force)
2778 {
2779 rtx ret;
2780
2781 if (reload_in_progress && MEM_P (x))
2782 {
2783 /* We can't use gen_lowpart here because it may call change_address
2784 which is not appropriate if we were called when a reload was in
2785 progress. We don't have to worry about changing the address since
2786 the size in bytes is supposed to be the same. Copy the MEM to
2787 change the mode and move any substitutions from the old MEM to
2788 the new one. */
2789
2790 ret = adjust_address_nv (x, new_mode, 0);
2791 copy_replacements (x, ret);
2792 }
2793 else
2794 {
2795 /* Note that we do want simplify_subreg's behavior of validating
2796 that the new mode is ok for a hard register. If we were to use
2797 simplify_gen_subreg, we would create the subreg, but would
2798 probably run into the target not being able to implement it. */
2799 /* Except, of course, when FORCE is true, when this is exactly what
2800 we want. Which is needed for CCmodes on some targets. */
2801 if (force)
2802 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2803 else
2804 ret = simplify_subreg (new_mode, x, old_mode, 0);
2805 }
2806
2807 return ret;
2808 }
2809
2810 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2811 an integer mode of the same size as MODE. Returns the instruction
2812 emitted, or NULL if such a move could not be generated. */
2813
2814 static rtx
2815 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y)
2816 {
2817 enum machine_mode imode;
2818 enum insn_code code;
2819
2820 /* There must exist a mode of the exact size we require. */
2821 imode = int_mode_for_mode (mode);
2822 if (imode == BLKmode)
2823 return NULL_RTX;
2824
2825 /* The target must support moves in this mode. */
2826 code = mov_optab->handlers[imode].insn_code;
2827 if (code == CODE_FOR_nothing)
2828 return NULL_RTX;
2829
2830 x = emit_move_change_mode (imode, mode, x, false);
2831 if (x == NULL_RTX)
2832 return NULL_RTX;
2833 y = emit_move_change_mode (imode, mode, y, false);
2834 if (y == NULL_RTX)
2835 return NULL_RTX;
2836 return emit_insn (GEN_FCN (code) (x, y));
2837 }
2838
2839 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2840 Return an equivalent MEM that does not use an auto-increment. */
2841
2842 static rtx
2843 emit_move_resolve_push (enum machine_mode mode, rtx x)
2844 {
2845 enum rtx_code code = GET_CODE (XEXP (x, 0));
2846 HOST_WIDE_INT adjust;
2847 rtx temp;
2848
2849 adjust = GET_MODE_SIZE (mode);
2850 #ifdef PUSH_ROUNDING
2851 adjust = PUSH_ROUNDING (adjust);
2852 #endif
2853 if (code == PRE_DEC || code == POST_DEC)
2854 adjust = -adjust;
2855
2856 /* Do not use anti_adjust_stack, since we don't want to update
2857 stack_pointer_delta. */
2858 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2859 GEN_INT (adjust), stack_pointer_rtx,
2860 0, OPTAB_LIB_WIDEN);
2861 if (temp != stack_pointer_rtx)
2862 emit_move_insn (stack_pointer_rtx, temp);
2863
2864 switch (code)
2865 {
2866 case PRE_INC:
2867 case PRE_DEC:
2868 temp = stack_pointer_rtx;
2869 break;
2870 case POST_INC:
2871 temp = plus_constant (stack_pointer_rtx, -GET_MODE_SIZE (mode));
2872 break;
2873 case POST_DEC:
2874 temp = plus_constant (stack_pointer_rtx, GET_MODE_SIZE (mode));
2875 break;
2876 default:
2877 gcc_unreachable ();
2878 }
2879
2880 return replace_equiv_address (x, temp);
2881 }
2882
2883 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2884 X is known to satisfy push_operand, and MODE is known to be complex.
2885 Returns the last instruction emitted. */
2886
2887 static rtx
2888 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2889 {
2890 enum machine_mode submode = GET_MODE_INNER (mode);
2891 bool imag_first;
2892
2893 #ifdef PUSH_ROUNDING
2894 unsigned int submodesize = GET_MODE_SIZE (submode);
2895
2896 /* In case we output to the stack, but the size is smaller than the
2897 machine can push exactly, we need to use move instructions. */
2898 if (PUSH_ROUNDING (submodesize) != submodesize)
2899 {
2900 x = emit_move_resolve_push (mode, x);
2901 return emit_move_insn (x, y);
2902 }
2903 #endif
2904
2905 /* Note that the real part always precedes the imag part in memory
2906 regardless of machine's endianness. */
2907 switch (GET_CODE (XEXP (x, 0)))
2908 {
2909 case PRE_DEC:
2910 case POST_DEC:
2911 imag_first = true;
2912 break;
2913 case PRE_INC:
2914 case POST_INC:
2915 imag_first = false;
2916 break;
2917 default:
2918 gcc_unreachable ();
2919 }
2920
2921 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2922 read_complex_part (y, imag_first));
2923 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2924 read_complex_part (y, !imag_first));
2925 }
2926
2927 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2928 MODE is known to be complex. Returns the last instruction emitted. */
2929
2930 static rtx
2931 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2932 {
2933 bool try_int;
2934
2935 /* Need to take special care for pushes, to maintain proper ordering
2936 of the data, and possibly extra padding. */
2937 if (push_operand (x, mode))
2938 return emit_move_complex_push (mode, x, y);
2939
2940 /* See if we can coerce the target into moving both values at once. */
2941
2942 /* Move floating point as parts. */
2943 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
2944 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
2945 try_int = false;
2946 /* Not possible if the values are inherently not adjacent. */
2947 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2948 try_int = false;
2949 /* Is possible if both are registers (or subregs of registers). */
2950 else if (register_operand (x, mode) && register_operand (y, mode))
2951 try_int = true;
2952 /* If one of the operands is a memory, and alignment constraints
2953 are friendly enough, we may be able to do combined memory operations.
2954 We do not attempt this if Y is a constant because that combination is
2955 usually better with the by-parts thing below. */
2956 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2957 && (!STRICT_ALIGNMENT
2958 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2959 try_int = true;
2960 else
2961 try_int = false;
2962
2963 if (try_int)
2964 {
2965 rtx ret;
2966
2967 /* For memory to memory moves, optimal behavior can be had with the
2968 existing block move logic. */
2969 if (MEM_P (x) && MEM_P (y))
2970 {
2971 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2972 BLOCK_OP_NO_LIBCALL);
2973 return get_last_insn ();
2974 }
2975
2976 ret = emit_move_via_integer (mode, x, y);
2977 if (ret)
2978 return ret;
2979 }
2980
2981 /* Show the output dies here. This is necessary for SUBREGs
2982 of pseudos since we cannot track their lifetimes correctly;
2983 hard regs shouldn't appear here except as return values. */
2984 if (!reload_completed && !reload_in_progress
2985 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
2986 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2987
2988 write_complex_part (x, read_complex_part (y, false), false);
2989 write_complex_part (x, read_complex_part (y, true), true);
2990 return get_last_insn ();
2991 }
2992
2993 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2994 MODE is known to be MODE_CC. Returns the last instruction emitted. */
2995
2996 static rtx
2997 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
2998 {
2999 rtx ret;
3000
3001 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3002 if (mode != CCmode)
3003 {
3004 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3005 if (code != CODE_FOR_nothing)
3006 {
3007 x = emit_move_change_mode (CCmode, mode, x, true);
3008 y = emit_move_change_mode (CCmode, mode, y, true);
3009 return emit_insn (GEN_FCN (code) (x, y));
3010 }
3011 }
3012
3013 /* Otherwise, find the MODE_INT mode of the same width. */
3014 ret = emit_move_via_integer (mode, x, y);
3015 gcc_assert (ret != NULL);
3016 return ret;
3017 }
3018
3019 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3020 MODE is any multi-word or full-word mode that lacks a move_insn
3021 pattern. Note that you will get better code if you define such
3022 patterns, even if they must turn into multiple assembler instructions. */
3023
3024 static rtx
3025 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3026 {
3027 rtx last_insn = 0;
3028 rtx seq, inner;
3029 bool need_clobber;
3030 int i;
3031
3032 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3033
3034 /* If X is a push on the stack, do the push now and replace
3035 X with a reference to the stack pointer. */
3036 if (push_operand (x, mode))
3037 x = emit_move_resolve_push (mode, x);
3038
3039 /* If we are in reload, see if either operand is a MEM whose address
3040 is scheduled for replacement. */
3041 if (reload_in_progress && MEM_P (x)
3042 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3043 x = replace_equiv_address_nv (x, inner);
3044 if (reload_in_progress && MEM_P (y)
3045 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3046 y = replace_equiv_address_nv (y, inner);
3047
3048 start_sequence ();
3049
3050 need_clobber = false;
3051 for (i = 0;
3052 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3053 i++)
3054 {
3055 rtx xpart = operand_subword (x, i, 1, mode);
3056 rtx ypart = operand_subword (y, i, 1, mode);
3057
3058 /* If we can't get a part of Y, put Y into memory if it is a
3059 constant. Otherwise, force it into a register. Then we must
3060 be able to get a part of Y. */
3061 if (ypart == 0 && CONSTANT_P (y))
3062 {
3063 y = force_const_mem (mode, y);
3064 ypart = operand_subword (y, i, 1, mode);
3065 }
3066 else if (ypart == 0)
3067 ypart = operand_subword_force (y, i, mode);
3068
3069 gcc_assert (xpart && ypart);
3070
3071 need_clobber |= (GET_CODE (xpart) == SUBREG);
3072
3073 last_insn = emit_move_insn (xpart, ypart);
3074 }
3075
3076 seq = get_insns ();
3077 end_sequence ();
3078
3079 /* Show the output dies here. This is necessary for SUBREGs
3080 of pseudos since we cannot track their lifetimes correctly;
3081 hard regs shouldn't appear here except as return values.
3082 We never want to emit such a clobber after reload. */
3083 if (x != y
3084 && ! (reload_in_progress || reload_completed)
3085 && need_clobber != 0)
3086 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3087
3088 emit_insn (seq);
3089
3090 return last_insn;
3091 }
3092
3093 /* Low level part of emit_move_insn.
3094 Called just like emit_move_insn, but assumes X and Y
3095 are basically valid. */
3096
3097 rtx
3098 emit_move_insn_1 (rtx x, rtx y)
3099 {
3100 enum machine_mode mode = GET_MODE (x);
3101 enum insn_code code;
3102
3103 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3104
3105 code = mov_optab->handlers[mode].insn_code;
3106 if (code != CODE_FOR_nothing)
3107 return emit_insn (GEN_FCN (code) (x, y));
3108
3109 /* Expand complex moves by moving real part and imag part. */
3110 if (COMPLEX_MODE_P (mode))
3111 return emit_move_complex (mode, x, y);
3112
3113 if (GET_MODE_CLASS (mode) == MODE_CC)
3114 return emit_move_ccmode (mode, x, y);
3115
3116 /* Try using a move pattern for the corresponding integer mode. This is
3117 only safe when simplify_subreg can convert MODE constants into integer
3118 constants. At present, it can only do this reliably if the value
3119 fits within a HOST_WIDE_INT. */
3120 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3121 {
3122 rtx ret = emit_move_via_integer (mode, x, y);
3123 if (ret)
3124 return ret;
3125 }
3126
3127 return emit_move_multi_word (mode, x, y);
3128 }
3129
3130 /* Generate code to copy Y into X.
3131 Both Y and X must have the same mode, except that
3132 Y can be a constant with VOIDmode.
3133 This mode cannot be BLKmode; use emit_block_move for that.
3134
3135 Return the last instruction emitted. */
3136
3137 rtx
3138 emit_move_insn (rtx x, rtx y)
3139 {
3140 enum machine_mode mode = GET_MODE (x);
3141 rtx y_cst = NULL_RTX;
3142 rtx last_insn, set;
3143
3144 gcc_assert (mode != BLKmode
3145 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3146
3147 if (CONSTANT_P (y))
3148 {
3149 if (optimize
3150 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3151 && (last_insn = compress_float_constant (x, y)))
3152 return last_insn;
3153
3154 y_cst = y;
3155
3156 if (!LEGITIMATE_CONSTANT_P (y))
3157 {
3158 y = force_const_mem (mode, y);
3159
3160 /* If the target's cannot_force_const_mem prevented the spill,
3161 assume that the target's move expanders will also take care
3162 of the non-legitimate constant. */
3163 if (!y)
3164 y = y_cst;
3165 }
3166 }
3167
3168 /* If X or Y are memory references, verify that their addresses are valid
3169 for the machine. */
3170 if (MEM_P (x)
3171 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3172 && ! push_operand (x, GET_MODE (x)))
3173 || (flag_force_addr
3174 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3175 x = validize_mem (x);
3176
3177 if (MEM_P (y)
3178 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3179 || (flag_force_addr
3180 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3181 y = validize_mem (y);
3182
3183 gcc_assert (mode != BLKmode);
3184
3185 last_insn = emit_move_insn_1 (x, y);
3186
3187 if (y_cst && REG_P (x)
3188 && (set = single_set (last_insn)) != NULL_RTX
3189 && SET_DEST (set) == x
3190 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3191 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3192
3193 return last_insn;
3194 }
3195
3196 /* If Y is representable exactly in a narrower mode, and the target can
3197 perform the extension directly from constant or memory, then emit the
3198 move as an extension. */
3199
3200 static rtx
3201 compress_float_constant (rtx x, rtx y)
3202 {
3203 enum machine_mode dstmode = GET_MODE (x);
3204 enum machine_mode orig_srcmode = GET_MODE (y);
3205 enum machine_mode srcmode;
3206 REAL_VALUE_TYPE r;
3207 int oldcost, newcost;
3208
3209 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3210
3211 if (LEGITIMATE_CONSTANT_P (y))
3212 oldcost = rtx_cost (y, SET);
3213 else
3214 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3215
3216 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3217 srcmode != orig_srcmode;
3218 srcmode = GET_MODE_WIDER_MODE (srcmode))
3219 {
3220 enum insn_code ic;
3221 rtx trunc_y, last_insn;
3222
3223 /* Skip if the target can't extend this way. */
3224 ic = can_extend_p (dstmode, srcmode, 0);
3225 if (ic == CODE_FOR_nothing)
3226 continue;
3227
3228 /* Skip if the narrowed value isn't exact. */
3229 if (! exact_real_truncate (srcmode, &r))
3230 continue;
3231
3232 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3233
3234 if (LEGITIMATE_CONSTANT_P (trunc_y))
3235 {
3236 /* Skip if the target needs extra instructions to perform
3237 the extension. */
3238 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3239 continue;
3240 /* This is valid, but may not be cheaper than the original. */
3241 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3242 if (oldcost < newcost)
3243 continue;
3244 }
3245 else if (float_extend_from_mem[dstmode][srcmode])
3246 {
3247 trunc_y = force_const_mem (srcmode, trunc_y);
3248 /* This is valid, but may not be cheaper than the original. */
3249 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3250 if (oldcost < newcost)
3251 continue;
3252 trunc_y = validize_mem (trunc_y);
3253 }
3254 else
3255 continue;
3256
3257 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3258 last_insn = get_last_insn ();
3259
3260 if (REG_P (x))
3261 set_unique_reg_note (last_insn, REG_EQUAL, y);
3262
3263 return last_insn;
3264 }
3265
3266 return NULL_RTX;
3267 }
3268 \f
3269 /* Pushing data onto the stack. */
3270
3271 /* Push a block of length SIZE (perhaps variable)
3272 and return an rtx to address the beginning of the block.
3273 The value may be virtual_outgoing_args_rtx.
3274
3275 EXTRA is the number of bytes of padding to push in addition to SIZE.
3276 BELOW nonzero means this padding comes at low addresses;
3277 otherwise, the padding comes at high addresses. */
3278
3279 rtx
3280 push_block (rtx size, int extra, int below)
3281 {
3282 rtx temp;
3283
3284 size = convert_modes (Pmode, ptr_mode, size, 1);
3285 if (CONSTANT_P (size))
3286 anti_adjust_stack (plus_constant (size, extra));
3287 else if (REG_P (size) && extra == 0)
3288 anti_adjust_stack (size);
3289 else
3290 {
3291 temp = copy_to_mode_reg (Pmode, size);
3292 if (extra != 0)
3293 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3294 temp, 0, OPTAB_LIB_WIDEN);
3295 anti_adjust_stack (temp);
3296 }
3297
3298 #ifndef STACK_GROWS_DOWNWARD
3299 if (0)
3300 #else
3301 if (1)
3302 #endif
3303 {
3304 temp = virtual_outgoing_args_rtx;
3305 if (extra != 0 && below)
3306 temp = plus_constant (temp, extra);
3307 }
3308 else
3309 {
3310 if (GET_CODE (size) == CONST_INT)
3311 temp = plus_constant (virtual_outgoing_args_rtx,
3312 -INTVAL (size) - (below ? 0 : extra));
3313 else if (extra != 0 && !below)
3314 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3315 negate_rtx (Pmode, plus_constant (size, extra)));
3316 else
3317 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3318 negate_rtx (Pmode, size));
3319 }
3320
3321 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3322 }
3323
3324 #ifdef PUSH_ROUNDING
3325
3326 /* Emit single push insn. */
3327
3328 static void
3329 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3330 {
3331 rtx dest_addr;
3332 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3333 rtx dest;
3334 enum insn_code icode;
3335 insn_operand_predicate_fn pred;
3336
3337 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3338 /* If there is push pattern, use it. Otherwise try old way of throwing
3339 MEM representing push operation to move expander. */
3340 icode = push_optab->handlers[(int) mode].insn_code;
3341 if (icode != CODE_FOR_nothing)
3342 {
3343 if (((pred = insn_data[(int) icode].operand[0].predicate)
3344 && !((*pred) (x, mode))))
3345 x = force_reg (mode, x);
3346 emit_insn (GEN_FCN (icode) (x));
3347 return;
3348 }
3349 if (GET_MODE_SIZE (mode) == rounded_size)
3350 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3351 /* If we are to pad downward, adjust the stack pointer first and
3352 then store X into the stack location using an offset. This is
3353 because emit_move_insn does not know how to pad; it does not have
3354 access to type. */
3355 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3356 {
3357 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3358 HOST_WIDE_INT offset;
3359
3360 emit_move_insn (stack_pointer_rtx,
3361 expand_binop (Pmode,
3362 #ifdef STACK_GROWS_DOWNWARD
3363 sub_optab,
3364 #else
3365 add_optab,
3366 #endif
3367 stack_pointer_rtx,
3368 GEN_INT (rounded_size),
3369 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3370
3371 offset = (HOST_WIDE_INT) padding_size;
3372 #ifdef STACK_GROWS_DOWNWARD
3373 if (STACK_PUSH_CODE == POST_DEC)
3374 /* We have already decremented the stack pointer, so get the
3375 previous value. */
3376 offset += (HOST_WIDE_INT) rounded_size;
3377 #else
3378 if (STACK_PUSH_CODE == POST_INC)
3379 /* We have already incremented the stack pointer, so get the
3380 previous value. */
3381 offset -= (HOST_WIDE_INT) rounded_size;
3382 #endif
3383 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3384 }
3385 else
3386 {
3387 #ifdef STACK_GROWS_DOWNWARD
3388 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3389 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3390 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3391 #else
3392 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3393 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3394 GEN_INT (rounded_size));
3395 #endif
3396 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3397 }
3398
3399 dest = gen_rtx_MEM (mode, dest_addr);
3400
3401 if (type != 0)
3402 {
3403 set_mem_attributes (dest, type, 1);
3404
3405 if (flag_optimize_sibling_calls)
3406 /* Function incoming arguments may overlap with sibling call
3407 outgoing arguments and we cannot allow reordering of reads
3408 from function arguments with stores to outgoing arguments
3409 of sibling calls. */
3410 set_mem_alias_set (dest, 0);
3411 }
3412 emit_move_insn (dest, x);
3413 }
3414 #endif
3415
3416 /* Generate code to push X onto the stack, assuming it has mode MODE and
3417 type TYPE.
3418 MODE is redundant except when X is a CONST_INT (since they don't
3419 carry mode info).
3420 SIZE is an rtx for the size of data to be copied (in bytes),
3421 needed only if X is BLKmode.
3422
3423 ALIGN (in bits) is maximum alignment we can assume.
3424
3425 If PARTIAL and REG are both nonzero, then copy that many of the first
3426 bytes of X into registers starting with REG, and push the rest of X.
3427 The amount of space pushed is decreased by PARTIAL bytes.
3428 REG must be a hard register in this case.
3429 If REG is zero but PARTIAL is not, take any all others actions for an
3430 argument partially in registers, but do not actually load any
3431 registers.
3432
3433 EXTRA is the amount in bytes of extra space to leave next to this arg.
3434 This is ignored if an argument block has already been allocated.
3435
3436 On a machine that lacks real push insns, ARGS_ADDR is the address of
3437 the bottom of the argument block for this call. We use indexing off there
3438 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3439 argument block has not been preallocated.
3440
3441 ARGS_SO_FAR is the size of args previously pushed for this call.
3442
3443 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3444 for arguments passed in registers. If nonzero, it will be the number
3445 of bytes required. */
3446
3447 void
3448 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3449 unsigned int align, int partial, rtx reg, int extra,
3450 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3451 rtx alignment_pad)
3452 {
3453 rtx xinner;
3454 enum direction stack_direction
3455 #ifdef STACK_GROWS_DOWNWARD
3456 = downward;
3457 #else
3458 = upward;
3459 #endif
3460
3461 /* Decide where to pad the argument: `downward' for below,
3462 `upward' for above, or `none' for don't pad it.
3463 Default is below for small data on big-endian machines; else above. */
3464 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3465
3466 /* Invert direction if stack is post-decrement.
3467 FIXME: why? */
3468 if (STACK_PUSH_CODE == POST_DEC)
3469 if (where_pad != none)
3470 where_pad = (where_pad == downward ? upward : downward);
3471
3472 xinner = x;
3473
3474 if (mode == BLKmode)
3475 {
3476 /* Copy a block into the stack, entirely or partially. */
3477
3478 rtx temp;
3479 int used;
3480 int offset;
3481 int skip;
3482
3483 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3484 used = partial - offset;
3485
3486 gcc_assert (size);
3487
3488 /* USED is now the # of bytes we need not copy to the stack
3489 because registers will take care of them. */
3490
3491 if (partial != 0)
3492 xinner = adjust_address (xinner, BLKmode, used);
3493
3494 /* If the partial register-part of the arg counts in its stack size,
3495 skip the part of stack space corresponding to the registers.
3496 Otherwise, start copying to the beginning of the stack space,
3497 by setting SKIP to 0. */
3498 skip = (reg_parm_stack_space == 0) ? 0 : used;
3499
3500 #ifdef PUSH_ROUNDING
3501 /* Do it with several push insns if that doesn't take lots of insns
3502 and if there is no difficulty with push insns that skip bytes
3503 on the stack for alignment purposes. */
3504 if (args_addr == 0
3505 && PUSH_ARGS
3506 && GET_CODE (size) == CONST_INT
3507 && skip == 0
3508 && MEM_ALIGN (xinner) >= align
3509 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3510 /* Here we avoid the case of a structure whose weak alignment
3511 forces many pushes of a small amount of data,
3512 and such small pushes do rounding that causes trouble. */
3513 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3514 || align >= BIGGEST_ALIGNMENT
3515 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3516 == (align / BITS_PER_UNIT)))
3517 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3518 {
3519 /* Push padding now if padding above and stack grows down,
3520 or if padding below and stack grows up.
3521 But if space already allocated, this has already been done. */
3522 if (extra && args_addr == 0
3523 && where_pad != none && where_pad != stack_direction)
3524 anti_adjust_stack (GEN_INT (extra));
3525
3526 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3527 }
3528 else
3529 #endif /* PUSH_ROUNDING */
3530 {
3531 rtx target;
3532
3533 /* Otherwise make space on the stack and copy the data
3534 to the address of that space. */
3535
3536 /* Deduct words put into registers from the size we must copy. */
3537 if (partial != 0)
3538 {
3539 if (GET_CODE (size) == CONST_INT)
3540 size = GEN_INT (INTVAL (size) - used);
3541 else
3542 size = expand_binop (GET_MODE (size), sub_optab, size,
3543 GEN_INT (used), NULL_RTX, 0,
3544 OPTAB_LIB_WIDEN);
3545 }
3546
3547 /* Get the address of the stack space.
3548 In this case, we do not deal with EXTRA separately.
3549 A single stack adjust will do. */
3550 if (! args_addr)
3551 {
3552 temp = push_block (size, extra, where_pad == downward);
3553 extra = 0;
3554 }
3555 else if (GET_CODE (args_so_far) == CONST_INT)
3556 temp = memory_address (BLKmode,
3557 plus_constant (args_addr,
3558 skip + INTVAL (args_so_far)));
3559 else
3560 temp = memory_address (BLKmode,
3561 plus_constant (gen_rtx_PLUS (Pmode,
3562 args_addr,
3563 args_so_far),
3564 skip));
3565
3566 if (!ACCUMULATE_OUTGOING_ARGS)
3567 {
3568 /* If the source is referenced relative to the stack pointer,
3569 copy it to another register to stabilize it. We do not need
3570 to do this if we know that we won't be changing sp. */
3571
3572 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3573 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3574 temp = copy_to_reg (temp);
3575 }
3576
3577 target = gen_rtx_MEM (BLKmode, temp);
3578
3579 /* We do *not* set_mem_attributes here, because incoming arguments
3580 may overlap with sibling call outgoing arguments and we cannot
3581 allow reordering of reads from function arguments with stores
3582 to outgoing arguments of sibling calls. We do, however, want
3583 to record the alignment of the stack slot. */
3584 /* ALIGN may well be better aligned than TYPE, e.g. due to
3585 PARM_BOUNDARY. Assume the caller isn't lying. */
3586 set_mem_align (target, align);
3587
3588 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3589 }
3590 }
3591 else if (partial > 0)
3592 {
3593 /* Scalar partly in registers. */
3594
3595 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3596 int i;
3597 int not_stack;
3598 /* # bytes of start of argument
3599 that we must make space for but need not store. */
3600 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3601 int args_offset = INTVAL (args_so_far);
3602 int skip;
3603
3604 /* Push padding now if padding above and stack grows down,
3605 or if padding below and stack grows up.
3606 But if space already allocated, this has already been done. */
3607 if (extra && args_addr == 0
3608 && where_pad != none && where_pad != stack_direction)
3609 anti_adjust_stack (GEN_INT (extra));
3610
3611 /* If we make space by pushing it, we might as well push
3612 the real data. Otherwise, we can leave OFFSET nonzero
3613 and leave the space uninitialized. */
3614 if (args_addr == 0)
3615 offset = 0;
3616
3617 /* Now NOT_STACK gets the number of words that we don't need to
3618 allocate on the stack. Convert OFFSET to words too. */
3619 not_stack = (partial - offset) / UNITS_PER_WORD;
3620 offset /= UNITS_PER_WORD;
3621
3622 /* If the partial register-part of the arg counts in its stack size,
3623 skip the part of stack space corresponding to the registers.
3624 Otherwise, start copying to the beginning of the stack space,
3625 by setting SKIP to 0. */
3626 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3627
3628 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3629 x = validize_mem (force_const_mem (mode, x));
3630
3631 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3632 SUBREGs of such registers are not allowed. */
3633 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3634 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3635 x = copy_to_reg (x);
3636
3637 /* Loop over all the words allocated on the stack for this arg. */
3638 /* We can do it by words, because any scalar bigger than a word
3639 has a size a multiple of a word. */
3640 #ifndef PUSH_ARGS_REVERSED
3641 for (i = not_stack; i < size; i++)
3642 #else
3643 for (i = size - 1; i >= not_stack; i--)
3644 #endif
3645 if (i >= not_stack + offset)
3646 emit_push_insn (operand_subword_force (x, i, mode),
3647 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3648 0, args_addr,
3649 GEN_INT (args_offset + ((i - not_stack + skip)
3650 * UNITS_PER_WORD)),
3651 reg_parm_stack_space, alignment_pad);
3652 }
3653 else
3654 {
3655 rtx addr;
3656 rtx dest;
3657
3658 /* Push padding now if padding above and stack grows down,
3659 or if padding below and stack grows up.
3660 But if space already allocated, this has already been done. */
3661 if (extra && args_addr == 0
3662 && where_pad != none && where_pad != stack_direction)
3663 anti_adjust_stack (GEN_INT (extra));
3664
3665 #ifdef PUSH_ROUNDING
3666 if (args_addr == 0 && PUSH_ARGS)
3667 emit_single_push_insn (mode, x, type);
3668 else
3669 #endif
3670 {
3671 if (GET_CODE (args_so_far) == CONST_INT)
3672 addr
3673 = memory_address (mode,
3674 plus_constant (args_addr,
3675 INTVAL (args_so_far)));
3676 else
3677 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3678 args_so_far));
3679 dest = gen_rtx_MEM (mode, addr);
3680
3681 /* We do *not* set_mem_attributes here, because incoming arguments
3682 may overlap with sibling call outgoing arguments and we cannot
3683 allow reordering of reads from function arguments with stores
3684 to outgoing arguments of sibling calls. We do, however, want
3685 to record the alignment of the stack slot. */
3686 /* ALIGN may well be better aligned than TYPE, e.g. due to
3687 PARM_BOUNDARY. Assume the caller isn't lying. */
3688 set_mem_align (dest, align);
3689
3690 emit_move_insn (dest, x);
3691 }
3692 }
3693
3694 /* If part should go in registers, copy that part
3695 into the appropriate registers. Do this now, at the end,
3696 since mem-to-mem copies above may do function calls. */
3697 if (partial > 0 && reg != 0)
3698 {
3699 /* Handle calls that pass values in multiple non-contiguous locations.
3700 The Irix 6 ABI has examples of this. */
3701 if (GET_CODE (reg) == PARALLEL)
3702 emit_group_load (reg, x, type, -1);
3703 else
3704 {
3705 gcc_assert (partial % UNITS_PER_WORD == 0);
3706 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3707 }
3708 }
3709
3710 if (extra && args_addr == 0 && where_pad == stack_direction)
3711 anti_adjust_stack (GEN_INT (extra));
3712
3713 if (alignment_pad && args_addr == 0)
3714 anti_adjust_stack (alignment_pad);
3715 }
3716 \f
3717 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3718 operations. */
3719
3720 static rtx
3721 get_subtarget (rtx x)
3722 {
3723 return (optimize
3724 || x == 0
3725 /* Only registers can be subtargets. */
3726 || !REG_P (x)
3727 /* Don't use hard regs to avoid extending their life. */
3728 || REGNO (x) < FIRST_PSEUDO_REGISTER
3729 ? 0 : x);
3730 }
3731
3732 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3733 FIELD is a bitfield. Returns true if the optimization was successful,
3734 and there's nothing else to do. */
3735
3736 static bool
3737 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3738 unsigned HOST_WIDE_INT bitpos,
3739 enum machine_mode mode1, rtx str_rtx,
3740 tree to, tree src)
3741 {
3742 enum machine_mode str_mode = GET_MODE (str_rtx);
3743 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3744 tree op0, op1;
3745 rtx value, result;
3746 optab binop;
3747
3748 if (mode1 != VOIDmode
3749 || bitsize >= BITS_PER_WORD
3750 || str_bitsize > BITS_PER_WORD
3751 || TREE_SIDE_EFFECTS (to)
3752 || TREE_THIS_VOLATILE (to))
3753 return false;
3754
3755 STRIP_NOPS (src);
3756 if (!BINARY_CLASS_P (src)
3757 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3758 return false;
3759
3760 op0 = TREE_OPERAND (src, 0);
3761 op1 = TREE_OPERAND (src, 1);
3762 STRIP_NOPS (op0);
3763
3764 if (!operand_equal_p (to, op0, 0))
3765 return false;
3766
3767 if (MEM_P (str_rtx))
3768 {
3769 unsigned HOST_WIDE_INT offset1;
3770
3771 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3772 str_mode = word_mode;
3773 str_mode = get_best_mode (bitsize, bitpos,
3774 MEM_ALIGN (str_rtx), str_mode, 0);
3775 if (str_mode == VOIDmode)
3776 return false;
3777 str_bitsize = GET_MODE_BITSIZE (str_mode);
3778
3779 offset1 = bitpos;
3780 bitpos %= str_bitsize;
3781 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3782 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3783 }
3784 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3785 return false;
3786
3787 /* If the bit field covers the whole REG/MEM, store_field
3788 will likely generate better code. */
3789 if (bitsize >= str_bitsize)
3790 return false;
3791
3792 /* We can't handle fields split across multiple entities. */
3793 if (bitpos + bitsize > str_bitsize)
3794 return false;
3795
3796 if (BYTES_BIG_ENDIAN)
3797 bitpos = str_bitsize - bitpos - bitsize;
3798
3799 switch (TREE_CODE (src))
3800 {
3801 case PLUS_EXPR:
3802 case MINUS_EXPR:
3803 /* For now, just optimize the case of the topmost bitfield
3804 where we don't need to do any masking and also
3805 1 bit bitfields where xor can be used.
3806 We might win by one instruction for the other bitfields
3807 too if insv/extv instructions aren't used, so that
3808 can be added later. */
3809 if (bitpos + bitsize != str_bitsize
3810 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3811 break;
3812
3813 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3814 value = convert_modes (str_mode,
3815 TYPE_MODE (TREE_TYPE (op1)), value,
3816 TYPE_UNSIGNED (TREE_TYPE (op1)));
3817
3818 /* We may be accessing data outside the field, which means
3819 we can alias adjacent data. */
3820 if (MEM_P (str_rtx))
3821 {
3822 str_rtx = shallow_copy_rtx (str_rtx);
3823 set_mem_alias_set (str_rtx, 0);
3824 set_mem_expr (str_rtx, 0);
3825 }
3826
3827 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3828 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3829 {
3830 value = expand_and (str_mode, value, const1_rtx, NULL);
3831 binop = xor_optab;
3832 }
3833 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3834 build_int_cst (NULL_TREE, bitpos),
3835 NULL_RTX, 1);
3836 result = expand_binop (str_mode, binop, str_rtx,
3837 value, str_rtx, 1, OPTAB_WIDEN);
3838 if (result != str_rtx)
3839 emit_move_insn (str_rtx, result);
3840 return true;
3841
3842 case BIT_IOR_EXPR:
3843 case BIT_XOR_EXPR:
3844 if (TREE_CODE (op1) != INTEGER_CST)
3845 break;
3846 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3847 value = convert_modes (GET_MODE (str_rtx),
3848 TYPE_MODE (TREE_TYPE (op1)), value,
3849 TYPE_UNSIGNED (TREE_TYPE (op1)));
3850
3851 /* We may be accessing data outside the field, which means
3852 we can alias adjacent data. */
3853 if (MEM_P (str_rtx))
3854 {
3855 str_rtx = shallow_copy_rtx (str_rtx);
3856 set_mem_alias_set (str_rtx, 0);
3857 set_mem_expr (str_rtx, 0);
3858 }
3859
3860 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3861 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3862 {
3863 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3864 - 1);
3865 value = expand_and (GET_MODE (str_rtx), value, mask,
3866 NULL_RTX);
3867 }
3868 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3869 build_int_cst (NULL_TREE, bitpos),
3870 NULL_RTX, 1);
3871 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3872 value, str_rtx, 1, OPTAB_WIDEN);
3873 if (result != str_rtx)
3874 emit_move_insn (str_rtx, result);
3875 return true;
3876
3877 default:
3878 break;
3879 }
3880
3881 return false;
3882 }
3883
3884
3885 /* Expand an assignment that stores the value of FROM into TO. */
3886
3887 void
3888 expand_assignment (tree to, tree from)
3889 {
3890 rtx to_rtx = 0;
3891 rtx result;
3892
3893 /* Don't crash if the lhs of the assignment was erroneous. */
3894
3895 if (TREE_CODE (to) == ERROR_MARK)
3896 {
3897 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3898 return;
3899 }
3900
3901 /* Assignment of a structure component needs special treatment
3902 if the structure component's rtx is not simply a MEM.
3903 Assignment of an array element at a constant index, and assignment of
3904 an array element in an unaligned packed structure field, has the same
3905 problem. */
3906 if (handled_component_p (to)
3907 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3908 {
3909 enum machine_mode mode1;
3910 HOST_WIDE_INT bitsize, bitpos;
3911 tree offset;
3912 int unsignedp;
3913 int volatilep = 0;
3914 tree tem;
3915
3916 push_temp_slots ();
3917 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3918 &unsignedp, &volatilep, true);
3919
3920 /* If we are going to use store_bit_field and extract_bit_field,
3921 make sure to_rtx will be safe for multiple use. */
3922
3923 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3924
3925 if (offset != 0)
3926 {
3927 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3928
3929 gcc_assert (MEM_P (to_rtx));
3930
3931 #ifdef POINTERS_EXTEND_UNSIGNED
3932 if (GET_MODE (offset_rtx) != Pmode)
3933 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3934 #else
3935 if (GET_MODE (offset_rtx) != ptr_mode)
3936 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3937 #endif
3938
3939 /* A constant address in TO_RTX can have VOIDmode, we must not try
3940 to call force_reg for that case. Avoid that case. */
3941 if (MEM_P (to_rtx)
3942 && GET_MODE (to_rtx) == BLKmode
3943 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3944 && bitsize > 0
3945 && (bitpos % bitsize) == 0
3946 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3947 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3948 {
3949 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3950 bitpos = 0;
3951 }
3952
3953 to_rtx = offset_address (to_rtx, offset_rtx,
3954 highest_pow2_factor_for_target (to,
3955 offset));
3956 }
3957
3958 /* Handle expand_expr of a complex value returning a CONCAT. */
3959 if (GET_CODE (to_rtx) == CONCAT)
3960 {
3961 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3962 {
3963 gcc_assert (bitpos == 0);
3964 result = store_expr (from, to_rtx, false);
3965 }
3966 else
3967 {
3968 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3969 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3970 }
3971 }
3972 else
3973 {
3974 if (MEM_P (to_rtx))
3975 {
3976 /* If the field is at offset zero, we could have been given the
3977 DECL_RTX of the parent struct. Don't munge it. */
3978 to_rtx = shallow_copy_rtx (to_rtx);
3979
3980 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3981
3982 /* Deal with volatile and readonly fields. The former is only
3983 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3984 if (volatilep)
3985 MEM_VOLATILE_P (to_rtx) = 1;
3986 if (component_uses_parent_alias_set (to))
3987 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3988 }
3989
3990 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
3991 to_rtx, to, from))
3992 result = NULL;
3993 else
3994 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3995 TREE_TYPE (tem), get_alias_set (to));
3996 }
3997
3998 if (result)
3999 preserve_temp_slots (result);
4000 free_temp_slots ();
4001 pop_temp_slots ();
4002 return;
4003 }
4004
4005 /* If the rhs is a function call and its value is not an aggregate,
4006 call the function before we start to compute the lhs.
4007 This is needed for correct code for cases such as
4008 val = setjmp (buf) on machines where reference to val
4009 requires loading up part of an address in a separate insn.
4010
4011 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4012 since it might be a promoted variable where the zero- or sign- extension
4013 needs to be done. Handling this in the normal way is safe because no
4014 computation is done before the call. */
4015 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4016 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4017 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4018 && REG_P (DECL_RTL (to))))
4019 {
4020 rtx value;
4021
4022 push_temp_slots ();
4023 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4024 if (to_rtx == 0)
4025 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4026
4027 /* Handle calls that return values in multiple non-contiguous locations.
4028 The Irix 6 ABI has examples of this. */
4029 if (GET_CODE (to_rtx) == PARALLEL)
4030 emit_group_load (to_rtx, value, TREE_TYPE (from),
4031 int_size_in_bytes (TREE_TYPE (from)));
4032 else if (GET_MODE (to_rtx) == BLKmode)
4033 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4034 else
4035 {
4036 if (POINTER_TYPE_P (TREE_TYPE (to)))
4037 value = convert_memory_address (GET_MODE (to_rtx), value);
4038 emit_move_insn (to_rtx, value);
4039 }
4040 preserve_temp_slots (to_rtx);
4041 free_temp_slots ();
4042 pop_temp_slots ();
4043 return;
4044 }
4045
4046 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4047 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4048
4049 if (to_rtx == 0)
4050 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4051
4052 /* Don't move directly into a return register. */
4053 if (TREE_CODE (to) == RESULT_DECL
4054 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4055 {
4056 rtx temp;
4057
4058 push_temp_slots ();
4059 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4060
4061 if (GET_CODE (to_rtx) == PARALLEL)
4062 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4063 int_size_in_bytes (TREE_TYPE (from)));
4064 else
4065 emit_move_insn (to_rtx, temp);
4066
4067 preserve_temp_slots (to_rtx);
4068 free_temp_slots ();
4069 pop_temp_slots ();
4070 return;
4071 }
4072
4073 /* In case we are returning the contents of an object which overlaps
4074 the place the value is being stored, use a safe function when copying
4075 a value through a pointer into a structure value return block. */
4076 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4077 && current_function_returns_struct
4078 && !current_function_returns_pcc_struct)
4079 {
4080 rtx from_rtx, size;
4081
4082 push_temp_slots ();
4083 size = expr_size (from);
4084 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4085
4086 emit_library_call (memmove_libfunc, LCT_NORMAL,
4087 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4088 XEXP (from_rtx, 0), Pmode,
4089 convert_to_mode (TYPE_MODE (sizetype),
4090 size, TYPE_UNSIGNED (sizetype)),
4091 TYPE_MODE (sizetype));
4092
4093 preserve_temp_slots (to_rtx);
4094 free_temp_slots ();
4095 pop_temp_slots ();
4096 return;
4097 }
4098
4099 /* Compute FROM and store the value in the rtx we got. */
4100
4101 push_temp_slots ();
4102 result = store_expr (from, to_rtx, 0);
4103 preserve_temp_slots (result);
4104 free_temp_slots ();
4105 pop_temp_slots ();
4106 return;
4107 }
4108
4109 /* Generate code for computing expression EXP,
4110 and storing the value into TARGET.
4111
4112 If the mode is BLKmode then we may return TARGET itself.
4113 It turns out that in BLKmode it doesn't cause a problem.
4114 because C has no operators that could combine two different
4115 assignments into the same BLKmode object with different values
4116 with no sequence point. Will other languages need this to
4117 be more thorough?
4118
4119 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4120 stack, and block moves may need to be treated specially. */
4121
4122 rtx
4123 store_expr (tree exp, rtx target, int call_param_p)
4124 {
4125 rtx temp;
4126 rtx alt_rtl = NULL_RTX;
4127 int dont_return_target = 0;
4128
4129 if (VOID_TYPE_P (TREE_TYPE (exp)))
4130 {
4131 /* C++ can generate ?: expressions with a throw expression in one
4132 branch and an rvalue in the other. Here, we resolve attempts to
4133 store the throw expression's nonexistent result. */
4134 gcc_assert (!call_param_p);
4135 expand_expr (exp, const0_rtx, VOIDmode, 0);
4136 return NULL_RTX;
4137 }
4138 if (TREE_CODE (exp) == COMPOUND_EXPR)
4139 {
4140 /* Perform first part of compound expression, then assign from second
4141 part. */
4142 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4143 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4144 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4145 }
4146 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4147 {
4148 /* For conditional expression, get safe form of the target. Then
4149 test the condition, doing the appropriate assignment on either
4150 side. This avoids the creation of unnecessary temporaries.
4151 For non-BLKmode, it is more efficient not to do this. */
4152
4153 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4154
4155 do_pending_stack_adjust ();
4156 NO_DEFER_POP;
4157 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4158 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4159 emit_jump_insn (gen_jump (lab2));
4160 emit_barrier ();
4161 emit_label (lab1);
4162 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4163 emit_label (lab2);
4164 OK_DEFER_POP;
4165
4166 return NULL_RTX;
4167 }
4168 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4169 /* If this is a scalar in a register that is stored in a wider mode
4170 than the declared mode, compute the result into its declared mode
4171 and then convert to the wider mode. Our value is the computed
4172 expression. */
4173 {
4174 rtx inner_target = 0;
4175
4176 /* We can do the conversion inside EXP, which will often result
4177 in some optimizations. Do the conversion in two steps: first
4178 change the signedness, if needed, then the extend. But don't
4179 do this if the type of EXP is a subtype of something else
4180 since then the conversion might involve more than just
4181 converting modes. */
4182 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4183 && TREE_TYPE (TREE_TYPE (exp)) == 0
4184 && (!lang_hooks.reduce_bit_field_operations
4185 || (GET_MODE_PRECISION (GET_MODE (target))
4186 == TYPE_PRECISION (TREE_TYPE (exp)))))
4187 {
4188 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4189 != SUBREG_PROMOTED_UNSIGNED_P (target))
4190 exp = convert
4191 (lang_hooks.types.signed_or_unsigned_type
4192 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4193
4194 exp = convert (lang_hooks.types.type_for_mode
4195 (GET_MODE (SUBREG_REG (target)),
4196 SUBREG_PROMOTED_UNSIGNED_P (target)),
4197 exp);
4198
4199 inner_target = SUBREG_REG (target);
4200 }
4201
4202 temp = expand_expr (exp, inner_target, VOIDmode,
4203 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4204
4205 /* If TEMP is a VOIDmode constant, use convert_modes to make
4206 sure that we properly convert it. */
4207 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4208 {
4209 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4210 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4211 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4212 GET_MODE (target), temp,
4213 SUBREG_PROMOTED_UNSIGNED_P (target));
4214 }
4215
4216 convert_move (SUBREG_REG (target), temp,
4217 SUBREG_PROMOTED_UNSIGNED_P (target));
4218
4219 return NULL_RTX;
4220 }
4221 else
4222 {
4223 temp = expand_expr_real (exp, target, GET_MODE (target),
4224 (call_param_p
4225 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4226 &alt_rtl);
4227 /* Return TARGET if it's a specified hardware register.
4228 If TARGET is a volatile mem ref, either return TARGET
4229 or return a reg copied *from* TARGET; ANSI requires this.
4230
4231 Otherwise, if TEMP is not TARGET, return TEMP
4232 if it is constant (for efficiency),
4233 or if we really want the correct value. */
4234 if (!(target && REG_P (target)
4235 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4236 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4237 && ! rtx_equal_p (temp, target)
4238 && CONSTANT_P (temp))
4239 dont_return_target = 1;
4240 }
4241
4242 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4243 the same as that of TARGET, adjust the constant. This is needed, for
4244 example, in case it is a CONST_DOUBLE and we want only a word-sized
4245 value. */
4246 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4247 && TREE_CODE (exp) != ERROR_MARK
4248 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4249 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4250 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4251
4252 /* If value was not generated in the target, store it there.
4253 Convert the value to TARGET's type first if necessary and emit the
4254 pending incrementations that have been queued when expanding EXP.
4255 Note that we cannot emit the whole queue blindly because this will
4256 effectively disable the POST_INC optimization later.
4257
4258 If TEMP and TARGET compare equal according to rtx_equal_p, but
4259 one or both of them are volatile memory refs, we have to distinguish
4260 two cases:
4261 - expand_expr has used TARGET. In this case, we must not generate
4262 another copy. This can be detected by TARGET being equal according
4263 to == .
4264 - expand_expr has not used TARGET - that means that the source just
4265 happens to have the same RTX form. Since temp will have been created
4266 by expand_expr, it will compare unequal according to == .
4267 We must generate a copy in this case, to reach the correct number
4268 of volatile memory references. */
4269
4270 if ((! rtx_equal_p (temp, target)
4271 || (temp != target && (side_effects_p (temp)
4272 || side_effects_p (target))))
4273 && TREE_CODE (exp) != ERROR_MARK
4274 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4275 but TARGET is not valid memory reference, TEMP will differ
4276 from TARGET although it is really the same location. */
4277 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4278 /* If there's nothing to copy, don't bother. Don't call
4279 expr_size unless necessary, because some front-ends (C++)
4280 expr_size-hook must not be given objects that are not
4281 supposed to be bit-copied or bit-initialized. */
4282 && expr_size (exp) != const0_rtx)
4283 {
4284 if (GET_MODE (temp) != GET_MODE (target)
4285 && GET_MODE (temp) != VOIDmode)
4286 {
4287 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4288 if (dont_return_target)
4289 {
4290 /* In this case, we will return TEMP,
4291 so make sure it has the proper mode.
4292 But don't forget to store the value into TARGET. */
4293 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4294 emit_move_insn (target, temp);
4295 }
4296 else
4297 convert_move (target, temp, unsignedp);
4298 }
4299
4300 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4301 {
4302 /* Handle copying a string constant into an array. The string
4303 constant may be shorter than the array. So copy just the string's
4304 actual length, and clear the rest. First get the size of the data
4305 type of the string, which is actually the size of the target. */
4306 rtx size = expr_size (exp);
4307
4308 if (GET_CODE (size) == CONST_INT
4309 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4310 emit_block_move (target, temp, size,
4311 (call_param_p
4312 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4313 else
4314 {
4315 /* Compute the size of the data to copy from the string. */
4316 tree copy_size
4317 = size_binop (MIN_EXPR,
4318 make_tree (sizetype, size),
4319 size_int (TREE_STRING_LENGTH (exp)));
4320 rtx copy_size_rtx
4321 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4322 (call_param_p
4323 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4324 rtx label = 0;
4325
4326 /* Copy that much. */
4327 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4328 TYPE_UNSIGNED (sizetype));
4329 emit_block_move (target, temp, copy_size_rtx,
4330 (call_param_p
4331 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4332
4333 /* Figure out how much is left in TARGET that we have to clear.
4334 Do all calculations in ptr_mode. */
4335 if (GET_CODE (copy_size_rtx) == CONST_INT)
4336 {
4337 size = plus_constant (size, -INTVAL (copy_size_rtx));
4338 target = adjust_address (target, BLKmode,
4339 INTVAL (copy_size_rtx));
4340 }
4341 else
4342 {
4343 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4344 copy_size_rtx, NULL_RTX, 0,
4345 OPTAB_LIB_WIDEN);
4346
4347 #ifdef POINTERS_EXTEND_UNSIGNED
4348 if (GET_MODE (copy_size_rtx) != Pmode)
4349 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4350 TYPE_UNSIGNED (sizetype));
4351 #endif
4352
4353 target = offset_address (target, copy_size_rtx,
4354 highest_pow2_factor (copy_size));
4355 label = gen_label_rtx ();
4356 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4357 GET_MODE (size), 0, label);
4358 }
4359
4360 if (size != const0_rtx)
4361 clear_storage (target, size, BLOCK_OP_NORMAL);
4362
4363 if (label)
4364 emit_label (label);
4365 }
4366 }
4367 /* Handle calls that return values in multiple non-contiguous locations.
4368 The Irix 6 ABI has examples of this. */
4369 else if (GET_CODE (target) == PARALLEL)
4370 emit_group_load (target, temp, TREE_TYPE (exp),
4371 int_size_in_bytes (TREE_TYPE (exp)));
4372 else if (GET_MODE (temp) == BLKmode)
4373 emit_block_move (target, temp, expr_size (exp),
4374 (call_param_p
4375 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4376 else
4377 {
4378 temp = force_operand (temp, target);
4379 if (temp != target)
4380 emit_move_insn (target, temp);
4381 }
4382 }
4383
4384 return NULL_RTX;
4385 }
4386 \f
4387 /* Examine CTOR to discover:
4388 * how many scalar fields are set to nonzero values,
4389 and place it in *P_NZ_ELTS;
4390 * how many scalar fields are set to non-constant values,
4391 and place it in *P_NC_ELTS; and
4392 * how many scalar fields in total are in CTOR,
4393 and place it in *P_ELT_COUNT.
4394 * if a type is a union, and the initializer from the constructor
4395 is not the largest element in the union, then set *p_must_clear. */
4396
4397 static void
4398 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4399 HOST_WIDE_INT *p_nc_elts,
4400 HOST_WIDE_INT *p_elt_count,
4401 bool *p_must_clear)
4402 {
4403 unsigned HOST_WIDE_INT idx;
4404 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4405 tree value, purpose;
4406
4407 nz_elts = 0;
4408 nc_elts = 0;
4409 elt_count = 0;
4410
4411 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4412 {
4413 HOST_WIDE_INT mult;
4414
4415 mult = 1;
4416 if (TREE_CODE (purpose) == RANGE_EXPR)
4417 {
4418 tree lo_index = TREE_OPERAND (purpose, 0);
4419 tree hi_index = TREE_OPERAND (purpose, 1);
4420
4421 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4422 mult = (tree_low_cst (hi_index, 1)
4423 - tree_low_cst (lo_index, 1) + 1);
4424 }
4425
4426 switch (TREE_CODE (value))
4427 {
4428 case CONSTRUCTOR:
4429 {
4430 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4431 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
4432 nz_elts += mult * nz;
4433 nc_elts += mult * nc;
4434 elt_count += mult * ic;
4435 }
4436 break;
4437
4438 case INTEGER_CST:
4439 case REAL_CST:
4440 if (!initializer_zerop (value))
4441 nz_elts += mult;
4442 elt_count += mult;
4443 break;
4444
4445 case STRING_CST:
4446 nz_elts += mult * TREE_STRING_LENGTH (value);
4447 elt_count += mult * TREE_STRING_LENGTH (value);
4448 break;
4449
4450 case COMPLEX_CST:
4451 if (!initializer_zerop (TREE_REALPART (value)))
4452 nz_elts += mult;
4453 if (!initializer_zerop (TREE_IMAGPART (value)))
4454 nz_elts += mult;
4455 elt_count += mult;
4456 break;
4457
4458 case VECTOR_CST:
4459 {
4460 tree v;
4461 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4462 {
4463 if (!initializer_zerop (TREE_VALUE (v)))
4464 nz_elts += mult;
4465 elt_count += mult;
4466 }
4467 }
4468 break;
4469
4470 default:
4471 nz_elts += mult;
4472 elt_count += mult;
4473 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4474 nc_elts += mult;
4475 break;
4476 }
4477 }
4478
4479 if (!*p_must_clear
4480 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4481 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4482 {
4483 tree init_sub_type;
4484 bool clear_this = true;
4485
4486 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4487 {
4488 /* We don't expect more than one element of the union to be
4489 initialized. Not sure what we should do otherwise... */
4490 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4491 == 1);
4492
4493 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4494 CONSTRUCTOR_ELTS (ctor),
4495 0)->value);
4496
4497 /* ??? We could look at each element of the union, and find the
4498 largest element. Which would avoid comparing the size of the
4499 initialized element against any tail padding in the union.
4500 Doesn't seem worth the effort... */
4501 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4502 TYPE_SIZE (init_sub_type)) == 1)
4503 {
4504 /* And now we have to find out if the element itself is fully
4505 constructed. E.g. for union { struct { int a, b; } s; } u
4506 = { .s = { .a = 1 } }. */
4507 if (elt_count == count_type_elements (init_sub_type, false))
4508 clear_this = false;
4509 }
4510 }
4511
4512 *p_must_clear = clear_this;
4513 }
4514
4515 *p_nz_elts += nz_elts;
4516 *p_nc_elts += nc_elts;
4517 *p_elt_count += elt_count;
4518 }
4519
4520 void
4521 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4522 HOST_WIDE_INT *p_nc_elts,
4523 HOST_WIDE_INT *p_elt_count,
4524 bool *p_must_clear)
4525 {
4526 *p_nz_elts = 0;
4527 *p_nc_elts = 0;
4528 *p_elt_count = 0;
4529 *p_must_clear = false;
4530 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4531 p_must_clear);
4532 }
4533
4534 /* Count the number of scalars in TYPE. Return -1 on overflow or
4535 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4536 array member at the end of the structure. */
4537
4538 HOST_WIDE_INT
4539 count_type_elements (tree type, bool allow_flexarr)
4540 {
4541 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4542 switch (TREE_CODE (type))
4543 {
4544 case ARRAY_TYPE:
4545 {
4546 tree telts = array_type_nelts (type);
4547 if (telts && host_integerp (telts, 1))
4548 {
4549 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4550 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4551 if (n == 0)
4552 return 0;
4553 else if (max / n > m)
4554 return n * m;
4555 }
4556 return -1;
4557 }
4558
4559 case RECORD_TYPE:
4560 {
4561 HOST_WIDE_INT n = 0, t;
4562 tree f;
4563
4564 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4565 if (TREE_CODE (f) == FIELD_DECL)
4566 {
4567 t = count_type_elements (TREE_TYPE (f), false);
4568 if (t < 0)
4569 {
4570 /* Check for structures with flexible array member. */
4571 tree tf = TREE_TYPE (f);
4572 if (allow_flexarr
4573 && TREE_CHAIN (f) == NULL
4574 && TREE_CODE (tf) == ARRAY_TYPE
4575 && TYPE_DOMAIN (tf)
4576 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4577 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4578 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4579 && int_size_in_bytes (type) >= 0)
4580 break;
4581
4582 return -1;
4583 }
4584 n += t;
4585 }
4586
4587 return n;
4588 }
4589
4590 case UNION_TYPE:
4591 case QUAL_UNION_TYPE:
4592 {
4593 /* Ho hum. How in the world do we guess here? Clearly it isn't
4594 right to count the fields. Guess based on the number of words. */
4595 HOST_WIDE_INT n = int_size_in_bytes (type);
4596 if (n < 0)
4597 return -1;
4598 return n / UNITS_PER_WORD;
4599 }
4600
4601 case COMPLEX_TYPE:
4602 return 2;
4603
4604 case VECTOR_TYPE:
4605 return TYPE_VECTOR_SUBPARTS (type);
4606
4607 case INTEGER_TYPE:
4608 case REAL_TYPE:
4609 case ENUMERAL_TYPE:
4610 case BOOLEAN_TYPE:
4611 case CHAR_TYPE:
4612 case POINTER_TYPE:
4613 case OFFSET_TYPE:
4614 case REFERENCE_TYPE:
4615 return 1;
4616
4617 case VOID_TYPE:
4618 case METHOD_TYPE:
4619 case FUNCTION_TYPE:
4620 case LANG_TYPE:
4621 default:
4622 gcc_unreachable ();
4623 }
4624 }
4625
4626 /* Return 1 if EXP contains mostly (3/4) zeros. */
4627
4628 static int
4629 mostly_zeros_p (tree exp)
4630 {
4631 if (TREE_CODE (exp) == CONSTRUCTOR)
4632
4633 {
4634 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4635 bool must_clear;
4636
4637 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4638 if (must_clear)
4639 return 1;
4640
4641 elts = count_type_elements (TREE_TYPE (exp), false);
4642
4643 return nz_elts < elts / 4;
4644 }
4645
4646 return initializer_zerop (exp);
4647 }
4648
4649 /* Return 1 if EXP contains all zeros. */
4650
4651 static int
4652 all_zeros_p (tree exp)
4653 {
4654 if (TREE_CODE (exp) == CONSTRUCTOR)
4655
4656 {
4657 HOST_WIDE_INT nz_elts, nc_elts, count;
4658 bool must_clear;
4659
4660 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4661 return nz_elts == 0;
4662 }
4663
4664 return initializer_zerop (exp);
4665 }
4666 \f
4667 /* Helper function for store_constructor.
4668 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4669 TYPE is the type of the CONSTRUCTOR, not the element type.
4670 CLEARED is as for store_constructor.
4671 ALIAS_SET is the alias set to use for any stores.
4672
4673 This provides a recursive shortcut back to store_constructor when it isn't
4674 necessary to go through store_field. This is so that we can pass through
4675 the cleared field to let store_constructor know that we may not have to
4676 clear a substructure if the outer structure has already been cleared. */
4677
4678 static void
4679 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4680 HOST_WIDE_INT bitpos, enum machine_mode mode,
4681 tree exp, tree type, int cleared, int alias_set)
4682 {
4683 if (TREE_CODE (exp) == CONSTRUCTOR
4684 /* We can only call store_constructor recursively if the size and
4685 bit position are on a byte boundary. */
4686 && bitpos % BITS_PER_UNIT == 0
4687 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4688 /* If we have a nonzero bitpos for a register target, then we just
4689 let store_field do the bitfield handling. This is unlikely to
4690 generate unnecessary clear instructions anyways. */
4691 && (bitpos == 0 || MEM_P (target)))
4692 {
4693 if (MEM_P (target))
4694 target
4695 = adjust_address (target,
4696 GET_MODE (target) == BLKmode
4697 || 0 != (bitpos
4698 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4699 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4700
4701
4702 /* Update the alias set, if required. */
4703 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4704 && MEM_ALIAS_SET (target) != 0)
4705 {
4706 target = copy_rtx (target);
4707 set_mem_alias_set (target, alias_set);
4708 }
4709
4710 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4711 }
4712 else
4713 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4714 }
4715
4716 /* Store the value of constructor EXP into the rtx TARGET.
4717 TARGET is either a REG or a MEM; we know it cannot conflict, since
4718 safe_from_p has been called.
4719 CLEARED is true if TARGET is known to have been zero'd.
4720 SIZE is the number of bytes of TARGET we are allowed to modify: this
4721 may not be the same as the size of EXP if we are assigning to a field
4722 which has been packed to exclude padding bits. */
4723
4724 static void
4725 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4726 {
4727 tree type = TREE_TYPE (exp);
4728 #ifdef WORD_REGISTER_OPERATIONS
4729 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4730 #endif
4731
4732 switch (TREE_CODE (type))
4733 {
4734 case RECORD_TYPE:
4735 case UNION_TYPE:
4736 case QUAL_UNION_TYPE:
4737 {
4738 unsigned HOST_WIDE_INT idx;
4739 tree field, value;
4740
4741 /* If size is zero or the target is already cleared, do nothing. */
4742 if (size == 0 || cleared)
4743 cleared = 1;
4744 /* We either clear the aggregate or indicate the value is dead. */
4745 else if ((TREE_CODE (type) == UNION_TYPE
4746 || TREE_CODE (type) == QUAL_UNION_TYPE)
4747 && ! CONSTRUCTOR_ELTS (exp))
4748 /* If the constructor is empty, clear the union. */
4749 {
4750 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4751 cleared = 1;
4752 }
4753
4754 /* If we are building a static constructor into a register,
4755 set the initial value as zero so we can fold the value into
4756 a constant. But if more than one register is involved,
4757 this probably loses. */
4758 else if (REG_P (target) && TREE_STATIC (exp)
4759 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4760 {
4761 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4762 cleared = 1;
4763 }
4764
4765 /* If the constructor has fewer fields than the structure or
4766 if we are initializing the structure to mostly zeros, clear
4767 the whole structure first. Don't do this if TARGET is a
4768 register whose mode size isn't equal to SIZE since
4769 clear_storage can't handle this case. */
4770 else if (size > 0
4771 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4772 != fields_length (type))
4773 || mostly_zeros_p (exp))
4774 && (!REG_P (target)
4775 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4776 == size)))
4777 {
4778 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4779 cleared = 1;
4780 }
4781
4782 if (! cleared)
4783 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4784
4785 /* Store each element of the constructor into the
4786 corresponding field of TARGET. */
4787 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4788 {
4789 enum machine_mode mode;
4790 HOST_WIDE_INT bitsize;
4791 HOST_WIDE_INT bitpos = 0;
4792 tree offset;
4793 rtx to_rtx = target;
4794
4795 /* Just ignore missing fields. We cleared the whole
4796 structure, above, if any fields are missing. */
4797 if (field == 0)
4798 continue;
4799
4800 if (cleared && initializer_zerop (value))
4801 continue;
4802
4803 if (host_integerp (DECL_SIZE (field), 1))
4804 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4805 else
4806 bitsize = -1;
4807
4808 mode = DECL_MODE (field);
4809 if (DECL_BIT_FIELD (field))
4810 mode = VOIDmode;
4811
4812 offset = DECL_FIELD_OFFSET (field);
4813 if (host_integerp (offset, 0)
4814 && host_integerp (bit_position (field), 0))
4815 {
4816 bitpos = int_bit_position (field);
4817 offset = 0;
4818 }
4819 else
4820 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4821
4822 if (offset)
4823 {
4824 rtx offset_rtx;
4825
4826 offset
4827 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4828 make_tree (TREE_TYPE (exp),
4829 target));
4830
4831 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4832 gcc_assert (MEM_P (to_rtx));
4833
4834 #ifdef POINTERS_EXTEND_UNSIGNED
4835 if (GET_MODE (offset_rtx) != Pmode)
4836 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4837 #else
4838 if (GET_MODE (offset_rtx) != ptr_mode)
4839 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4840 #endif
4841
4842 to_rtx = offset_address (to_rtx, offset_rtx,
4843 highest_pow2_factor (offset));
4844 }
4845
4846 #ifdef WORD_REGISTER_OPERATIONS
4847 /* If this initializes a field that is smaller than a
4848 word, at the start of a word, try to widen it to a full
4849 word. This special case allows us to output C++ member
4850 function initializations in a form that the optimizers
4851 can understand. */
4852 if (REG_P (target)
4853 && bitsize < BITS_PER_WORD
4854 && bitpos % BITS_PER_WORD == 0
4855 && GET_MODE_CLASS (mode) == MODE_INT
4856 && TREE_CODE (value) == INTEGER_CST
4857 && exp_size >= 0
4858 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4859 {
4860 tree type = TREE_TYPE (value);
4861
4862 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4863 {
4864 type = lang_hooks.types.type_for_size
4865 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4866 value = convert (type, value);
4867 }
4868
4869 if (BYTES_BIG_ENDIAN)
4870 value
4871 = fold_build2 (LSHIFT_EXPR, type, value,
4872 build_int_cst (NULL_TREE,
4873 BITS_PER_WORD - bitsize));
4874 bitsize = BITS_PER_WORD;
4875 mode = word_mode;
4876 }
4877 #endif
4878
4879 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4880 && DECL_NONADDRESSABLE_P (field))
4881 {
4882 to_rtx = copy_rtx (to_rtx);
4883 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4884 }
4885
4886 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4887 value, type, cleared,
4888 get_alias_set (TREE_TYPE (field)));
4889 }
4890 break;
4891 }
4892 case ARRAY_TYPE:
4893 {
4894 tree value, index;
4895 unsigned HOST_WIDE_INT i;
4896 int need_to_clear;
4897 tree domain;
4898 tree elttype = TREE_TYPE (type);
4899 int const_bounds_p;
4900 HOST_WIDE_INT minelt = 0;
4901 HOST_WIDE_INT maxelt = 0;
4902
4903 domain = TYPE_DOMAIN (type);
4904 const_bounds_p = (TYPE_MIN_VALUE (domain)
4905 && TYPE_MAX_VALUE (domain)
4906 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4907 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4908
4909 /* If we have constant bounds for the range of the type, get them. */
4910 if (const_bounds_p)
4911 {
4912 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4913 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4914 }
4915
4916 /* If the constructor has fewer elements than the array, clear
4917 the whole array first. Similarly if this is static
4918 constructor of a non-BLKmode object. */
4919 if (cleared)
4920 need_to_clear = 0;
4921 else if (REG_P (target) && TREE_STATIC (exp))
4922 need_to_clear = 1;
4923 else
4924 {
4925 unsigned HOST_WIDE_INT idx;
4926 tree index, value;
4927 HOST_WIDE_INT count = 0, zero_count = 0;
4928 need_to_clear = ! const_bounds_p;
4929
4930 /* This loop is a more accurate version of the loop in
4931 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4932 is also needed to check for missing elements. */
4933 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
4934 {
4935 HOST_WIDE_INT this_node_count;
4936
4937 if (need_to_clear)
4938 break;
4939
4940 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4941 {
4942 tree lo_index = TREE_OPERAND (index, 0);
4943 tree hi_index = TREE_OPERAND (index, 1);
4944
4945 if (! host_integerp (lo_index, 1)
4946 || ! host_integerp (hi_index, 1))
4947 {
4948 need_to_clear = 1;
4949 break;
4950 }
4951
4952 this_node_count = (tree_low_cst (hi_index, 1)
4953 - tree_low_cst (lo_index, 1) + 1);
4954 }
4955 else
4956 this_node_count = 1;
4957
4958 count += this_node_count;
4959 if (mostly_zeros_p (value))
4960 zero_count += this_node_count;
4961 }
4962
4963 /* Clear the entire array first if there are any missing
4964 elements, or if the incidence of zero elements is >=
4965 75%. */
4966 if (! need_to_clear
4967 && (count < maxelt - minelt + 1
4968 || 4 * zero_count >= 3 * count))
4969 need_to_clear = 1;
4970 }
4971
4972 if (need_to_clear && size > 0)
4973 {
4974 if (REG_P (target))
4975 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4976 else
4977 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4978 cleared = 1;
4979 }
4980
4981 if (!cleared && REG_P (target))
4982 /* Inform later passes that the old value is dead. */
4983 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4984
4985 /* Store each element of the constructor into the
4986 corresponding element of TARGET, determined by counting the
4987 elements. */
4988 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
4989 {
4990 enum machine_mode mode;
4991 HOST_WIDE_INT bitsize;
4992 HOST_WIDE_INT bitpos;
4993 int unsignedp;
4994 rtx xtarget = target;
4995
4996 if (cleared && initializer_zerop (value))
4997 continue;
4998
4999 unsignedp = TYPE_UNSIGNED (elttype);
5000 mode = TYPE_MODE (elttype);
5001 if (mode == BLKmode)
5002 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5003 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5004 : -1);
5005 else
5006 bitsize = GET_MODE_BITSIZE (mode);
5007
5008 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5009 {
5010 tree lo_index = TREE_OPERAND (index, 0);
5011 tree hi_index = TREE_OPERAND (index, 1);
5012 rtx index_r, pos_rtx;
5013 HOST_WIDE_INT lo, hi, count;
5014 tree position;
5015
5016 /* If the range is constant and "small", unroll the loop. */
5017 if (const_bounds_p
5018 && host_integerp (lo_index, 0)
5019 && host_integerp (hi_index, 0)
5020 && (lo = tree_low_cst (lo_index, 0),
5021 hi = tree_low_cst (hi_index, 0),
5022 count = hi - lo + 1,
5023 (!MEM_P (target)
5024 || count <= 2
5025 || (host_integerp (TYPE_SIZE (elttype), 1)
5026 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5027 <= 40 * 8)))))
5028 {
5029 lo -= minelt; hi -= minelt;
5030 for (; lo <= hi; lo++)
5031 {
5032 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5033
5034 if (MEM_P (target)
5035 && !MEM_KEEP_ALIAS_SET_P (target)
5036 && TREE_CODE (type) == ARRAY_TYPE
5037 && TYPE_NONALIASED_COMPONENT (type))
5038 {
5039 target = copy_rtx (target);
5040 MEM_KEEP_ALIAS_SET_P (target) = 1;
5041 }
5042
5043 store_constructor_field
5044 (target, bitsize, bitpos, mode, value, type, cleared,
5045 get_alias_set (elttype));
5046 }
5047 }
5048 else
5049 {
5050 rtx loop_start = gen_label_rtx ();
5051 rtx loop_end = gen_label_rtx ();
5052 tree exit_cond;
5053
5054 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5055 unsignedp = TYPE_UNSIGNED (domain);
5056
5057 index = build_decl (VAR_DECL, NULL_TREE, domain);
5058
5059 index_r
5060 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5061 &unsignedp, 0));
5062 SET_DECL_RTL (index, index_r);
5063 store_expr (lo_index, index_r, 0);
5064
5065 /* Build the head of the loop. */
5066 do_pending_stack_adjust ();
5067 emit_label (loop_start);
5068
5069 /* Assign value to element index. */
5070 position
5071 = convert (ssizetype,
5072 fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5073 index, TYPE_MIN_VALUE (domain)));
5074 position = size_binop (MULT_EXPR, position,
5075 convert (ssizetype,
5076 TYPE_SIZE_UNIT (elttype)));
5077
5078 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5079 xtarget = offset_address (target, pos_rtx,
5080 highest_pow2_factor (position));
5081 xtarget = adjust_address (xtarget, mode, 0);
5082 if (TREE_CODE (value) == CONSTRUCTOR)
5083 store_constructor (value, xtarget, cleared,
5084 bitsize / BITS_PER_UNIT);
5085 else
5086 store_expr (value, xtarget, 0);
5087
5088 /* Generate a conditional jump to exit the loop. */
5089 exit_cond = build2 (LT_EXPR, integer_type_node,
5090 index, hi_index);
5091 jumpif (exit_cond, loop_end);
5092
5093 /* Update the loop counter, and jump to the head of
5094 the loop. */
5095 expand_assignment (index,
5096 build2 (PLUS_EXPR, TREE_TYPE (index),
5097 index, integer_one_node));
5098
5099 emit_jump (loop_start);
5100
5101 /* Build the end of the loop. */
5102 emit_label (loop_end);
5103 }
5104 }
5105 else if ((index != 0 && ! host_integerp (index, 0))
5106 || ! host_integerp (TYPE_SIZE (elttype), 1))
5107 {
5108 tree position;
5109
5110 if (index == 0)
5111 index = ssize_int (1);
5112
5113 if (minelt)
5114 index = fold_convert (ssizetype,
5115 fold_build2 (MINUS_EXPR,
5116 TREE_TYPE (index),
5117 index,
5118 TYPE_MIN_VALUE (domain)));
5119
5120 position = size_binop (MULT_EXPR, index,
5121 convert (ssizetype,
5122 TYPE_SIZE_UNIT (elttype)));
5123 xtarget = offset_address (target,
5124 expand_expr (position, 0, VOIDmode, 0),
5125 highest_pow2_factor (position));
5126 xtarget = adjust_address (xtarget, mode, 0);
5127 store_expr (value, xtarget, 0);
5128 }
5129 else
5130 {
5131 if (index != 0)
5132 bitpos = ((tree_low_cst (index, 0) - minelt)
5133 * tree_low_cst (TYPE_SIZE (elttype), 1));
5134 else
5135 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5136
5137 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5138 && TREE_CODE (type) == ARRAY_TYPE
5139 && TYPE_NONALIASED_COMPONENT (type))
5140 {
5141 target = copy_rtx (target);
5142 MEM_KEEP_ALIAS_SET_P (target) = 1;
5143 }
5144 store_constructor_field (target, bitsize, bitpos, mode, value,
5145 type, cleared, get_alias_set (elttype));
5146 }
5147 }
5148 break;
5149 }
5150
5151 case VECTOR_TYPE:
5152 {
5153 unsigned HOST_WIDE_INT idx;
5154 constructor_elt *ce;
5155 int i;
5156 int need_to_clear;
5157 int icode = 0;
5158 tree elttype = TREE_TYPE (type);
5159 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5160 enum machine_mode eltmode = TYPE_MODE (elttype);
5161 HOST_WIDE_INT bitsize;
5162 HOST_WIDE_INT bitpos;
5163 rtvec vector = NULL;
5164 unsigned n_elts;
5165
5166 gcc_assert (eltmode != BLKmode);
5167
5168 n_elts = TYPE_VECTOR_SUBPARTS (type);
5169 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5170 {
5171 enum machine_mode mode = GET_MODE (target);
5172
5173 icode = (int) vec_init_optab->handlers[mode].insn_code;
5174 if (icode != CODE_FOR_nothing)
5175 {
5176 unsigned int i;
5177
5178 vector = rtvec_alloc (n_elts);
5179 for (i = 0; i < n_elts; i++)
5180 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5181 }
5182 }
5183
5184 /* If the constructor has fewer elements than the vector,
5185 clear the whole array first. Similarly if this is static
5186 constructor of a non-BLKmode object. */
5187 if (cleared)
5188 need_to_clear = 0;
5189 else if (REG_P (target) && TREE_STATIC (exp))
5190 need_to_clear = 1;
5191 else
5192 {
5193 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5194 tree value;
5195
5196 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5197 {
5198 int n_elts_here = tree_low_cst
5199 (int_const_binop (TRUNC_DIV_EXPR,
5200 TYPE_SIZE (TREE_TYPE (value)),
5201 TYPE_SIZE (elttype), 0), 1);
5202
5203 count += n_elts_here;
5204 if (mostly_zeros_p (value))
5205 zero_count += n_elts_here;
5206 }
5207
5208 /* Clear the entire vector first if there are any missing elements,
5209 or if the incidence of zero elements is >= 75%. */
5210 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5211 }
5212
5213 if (need_to_clear && size > 0 && !vector)
5214 {
5215 if (REG_P (target))
5216 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5217 else
5218 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5219 cleared = 1;
5220 }
5221
5222 /* Inform later passes that the old value is dead. */
5223 if (!cleared && REG_P (target))
5224 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5225
5226 /* Store each element of the constructor into the corresponding
5227 element of TARGET, determined by counting the elements. */
5228 for (idx = 0, i = 0;
5229 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5230 idx++, i += bitsize / elt_size)
5231 {
5232 HOST_WIDE_INT eltpos;
5233 tree value = ce->value;
5234
5235 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5236 if (cleared && initializer_zerop (value))
5237 continue;
5238
5239 if (ce->index)
5240 eltpos = tree_low_cst (ce->index, 1);
5241 else
5242 eltpos = i;
5243
5244 if (vector)
5245 {
5246 /* Vector CONSTRUCTORs should only be built from smaller
5247 vectors in the case of BLKmode vectors. */
5248 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5249 RTVEC_ELT (vector, eltpos)
5250 = expand_expr (value, NULL_RTX, VOIDmode, 0);
5251 }
5252 else
5253 {
5254 enum machine_mode value_mode =
5255 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5256 ? TYPE_MODE (TREE_TYPE (value))
5257 : eltmode;
5258 bitpos = eltpos * elt_size;
5259 store_constructor_field (target, bitsize, bitpos,
5260 value_mode, value, type,
5261 cleared, get_alias_set (elttype));
5262 }
5263 }
5264
5265 if (vector)
5266 emit_insn (GEN_FCN (icode)
5267 (target,
5268 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5269 break;
5270 }
5271
5272 default:
5273 gcc_unreachable ();
5274 }
5275 }
5276
5277 /* Store the value of EXP (an expression tree)
5278 into a subfield of TARGET which has mode MODE and occupies
5279 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5280 If MODE is VOIDmode, it means that we are storing into a bit-field.
5281
5282 Always return const0_rtx unless we have something particular to
5283 return.
5284
5285 TYPE is the type of the underlying object,
5286
5287 ALIAS_SET is the alias set for the destination. This value will
5288 (in general) be different from that for TARGET, since TARGET is a
5289 reference to the containing structure. */
5290
5291 static rtx
5292 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5293 enum machine_mode mode, tree exp, tree type, int alias_set)
5294 {
5295 HOST_WIDE_INT width_mask = 0;
5296
5297 if (TREE_CODE (exp) == ERROR_MARK)
5298 return const0_rtx;
5299
5300 /* If we have nothing to store, do nothing unless the expression has
5301 side-effects. */
5302 if (bitsize == 0)
5303 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5304 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5305 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5306
5307 /* If we are storing into an unaligned field of an aligned union that is
5308 in a register, we may have the mode of TARGET being an integer mode but
5309 MODE == BLKmode. In that case, get an aligned object whose size and
5310 alignment are the same as TARGET and store TARGET into it (we can avoid
5311 the store if the field being stored is the entire width of TARGET). Then
5312 call ourselves recursively to store the field into a BLKmode version of
5313 that object. Finally, load from the object into TARGET. This is not
5314 very efficient in general, but should only be slightly more expensive
5315 than the otherwise-required unaligned accesses. Perhaps this can be
5316 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5317 twice, once with emit_move_insn and once via store_field. */
5318
5319 if (mode == BLKmode
5320 && (REG_P (target) || GET_CODE (target) == SUBREG))
5321 {
5322 rtx object = assign_temp (type, 0, 1, 1);
5323 rtx blk_object = adjust_address (object, BLKmode, 0);
5324
5325 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5326 emit_move_insn (object, target);
5327
5328 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5329
5330 emit_move_insn (target, object);
5331
5332 /* We want to return the BLKmode version of the data. */
5333 return blk_object;
5334 }
5335
5336 if (GET_CODE (target) == CONCAT)
5337 {
5338 /* We're storing into a struct containing a single __complex. */
5339
5340 gcc_assert (!bitpos);
5341 return store_expr (exp, target, 0);
5342 }
5343
5344 /* If the structure is in a register or if the component
5345 is a bit field, we cannot use addressing to access it.
5346 Use bit-field techniques or SUBREG to store in it. */
5347
5348 if (mode == VOIDmode
5349 || (mode != BLKmode && ! direct_store[(int) mode]
5350 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5351 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5352 || REG_P (target)
5353 || GET_CODE (target) == SUBREG
5354 /* If the field isn't aligned enough to store as an ordinary memref,
5355 store it as a bit field. */
5356 || (mode != BLKmode
5357 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5358 || bitpos % GET_MODE_ALIGNMENT (mode))
5359 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5360 || (bitpos % BITS_PER_UNIT != 0)))
5361 /* If the RHS and field are a constant size and the size of the
5362 RHS isn't the same size as the bitfield, we must use bitfield
5363 operations. */
5364 || (bitsize >= 0
5365 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5366 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5367 {
5368 rtx temp;
5369
5370 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5371 implies a mask operation. If the precision is the same size as
5372 the field we're storing into, that mask is redundant. This is
5373 particularly common with bit field assignments generated by the
5374 C front end. */
5375 if (TREE_CODE (exp) == NOP_EXPR)
5376 {
5377 tree type = TREE_TYPE (exp);
5378 if (INTEGRAL_TYPE_P (type)
5379 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5380 && bitsize == TYPE_PRECISION (type))
5381 {
5382 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5383 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5384 exp = TREE_OPERAND (exp, 0);
5385 }
5386 }
5387
5388 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5389
5390 /* If BITSIZE is narrower than the size of the type of EXP
5391 we will be narrowing TEMP. Normally, what's wanted are the
5392 low-order bits. However, if EXP's type is a record and this is
5393 big-endian machine, we want the upper BITSIZE bits. */
5394 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5395 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5396 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5397 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5398 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5399 - bitsize),
5400 NULL_RTX, 1);
5401
5402 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5403 MODE. */
5404 if (mode != VOIDmode && mode != BLKmode
5405 && mode != TYPE_MODE (TREE_TYPE (exp)))
5406 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5407
5408 /* If the modes of TARGET and TEMP are both BLKmode, both
5409 must be in memory and BITPOS must be aligned on a byte
5410 boundary. If so, we simply do a block copy. */
5411 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5412 {
5413 gcc_assert (MEM_P (target) && MEM_P (temp)
5414 && !(bitpos % BITS_PER_UNIT));
5415
5416 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5417 emit_block_move (target, temp,
5418 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5419 / BITS_PER_UNIT),
5420 BLOCK_OP_NORMAL);
5421
5422 return const0_rtx;
5423 }
5424
5425 /* Store the value in the bitfield. */
5426 store_bit_field (target, bitsize, bitpos, mode, temp);
5427
5428 return const0_rtx;
5429 }
5430 else
5431 {
5432 /* Now build a reference to just the desired component. */
5433 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5434
5435 if (to_rtx == target)
5436 to_rtx = copy_rtx (to_rtx);
5437
5438 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5439 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5440 set_mem_alias_set (to_rtx, alias_set);
5441
5442 return store_expr (exp, to_rtx, 0);
5443 }
5444 }
5445 \f
5446 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5447 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5448 codes and find the ultimate containing object, which we return.
5449
5450 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5451 bit position, and *PUNSIGNEDP to the signedness of the field.
5452 If the position of the field is variable, we store a tree
5453 giving the variable offset (in units) in *POFFSET.
5454 This offset is in addition to the bit position.
5455 If the position is not variable, we store 0 in *POFFSET.
5456
5457 If any of the extraction expressions is volatile,
5458 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5459
5460 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5461 is a mode that can be used to access the field. In that case, *PBITSIZE
5462 is redundant.
5463
5464 If the field describes a variable-sized object, *PMODE is set to
5465 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5466 this case, but the address of the object can be found.
5467
5468 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5469 look through nodes that serve as markers of a greater alignment than
5470 the one that can be deduced from the expression. These nodes make it
5471 possible for front-ends to prevent temporaries from being created by
5472 the middle-end on alignment considerations. For that purpose, the
5473 normal operating mode at high-level is to always pass FALSE so that
5474 the ultimate containing object is really returned; moreover, the
5475 associated predicate handled_component_p will always return TRUE
5476 on these nodes, thus indicating that they are essentially handled
5477 by get_inner_reference. TRUE should only be passed when the caller
5478 is scanning the expression in order to build another representation
5479 and specifically knows how to handle these nodes; as such, this is
5480 the normal operating mode in the RTL expanders. */
5481
5482 tree
5483 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5484 HOST_WIDE_INT *pbitpos, tree *poffset,
5485 enum machine_mode *pmode, int *punsignedp,
5486 int *pvolatilep, bool keep_aligning)
5487 {
5488 tree size_tree = 0;
5489 enum machine_mode mode = VOIDmode;
5490 tree offset = size_zero_node;
5491 tree bit_offset = bitsize_zero_node;
5492 tree tem;
5493
5494 /* First get the mode, signedness, and size. We do this from just the
5495 outermost expression. */
5496 if (TREE_CODE (exp) == COMPONENT_REF)
5497 {
5498 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5499 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5500 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5501
5502 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5503 }
5504 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5505 {
5506 size_tree = TREE_OPERAND (exp, 1);
5507 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5508 }
5509 else
5510 {
5511 mode = TYPE_MODE (TREE_TYPE (exp));
5512 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5513
5514 if (mode == BLKmode)
5515 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5516 else
5517 *pbitsize = GET_MODE_BITSIZE (mode);
5518 }
5519
5520 if (size_tree != 0)
5521 {
5522 if (! host_integerp (size_tree, 1))
5523 mode = BLKmode, *pbitsize = -1;
5524 else
5525 *pbitsize = tree_low_cst (size_tree, 1);
5526 }
5527
5528 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5529 and find the ultimate containing object. */
5530 while (1)
5531 {
5532 switch (TREE_CODE (exp))
5533 {
5534 case BIT_FIELD_REF:
5535 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5536 TREE_OPERAND (exp, 2));
5537 break;
5538
5539 case COMPONENT_REF:
5540 {
5541 tree field = TREE_OPERAND (exp, 1);
5542 tree this_offset = component_ref_field_offset (exp);
5543
5544 /* If this field hasn't been filled in yet, don't go past it.
5545 This should only happen when folding expressions made during
5546 type construction. */
5547 if (this_offset == 0)
5548 break;
5549
5550 offset = size_binop (PLUS_EXPR, offset, this_offset);
5551 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5552 DECL_FIELD_BIT_OFFSET (field));
5553
5554 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5555 }
5556 break;
5557
5558 case ARRAY_REF:
5559 case ARRAY_RANGE_REF:
5560 {
5561 tree index = TREE_OPERAND (exp, 1);
5562 tree low_bound = array_ref_low_bound (exp);
5563 tree unit_size = array_ref_element_size (exp);
5564
5565 /* We assume all arrays have sizes that are a multiple of a byte.
5566 First subtract the lower bound, if any, in the type of the
5567 index, then convert to sizetype and multiply by the size of
5568 the array element. */
5569 if (! integer_zerop (low_bound))
5570 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5571 index, low_bound);
5572
5573 offset = size_binop (PLUS_EXPR, offset,
5574 size_binop (MULT_EXPR,
5575 convert (sizetype, index),
5576 unit_size));
5577 }
5578 break;
5579
5580 case REALPART_EXPR:
5581 break;
5582
5583 case IMAGPART_EXPR:
5584 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5585 bitsize_int (*pbitsize));
5586 break;
5587
5588 case VIEW_CONVERT_EXPR:
5589 if (keep_aligning && STRICT_ALIGNMENT
5590 && (TYPE_ALIGN (TREE_TYPE (exp))
5591 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5592 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5593 < BIGGEST_ALIGNMENT)
5594 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5595 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5596 goto done;
5597 break;
5598
5599 default:
5600 goto done;
5601 }
5602
5603 /* If any reference in the chain is volatile, the effect is volatile. */
5604 if (TREE_THIS_VOLATILE (exp))
5605 *pvolatilep = 1;
5606
5607 exp = TREE_OPERAND (exp, 0);
5608 }
5609 done:
5610
5611 /* If OFFSET is constant, see if we can return the whole thing as a
5612 constant bit position. Otherwise, split it up. */
5613 if (host_integerp (offset, 0)
5614 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5615 bitsize_unit_node))
5616 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5617 && host_integerp (tem, 0))
5618 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5619 else
5620 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5621
5622 *pmode = mode;
5623 return exp;
5624 }
5625
5626 /* Return a tree of sizetype representing the size, in bytes, of the element
5627 of EXP, an ARRAY_REF. */
5628
5629 tree
5630 array_ref_element_size (tree exp)
5631 {
5632 tree aligned_size = TREE_OPERAND (exp, 3);
5633 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5634
5635 /* If a size was specified in the ARRAY_REF, it's the size measured
5636 in alignment units of the element type. So multiply by that value. */
5637 if (aligned_size)
5638 {
5639 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5640 sizetype from another type of the same width and signedness. */
5641 if (TREE_TYPE (aligned_size) != sizetype)
5642 aligned_size = fold_convert (sizetype, aligned_size);
5643 return size_binop (MULT_EXPR, aligned_size,
5644 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5645 }
5646
5647 /* Otherwise, take the size from that of the element type. Substitute
5648 any PLACEHOLDER_EXPR that we have. */
5649 else
5650 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5651 }
5652
5653 /* Return a tree representing the lower bound of the array mentioned in
5654 EXP, an ARRAY_REF. */
5655
5656 tree
5657 array_ref_low_bound (tree exp)
5658 {
5659 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5660
5661 /* If a lower bound is specified in EXP, use it. */
5662 if (TREE_OPERAND (exp, 2))
5663 return TREE_OPERAND (exp, 2);
5664
5665 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5666 substituting for a PLACEHOLDER_EXPR as needed. */
5667 if (domain_type && TYPE_MIN_VALUE (domain_type))
5668 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5669
5670 /* Otherwise, return a zero of the appropriate type. */
5671 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5672 }
5673
5674 /* Return a tree representing the upper bound of the array mentioned in
5675 EXP, an ARRAY_REF. */
5676
5677 tree
5678 array_ref_up_bound (tree exp)
5679 {
5680 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5681
5682 /* If there is a domain type and it has an upper bound, use it, substituting
5683 for a PLACEHOLDER_EXPR as needed. */
5684 if (domain_type && TYPE_MAX_VALUE (domain_type))
5685 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5686
5687 /* Otherwise fail. */
5688 return NULL_TREE;
5689 }
5690
5691 /* Return a tree representing the offset, in bytes, of the field referenced
5692 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5693
5694 tree
5695 component_ref_field_offset (tree exp)
5696 {
5697 tree aligned_offset = TREE_OPERAND (exp, 2);
5698 tree field = TREE_OPERAND (exp, 1);
5699
5700 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5701 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5702 value. */
5703 if (aligned_offset)
5704 {
5705 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5706 sizetype from another type of the same width and signedness. */
5707 if (TREE_TYPE (aligned_offset) != sizetype)
5708 aligned_offset = fold_convert (sizetype, aligned_offset);
5709 return size_binop (MULT_EXPR, aligned_offset,
5710 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5711 }
5712
5713 /* Otherwise, take the offset from that of the field. Substitute
5714 any PLACEHOLDER_EXPR that we have. */
5715 else
5716 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5717 }
5718
5719 /* Return 1 if T is an expression that get_inner_reference handles. */
5720
5721 int
5722 handled_component_p (tree t)
5723 {
5724 switch (TREE_CODE (t))
5725 {
5726 case BIT_FIELD_REF:
5727 case COMPONENT_REF:
5728 case ARRAY_REF:
5729 case ARRAY_RANGE_REF:
5730 case VIEW_CONVERT_EXPR:
5731 case REALPART_EXPR:
5732 case IMAGPART_EXPR:
5733 return 1;
5734
5735 default:
5736 return 0;
5737 }
5738 }
5739 \f
5740 /* Given an rtx VALUE that may contain additions and multiplications, return
5741 an equivalent value that just refers to a register, memory, or constant.
5742 This is done by generating instructions to perform the arithmetic and
5743 returning a pseudo-register containing the value.
5744
5745 The returned value may be a REG, SUBREG, MEM or constant. */
5746
5747 rtx
5748 force_operand (rtx value, rtx target)
5749 {
5750 rtx op1, op2;
5751 /* Use subtarget as the target for operand 0 of a binary operation. */
5752 rtx subtarget = get_subtarget (target);
5753 enum rtx_code code = GET_CODE (value);
5754
5755 /* Check for subreg applied to an expression produced by loop optimizer. */
5756 if (code == SUBREG
5757 && !REG_P (SUBREG_REG (value))
5758 && !MEM_P (SUBREG_REG (value)))
5759 {
5760 value = simplify_gen_subreg (GET_MODE (value),
5761 force_reg (GET_MODE (SUBREG_REG (value)),
5762 force_operand (SUBREG_REG (value),
5763 NULL_RTX)),
5764 GET_MODE (SUBREG_REG (value)),
5765 SUBREG_BYTE (value));
5766 code = GET_CODE (value);
5767 }
5768
5769 /* Check for a PIC address load. */
5770 if ((code == PLUS || code == MINUS)
5771 && XEXP (value, 0) == pic_offset_table_rtx
5772 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5773 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5774 || GET_CODE (XEXP (value, 1)) == CONST))
5775 {
5776 if (!subtarget)
5777 subtarget = gen_reg_rtx (GET_MODE (value));
5778 emit_move_insn (subtarget, value);
5779 return subtarget;
5780 }
5781
5782 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5783 {
5784 if (!target)
5785 target = gen_reg_rtx (GET_MODE (value));
5786 convert_move (target, force_operand (XEXP (value, 0), NULL),
5787 code == ZERO_EXTEND);
5788 return target;
5789 }
5790
5791 if (ARITHMETIC_P (value))
5792 {
5793 op2 = XEXP (value, 1);
5794 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5795 subtarget = 0;
5796 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5797 {
5798 code = PLUS;
5799 op2 = negate_rtx (GET_MODE (value), op2);
5800 }
5801
5802 /* Check for an addition with OP2 a constant integer and our first
5803 operand a PLUS of a virtual register and something else. In that
5804 case, we want to emit the sum of the virtual register and the
5805 constant first and then add the other value. This allows virtual
5806 register instantiation to simply modify the constant rather than
5807 creating another one around this addition. */
5808 if (code == PLUS && GET_CODE (op2) == CONST_INT
5809 && GET_CODE (XEXP (value, 0)) == PLUS
5810 && REG_P (XEXP (XEXP (value, 0), 0))
5811 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5812 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5813 {
5814 rtx temp = expand_simple_binop (GET_MODE (value), code,
5815 XEXP (XEXP (value, 0), 0), op2,
5816 subtarget, 0, OPTAB_LIB_WIDEN);
5817 return expand_simple_binop (GET_MODE (value), code, temp,
5818 force_operand (XEXP (XEXP (value,
5819 0), 1), 0),
5820 target, 0, OPTAB_LIB_WIDEN);
5821 }
5822
5823 op1 = force_operand (XEXP (value, 0), subtarget);
5824 op2 = force_operand (op2, NULL_RTX);
5825 switch (code)
5826 {
5827 case MULT:
5828 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5829 case DIV:
5830 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5831 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5832 target, 1, OPTAB_LIB_WIDEN);
5833 else
5834 return expand_divmod (0,
5835 FLOAT_MODE_P (GET_MODE (value))
5836 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5837 GET_MODE (value), op1, op2, target, 0);
5838 break;
5839 case MOD:
5840 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5841 target, 0);
5842 break;
5843 case UDIV:
5844 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5845 target, 1);
5846 break;
5847 case UMOD:
5848 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5849 target, 1);
5850 break;
5851 case ASHIFTRT:
5852 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5853 target, 0, OPTAB_LIB_WIDEN);
5854 break;
5855 default:
5856 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5857 target, 1, OPTAB_LIB_WIDEN);
5858 }
5859 }
5860 if (UNARY_P (value))
5861 {
5862 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5863 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5864 }
5865
5866 #ifdef INSN_SCHEDULING
5867 /* On machines that have insn scheduling, we want all memory reference to be
5868 explicit, so we need to deal with such paradoxical SUBREGs. */
5869 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5870 && (GET_MODE_SIZE (GET_MODE (value))
5871 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5872 value
5873 = simplify_gen_subreg (GET_MODE (value),
5874 force_reg (GET_MODE (SUBREG_REG (value)),
5875 force_operand (SUBREG_REG (value),
5876 NULL_RTX)),
5877 GET_MODE (SUBREG_REG (value)),
5878 SUBREG_BYTE (value));
5879 #endif
5880
5881 return value;
5882 }
5883 \f
5884 /* Subroutine of expand_expr: return nonzero iff there is no way that
5885 EXP can reference X, which is being modified. TOP_P is nonzero if this
5886 call is going to be used to determine whether we need a temporary
5887 for EXP, as opposed to a recursive call to this function.
5888
5889 It is always safe for this routine to return zero since it merely
5890 searches for optimization opportunities. */
5891
5892 int
5893 safe_from_p (rtx x, tree exp, int top_p)
5894 {
5895 rtx exp_rtl = 0;
5896 int i, nops;
5897
5898 if (x == 0
5899 /* If EXP has varying size, we MUST use a target since we currently
5900 have no way of allocating temporaries of variable size
5901 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5902 So we assume here that something at a higher level has prevented a
5903 clash. This is somewhat bogus, but the best we can do. Only
5904 do this when X is BLKmode and when we are at the top level. */
5905 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5906 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5907 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5908 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5909 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5910 != INTEGER_CST)
5911 && GET_MODE (x) == BLKmode)
5912 /* If X is in the outgoing argument area, it is always safe. */
5913 || (MEM_P (x)
5914 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5915 || (GET_CODE (XEXP (x, 0)) == PLUS
5916 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5917 return 1;
5918
5919 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5920 find the underlying pseudo. */
5921 if (GET_CODE (x) == SUBREG)
5922 {
5923 x = SUBREG_REG (x);
5924 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5925 return 0;
5926 }
5927
5928 /* Now look at our tree code and possibly recurse. */
5929 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5930 {
5931 case tcc_declaration:
5932 exp_rtl = DECL_RTL_IF_SET (exp);
5933 break;
5934
5935 case tcc_constant:
5936 return 1;
5937
5938 case tcc_exceptional:
5939 if (TREE_CODE (exp) == TREE_LIST)
5940 {
5941 while (1)
5942 {
5943 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5944 return 0;
5945 exp = TREE_CHAIN (exp);
5946 if (!exp)
5947 return 1;
5948 if (TREE_CODE (exp) != TREE_LIST)
5949 return safe_from_p (x, exp, 0);
5950 }
5951 }
5952 else if (TREE_CODE (exp) == ERROR_MARK)
5953 return 1; /* An already-visited SAVE_EXPR? */
5954 else
5955 return 0;
5956
5957 case tcc_statement:
5958 /* The only case we look at here is the DECL_INITIAL inside a
5959 DECL_EXPR. */
5960 return (TREE_CODE (exp) != DECL_EXPR
5961 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5962 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5963 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5964
5965 case tcc_binary:
5966 case tcc_comparison:
5967 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5968 return 0;
5969 /* Fall through. */
5970
5971 case tcc_unary:
5972 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5973
5974 case tcc_expression:
5975 case tcc_reference:
5976 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5977 the expression. If it is set, we conflict iff we are that rtx or
5978 both are in memory. Otherwise, we check all operands of the
5979 expression recursively. */
5980
5981 switch (TREE_CODE (exp))
5982 {
5983 case ADDR_EXPR:
5984 /* If the operand is static or we are static, we can't conflict.
5985 Likewise if we don't conflict with the operand at all. */
5986 if (staticp (TREE_OPERAND (exp, 0))
5987 || TREE_STATIC (exp)
5988 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5989 return 1;
5990
5991 /* Otherwise, the only way this can conflict is if we are taking
5992 the address of a DECL a that address if part of X, which is
5993 very rare. */
5994 exp = TREE_OPERAND (exp, 0);
5995 if (DECL_P (exp))
5996 {
5997 if (!DECL_RTL_SET_P (exp)
5998 || !MEM_P (DECL_RTL (exp)))
5999 return 0;
6000 else
6001 exp_rtl = XEXP (DECL_RTL (exp), 0);
6002 }
6003 break;
6004
6005 case MISALIGNED_INDIRECT_REF:
6006 case ALIGN_INDIRECT_REF:
6007 case INDIRECT_REF:
6008 if (MEM_P (x)
6009 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6010 get_alias_set (exp)))
6011 return 0;
6012 break;
6013
6014 case CALL_EXPR:
6015 /* Assume that the call will clobber all hard registers and
6016 all of memory. */
6017 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6018 || MEM_P (x))
6019 return 0;
6020 break;
6021
6022 case WITH_CLEANUP_EXPR:
6023 case CLEANUP_POINT_EXPR:
6024 /* Lowered by gimplify.c. */
6025 gcc_unreachable ();
6026
6027 case SAVE_EXPR:
6028 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6029
6030 default:
6031 break;
6032 }
6033
6034 /* If we have an rtx, we do not need to scan our operands. */
6035 if (exp_rtl)
6036 break;
6037
6038 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6039 for (i = 0; i < nops; i++)
6040 if (TREE_OPERAND (exp, i) != 0
6041 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6042 return 0;
6043
6044 /* If this is a language-specific tree code, it may require
6045 special handling. */
6046 if ((unsigned int) TREE_CODE (exp)
6047 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6048 && !lang_hooks.safe_from_p (x, exp))
6049 return 0;
6050 break;
6051
6052 case tcc_type:
6053 /* Should never get a type here. */
6054 gcc_unreachable ();
6055 }
6056
6057 /* If we have an rtl, find any enclosed object. Then see if we conflict
6058 with it. */
6059 if (exp_rtl)
6060 {
6061 if (GET_CODE (exp_rtl) == SUBREG)
6062 {
6063 exp_rtl = SUBREG_REG (exp_rtl);
6064 if (REG_P (exp_rtl)
6065 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6066 return 0;
6067 }
6068
6069 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6070 are memory and they conflict. */
6071 return ! (rtx_equal_p (x, exp_rtl)
6072 || (MEM_P (x) && MEM_P (exp_rtl)
6073 && true_dependence (exp_rtl, VOIDmode, x,
6074 rtx_addr_varies_p)));
6075 }
6076
6077 /* If we reach here, it is safe. */
6078 return 1;
6079 }
6080
6081 \f
6082 /* Return the highest power of two that EXP is known to be a multiple of.
6083 This is used in updating alignment of MEMs in array references. */
6084
6085 unsigned HOST_WIDE_INT
6086 highest_pow2_factor (tree exp)
6087 {
6088 unsigned HOST_WIDE_INT c0, c1;
6089
6090 switch (TREE_CODE (exp))
6091 {
6092 case INTEGER_CST:
6093 /* We can find the lowest bit that's a one. If the low
6094 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6095 We need to handle this case since we can find it in a COND_EXPR,
6096 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6097 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6098 later ICE. */
6099 if (TREE_CONSTANT_OVERFLOW (exp))
6100 return BIGGEST_ALIGNMENT;
6101 else
6102 {
6103 /* Note: tree_low_cst is intentionally not used here,
6104 we don't care about the upper bits. */
6105 c0 = TREE_INT_CST_LOW (exp);
6106 c0 &= -c0;
6107 return c0 ? c0 : BIGGEST_ALIGNMENT;
6108 }
6109 break;
6110
6111 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6112 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6113 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6114 return MIN (c0, c1);
6115
6116 case MULT_EXPR:
6117 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6118 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6119 return c0 * c1;
6120
6121 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6122 case CEIL_DIV_EXPR:
6123 if (integer_pow2p (TREE_OPERAND (exp, 1))
6124 && host_integerp (TREE_OPERAND (exp, 1), 1))
6125 {
6126 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6127 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6128 return MAX (1, c0 / c1);
6129 }
6130 break;
6131
6132 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6133 case SAVE_EXPR:
6134 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6135
6136 case COMPOUND_EXPR:
6137 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6138
6139 case COND_EXPR:
6140 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6141 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6142 return MIN (c0, c1);
6143
6144 default:
6145 break;
6146 }
6147
6148 return 1;
6149 }
6150
6151 /* Similar, except that the alignment requirements of TARGET are
6152 taken into account. Assume it is at least as aligned as its
6153 type, unless it is a COMPONENT_REF in which case the layout of
6154 the structure gives the alignment. */
6155
6156 static unsigned HOST_WIDE_INT
6157 highest_pow2_factor_for_target (tree target, tree exp)
6158 {
6159 unsigned HOST_WIDE_INT target_align, factor;
6160
6161 factor = highest_pow2_factor (exp);
6162 if (TREE_CODE (target) == COMPONENT_REF)
6163 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6164 else
6165 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6166 return MAX (factor, target_align);
6167 }
6168 \f
6169 /* Expands variable VAR. */
6170
6171 void
6172 expand_var (tree var)
6173 {
6174 if (DECL_EXTERNAL (var))
6175 return;
6176
6177 if (TREE_STATIC (var))
6178 /* If this is an inlined copy of a static local variable,
6179 look up the original decl. */
6180 var = DECL_ORIGIN (var);
6181
6182 if (TREE_STATIC (var)
6183 ? !TREE_ASM_WRITTEN (var)
6184 : !DECL_RTL_SET_P (var))
6185 {
6186 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6187 /* Should be ignored. */;
6188 else if (lang_hooks.expand_decl (var))
6189 /* OK. */;
6190 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6191 expand_decl (var);
6192 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6193 rest_of_decl_compilation (var, 0, 0);
6194 else
6195 /* No expansion needed. */
6196 gcc_assert (TREE_CODE (var) == TYPE_DECL
6197 || TREE_CODE (var) == CONST_DECL
6198 || TREE_CODE (var) == FUNCTION_DECL
6199 || TREE_CODE (var) == LABEL_DECL);
6200 }
6201 }
6202
6203 /* Subroutine of expand_expr. Expand the two operands of a binary
6204 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6205 The value may be stored in TARGET if TARGET is nonzero. The
6206 MODIFIER argument is as documented by expand_expr. */
6207
6208 static void
6209 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6210 enum expand_modifier modifier)
6211 {
6212 if (! safe_from_p (target, exp1, 1))
6213 target = 0;
6214 if (operand_equal_p (exp0, exp1, 0))
6215 {
6216 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6217 *op1 = copy_rtx (*op0);
6218 }
6219 else
6220 {
6221 /* If we need to preserve evaluation order, copy exp0 into its own
6222 temporary variable so that it can't be clobbered by exp1. */
6223 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6224 exp0 = save_expr (exp0);
6225 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6226 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6227 }
6228 }
6229
6230 \f
6231 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6232 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6233
6234 static rtx
6235 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6236 enum expand_modifier modifier)
6237 {
6238 rtx result, subtarget;
6239 tree inner, offset;
6240 HOST_WIDE_INT bitsize, bitpos;
6241 int volatilep, unsignedp;
6242 enum machine_mode mode1;
6243
6244 /* If we are taking the address of a constant and are at the top level,
6245 we have to use output_constant_def since we can't call force_const_mem
6246 at top level. */
6247 /* ??? This should be considered a front-end bug. We should not be
6248 generating ADDR_EXPR of something that isn't an LVALUE. The only
6249 exception here is STRING_CST. */
6250 if (TREE_CODE (exp) == CONSTRUCTOR
6251 || CONSTANT_CLASS_P (exp))
6252 return XEXP (output_constant_def (exp, 0), 0);
6253
6254 /* Everything must be something allowed by is_gimple_addressable. */
6255 switch (TREE_CODE (exp))
6256 {
6257 case INDIRECT_REF:
6258 /* This case will happen via recursion for &a->b. */
6259 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6260
6261 case CONST_DECL:
6262 /* Recurse and make the output_constant_def clause above handle this. */
6263 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6264 tmode, modifier);
6265
6266 case REALPART_EXPR:
6267 /* The real part of the complex number is always first, therefore
6268 the address is the same as the address of the parent object. */
6269 offset = 0;
6270 bitpos = 0;
6271 inner = TREE_OPERAND (exp, 0);
6272 break;
6273
6274 case IMAGPART_EXPR:
6275 /* The imaginary part of the complex number is always second.
6276 The expression is therefore always offset by the size of the
6277 scalar type. */
6278 offset = 0;
6279 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6280 inner = TREE_OPERAND (exp, 0);
6281 break;
6282
6283 default:
6284 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6285 expand_expr, as that can have various side effects; LABEL_DECLs for
6286 example, may not have their DECL_RTL set yet. Assume language
6287 specific tree nodes can be expanded in some interesting way. */
6288 if (DECL_P (exp)
6289 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6290 {
6291 result = expand_expr (exp, target, tmode,
6292 modifier == EXPAND_INITIALIZER
6293 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6294
6295 /* If the DECL isn't in memory, then the DECL wasn't properly
6296 marked TREE_ADDRESSABLE, which will be either a front-end
6297 or a tree optimizer bug. */
6298 gcc_assert (MEM_P (result));
6299 result = XEXP (result, 0);
6300
6301 /* ??? Is this needed anymore? */
6302 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6303 {
6304 assemble_external (exp);
6305 TREE_USED (exp) = 1;
6306 }
6307
6308 if (modifier != EXPAND_INITIALIZER
6309 && modifier != EXPAND_CONST_ADDRESS)
6310 result = force_operand (result, target);
6311 return result;
6312 }
6313
6314 /* Pass FALSE as the last argument to get_inner_reference although
6315 we are expanding to RTL. The rationale is that we know how to
6316 handle "aligning nodes" here: we can just bypass them because
6317 they won't change the final object whose address will be returned
6318 (they actually exist only for that purpose). */
6319 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6320 &mode1, &unsignedp, &volatilep, false);
6321 break;
6322 }
6323
6324 /* We must have made progress. */
6325 gcc_assert (inner != exp);
6326
6327 subtarget = offset || bitpos ? NULL_RTX : target;
6328 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6329
6330 if (offset)
6331 {
6332 rtx tmp;
6333
6334 if (modifier != EXPAND_NORMAL)
6335 result = force_operand (result, NULL);
6336 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6337
6338 result = convert_memory_address (tmode, result);
6339 tmp = convert_memory_address (tmode, tmp);
6340
6341 if (modifier == EXPAND_SUM)
6342 result = gen_rtx_PLUS (tmode, result, tmp);
6343 else
6344 {
6345 subtarget = bitpos ? NULL_RTX : target;
6346 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6347 1, OPTAB_LIB_WIDEN);
6348 }
6349 }
6350
6351 if (bitpos)
6352 {
6353 /* Someone beforehand should have rejected taking the address
6354 of such an object. */
6355 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6356
6357 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6358 if (modifier < EXPAND_SUM)
6359 result = force_operand (result, target);
6360 }
6361
6362 return result;
6363 }
6364
6365 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6366 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6367
6368 static rtx
6369 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6370 enum expand_modifier modifier)
6371 {
6372 enum machine_mode rmode;
6373 rtx result;
6374
6375 /* Target mode of VOIDmode says "whatever's natural". */
6376 if (tmode == VOIDmode)
6377 tmode = TYPE_MODE (TREE_TYPE (exp));
6378
6379 /* We can get called with some Weird Things if the user does silliness
6380 like "(short) &a". In that case, convert_memory_address won't do
6381 the right thing, so ignore the given target mode. */
6382 if (tmode != Pmode && tmode != ptr_mode)
6383 tmode = Pmode;
6384
6385 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6386 tmode, modifier);
6387
6388 /* Despite expand_expr claims concerning ignoring TMODE when not
6389 strictly convenient, stuff breaks if we don't honor it. Note
6390 that combined with the above, we only do this for pointer modes. */
6391 rmode = GET_MODE (result);
6392 if (rmode == VOIDmode)
6393 rmode = tmode;
6394 if (rmode != tmode)
6395 result = convert_memory_address (tmode, result);
6396
6397 return result;
6398 }
6399
6400
6401 /* expand_expr: generate code for computing expression EXP.
6402 An rtx for the computed value is returned. The value is never null.
6403 In the case of a void EXP, const0_rtx is returned.
6404
6405 The value may be stored in TARGET if TARGET is nonzero.
6406 TARGET is just a suggestion; callers must assume that
6407 the rtx returned may not be the same as TARGET.
6408
6409 If TARGET is CONST0_RTX, it means that the value will be ignored.
6410
6411 If TMODE is not VOIDmode, it suggests generating the
6412 result in mode TMODE. But this is done only when convenient.
6413 Otherwise, TMODE is ignored and the value generated in its natural mode.
6414 TMODE is just a suggestion; callers must assume that
6415 the rtx returned may not have mode TMODE.
6416
6417 Note that TARGET may have neither TMODE nor MODE. In that case, it
6418 probably will not be used.
6419
6420 If MODIFIER is EXPAND_SUM then when EXP is an addition
6421 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6422 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6423 products as above, or REG or MEM, or constant.
6424 Ordinarily in such cases we would output mul or add instructions
6425 and then return a pseudo reg containing the sum.
6426
6427 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6428 it also marks a label as absolutely required (it can't be dead).
6429 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6430 This is used for outputting expressions used in initializers.
6431
6432 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6433 with a constant address even if that address is not normally legitimate.
6434 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6435
6436 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6437 a call parameter. Such targets require special care as we haven't yet
6438 marked TARGET so that it's safe from being trashed by libcalls. We
6439 don't want to use TARGET for anything but the final result;
6440 Intermediate values must go elsewhere. Additionally, calls to
6441 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6442
6443 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6444 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6445 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6446 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6447 recursively. */
6448
6449 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6450 enum expand_modifier, rtx *);
6451
6452 rtx
6453 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6454 enum expand_modifier modifier, rtx *alt_rtl)
6455 {
6456 int rn = -1;
6457 rtx ret, last = NULL;
6458
6459 /* Handle ERROR_MARK before anybody tries to access its type. */
6460 if (TREE_CODE (exp) == ERROR_MARK
6461 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6462 {
6463 ret = CONST0_RTX (tmode);
6464 return ret ? ret : const0_rtx;
6465 }
6466
6467 if (flag_non_call_exceptions)
6468 {
6469 rn = lookup_stmt_eh_region (exp);
6470 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6471 if (rn >= 0)
6472 last = get_last_insn ();
6473 }
6474
6475 /* If this is an expression of some kind and it has an associated line
6476 number, then emit the line number before expanding the expression.
6477
6478 We need to save and restore the file and line information so that
6479 errors discovered during expansion are emitted with the right
6480 information. It would be better of the diagnostic routines
6481 used the file/line information embedded in the tree nodes rather
6482 than globals. */
6483 if (cfun && EXPR_HAS_LOCATION (exp))
6484 {
6485 location_t saved_location = input_location;
6486 input_location = EXPR_LOCATION (exp);
6487 emit_line_note (input_location);
6488
6489 /* Record where the insns produced belong. */
6490 record_block_change (TREE_BLOCK (exp));
6491
6492 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6493
6494 input_location = saved_location;
6495 }
6496 else
6497 {
6498 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6499 }
6500
6501 /* If using non-call exceptions, mark all insns that may trap.
6502 expand_call() will mark CALL_INSNs before we get to this code,
6503 but it doesn't handle libcalls, and these may trap. */
6504 if (rn >= 0)
6505 {
6506 rtx insn;
6507 for (insn = next_real_insn (last); insn;
6508 insn = next_real_insn (insn))
6509 {
6510 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6511 /* If we want exceptions for non-call insns, any
6512 may_trap_p instruction may throw. */
6513 && GET_CODE (PATTERN (insn)) != CLOBBER
6514 && GET_CODE (PATTERN (insn)) != USE
6515 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6516 {
6517 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6518 REG_NOTES (insn));
6519 }
6520 }
6521 }
6522
6523 return ret;
6524 }
6525
6526 static rtx
6527 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6528 enum expand_modifier modifier, rtx *alt_rtl)
6529 {
6530 rtx op0, op1, temp;
6531 tree type = TREE_TYPE (exp);
6532 int unsignedp;
6533 enum machine_mode mode;
6534 enum tree_code code = TREE_CODE (exp);
6535 optab this_optab;
6536 rtx subtarget, original_target;
6537 int ignore;
6538 tree context;
6539 bool reduce_bit_field = false;
6540 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6541 ? reduce_to_bit_field_precision ((expr), \
6542 target, \
6543 type) \
6544 : (expr))
6545
6546 mode = TYPE_MODE (type);
6547 unsignedp = TYPE_UNSIGNED (type);
6548 if (lang_hooks.reduce_bit_field_operations
6549 && TREE_CODE (type) == INTEGER_TYPE
6550 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6551 {
6552 /* An operation in what may be a bit-field type needs the
6553 result to be reduced to the precision of the bit-field type,
6554 which is narrower than that of the type's mode. */
6555 reduce_bit_field = true;
6556 if (modifier == EXPAND_STACK_PARM)
6557 target = 0;
6558 }
6559
6560 /* Use subtarget as the target for operand 0 of a binary operation. */
6561 subtarget = get_subtarget (target);
6562 original_target = target;
6563 ignore = (target == const0_rtx
6564 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6565 || code == CONVERT_EXPR || code == COND_EXPR
6566 || code == VIEW_CONVERT_EXPR)
6567 && TREE_CODE (type) == VOID_TYPE));
6568
6569 /* If we are going to ignore this result, we need only do something
6570 if there is a side-effect somewhere in the expression. If there
6571 is, short-circuit the most common cases here. Note that we must
6572 not call expand_expr with anything but const0_rtx in case this
6573 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6574
6575 if (ignore)
6576 {
6577 if (! TREE_SIDE_EFFECTS (exp))
6578 return const0_rtx;
6579
6580 /* Ensure we reference a volatile object even if value is ignored, but
6581 don't do this if all we are doing is taking its address. */
6582 if (TREE_THIS_VOLATILE (exp)
6583 && TREE_CODE (exp) != FUNCTION_DECL
6584 && mode != VOIDmode && mode != BLKmode
6585 && modifier != EXPAND_CONST_ADDRESS)
6586 {
6587 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6588 if (MEM_P (temp))
6589 temp = copy_to_reg (temp);
6590 return const0_rtx;
6591 }
6592
6593 if (TREE_CODE_CLASS (code) == tcc_unary
6594 || code == COMPONENT_REF || code == INDIRECT_REF)
6595 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6596 modifier);
6597
6598 else if (TREE_CODE_CLASS (code) == tcc_binary
6599 || TREE_CODE_CLASS (code) == tcc_comparison
6600 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6601 {
6602 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6603 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6604 return const0_rtx;
6605 }
6606 else if (code == BIT_FIELD_REF)
6607 {
6608 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6609 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6610 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6611 return const0_rtx;
6612 }
6613
6614 target = 0;
6615 }
6616
6617
6618 switch (code)
6619 {
6620 case LABEL_DECL:
6621 {
6622 tree function = decl_function_context (exp);
6623
6624 temp = label_rtx (exp);
6625 temp = gen_rtx_LABEL_REF (Pmode, temp);
6626
6627 if (function != current_function_decl
6628 && function != 0)
6629 LABEL_REF_NONLOCAL_P (temp) = 1;
6630
6631 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6632 return temp;
6633 }
6634
6635 case SSA_NAME:
6636 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6637 NULL);
6638
6639 case PARM_DECL:
6640 case VAR_DECL:
6641 /* If a static var's type was incomplete when the decl was written,
6642 but the type is complete now, lay out the decl now. */
6643 if (DECL_SIZE (exp) == 0
6644 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6645 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6646 layout_decl (exp, 0);
6647
6648 /* ... fall through ... */
6649
6650 case FUNCTION_DECL:
6651 case RESULT_DECL:
6652 gcc_assert (DECL_RTL (exp));
6653
6654 /* Ensure variable marked as used even if it doesn't go through
6655 a parser. If it hasn't be used yet, write out an external
6656 definition. */
6657 if (! TREE_USED (exp))
6658 {
6659 assemble_external (exp);
6660 TREE_USED (exp) = 1;
6661 }
6662
6663 /* Show we haven't gotten RTL for this yet. */
6664 temp = 0;
6665
6666 /* Variables inherited from containing functions should have
6667 been lowered by this point. */
6668 context = decl_function_context (exp);
6669 gcc_assert (!context
6670 || context == current_function_decl
6671 || TREE_STATIC (exp)
6672 /* ??? C++ creates functions that are not TREE_STATIC. */
6673 || TREE_CODE (exp) == FUNCTION_DECL);
6674
6675 /* This is the case of an array whose size is to be determined
6676 from its initializer, while the initializer is still being parsed.
6677 See expand_decl. */
6678
6679 if (MEM_P (DECL_RTL (exp))
6680 && REG_P (XEXP (DECL_RTL (exp), 0)))
6681 temp = validize_mem (DECL_RTL (exp));
6682
6683 /* If DECL_RTL is memory, we are in the normal case and either
6684 the address is not valid or it is not a register and -fforce-addr
6685 is specified, get the address into a register. */
6686
6687 else if (MEM_P (DECL_RTL (exp))
6688 && modifier != EXPAND_CONST_ADDRESS
6689 && modifier != EXPAND_SUM
6690 && modifier != EXPAND_INITIALIZER
6691 && (! memory_address_p (DECL_MODE (exp),
6692 XEXP (DECL_RTL (exp), 0))
6693 || (flag_force_addr
6694 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6695 {
6696 if (alt_rtl)
6697 *alt_rtl = DECL_RTL (exp);
6698 temp = replace_equiv_address (DECL_RTL (exp),
6699 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6700 }
6701
6702 /* If we got something, return it. But first, set the alignment
6703 if the address is a register. */
6704 if (temp != 0)
6705 {
6706 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6707 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6708
6709 return temp;
6710 }
6711
6712 /* If the mode of DECL_RTL does not match that of the decl, it
6713 must be a promoted value. We return a SUBREG of the wanted mode,
6714 but mark it so that we know that it was already extended. */
6715
6716 if (REG_P (DECL_RTL (exp))
6717 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6718 {
6719 enum machine_mode pmode;
6720
6721 /* Get the signedness used for this variable. Ensure we get the
6722 same mode we got when the variable was declared. */
6723 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6724 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6725 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6726
6727 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6728 SUBREG_PROMOTED_VAR_P (temp) = 1;
6729 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6730 return temp;
6731 }
6732
6733 return DECL_RTL (exp);
6734
6735 case INTEGER_CST:
6736 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6737 TREE_INT_CST_HIGH (exp), mode);
6738
6739 /* ??? If overflow is set, fold will have done an incomplete job,
6740 which can result in (plus xx (const_int 0)), which can get
6741 simplified by validate_replace_rtx during virtual register
6742 instantiation, which can result in unrecognizable insns.
6743 Avoid this by forcing all overflows into registers. */
6744 if (TREE_CONSTANT_OVERFLOW (exp)
6745 && modifier != EXPAND_INITIALIZER)
6746 temp = force_reg (mode, temp);
6747
6748 return temp;
6749
6750 case VECTOR_CST:
6751 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6752 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6753 return const_vector_from_tree (exp);
6754 else
6755 return expand_expr (build_constructor_from_list
6756 (TREE_TYPE (exp),
6757 TREE_VECTOR_CST_ELTS (exp)),
6758 ignore ? const0_rtx : target, tmode, modifier);
6759
6760 case CONST_DECL:
6761 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6762
6763 case REAL_CST:
6764 /* If optimized, generate immediate CONST_DOUBLE
6765 which will be turned into memory by reload if necessary.
6766
6767 We used to force a register so that loop.c could see it. But
6768 this does not allow gen_* patterns to perform optimizations with
6769 the constants. It also produces two insns in cases like "x = 1.0;".
6770 On most machines, floating-point constants are not permitted in
6771 many insns, so we'd end up copying it to a register in any case.
6772
6773 Now, we do the copying in expand_binop, if appropriate. */
6774 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6775 TYPE_MODE (TREE_TYPE (exp)));
6776
6777 case COMPLEX_CST:
6778 /* Handle evaluating a complex constant in a CONCAT target. */
6779 if (original_target && GET_CODE (original_target) == CONCAT)
6780 {
6781 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6782 rtx rtarg, itarg;
6783
6784 rtarg = XEXP (original_target, 0);
6785 itarg = XEXP (original_target, 1);
6786
6787 /* Move the real and imaginary parts separately. */
6788 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6789 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6790
6791 if (op0 != rtarg)
6792 emit_move_insn (rtarg, op0);
6793 if (op1 != itarg)
6794 emit_move_insn (itarg, op1);
6795
6796 return original_target;
6797 }
6798
6799 /* ... fall through ... */
6800
6801 case STRING_CST:
6802 temp = output_constant_def (exp, 1);
6803
6804 /* temp contains a constant address.
6805 On RISC machines where a constant address isn't valid,
6806 make some insns to get that address into a register. */
6807 if (modifier != EXPAND_CONST_ADDRESS
6808 && modifier != EXPAND_INITIALIZER
6809 && modifier != EXPAND_SUM
6810 && (! memory_address_p (mode, XEXP (temp, 0))
6811 || flag_force_addr))
6812 return replace_equiv_address (temp,
6813 copy_rtx (XEXP (temp, 0)));
6814 return temp;
6815
6816 case SAVE_EXPR:
6817 {
6818 tree val = TREE_OPERAND (exp, 0);
6819 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6820
6821 if (!SAVE_EXPR_RESOLVED_P (exp))
6822 {
6823 /* We can indeed still hit this case, typically via builtin
6824 expanders calling save_expr immediately before expanding
6825 something. Assume this means that we only have to deal
6826 with non-BLKmode values. */
6827 gcc_assert (GET_MODE (ret) != BLKmode);
6828
6829 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6830 DECL_ARTIFICIAL (val) = 1;
6831 DECL_IGNORED_P (val) = 1;
6832 TREE_OPERAND (exp, 0) = val;
6833 SAVE_EXPR_RESOLVED_P (exp) = 1;
6834
6835 if (!CONSTANT_P (ret))
6836 ret = copy_to_reg (ret);
6837 SET_DECL_RTL (val, ret);
6838 }
6839
6840 return ret;
6841 }
6842
6843 case GOTO_EXPR:
6844 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6845 expand_goto (TREE_OPERAND (exp, 0));
6846 else
6847 expand_computed_goto (TREE_OPERAND (exp, 0));
6848 return const0_rtx;
6849
6850 case CONSTRUCTOR:
6851 /* If we don't need the result, just ensure we evaluate any
6852 subexpressions. */
6853 if (ignore)
6854 {
6855 unsigned HOST_WIDE_INT idx;
6856 tree value;
6857
6858 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6859 expand_expr (value, const0_rtx, VOIDmode, 0);
6860
6861 return const0_rtx;
6862 }
6863
6864 /* Try to avoid creating a temporary at all. This is possible
6865 if all of the initializer is zero.
6866 FIXME: try to handle all [0..255] initializers we can handle
6867 with memset. */
6868 else if (TREE_STATIC (exp)
6869 && !TREE_ADDRESSABLE (exp)
6870 && target != 0 && mode == BLKmode
6871 && all_zeros_p (exp))
6872 {
6873 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6874 return target;
6875 }
6876
6877 /* All elts simple constants => refer to a constant in memory. But
6878 if this is a non-BLKmode mode, let it store a field at a time
6879 since that should make a CONST_INT or CONST_DOUBLE when we
6880 fold. Likewise, if we have a target we can use, it is best to
6881 store directly into the target unless the type is large enough
6882 that memcpy will be used. If we are making an initializer and
6883 all operands are constant, put it in memory as well.
6884
6885 FIXME: Avoid trying to fill vector constructors piece-meal.
6886 Output them with output_constant_def below unless we're sure
6887 they're zeros. This should go away when vector initializers
6888 are treated like VECTOR_CST instead of arrays.
6889 */
6890 else if ((TREE_STATIC (exp)
6891 && ((mode == BLKmode
6892 && ! (target != 0 && safe_from_p (target, exp, 1)))
6893 || TREE_ADDRESSABLE (exp)
6894 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6895 && (! MOVE_BY_PIECES_P
6896 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6897 TYPE_ALIGN (type)))
6898 && ! mostly_zeros_p (exp))))
6899 || ((modifier == EXPAND_INITIALIZER
6900 || modifier == EXPAND_CONST_ADDRESS)
6901 && TREE_CONSTANT (exp)))
6902 {
6903 rtx constructor = output_constant_def (exp, 1);
6904
6905 if (modifier != EXPAND_CONST_ADDRESS
6906 && modifier != EXPAND_INITIALIZER
6907 && modifier != EXPAND_SUM)
6908 constructor = validize_mem (constructor);
6909
6910 return constructor;
6911 }
6912 else
6913 {
6914 /* Handle calls that pass values in multiple non-contiguous
6915 locations. The Irix 6 ABI has examples of this. */
6916 if (target == 0 || ! safe_from_p (target, exp, 1)
6917 || GET_CODE (target) == PARALLEL
6918 || modifier == EXPAND_STACK_PARM)
6919 target
6920 = assign_temp (build_qualified_type (type,
6921 (TYPE_QUALS (type)
6922 | (TREE_READONLY (exp)
6923 * TYPE_QUAL_CONST))),
6924 0, TREE_ADDRESSABLE (exp), 1);
6925
6926 store_constructor (exp, target, 0, int_expr_size (exp));
6927 return target;
6928 }
6929
6930 case MISALIGNED_INDIRECT_REF:
6931 case ALIGN_INDIRECT_REF:
6932 case INDIRECT_REF:
6933 {
6934 tree exp1 = TREE_OPERAND (exp, 0);
6935
6936 if (modifier != EXPAND_WRITE)
6937 {
6938 tree t;
6939
6940 t = fold_read_from_constant_string (exp);
6941 if (t)
6942 return expand_expr (t, target, tmode, modifier);
6943 }
6944
6945 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6946 op0 = memory_address (mode, op0);
6947
6948 if (code == ALIGN_INDIRECT_REF)
6949 {
6950 int align = TYPE_ALIGN_UNIT (type);
6951 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6952 op0 = memory_address (mode, op0);
6953 }
6954
6955 temp = gen_rtx_MEM (mode, op0);
6956
6957 set_mem_attributes (temp, exp, 0);
6958
6959 /* Resolve the misalignment now, so that we don't have to remember
6960 to resolve it later. Of course, this only works for reads. */
6961 /* ??? When we get around to supporting writes, we'll have to handle
6962 this in store_expr directly. The vectorizer isn't generating
6963 those yet, however. */
6964 if (code == MISALIGNED_INDIRECT_REF)
6965 {
6966 int icode;
6967 rtx reg, insn;
6968
6969 gcc_assert (modifier == EXPAND_NORMAL);
6970
6971 /* The vectorizer should have already checked the mode. */
6972 icode = movmisalign_optab->handlers[mode].insn_code;
6973 gcc_assert (icode != CODE_FOR_nothing);
6974
6975 /* We've already validated the memory, and we're creating a
6976 new pseudo destination. The predicates really can't fail. */
6977 reg = gen_reg_rtx (mode);
6978
6979 /* Nor can the insn generator. */
6980 insn = GEN_FCN (icode) (reg, temp);
6981 emit_insn (insn);
6982
6983 return reg;
6984 }
6985
6986 return temp;
6987 }
6988
6989 case TARGET_MEM_REF:
6990 {
6991 struct mem_address addr;
6992
6993 get_address_description (exp, &addr);
6994 op0 = addr_for_mem_ref (&addr, true);
6995 op0 = memory_address (mode, op0);
6996 temp = gen_rtx_MEM (mode, op0);
6997 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
6998 }
6999 return temp;
7000
7001 case ARRAY_REF:
7002
7003 {
7004 tree array = TREE_OPERAND (exp, 0);
7005 tree index = TREE_OPERAND (exp, 1);
7006
7007 /* Fold an expression like: "foo"[2].
7008 This is not done in fold so it won't happen inside &.
7009 Don't fold if this is for wide characters since it's too
7010 difficult to do correctly and this is a very rare case. */
7011
7012 if (modifier != EXPAND_CONST_ADDRESS
7013 && modifier != EXPAND_INITIALIZER
7014 && modifier != EXPAND_MEMORY)
7015 {
7016 tree t = fold_read_from_constant_string (exp);
7017
7018 if (t)
7019 return expand_expr (t, target, tmode, modifier);
7020 }
7021
7022 /* If this is a constant index into a constant array,
7023 just get the value from the array. Handle both the cases when
7024 we have an explicit constructor and when our operand is a variable
7025 that was declared const. */
7026
7027 if (modifier != EXPAND_CONST_ADDRESS
7028 && modifier != EXPAND_INITIALIZER
7029 && modifier != EXPAND_MEMORY
7030 && TREE_CODE (array) == CONSTRUCTOR
7031 && ! TREE_SIDE_EFFECTS (array)
7032 && TREE_CODE (index) == INTEGER_CST)
7033 {
7034 unsigned HOST_WIDE_INT ix;
7035 tree field, value;
7036
7037 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7038 field, value)
7039 if (tree_int_cst_equal (field, index))
7040 {
7041 if (!TREE_SIDE_EFFECTS (value))
7042 return expand_expr (fold (value), target, tmode, modifier);
7043 break;
7044 }
7045 }
7046
7047 else if (optimize >= 1
7048 && modifier != EXPAND_CONST_ADDRESS
7049 && modifier != EXPAND_INITIALIZER
7050 && modifier != EXPAND_MEMORY
7051 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7052 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7053 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7054 && targetm.binds_local_p (array))
7055 {
7056 if (TREE_CODE (index) == INTEGER_CST)
7057 {
7058 tree init = DECL_INITIAL (array);
7059
7060 if (TREE_CODE (init) == CONSTRUCTOR)
7061 {
7062 unsigned HOST_WIDE_INT ix;
7063 tree field, value;
7064
7065 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7066 field, value)
7067 if (tree_int_cst_equal (field, index))
7068 {
7069 if (!TREE_SIDE_EFFECTS (value))
7070 return expand_expr (fold (value), target, tmode,
7071 modifier);
7072 break;
7073 }
7074 }
7075 else if (TREE_CODE (init) == STRING_CST
7076 && 0 > compare_tree_int (index,
7077 TREE_STRING_LENGTH (init)))
7078 {
7079 tree type = TREE_TYPE (TREE_TYPE (init));
7080 enum machine_mode mode = TYPE_MODE (type);
7081
7082 if (GET_MODE_CLASS (mode) == MODE_INT
7083 && GET_MODE_SIZE (mode) == 1)
7084 return gen_int_mode (TREE_STRING_POINTER (init)
7085 [TREE_INT_CST_LOW (index)], mode);
7086 }
7087 }
7088 }
7089 }
7090 goto normal_inner_ref;
7091
7092 case COMPONENT_REF:
7093 /* If the operand is a CONSTRUCTOR, we can just extract the
7094 appropriate field if it is present. */
7095 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7096 {
7097 unsigned HOST_WIDE_INT idx;
7098 tree field, value;
7099
7100 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7101 idx, field, value)
7102 if (field == TREE_OPERAND (exp, 1)
7103 /* We can normally use the value of the field in the
7104 CONSTRUCTOR. However, if this is a bitfield in
7105 an integral mode that we can fit in a HOST_WIDE_INT,
7106 we must mask only the number of bits in the bitfield,
7107 since this is done implicitly by the constructor. If
7108 the bitfield does not meet either of those conditions,
7109 we can't do this optimization. */
7110 && (! DECL_BIT_FIELD (field)
7111 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7112 && (GET_MODE_BITSIZE (DECL_MODE (field))
7113 <= HOST_BITS_PER_WIDE_INT))))
7114 {
7115 if (DECL_BIT_FIELD (field)
7116 && modifier == EXPAND_STACK_PARM)
7117 target = 0;
7118 op0 = expand_expr (value, target, tmode, modifier);
7119 if (DECL_BIT_FIELD (field))
7120 {
7121 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7122 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7123
7124 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7125 {
7126 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7127 op0 = expand_and (imode, op0, op1, target);
7128 }
7129 else
7130 {
7131 tree count
7132 = build_int_cst (NULL_TREE,
7133 GET_MODE_BITSIZE (imode) - bitsize);
7134
7135 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7136 target, 0);
7137 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7138 target, 0);
7139 }
7140 }
7141
7142 return op0;
7143 }
7144 }
7145 goto normal_inner_ref;
7146
7147 case BIT_FIELD_REF:
7148 case ARRAY_RANGE_REF:
7149 normal_inner_ref:
7150 {
7151 enum machine_mode mode1;
7152 HOST_WIDE_INT bitsize, bitpos;
7153 tree offset;
7154 int volatilep = 0;
7155 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7156 &mode1, &unsignedp, &volatilep, true);
7157 rtx orig_op0;
7158
7159 /* If we got back the original object, something is wrong. Perhaps
7160 we are evaluating an expression too early. In any event, don't
7161 infinitely recurse. */
7162 gcc_assert (tem != exp);
7163
7164 /* If TEM's type is a union of variable size, pass TARGET to the inner
7165 computation, since it will need a temporary and TARGET is known
7166 to have to do. This occurs in unchecked conversion in Ada. */
7167
7168 orig_op0 = op0
7169 = expand_expr (tem,
7170 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7171 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7172 != INTEGER_CST)
7173 && modifier != EXPAND_STACK_PARM
7174 ? target : NULL_RTX),
7175 VOIDmode,
7176 (modifier == EXPAND_INITIALIZER
7177 || modifier == EXPAND_CONST_ADDRESS
7178 || modifier == EXPAND_STACK_PARM)
7179 ? modifier : EXPAND_NORMAL);
7180
7181 /* If this is a constant, put it into a register if it is a
7182 legitimate constant and OFFSET is 0 and memory if it isn't. */
7183 if (CONSTANT_P (op0))
7184 {
7185 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7186 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7187 && offset == 0)
7188 op0 = force_reg (mode, op0);
7189 else
7190 op0 = validize_mem (force_const_mem (mode, op0));
7191 }
7192
7193 /* Otherwise, if this object not in memory and we either have an
7194 offset or a BLKmode result, put it there. This case can't occur in
7195 C, but can in Ada if we have unchecked conversion of an expression
7196 from a scalar type to an array or record type or for an
7197 ARRAY_RANGE_REF whose type is BLKmode. */
7198 else if (!MEM_P (op0)
7199 && (offset != 0
7200 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7201 {
7202 tree nt = build_qualified_type (TREE_TYPE (tem),
7203 (TYPE_QUALS (TREE_TYPE (tem))
7204 | TYPE_QUAL_CONST));
7205 rtx memloc = assign_temp (nt, 1, 1, 1);
7206
7207 emit_move_insn (memloc, op0);
7208 op0 = memloc;
7209 }
7210
7211 if (offset != 0)
7212 {
7213 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7214 EXPAND_SUM);
7215
7216 gcc_assert (MEM_P (op0));
7217
7218 #ifdef POINTERS_EXTEND_UNSIGNED
7219 if (GET_MODE (offset_rtx) != Pmode)
7220 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7221 #else
7222 if (GET_MODE (offset_rtx) != ptr_mode)
7223 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7224 #endif
7225
7226 if (GET_MODE (op0) == BLKmode
7227 /* A constant address in OP0 can have VOIDmode, we must
7228 not try to call force_reg in that case. */
7229 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7230 && bitsize != 0
7231 && (bitpos % bitsize) == 0
7232 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7233 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7234 {
7235 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7236 bitpos = 0;
7237 }
7238
7239 op0 = offset_address (op0, offset_rtx,
7240 highest_pow2_factor (offset));
7241 }
7242
7243 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7244 record its alignment as BIGGEST_ALIGNMENT. */
7245 if (MEM_P (op0) && bitpos == 0 && offset != 0
7246 && is_aligning_offset (offset, tem))
7247 set_mem_align (op0, BIGGEST_ALIGNMENT);
7248
7249 /* Don't forget about volatility even if this is a bitfield. */
7250 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7251 {
7252 if (op0 == orig_op0)
7253 op0 = copy_rtx (op0);
7254
7255 MEM_VOLATILE_P (op0) = 1;
7256 }
7257
7258 /* The following code doesn't handle CONCAT.
7259 Assume only bitpos == 0 can be used for CONCAT, due to
7260 one element arrays having the same mode as its element. */
7261 if (GET_CODE (op0) == CONCAT)
7262 {
7263 gcc_assert (bitpos == 0
7264 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7265 return op0;
7266 }
7267
7268 /* In cases where an aligned union has an unaligned object
7269 as a field, we might be extracting a BLKmode value from
7270 an integer-mode (e.g., SImode) object. Handle this case
7271 by doing the extract into an object as wide as the field
7272 (which we know to be the width of a basic mode), then
7273 storing into memory, and changing the mode to BLKmode. */
7274 if (mode1 == VOIDmode
7275 || REG_P (op0) || GET_CODE (op0) == SUBREG
7276 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7277 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7278 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7279 && modifier != EXPAND_CONST_ADDRESS
7280 && modifier != EXPAND_INITIALIZER)
7281 /* If the field isn't aligned enough to fetch as a memref,
7282 fetch it as a bit field. */
7283 || (mode1 != BLKmode
7284 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7285 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7286 || (MEM_P (op0)
7287 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7288 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7289 && ((modifier == EXPAND_CONST_ADDRESS
7290 || modifier == EXPAND_INITIALIZER)
7291 ? STRICT_ALIGNMENT
7292 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7293 || (bitpos % BITS_PER_UNIT != 0)))
7294 /* If the type and the field are a constant size and the
7295 size of the type isn't the same size as the bitfield,
7296 we must use bitfield operations. */
7297 || (bitsize >= 0
7298 && TYPE_SIZE (TREE_TYPE (exp))
7299 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7300 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7301 bitsize)))
7302 {
7303 enum machine_mode ext_mode = mode;
7304
7305 if (ext_mode == BLKmode
7306 && ! (target != 0 && MEM_P (op0)
7307 && MEM_P (target)
7308 && bitpos % BITS_PER_UNIT == 0))
7309 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7310
7311 if (ext_mode == BLKmode)
7312 {
7313 if (target == 0)
7314 target = assign_temp (type, 0, 1, 1);
7315
7316 if (bitsize == 0)
7317 return target;
7318
7319 /* In this case, BITPOS must start at a byte boundary and
7320 TARGET, if specified, must be a MEM. */
7321 gcc_assert (MEM_P (op0)
7322 && (!target || MEM_P (target))
7323 && !(bitpos % BITS_PER_UNIT));
7324
7325 emit_block_move (target,
7326 adjust_address (op0, VOIDmode,
7327 bitpos / BITS_PER_UNIT),
7328 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7329 / BITS_PER_UNIT),
7330 (modifier == EXPAND_STACK_PARM
7331 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7332
7333 return target;
7334 }
7335
7336 op0 = validize_mem (op0);
7337
7338 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7339 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7340
7341 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7342 (modifier == EXPAND_STACK_PARM
7343 ? NULL_RTX : target),
7344 ext_mode, ext_mode);
7345
7346 /* If the result is a record type and BITSIZE is narrower than
7347 the mode of OP0, an integral mode, and this is a big endian
7348 machine, we must put the field into the high-order bits. */
7349 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7350 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7351 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7352 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7353 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7354 - bitsize),
7355 op0, 1);
7356
7357 /* If the result type is BLKmode, store the data into a temporary
7358 of the appropriate type, but with the mode corresponding to the
7359 mode for the data we have (op0's mode). It's tempting to make
7360 this a constant type, since we know it's only being stored once,
7361 but that can cause problems if we are taking the address of this
7362 COMPONENT_REF because the MEM of any reference via that address
7363 will have flags corresponding to the type, which will not
7364 necessarily be constant. */
7365 if (mode == BLKmode)
7366 {
7367 rtx new
7368 = assign_stack_temp_for_type
7369 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7370
7371 emit_move_insn (new, op0);
7372 op0 = copy_rtx (new);
7373 PUT_MODE (op0, BLKmode);
7374 set_mem_attributes (op0, exp, 1);
7375 }
7376
7377 return op0;
7378 }
7379
7380 /* If the result is BLKmode, use that to access the object
7381 now as well. */
7382 if (mode == BLKmode)
7383 mode1 = BLKmode;
7384
7385 /* Get a reference to just this component. */
7386 if (modifier == EXPAND_CONST_ADDRESS
7387 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7388 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7389 else
7390 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7391
7392 if (op0 == orig_op0)
7393 op0 = copy_rtx (op0);
7394
7395 set_mem_attributes (op0, exp, 0);
7396 if (REG_P (XEXP (op0, 0)))
7397 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7398
7399 MEM_VOLATILE_P (op0) |= volatilep;
7400 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7401 || modifier == EXPAND_CONST_ADDRESS
7402 || modifier == EXPAND_INITIALIZER)
7403 return op0;
7404 else if (target == 0)
7405 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7406
7407 convert_move (target, op0, unsignedp);
7408 return target;
7409 }
7410
7411 case OBJ_TYPE_REF:
7412 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7413
7414 case CALL_EXPR:
7415 /* Check for a built-in function. */
7416 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7417 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7418 == FUNCTION_DECL)
7419 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7420 {
7421 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7422 == BUILT_IN_FRONTEND)
7423 return lang_hooks.expand_expr (exp, original_target,
7424 tmode, modifier,
7425 alt_rtl);
7426 else
7427 return expand_builtin (exp, target, subtarget, tmode, ignore);
7428 }
7429
7430 return expand_call (exp, target, ignore);
7431
7432 case NON_LVALUE_EXPR:
7433 case NOP_EXPR:
7434 case CONVERT_EXPR:
7435 if (TREE_OPERAND (exp, 0) == error_mark_node)
7436 return const0_rtx;
7437
7438 if (TREE_CODE (type) == UNION_TYPE)
7439 {
7440 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7441
7442 /* If both input and output are BLKmode, this conversion isn't doing
7443 anything except possibly changing memory attribute. */
7444 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7445 {
7446 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7447 modifier);
7448
7449 result = copy_rtx (result);
7450 set_mem_attributes (result, exp, 0);
7451 return result;
7452 }
7453
7454 if (target == 0)
7455 {
7456 if (TYPE_MODE (type) != BLKmode)
7457 target = gen_reg_rtx (TYPE_MODE (type));
7458 else
7459 target = assign_temp (type, 0, 1, 1);
7460 }
7461
7462 if (MEM_P (target))
7463 /* Store data into beginning of memory target. */
7464 store_expr (TREE_OPERAND (exp, 0),
7465 adjust_address (target, TYPE_MODE (valtype), 0),
7466 modifier == EXPAND_STACK_PARM);
7467
7468 else
7469 {
7470 gcc_assert (REG_P (target));
7471
7472 /* Store this field into a union of the proper type. */
7473 store_field (target,
7474 MIN ((int_size_in_bytes (TREE_TYPE
7475 (TREE_OPERAND (exp, 0)))
7476 * BITS_PER_UNIT),
7477 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7478 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7479 type, 0);
7480 }
7481
7482 /* Return the entire union. */
7483 return target;
7484 }
7485
7486 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7487 {
7488 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7489 modifier);
7490
7491 /* If the signedness of the conversion differs and OP0 is
7492 a promoted SUBREG, clear that indication since we now
7493 have to do the proper extension. */
7494 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7495 && GET_CODE (op0) == SUBREG)
7496 SUBREG_PROMOTED_VAR_P (op0) = 0;
7497
7498 return REDUCE_BIT_FIELD (op0);
7499 }
7500
7501 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7502 if (GET_MODE (op0) == mode)
7503 ;
7504
7505 /* If OP0 is a constant, just convert it into the proper mode. */
7506 else if (CONSTANT_P (op0))
7507 {
7508 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7509 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7510
7511 if (modifier == EXPAND_INITIALIZER)
7512 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7513 subreg_lowpart_offset (mode,
7514 inner_mode));
7515 else
7516 op0= convert_modes (mode, inner_mode, op0,
7517 TYPE_UNSIGNED (inner_type));
7518 }
7519
7520 else if (modifier == EXPAND_INITIALIZER)
7521 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7522
7523 else if (target == 0)
7524 op0 = convert_to_mode (mode, op0,
7525 TYPE_UNSIGNED (TREE_TYPE
7526 (TREE_OPERAND (exp, 0))));
7527 else
7528 {
7529 convert_move (target, op0,
7530 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7531 op0 = target;
7532 }
7533
7534 return REDUCE_BIT_FIELD (op0);
7535
7536 case VIEW_CONVERT_EXPR:
7537 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7538
7539 /* If the input and output modes are both the same, we are done. */
7540 if (TYPE_MODE (type) == GET_MODE (op0))
7541 ;
7542 /* If neither mode is BLKmode, and both modes are the same size
7543 then we can use gen_lowpart. */
7544 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7545 && GET_MODE_SIZE (TYPE_MODE (type))
7546 == GET_MODE_SIZE (GET_MODE (op0)))
7547 {
7548 if (GET_CODE (op0) == SUBREG)
7549 op0 = force_reg (GET_MODE (op0), op0);
7550 op0 = gen_lowpart (TYPE_MODE (type), op0);
7551 }
7552 /* If both modes are integral, then we can convert from one to the
7553 other. */
7554 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7555 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7556 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7557 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7558 /* As a last resort, spill op0 to memory, and reload it in a
7559 different mode. */
7560 else if (!MEM_P (op0))
7561 {
7562 /* If the operand is not a MEM, force it into memory. Since we
7563 are going to be be changing the mode of the MEM, don't call
7564 force_const_mem for constants because we don't allow pool
7565 constants to change mode. */
7566 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7567
7568 gcc_assert (!TREE_ADDRESSABLE (exp));
7569
7570 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7571 target
7572 = assign_stack_temp_for_type
7573 (TYPE_MODE (inner_type),
7574 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7575
7576 emit_move_insn (target, op0);
7577 op0 = target;
7578 }
7579
7580 /* At this point, OP0 is in the correct mode. If the output type is such
7581 that the operand is known to be aligned, indicate that it is.
7582 Otherwise, we need only be concerned about alignment for non-BLKmode
7583 results. */
7584 if (MEM_P (op0))
7585 {
7586 op0 = copy_rtx (op0);
7587
7588 if (TYPE_ALIGN_OK (type))
7589 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7590 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7591 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7592 {
7593 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7594 HOST_WIDE_INT temp_size
7595 = MAX (int_size_in_bytes (inner_type),
7596 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7597 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7598 temp_size, 0, type);
7599 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7600
7601 gcc_assert (!TREE_ADDRESSABLE (exp));
7602
7603 if (GET_MODE (op0) == BLKmode)
7604 emit_block_move (new_with_op0_mode, op0,
7605 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7606 (modifier == EXPAND_STACK_PARM
7607 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7608 else
7609 emit_move_insn (new_with_op0_mode, op0);
7610
7611 op0 = new;
7612 }
7613
7614 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7615 }
7616
7617 return op0;
7618
7619 case PLUS_EXPR:
7620 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7621 something else, make sure we add the register to the constant and
7622 then to the other thing. This case can occur during strength
7623 reduction and doing it this way will produce better code if the
7624 frame pointer or argument pointer is eliminated.
7625
7626 fold-const.c will ensure that the constant is always in the inner
7627 PLUS_EXPR, so the only case we need to do anything about is if
7628 sp, ap, or fp is our second argument, in which case we must swap
7629 the innermost first argument and our second argument. */
7630
7631 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7632 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7633 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7634 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7635 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7636 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7637 {
7638 tree t = TREE_OPERAND (exp, 1);
7639
7640 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7641 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7642 }
7643
7644 /* If the result is to be ptr_mode and we are adding an integer to
7645 something, we might be forming a constant. So try to use
7646 plus_constant. If it produces a sum and we can't accept it,
7647 use force_operand. This allows P = &ARR[const] to generate
7648 efficient code on machines where a SYMBOL_REF is not a valid
7649 address.
7650
7651 If this is an EXPAND_SUM call, always return the sum. */
7652 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7653 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7654 {
7655 if (modifier == EXPAND_STACK_PARM)
7656 target = 0;
7657 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7658 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7659 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7660 {
7661 rtx constant_part;
7662
7663 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7664 EXPAND_SUM);
7665 /* Use immed_double_const to ensure that the constant is
7666 truncated according to the mode of OP1, then sign extended
7667 to a HOST_WIDE_INT. Using the constant directly can result
7668 in non-canonical RTL in a 64x32 cross compile. */
7669 constant_part
7670 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7671 (HOST_WIDE_INT) 0,
7672 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7673 op1 = plus_constant (op1, INTVAL (constant_part));
7674 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7675 op1 = force_operand (op1, target);
7676 return REDUCE_BIT_FIELD (op1);
7677 }
7678
7679 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7680 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7681 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7682 {
7683 rtx constant_part;
7684
7685 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7686 (modifier == EXPAND_INITIALIZER
7687 ? EXPAND_INITIALIZER : EXPAND_SUM));
7688 if (! CONSTANT_P (op0))
7689 {
7690 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7691 VOIDmode, modifier);
7692 /* Return a PLUS if modifier says it's OK. */
7693 if (modifier == EXPAND_SUM
7694 || modifier == EXPAND_INITIALIZER)
7695 return simplify_gen_binary (PLUS, mode, op0, op1);
7696 goto binop2;
7697 }
7698 /* Use immed_double_const to ensure that the constant is
7699 truncated according to the mode of OP1, then sign extended
7700 to a HOST_WIDE_INT. Using the constant directly can result
7701 in non-canonical RTL in a 64x32 cross compile. */
7702 constant_part
7703 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7704 (HOST_WIDE_INT) 0,
7705 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7706 op0 = plus_constant (op0, INTVAL (constant_part));
7707 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7708 op0 = force_operand (op0, target);
7709 return REDUCE_BIT_FIELD (op0);
7710 }
7711 }
7712
7713 /* No sense saving up arithmetic to be done
7714 if it's all in the wrong mode to form part of an address.
7715 And force_operand won't know whether to sign-extend or
7716 zero-extend. */
7717 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7718 || mode != ptr_mode)
7719 {
7720 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7721 subtarget, &op0, &op1, 0);
7722 if (op0 == const0_rtx)
7723 return op1;
7724 if (op1 == const0_rtx)
7725 return op0;
7726 goto binop2;
7727 }
7728
7729 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7730 subtarget, &op0, &op1, modifier);
7731 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7732
7733 case MINUS_EXPR:
7734 /* For initializers, we are allowed to return a MINUS of two
7735 symbolic constants. Here we handle all cases when both operands
7736 are constant. */
7737 /* Handle difference of two symbolic constants,
7738 for the sake of an initializer. */
7739 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7740 && really_constant_p (TREE_OPERAND (exp, 0))
7741 && really_constant_p (TREE_OPERAND (exp, 1)))
7742 {
7743 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7744 NULL_RTX, &op0, &op1, modifier);
7745
7746 /* If the last operand is a CONST_INT, use plus_constant of
7747 the negated constant. Else make the MINUS. */
7748 if (GET_CODE (op1) == CONST_INT)
7749 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7750 else
7751 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7752 }
7753
7754 /* No sense saving up arithmetic to be done
7755 if it's all in the wrong mode to form part of an address.
7756 And force_operand won't know whether to sign-extend or
7757 zero-extend. */
7758 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7759 || mode != ptr_mode)
7760 goto binop;
7761
7762 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7763 subtarget, &op0, &op1, modifier);
7764
7765 /* Convert A - const to A + (-const). */
7766 if (GET_CODE (op1) == CONST_INT)
7767 {
7768 op1 = negate_rtx (mode, op1);
7769 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7770 }
7771
7772 goto binop2;
7773
7774 case MULT_EXPR:
7775 /* If first operand is constant, swap them.
7776 Thus the following special case checks need only
7777 check the second operand. */
7778 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7779 {
7780 tree t1 = TREE_OPERAND (exp, 0);
7781 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7782 TREE_OPERAND (exp, 1) = t1;
7783 }
7784
7785 /* Attempt to return something suitable for generating an
7786 indexed address, for machines that support that. */
7787
7788 if (modifier == EXPAND_SUM && mode == ptr_mode
7789 && host_integerp (TREE_OPERAND (exp, 1), 0))
7790 {
7791 tree exp1 = TREE_OPERAND (exp, 1);
7792
7793 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7794 EXPAND_SUM);
7795
7796 if (!REG_P (op0))
7797 op0 = force_operand (op0, NULL_RTX);
7798 if (!REG_P (op0))
7799 op0 = copy_to_mode_reg (mode, op0);
7800
7801 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7802 gen_int_mode (tree_low_cst (exp1, 0),
7803 TYPE_MODE (TREE_TYPE (exp1)))));
7804 }
7805
7806 if (modifier == EXPAND_STACK_PARM)
7807 target = 0;
7808
7809 /* Check for multiplying things that have been extended
7810 from a narrower type. If this machine supports multiplying
7811 in that narrower type with a result in the desired type,
7812 do it that way, and avoid the explicit type-conversion. */
7813 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7814 && TREE_CODE (type) == INTEGER_TYPE
7815 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7816 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7817 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7818 && int_fits_type_p (TREE_OPERAND (exp, 1),
7819 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7820 /* Don't use a widening multiply if a shift will do. */
7821 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7822 > HOST_BITS_PER_WIDE_INT)
7823 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7824 ||
7825 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7826 && (TYPE_PRECISION (TREE_TYPE
7827 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7828 == TYPE_PRECISION (TREE_TYPE
7829 (TREE_OPERAND
7830 (TREE_OPERAND (exp, 0), 0))))
7831 /* If both operands are extended, they must either both
7832 be zero-extended or both be sign-extended. */
7833 && (TYPE_UNSIGNED (TREE_TYPE
7834 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7835 == TYPE_UNSIGNED (TREE_TYPE
7836 (TREE_OPERAND
7837 (TREE_OPERAND (exp, 0), 0)))))))
7838 {
7839 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7840 enum machine_mode innermode = TYPE_MODE (op0type);
7841 bool zextend_p = TYPE_UNSIGNED (op0type);
7842 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7843 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7844
7845 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7846 {
7847 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7848 {
7849 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7850 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7851 TREE_OPERAND (exp, 1),
7852 NULL_RTX, &op0, &op1, 0);
7853 else
7854 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7855 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7856 NULL_RTX, &op0, &op1, 0);
7857 goto binop3;
7858 }
7859 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7860 && innermode == word_mode)
7861 {
7862 rtx htem, hipart;
7863 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7864 NULL_RTX, VOIDmode, 0);
7865 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7866 op1 = convert_modes (innermode, mode,
7867 expand_expr (TREE_OPERAND (exp, 1),
7868 NULL_RTX, VOIDmode, 0),
7869 unsignedp);
7870 else
7871 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7872 NULL_RTX, VOIDmode, 0);
7873 temp = expand_binop (mode, other_optab, op0, op1, target,
7874 unsignedp, OPTAB_LIB_WIDEN);
7875 hipart = gen_highpart (innermode, temp);
7876 htem = expand_mult_highpart_adjust (innermode, hipart,
7877 op0, op1, hipart,
7878 zextend_p);
7879 if (htem != hipart)
7880 emit_move_insn (hipart, htem);
7881 return REDUCE_BIT_FIELD (temp);
7882 }
7883 }
7884 }
7885 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7886 subtarget, &op0, &op1, 0);
7887 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7888
7889 case TRUNC_DIV_EXPR:
7890 case FLOOR_DIV_EXPR:
7891 case CEIL_DIV_EXPR:
7892 case ROUND_DIV_EXPR:
7893 case EXACT_DIV_EXPR:
7894 if (modifier == EXPAND_STACK_PARM)
7895 target = 0;
7896 /* Possible optimization: compute the dividend with EXPAND_SUM
7897 then if the divisor is constant can optimize the case
7898 where some terms of the dividend have coeffs divisible by it. */
7899 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7900 subtarget, &op0, &op1, 0);
7901 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7902
7903 case RDIV_EXPR:
7904 goto binop;
7905
7906 case TRUNC_MOD_EXPR:
7907 case FLOOR_MOD_EXPR:
7908 case CEIL_MOD_EXPR:
7909 case ROUND_MOD_EXPR:
7910 if (modifier == EXPAND_STACK_PARM)
7911 target = 0;
7912 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7913 subtarget, &op0, &op1, 0);
7914 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7915
7916 case FIX_ROUND_EXPR:
7917 case FIX_FLOOR_EXPR:
7918 case FIX_CEIL_EXPR:
7919 gcc_unreachable (); /* Not used for C. */
7920
7921 case FIX_TRUNC_EXPR:
7922 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7923 if (target == 0 || modifier == EXPAND_STACK_PARM)
7924 target = gen_reg_rtx (mode);
7925 expand_fix (target, op0, unsignedp);
7926 return target;
7927
7928 case FLOAT_EXPR:
7929 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7930 if (target == 0 || modifier == EXPAND_STACK_PARM)
7931 target = gen_reg_rtx (mode);
7932 /* expand_float can't figure out what to do if FROM has VOIDmode.
7933 So give it the correct mode. With -O, cse will optimize this. */
7934 if (GET_MODE (op0) == VOIDmode)
7935 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7936 op0);
7937 expand_float (target, op0,
7938 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7939 return target;
7940
7941 case NEGATE_EXPR:
7942 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7943 if (modifier == EXPAND_STACK_PARM)
7944 target = 0;
7945 temp = expand_unop (mode,
7946 optab_for_tree_code (NEGATE_EXPR, type),
7947 op0, target, 0);
7948 gcc_assert (temp);
7949 return REDUCE_BIT_FIELD (temp);
7950
7951 case ABS_EXPR:
7952 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7953 if (modifier == EXPAND_STACK_PARM)
7954 target = 0;
7955
7956 /* ABS_EXPR is not valid for complex arguments. */
7957 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7958 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7959
7960 /* Unsigned abs is simply the operand. Testing here means we don't
7961 risk generating incorrect code below. */
7962 if (TYPE_UNSIGNED (type))
7963 return op0;
7964
7965 return expand_abs (mode, op0, target, unsignedp,
7966 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7967
7968 case MAX_EXPR:
7969 case MIN_EXPR:
7970 target = original_target;
7971 if (target == 0
7972 || modifier == EXPAND_STACK_PARM
7973 || (MEM_P (target) && MEM_VOLATILE_P (target))
7974 || GET_MODE (target) != mode
7975 || (REG_P (target)
7976 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7977 target = gen_reg_rtx (mode);
7978 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7979 target, &op0, &op1, 0);
7980
7981 /* First try to do it with a special MIN or MAX instruction.
7982 If that does not win, use a conditional jump to select the proper
7983 value. */
7984 this_optab = optab_for_tree_code (code, type);
7985 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7986 OPTAB_WIDEN);
7987 if (temp != 0)
7988 return temp;
7989
7990 /* At this point, a MEM target is no longer useful; we will get better
7991 code without it. */
7992
7993 if (! REG_P (target))
7994 target = gen_reg_rtx (mode);
7995
7996 /* If op1 was placed in target, swap op0 and op1. */
7997 if (target != op0 && target == op1)
7998 {
7999 temp = op0;
8000 op0 = op1;
8001 op1 = temp;
8002 }
8003
8004 /* We generate better code and avoid problems with op1 mentioning
8005 target by forcing op1 into a pseudo if it isn't a constant. */
8006 if (! CONSTANT_P (op1))
8007 op1 = force_reg (mode, op1);
8008
8009 #ifdef HAVE_conditional_move
8010 /* Use a conditional move if possible. */
8011 if (can_conditionally_move_p (mode))
8012 {
8013 enum rtx_code comparison_code;
8014 rtx insn;
8015
8016 if (code == MAX_EXPR)
8017 comparison_code = unsignedp ? GEU : GE;
8018 else
8019 comparison_code = unsignedp ? LEU : LE;
8020
8021 /* ??? Same problem as in expmed.c: emit_conditional_move
8022 forces a stack adjustment via compare_from_rtx, and we
8023 lose the stack adjustment if the sequence we are about
8024 to create is discarded. */
8025 do_pending_stack_adjust ();
8026
8027 start_sequence ();
8028
8029 /* Try to emit the conditional move. */
8030 insn = emit_conditional_move (target, comparison_code,
8031 op0, op1, mode,
8032 op0, op1, mode,
8033 unsignedp);
8034
8035 /* If we could do the conditional move, emit the sequence,
8036 and return. */
8037 if (insn)
8038 {
8039 rtx seq = get_insns ();
8040 end_sequence ();
8041 emit_insn (seq);
8042 return target;
8043 }
8044
8045 /* Otherwise discard the sequence and fall back to code with
8046 branches. */
8047 end_sequence ();
8048 }
8049 #endif
8050 if (target != op0)
8051 emit_move_insn (target, op0);
8052
8053 temp = gen_label_rtx ();
8054
8055 /* If this mode is an integer too wide to compare properly,
8056 compare word by word. Rely on cse to optimize constant cases. */
8057 if (GET_MODE_CLASS (mode) == MODE_INT
8058 && ! can_compare_p (GE, mode, ccp_jump))
8059 {
8060 if (code == MAX_EXPR)
8061 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8062 NULL_RTX, temp);
8063 else
8064 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8065 NULL_RTX, temp);
8066 }
8067 else
8068 {
8069 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8070 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8071 }
8072 emit_move_insn (target, op1);
8073 emit_label (temp);
8074 return target;
8075
8076 case BIT_NOT_EXPR:
8077 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8078 if (modifier == EXPAND_STACK_PARM)
8079 target = 0;
8080 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8081 gcc_assert (temp);
8082 return temp;
8083
8084 /* ??? Can optimize bitwise operations with one arg constant.
8085 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8086 and (a bitwise1 b) bitwise2 b (etc)
8087 but that is probably not worth while. */
8088
8089 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8090 boolean values when we want in all cases to compute both of them. In
8091 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8092 as actual zero-or-1 values and then bitwise anding. In cases where
8093 there cannot be any side effects, better code would be made by
8094 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8095 how to recognize those cases. */
8096
8097 case TRUTH_AND_EXPR:
8098 code = BIT_AND_EXPR;
8099 case BIT_AND_EXPR:
8100 goto binop;
8101
8102 case TRUTH_OR_EXPR:
8103 code = BIT_IOR_EXPR;
8104 case BIT_IOR_EXPR:
8105 goto binop;
8106
8107 case TRUTH_XOR_EXPR:
8108 code = BIT_XOR_EXPR;
8109 case BIT_XOR_EXPR:
8110 goto binop;
8111
8112 case LSHIFT_EXPR:
8113 case RSHIFT_EXPR:
8114 case LROTATE_EXPR:
8115 case RROTATE_EXPR:
8116 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8117 subtarget = 0;
8118 if (modifier == EXPAND_STACK_PARM)
8119 target = 0;
8120 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8121 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8122 unsignedp);
8123
8124 /* Could determine the answer when only additive constants differ. Also,
8125 the addition of one can be handled by changing the condition. */
8126 case LT_EXPR:
8127 case LE_EXPR:
8128 case GT_EXPR:
8129 case GE_EXPR:
8130 case EQ_EXPR:
8131 case NE_EXPR:
8132 case UNORDERED_EXPR:
8133 case ORDERED_EXPR:
8134 case UNLT_EXPR:
8135 case UNLE_EXPR:
8136 case UNGT_EXPR:
8137 case UNGE_EXPR:
8138 case UNEQ_EXPR:
8139 case LTGT_EXPR:
8140 temp = do_store_flag (exp,
8141 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8142 tmode != VOIDmode ? tmode : mode, 0);
8143 if (temp != 0)
8144 return temp;
8145
8146 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8147 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8148 && original_target
8149 && REG_P (original_target)
8150 && (GET_MODE (original_target)
8151 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8152 {
8153 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8154 VOIDmode, 0);
8155
8156 /* If temp is constant, we can just compute the result. */
8157 if (GET_CODE (temp) == CONST_INT)
8158 {
8159 if (INTVAL (temp) != 0)
8160 emit_move_insn (target, const1_rtx);
8161 else
8162 emit_move_insn (target, const0_rtx);
8163
8164 return target;
8165 }
8166
8167 if (temp != original_target)
8168 {
8169 enum machine_mode mode1 = GET_MODE (temp);
8170 if (mode1 == VOIDmode)
8171 mode1 = tmode != VOIDmode ? tmode : mode;
8172
8173 temp = copy_to_mode_reg (mode1, temp);
8174 }
8175
8176 op1 = gen_label_rtx ();
8177 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8178 GET_MODE (temp), unsignedp, op1);
8179 emit_move_insn (temp, const1_rtx);
8180 emit_label (op1);
8181 return temp;
8182 }
8183
8184 /* If no set-flag instruction, must generate a conditional store
8185 into a temporary variable. Drop through and handle this
8186 like && and ||. */
8187
8188 if (! ignore
8189 && (target == 0
8190 || modifier == EXPAND_STACK_PARM
8191 || ! safe_from_p (target, exp, 1)
8192 /* Make sure we don't have a hard reg (such as function's return
8193 value) live across basic blocks, if not optimizing. */
8194 || (!optimize && REG_P (target)
8195 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8196 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8197
8198 if (target)
8199 emit_move_insn (target, const0_rtx);
8200
8201 op1 = gen_label_rtx ();
8202 jumpifnot (exp, op1);
8203
8204 if (target)
8205 emit_move_insn (target, const1_rtx);
8206
8207 emit_label (op1);
8208 return ignore ? const0_rtx : target;
8209
8210 case TRUTH_NOT_EXPR:
8211 if (modifier == EXPAND_STACK_PARM)
8212 target = 0;
8213 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8214 /* The parser is careful to generate TRUTH_NOT_EXPR
8215 only with operands that are always zero or one. */
8216 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8217 target, 1, OPTAB_LIB_WIDEN);
8218 gcc_assert (temp);
8219 return temp;
8220
8221 case STATEMENT_LIST:
8222 {
8223 tree_stmt_iterator iter;
8224
8225 gcc_assert (ignore);
8226
8227 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8228 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8229 }
8230 return const0_rtx;
8231
8232 case COND_EXPR:
8233 /* A COND_EXPR with its type being VOID_TYPE represents a
8234 conditional jump and is handled in
8235 expand_gimple_cond_expr. */
8236 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8237
8238 /* Note that COND_EXPRs whose type is a structure or union
8239 are required to be constructed to contain assignments of
8240 a temporary variable, so that we can evaluate them here
8241 for side effect only. If type is void, we must do likewise. */
8242
8243 gcc_assert (!TREE_ADDRESSABLE (type)
8244 && !ignore
8245 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8246 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8247
8248 /* If we are not to produce a result, we have no target. Otherwise,
8249 if a target was specified use it; it will not be used as an
8250 intermediate target unless it is safe. If no target, use a
8251 temporary. */
8252
8253 if (modifier != EXPAND_STACK_PARM
8254 && original_target
8255 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8256 && GET_MODE (original_target) == mode
8257 #ifdef HAVE_conditional_move
8258 && (! can_conditionally_move_p (mode)
8259 || REG_P (original_target))
8260 #endif
8261 && !MEM_P (original_target))
8262 temp = original_target;
8263 else
8264 temp = assign_temp (type, 0, 0, 1);
8265
8266 do_pending_stack_adjust ();
8267 NO_DEFER_POP;
8268 op0 = gen_label_rtx ();
8269 op1 = gen_label_rtx ();
8270 jumpifnot (TREE_OPERAND (exp, 0), op0);
8271 store_expr (TREE_OPERAND (exp, 1), temp,
8272 modifier == EXPAND_STACK_PARM);
8273
8274 emit_jump_insn (gen_jump (op1));
8275 emit_barrier ();
8276 emit_label (op0);
8277 store_expr (TREE_OPERAND (exp, 2), temp,
8278 modifier == EXPAND_STACK_PARM);
8279
8280 emit_label (op1);
8281 OK_DEFER_POP;
8282 return temp;
8283
8284 case VEC_COND_EXPR:
8285 target = expand_vec_cond_expr (exp, target);
8286 return target;
8287
8288 case MODIFY_EXPR:
8289 {
8290 tree lhs = TREE_OPERAND (exp, 0);
8291 tree rhs = TREE_OPERAND (exp, 1);
8292
8293 gcc_assert (ignore);
8294
8295 /* Check for |= or &= of a bitfield of size one into another bitfield
8296 of size 1. In this case, (unless we need the result of the
8297 assignment) we can do this more efficiently with a
8298 test followed by an assignment, if necessary.
8299
8300 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8301 things change so we do, this code should be enhanced to
8302 support it. */
8303 if (TREE_CODE (lhs) == COMPONENT_REF
8304 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8305 || TREE_CODE (rhs) == BIT_AND_EXPR)
8306 && TREE_OPERAND (rhs, 0) == lhs
8307 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8308 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8309 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8310 {
8311 rtx label = gen_label_rtx ();
8312
8313 do_jump (TREE_OPERAND (rhs, 1),
8314 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8315 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8316 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8317 (TREE_CODE (rhs) == BIT_IOR_EXPR
8318 ? integer_one_node
8319 : integer_zero_node)));
8320 do_pending_stack_adjust ();
8321 emit_label (label);
8322 return const0_rtx;
8323 }
8324
8325 expand_assignment (lhs, rhs);
8326
8327 return const0_rtx;
8328 }
8329
8330 case RETURN_EXPR:
8331 if (!TREE_OPERAND (exp, 0))
8332 expand_null_return ();
8333 else
8334 expand_return (TREE_OPERAND (exp, 0));
8335 return const0_rtx;
8336
8337 case ADDR_EXPR:
8338 return expand_expr_addr_expr (exp, target, tmode, modifier);
8339
8340 case COMPLEX_EXPR:
8341 /* Get the rtx code of the operands. */
8342 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8343 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8344
8345 if (!target)
8346 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8347
8348 /* Move the real (op0) and imaginary (op1) parts to their location. */
8349 write_complex_part (target, op0, false);
8350 write_complex_part (target, op1, true);
8351
8352 return target;
8353
8354 case REALPART_EXPR:
8355 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8356 return read_complex_part (op0, false);
8357
8358 case IMAGPART_EXPR:
8359 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8360 return read_complex_part (op0, true);
8361
8362 case RESX_EXPR:
8363 expand_resx_expr (exp);
8364 return const0_rtx;
8365
8366 case TRY_CATCH_EXPR:
8367 case CATCH_EXPR:
8368 case EH_FILTER_EXPR:
8369 case TRY_FINALLY_EXPR:
8370 /* Lowered by tree-eh.c. */
8371 gcc_unreachable ();
8372
8373 case WITH_CLEANUP_EXPR:
8374 case CLEANUP_POINT_EXPR:
8375 case TARGET_EXPR:
8376 case CASE_LABEL_EXPR:
8377 case VA_ARG_EXPR:
8378 case BIND_EXPR:
8379 case INIT_EXPR:
8380 case CONJ_EXPR:
8381 case COMPOUND_EXPR:
8382 case PREINCREMENT_EXPR:
8383 case PREDECREMENT_EXPR:
8384 case POSTINCREMENT_EXPR:
8385 case POSTDECREMENT_EXPR:
8386 case LOOP_EXPR:
8387 case EXIT_EXPR:
8388 case TRUTH_ANDIF_EXPR:
8389 case TRUTH_ORIF_EXPR:
8390 /* Lowered by gimplify.c. */
8391 gcc_unreachable ();
8392
8393 case EXC_PTR_EXPR:
8394 return get_exception_pointer (cfun);
8395
8396 case FILTER_EXPR:
8397 return get_exception_filter (cfun);
8398
8399 case FDESC_EXPR:
8400 /* Function descriptors are not valid except for as
8401 initialization constants, and should not be expanded. */
8402 gcc_unreachable ();
8403
8404 case SWITCH_EXPR:
8405 expand_case (exp);
8406 return const0_rtx;
8407
8408 case LABEL_EXPR:
8409 expand_label (TREE_OPERAND (exp, 0));
8410 return const0_rtx;
8411
8412 case ASM_EXPR:
8413 expand_asm_expr (exp);
8414 return const0_rtx;
8415
8416 case WITH_SIZE_EXPR:
8417 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8418 have pulled out the size to use in whatever context it needed. */
8419 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8420 modifier, alt_rtl);
8421
8422 case REALIGN_LOAD_EXPR:
8423 {
8424 tree oprnd0 = TREE_OPERAND (exp, 0);
8425 tree oprnd1 = TREE_OPERAND (exp, 1);
8426 tree oprnd2 = TREE_OPERAND (exp, 2);
8427 rtx op2;
8428
8429 this_optab = optab_for_tree_code (code, type);
8430 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8431 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8432 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8433 target, unsignedp);
8434 gcc_assert (temp);
8435 return temp;
8436 }
8437
8438 case REDUC_MAX_EXPR:
8439 case REDUC_MIN_EXPR:
8440 case REDUC_PLUS_EXPR:
8441 {
8442 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8443 this_optab = optab_for_tree_code (code, type);
8444 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8445 gcc_assert (temp);
8446 return temp;
8447 }
8448
8449 case VEC_LSHIFT_EXPR:
8450 case VEC_RSHIFT_EXPR:
8451 {
8452 target = expand_vec_shift_expr (exp, target);
8453 return target;
8454 }
8455
8456 default:
8457 return lang_hooks.expand_expr (exp, original_target, tmode,
8458 modifier, alt_rtl);
8459 }
8460
8461 /* Here to do an ordinary binary operator. */
8462 binop:
8463 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8464 subtarget, &op0, &op1, 0);
8465 binop2:
8466 this_optab = optab_for_tree_code (code, type);
8467 binop3:
8468 if (modifier == EXPAND_STACK_PARM)
8469 target = 0;
8470 temp = expand_binop (mode, this_optab, op0, op1, target,
8471 unsignedp, OPTAB_LIB_WIDEN);
8472 gcc_assert (temp);
8473 return REDUCE_BIT_FIELD (temp);
8474 }
8475 #undef REDUCE_BIT_FIELD
8476 \f
8477 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8478 signedness of TYPE), possibly returning the result in TARGET. */
8479 static rtx
8480 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8481 {
8482 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8483 if (target && GET_MODE (target) != GET_MODE (exp))
8484 target = 0;
8485 if (TYPE_UNSIGNED (type))
8486 {
8487 rtx mask;
8488 if (prec < HOST_BITS_PER_WIDE_INT)
8489 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8490 GET_MODE (exp));
8491 else
8492 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8493 ((unsigned HOST_WIDE_INT) 1
8494 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8495 GET_MODE (exp));
8496 return expand_and (GET_MODE (exp), exp, mask, target);
8497 }
8498 else
8499 {
8500 tree count = build_int_cst (NULL_TREE,
8501 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8502 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8503 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8504 }
8505 }
8506 \f
8507 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8508 when applied to the address of EXP produces an address known to be
8509 aligned more than BIGGEST_ALIGNMENT. */
8510
8511 static int
8512 is_aligning_offset (tree offset, tree exp)
8513 {
8514 /* Strip off any conversions. */
8515 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8516 || TREE_CODE (offset) == NOP_EXPR
8517 || TREE_CODE (offset) == CONVERT_EXPR)
8518 offset = TREE_OPERAND (offset, 0);
8519
8520 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8521 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8522 if (TREE_CODE (offset) != BIT_AND_EXPR
8523 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8524 || compare_tree_int (TREE_OPERAND (offset, 1),
8525 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8526 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8527 return 0;
8528
8529 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8530 It must be NEGATE_EXPR. Then strip any more conversions. */
8531 offset = TREE_OPERAND (offset, 0);
8532 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8533 || TREE_CODE (offset) == NOP_EXPR
8534 || TREE_CODE (offset) == CONVERT_EXPR)
8535 offset = TREE_OPERAND (offset, 0);
8536
8537 if (TREE_CODE (offset) != NEGATE_EXPR)
8538 return 0;
8539
8540 offset = TREE_OPERAND (offset, 0);
8541 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8542 || TREE_CODE (offset) == NOP_EXPR
8543 || TREE_CODE (offset) == CONVERT_EXPR)
8544 offset = TREE_OPERAND (offset, 0);
8545
8546 /* This must now be the address of EXP. */
8547 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8548 }
8549 \f
8550 /* Return the tree node if an ARG corresponds to a string constant or zero
8551 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8552 in bytes within the string that ARG is accessing. The type of the
8553 offset will be `sizetype'. */
8554
8555 tree
8556 string_constant (tree arg, tree *ptr_offset)
8557 {
8558 tree array, offset;
8559 STRIP_NOPS (arg);
8560
8561 if (TREE_CODE (arg) == ADDR_EXPR)
8562 {
8563 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8564 {
8565 *ptr_offset = size_zero_node;
8566 return TREE_OPERAND (arg, 0);
8567 }
8568 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8569 {
8570 array = TREE_OPERAND (arg, 0);
8571 offset = size_zero_node;
8572 }
8573 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8574 {
8575 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8576 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8577 if (TREE_CODE (array) != STRING_CST
8578 && TREE_CODE (array) != VAR_DECL)
8579 return 0;
8580 }
8581 else
8582 return 0;
8583 }
8584 else if (TREE_CODE (arg) == PLUS_EXPR)
8585 {
8586 tree arg0 = TREE_OPERAND (arg, 0);
8587 tree arg1 = TREE_OPERAND (arg, 1);
8588
8589 STRIP_NOPS (arg0);
8590 STRIP_NOPS (arg1);
8591
8592 if (TREE_CODE (arg0) == ADDR_EXPR
8593 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8594 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8595 {
8596 array = TREE_OPERAND (arg0, 0);
8597 offset = arg1;
8598 }
8599 else if (TREE_CODE (arg1) == ADDR_EXPR
8600 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8601 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8602 {
8603 array = TREE_OPERAND (arg1, 0);
8604 offset = arg0;
8605 }
8606 else
8607 return 0;
8608 }
8609 else
8610 return 0;
8611
8612 if (TREE_CODE (array) == STRING_CST)
8613 {
8614 *ptr_offset = convert (sizetype, offset);
8615 return array;
8616 }
8617 else if (TREE_CODE (array) == VAR_DECL)
8618 {
8619 int length;
8620
8621 /* Variables initialized to string literals can be handled too. */
8622 if (DECL_INITIAL (array) == NULL_TREE
8623 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8624 return 0;
8625
8626 /* If they are read-only, non-volatile and bind locally. */
8627 if (! TREE_READONLY (array)
8628 || TREE_SIDE_EFFECTS (array)
8629 || ! targetm.binds_local_p (array))
8630 return 0;
8631
8632 /* Avoid const char foo[4] = "abcde"; */
8633 if (DECL_SIZE_UNIT (array) == NULL_TREE
8634 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8635 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8636 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8637 return 0;
8638
8639 /* If variable is bigger than the string literal, OFFSET must be constant
8640 and inside of the bounds of the string literal. */
8641 offset = convert (sizetype, offset);
8642 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8643 && (! host_integerp (offset, 1)
8644 || compare_tree_int (offset, length) >= 0))
8645 return 0;
8646
8647 *ptr_offset = offset;
8648 return DECL_INITIAL (array);
8649 }
8650
8651 return 0;
8652 }
8653 \f
8654 /* Generate code to calculate EXP using a store-flag instruction
8655 and return an rtx for the result. EXP is either a comparison
8656 or a TRUTH_NOT_EXPR whose operand is a comparison.
8657
8658 If TARGET is nonzero, store the result there if convenient.
8659
8660 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8661 cheap.
8662
8663 Return zero if there is no suitable set-flag instruction
8664 available on this machine.
8665
8666 Once expand_expr has been called on the arguments of the comparison,
8667 we are committed to doing the store flag, since it is not safe to
8668 re-evaluate the expression. We emit the store-flag insn by calling
8669 emit_store_flag, but only expand the arguments if we have a reason
8670 to believe that emit_store_flag will be successful. If we think that
8671 it will, but it isn't, we have to simulate the store-flag with a
8672 set/jump/set sequence. */
8673
8674 static rtx
8675 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8676 {
8677 enum rtx_code code;
8678 tree arg0, arg1, type;
8679 tree tem;
8680 enum machine_mode operand_mode;
8681 int invert = 0;
8682 int unsignedp;
8683 rtx op0, op1;
8684 enum insn_code icode;
8685 rtx subtarget = target;
8686 rtx result, label;
8687
8688 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8689 result at the end. We can't simply invert the test since it would
8690 have already been inverted if it were valid. This case occurs for
8691 some floating-point comparisons. */
8692
8693 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8694 invert = 1, exp = TREE_OPERAND (exp, 0);
8695
8696 arg0 = TREE_OPERAND (exp, 0);
8697 arg1 = TREE_OPERAND (exp, 1);
8698
8699 /* Don't crash if the comparison was erroneous. */
8700 if (arg0 == error_mark_node || arg1 == error_mark_node)
8701 return const0_rtx;
8702
8703 type = TREE_TYPE (arg0);
8704 operand_mode = TYPE_MODE (type);
8705 unsignedp = TYPE_UNSIGNED (type);
8706
8707 /* We won't bother with BLKmode store-flag operations because it would mean
8708 passing a lot of information to emit_store_flag. */
8709 if (operand_mode == BLKmode)
8710 return 0;
8711
8712 /* We won't bother with store-flag operations involving function pointers
8713 when function pointers must be canonicalized before comparisons. */
8714 #ifdef HAVE_canonicalize_funcptr_for_compare
8715 if (HAVE_canonicalize_funcptr_for_compare
8716 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8717 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8718 == FUNCTION_TYPE))
8719 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8720 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8721 == FUNCTION_TYPE))))
8722 return 0;
8723 #endif
8724
8725 STRIP_NOPS (arg0);
8726 STRIP_NOPS (arg1);
8727
8728 /* Get the rtx comparison code to use. We know that EXP is a comparison
8729 operation of some type. Some comparisons against 1 and -1 can be
8730 converted to comparisons with zero. Do so here so that the tests
8731 below will be aware that we have a comparison with zero. These
8732 tests will not catch constants in the first operand, but constants
8733 are rarely passed as the first operand. */
8734
8735 switch (TREE_CODE (exp))
8736 {
8737 case EQ_EXPR:
8738 code = EQ;
8739 break;
8740 case NE_EXPR:
8741 code = NE;
8742 break;
8743 case LT_EXPR:
8744 if (integer_onep (arg1))
8745 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8746 else
8747 code = unsignedp ? LTU : LT;
8748 break;
8749 case LE_EXPR:
8750 if (! unsignedp && integer_all_onesp (arg1))
8751 arg1 = integer_zero_node, code = LT;
8752 else
8753 code = unsignedp ? LEU : LE;
8754 break;
8755 case GT_EXPR:
8756 if (! unsignedp && integer_all_onesp (arg1))
8757 arg1 = integer_zero_node, code = GE;
8758 else
8759 code = unsignedp ? GTU : GT;
8760 break;
8761 case GE_EXPR:
8762 if (integer_onep (arg1))
8763 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8764 else
8765 code = unsignedp ? GEU : GE;
8766 break;
8767
8768 case UNORDERED_EXPR:
8769 code = UNORDERED;
8770 break;
8771 case ORDERED_EXPR:
8772 code = ORDERED;
8773 break;
8774 case UNLT_EXPR:
8775 code = UNLT;
8776 break;
8777 case UNLE_EXPR:
8778 code = UNLE;
8779 break;
8780 case UNGT_EXPR:
8781 code = UNGT;
8782 break;
8783 case UNGE_EXPR:
8784 code = UNGE;
8785 break;
8786 case UNEQ_EXPR:
8787 code = UNEQ;
8788 break;
8789 case LTGT_EXPR:
8790 code = LTGT;
8791 break;
8792
8793 default:
8794 gcc_unreachable ();
8795 }
8796
8797 /* Put a constant second. */
8798 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8799 {
8800 tem = arg0; arg0 = arg1; arg1 = tem;
8801 code = swap_condition (code);
8802 }
8803
8804 /* If this is an equality or inequality test of a single bit, we can
8805 do this by shifting the bit being tested to the low-order bit and
8806 masking the result with the constant 1. If the condition was EQ,
8807 we xor it with 1. This does not require an scc insn and is faster
8808 than an scc insn even if we have it.
8809
8810 The code to make this transformation was moved into fold_single_bit_test,
8811 so we just call into the folder and expand its result. */
8812
8813 if ((code == NE || code == EQ)
8814 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8815 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8816 {
8817 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8818 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8819 arg0, arg1, type),
8820 target, VOIDmode, EXPAND_NORMAL);
8821 }
8822
8823 /* Now see if we are likely to be able to do this. Return if not. */
8824 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8825 return 0;
8826
8827 icode = setcc_gen_code[(int) code];
8828 if (icode == CODE_FOR_nothing
8829 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8830 {
8831 /* We can only do this if it is one of the special cases that
8832 can be handled without an scc insn. */
8833 if ((code == LT && integer_zerop (arg1))
8834 || (! only_cheap && code == GE && integer_zerop (arg1)))
8835 ;
8836 else if (! only_cheap && (code == NE || code == EQ)
8837 && TREE_CODE (type) != REAL_TYPE
8838 && ((abs_optab->handlers[(int) operand_mode].insn_code
8839 != CODE_FOR_nothing)
8840 || (ffs_optab->handlers[(int) operand_mode].insn_code
8841 != CODE_FOR_nothing)))
8842 ;
8843 else
8844 return 0;
8845 }
8846
8847 if (! get_subtarget (target)
8848 || GET_MODE (subtarget) != operand_mode)
8849 subtarget = 0;
8850
8851 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8852
8853 if (target == 0)
8854 target = gen_reg_rtx (mode);
8855
8856 result = emit_store_flag (target, code, op0, op1,
8857 operand_mode, unsignedp, 1);
8858
8859 if (result)
8860 {
8861 if (invert)
8862 result = expand_binop (mode, xor_optab, result, const1_rtx,
8863 result, 0, OPTAB_LIB_WIDEN);
8864 return result;
8865 }
8866
8867 /* If this failed, we have to do this with set/compare/jump/set code. */
8868 if (!REG_P (target)
8869 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8870 target = gen_reg_rtx (GET_MODE (target));
8871
8872 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8873 result = compare_from_rtx (op0, op1, code, unsignedp,
8874 operand_mode, NULL_RTX);
8875 if (GET_CODE (result) == CONST_INT)
8876 return (((result == const0_rtx && ! invert)
8877 || (result != const0_rtx && invert))
8878 ? const0_rtx : const1_rtx);
8879
8880 /* The code of RESULT may not match CODE if compare_from_rtx
8881 decided to swap its operands and reverse the original code.
8882
8883 We know that compare_from_rtx returns either a CONST_INT or
8884 a new comparison code, so it is safe to just extract the
8885 code from RESULT. */
8886 code = GET_CODE (result);
8887
8888 label = gen_label_rtx ();
8889 gcc_assert (bcc_gen_fctn[(int) code]);
8890
8891 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8892 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8893 emit_label (label);
8894
8895 return target;
8896 }
8897 \f
8898
8899 /* Stubs in case we haven't got a casesi insn. */
8900 #ifndef HAVE_casesi
8901 # define HAVE_casesi 0
8902 # define gen_casesi(a, b, c, d, e) (0)
8903 # define CODE_FOR_casesi CODE_FOR_nothing
8904 #endif
8905
8906 /* If the machine does not have a case insn that compares the bounds,
8907 this means extra overhead for dispatch tables, which raises the
8908 threshold for using them. */
8909 #ifndef CASE_VALUES_THRESHOLD
8910 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8911 #endif /* CASE_VALUES_THRESHOLD */
8912
8913 unsigned int
8914 case_values_threshold (void)
8915 {
8916 return CASE_VALUES_THRESHOLD;
8917 }
8918
8919 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8920 0 otherwise (i.e. if there is no casesi instruction). */
8921 int
8922 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8923 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8924 {
8925 enum machine_mode index_mode = SImode;
8926 int index_bits = GET_MODE_BITSIZE (index_mode);
8927 rtx op1, op2, index;
8928 enum machine_mode op_mode;
8929
8930 if (! HAVE_casesi)
8931 return 0;
8932
8933 /* Convert the index to SImode. */
8934 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8935 {
8936 enum machine_mode omode = TYPE_MODE (index_type);
8937 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8938
8939 /* We must handle the endpoints in the original mode. */
8940 index_expr = build2 (MINUS_EXPR, index_type,
8941 index_expr, minval);
8942 minval = integer_zero_node;
8943 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8944 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8945 omode, 1, default_label);
8946 /* Now we can safely truncate. */
8947 index = convert_to_mode (index_mode, index, 0);
8948 }
8949 else
8950 {
8951 if (TYPE_MODE (index_type) != index_mode)
8952 {
8953 index_expr = convert (lang_hooks.types.type_for_size
8954 (index_bits, 0), index_expr);
8955 index_type = TREE_TYPE (index_expr);
8956 }
8957
8958 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8959 }
8960
8961 do_pending_stack_adjust ();
8962
8963 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8964 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8965 (index, op_mode))
8966 index = copy_to_mode_reg (op_mode, index);
8967
8968 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8969
8970 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8971 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8972 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8973 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8974 (op1, op_mode))
8975 op1 = copy_to_mode_reg (op_mode, op1);
8976
8977 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8978
8979 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8980 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8981 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8982 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8983 (op2, op_mode))
8984 op2 = copy_to_mode_reg (op_mode, op2);
8985
8986 emit_jump_insn (gen_casesi (index, op1, op2,
8987 table_label, default_label));
8988 return 1;
8989 }
8990
8991 /* Attempt to generate a tablejump instruction; same concept. */
8992 #ifndef HAVE_tablejump
8993 #define HAVE_tablejump 0
8994 #define gen_tablejump(x, y) (0)
8995 #endif
8996
8997 /* Subroutine of the next function.
8998
8999 INDEX is the value being switched on, with the lowest value
9000 in the table already subtracted.
9001 MODE is its expected mode (needed if INDEX is constant).
9002 RANGE is the length of the jump table.
9003 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9004
9005 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9006 index value is out of range. */
9007
9008 static void
9009 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9010 rtx default_label)
9011 {
9012 rtx temp, vector;
9013
9014 if (INTVAL (range) > cfun->max_jumptable_ents)
9015 cfun->max_jumptable_ents = INTVAL (range);
9016
9017 /* Do an unsigned comparison (in the proper mode) between the index
9018 expression and the value which represents the length of the range.
9019 Since we just finished subtracting the lower bound of the range
9020 from the index expression, this comparison allows us to simultaneously
9021 check that the original index expression value is both greater than
9022 or equal to the minimum value of the range and less than or equal to
9023 the maximum value of the range. */
9024
9025 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9026 default_label);
9027
9028 /* If index is in range, it must fit in Pmode.
9029 Convert to Pmode so we can index with it. */
9030 if (mode != Pmode)
9031 index = convert_to_mode (Pmode, index, 1);
9032
9033 /* Don't let a MEM slip through, because then INDEX that comes
9034 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9035 and break_out_memory_refs will go to work on it and mess it up. */
9036 #ifdef PIC_CASE_VECTOR_ADDRESS
9037 if (flag_pic && !REG_P (index))
9038 index = copy_to_mode_reg (Pmode, index);
9039 #endif
9040
9041 /* If flag_force_addr were to affect this address
9042 it could interfere with the tricky assumptions made
9043 about addresses that contain label-refs,
9044 which may be valid only very near the tablejump itself. */
9045 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9046 GET_MODE_SIZE, because this indicates how large insns are. The other
9047 uses should all be Pmode, because they are addresses. This code
9048 could fail if addresses and insns are not the same size. */
9049 index = gen_rtx_PLUS (Pmode,
9050 gen_rtx_MULT (Pmode, index,
9051 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9052 gen_rtx_LABEL_REF (Pmode, table_label));
9053 #ifdef PIC_CASE_VECTOR_ADDRESS
9054 if (flag_pic)
9055 index = PIC_CASE_VECTOR_ADDRESS (index);
9056 else
9057 #endif
9058 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9059 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9060 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9061 convert_move (temp, vector, 0);
9062
9063 emit_jump_insn (gen_tablejump (temp, table_label));
9064
9065 /* If we are generating PIC code or if the table is PC-relative, the
9066 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9067 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9068 emit_barrier ();
9069 }
9070
9071 int
9072 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9073 rtx table_label, rtx default_label)
9074 {
9075 rtx index;
9076
9077 if (! HAVE_tablejump)
9078 return 0;
9079
9080 index_expr = fold_build2 (MINUS_EXPR, index_type,
9081 convert (index_type, index_expr),
9082 convert (index_type, minval));
9083 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9084 do_pending_stack_adjust ();
9085
9086 do_tablejump (index, TYPE_MODE (index_type),
9087 convert_modes (TYPE_MODE (index_type),
9088 TYPE_MODE (TREE_TYPE (range)),
9089 expand_expr (range, NULL_RTX,
9090 VOIDmode, 0),
9091 TYPE_UNSIGNED (TREE_TYPE (range))),
9092 table_label, default_label);
9093 return 1;
9094 }
9095
9096 /* Nonzero if the mode is a valid vector mode for this architecture.
9097 This returns nonzero even if there is no hardware support for the
9098 vector mode, but we can emulate with narrower modes. */
9099
9100 int
9101 vector_mode_valid_p (enum machine_mode mode)
9102 {
9103 enum mode_class class = GET_MODE_CLASS (mode);
9104 enum machine_mode innermode;
9105
9106 /* Doh! What's going on? */
9107 if (class != MODE_VECTOR_INT
9108 && class != MODE_VECTOR_FLOAT)
9109 return 0;
9110
9111 /* Hardware support. Woo hoo! */
9112 if (targetm.vector_mode_supported_p (mode))
9113 return 1;
9114
9115 innermode = GET_MODE_INNER (mode);
9116
9117 /* We should probably return 1 if requesting V4DI and we have no DI,
9118 but we have V2DI, but this is probably very unlikely. */
9119
9120 /* If we have support for the inner mode, we can safely emulate it.
9121 We may not have V2DI, but me can emulate with a pair of DIs. */
9122 return targetm.scalar_mode_supported_p (innermode);
9123 }
9124
9125 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9126 static rtx
9127 const_vector_from_tree (tree exp)
9128 {
9129 rtvec v;
9130 int units, i;
9131 tree link, elt;
9132 enum machine_mode inner, mode;
9133
9134 mode = TYPE_MODE (TREE_TYPE (exp));
9135
9136 if (initializer_zerop (exp))
9137 return CONST0_RTX (mode);
9138
9139 units = GET_MODE_NUNITS (mode);
9140 inner = GET_MODE_INNER (mode);
9141
9142 v = rtvec_alloc (units);
9143
9144 link = TREE_VECTOR_CST_ELTS (exp);
9145 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9146 {
9147 elt = TREE_VALUE (link);
9148
9149 if (TREE_CODE (elt) == REAL_CST)
9150 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9151 inner);
9152 else
9153 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9154 TREE_INT_CST_HIGH (elt),
9155 inner);
9156 }
9157
9158 /* Initialize remaining elements to 0. */
9159 for (; i < units; ++i)
9160 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9161
9162 return gen_rtx_CONST_VECTOR (mode, v);
9163 }
9164 #include "gt-expr.h"