re PR rtl-optimization/49429 (dse.c change (r175063) causes execution failures)
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "reload.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
51 #include "target.h"
52 #include "common/common-target.h"
53 #include "timevar.h"
54 #include "df.h"
55 #include "diagnostic.h"
56 #include "ssaexpand.h"
57 #include "target-globals.h"
58
59 /* Decide whether a function's arguments should be processed
60 from first to last or from last to first.
61
62 They should if the stack and args grow in opposite directions, but
63 only if we have push insns. */
64
65 #ifdef PUSH_ROUNDING
66
67 #ifndef PUSH_ARGS_REVERSED
68 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
69 #define PUSH_ARGS_REVERSED /* If it's last to first. */
70 #endif
71 #endif
72
73 #endif
74
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
82
83
84 /* If this is nonzero, we do not bother generating VOLATILE
85 around volatile memory references, and we are willing to
86 output indirect addresses. If cse is to follow, we reject
87 indirect addresses so a useful potential cse is generated;
88 if it is used only once, instruction combination will produce
89 the same indirect address eventually. */
90 int cse_not_expected;
91
92 /* This structure is used by move_by_pieces to describe the move to
93 be performed. */
94 struct move_by_pieces_d
95 {
96 rtx to;
97 rtx to_addr;
98 int autinc_to;
99 int explicit_inc_to;
100 rtx from;
101 rtx from_addr;
102 int autinc_from;
103 int explicit_inc_from;
104 unsigned HOST_WIDE_INT len;
105 HOST_WIDE_INT offset;
106 int reverse;
107 };
108
109 /* This structure is used by store_by_pieces to describe the clear to
110 be performed. */
111
112 struct store_by_pieces_d
113 {
114 rtx to;
115 rtx to_addr;
116 int autinc_to;
117 int explicit_inc_to;
118 unsigned HOST_WIDE_INT len;
119 HOST_WIDE_INT offset;
120 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
121 void *constfundata;
122 int reverse;
123 };
124
125 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
126 unsigned int,
127 unsigned int);
128 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
129 struct move_by_pieces_d *);
130 static bool block_move_libcall_safe_for_call_parm (void);
131 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
132 static tree emit_block_move_libcall_fn (int);
133 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
134 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
135 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
136 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
137 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
138 struct store_by_pieces_d *);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, alias_set_type);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, alias_set_type, bool);
148
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150
151 static int is_aligning_offset (const_tree, const_tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (sepops, rtx, enum machine_mode);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
162
163 /* This macro is used to determine whether move_by_pieces should be called
164 to perform a structure copy. */
165 #ifndef MOVE_BY_PIECES_P
166 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
167 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
168 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
169 #endif
170
171 /* This macro is used to determine whether clear_by_pieces should be
172 called to clear storage. */
173 #ifndef CLEAR_BY_PIECES_P
174 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
175 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
176 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
177 #endif
178
179 /* This macro is used to determine whether store_by_pieces should be
180 called to "memset" storage with byte values other than zero. */
181 #ifndef SET_BY_PIECES_P
182 #define SET_BY_PIECES_P(SIZE, ALIGN) \
183 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
184 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
185 #endif
186
187 /* This macro is used to determine whether store_by_pieces should be
188 called to "memcpy" storage when the source is a constant string. */
189 #ifndef STORE_BY_PIECES_P
190 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
192 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
193 #endif
194
195 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
196
197 #ifndef SLOW_UNALIGNED_ACCESS
198 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
199 #endif
200 \f
201 /* This is run to set up which modes can be used
202 directly in memory and to initialize the block move optab. It is run
203 at the beginning of compilation and when the target is reinitialized. */
204
205 void
206 init_expr_target (void)
207 {
208 rtx insn, pat;
209 enum machine_mode mode;
210 int num_clobbers;
211 rtx mem, mem1;
212 rtx reg;
213
214 /* Try indexing by frame ptr and try by stack ptr.
215 It is known that on the Convex the stack ptr isn't a valid index.
216 With luck, one or the other is valid on any machine. */
217 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
218 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
219
220 /* A scratch register we can modify in-place below to avoid
221 useless RTL allocations. */
222 reg = gen_rtx_REG (VOIDmode, -1);
223
224 insn = rtx_alloc (INSN);
225 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
226 PATTERN (insn) = pat;
227
228 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
229 mode = (enum machine_mode) ((int) mode + 1))
230 {
231 int regno;
232
233 direct_load[(int) mode] = direct_store[(int) mode] = 0;
234 PUT_MODE (mem, mode);
235 PUT_MODE (mem1, mode);
236 PUT_MODE (reg, mode);
237
238 /* See if there is some register that can be used in this mode and
239 directly loaded or stored from memory. */
240
241 if (mode != VOIDmode && mode != BLKmode)
242 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
243 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
244 regno++)
245 {
246 if (! HARD_REGNO_MODE_OK (regno, mode))
247 continue;
248
249 SET_REGNO (reg, regno);
250
251 SET_SRC (pat) = mem;
252 SET_DEST (pat) = reg;
253 if (recog (pat, insn, &num_clobbers) >= 0)
254 direct_load[(int) mode] = 1;
255
256 SET_SRC (pat) = mem1;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
260
261 SET_SRC (pat) = reg;
262 SET_DEST (pat) = mem;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_store[(int) mode] = 1;
265
266 SET_SRC (pat) = reg;
267 SET_DEST (pat) = mem1;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
270 }
271 }
272
273 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
274
275 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
276 mode = GET_MODE_WIDER_MODE (mode))
277 {
278 enum machine_mode srcmode;
279 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
280 srcmode = GET_MODE_WIDER_MODE (srcmode))
281 {
282 enum insn_code ic;
283
284 ic = can_extend_p (mode, srcmode, 0);
285 if (ic == CODE_FOR_nothing)
286 continue;
287
288 PUT_MODE (mem, srcmode);
289
290 if (insn_operand_matches (ic, 1, mem))
291 float_extend_from_mem[mode][srcmode] = true;
292 }
293 }
294 }
295
296 /* This is run at the start of compiling a function. */
297
298 void
299 init_expr (void)
300 {
301 memset (&crtl->expr, 0, sizeof (crtl->expr));
302 }
303 \f
304 /* Copy data from FROM to TO, where the machine modes are not the same.
305 Both modes may be integer, or both may be floating, or both may be
306 fixed-point.
307 UNSIGNEDP should be nonzero if FROM is an unsigned type.
308 This causes zero-extension instead of sign-extension. */
309
310 void
311 convert_move (rtx to, rtx from, int unsignedp)
312 {
313 enum machine_mode to_mode = GET_MODE (to);
314 enum machine_mode from_mode = GET_MODE (from);
315 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
316 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
317 enum insn_code code;
318 rtx libcall;
319
320 /* rtx code for making an equivalent value. */
321 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
322 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
323
324
325 gcc_assert (to_real == from_real);
326 gcc_assert (to_mode != BLKmode);
327 gcc_assert (from_mode != BLKmode);
328
329 /* If the source and destination are already the same, then there's
330 nothing to do. */
331 if (to == from)
332 return;
333
334 /* If FROM is a SUBREG that indicates that we have already done at least
335 the required extension, strip it. We don't handle such SUBREGs as
336 TO here. */
337
338 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
339 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
340 >= GET_MODE_SIZE (to_mode))
341 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
342 from = gen_lowpart (to_mode, from), from_mode = to_mode;
343
344 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
345
346 if (to_mode == from_mode
347 || (from_mode == VOIDmode && CONSTANT_P (from)))
348 {
349 emit_move_insn (to, from);
350 return;
351 }
352
353 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
354 {
355 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
356
357 if (VECTOR_MODE_P (to_mode))
358 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
359 else
360 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
361
362 emit_move_insn (to, from);
363 return;
364 }
365
366 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
367 {
368 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
369 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
370 return;
371 }
372
373 if (to_real)
374 {
375 rtx value, insns;
376 convert_optab tab;
377
378 gcc_assert ((GET_MODE_PRECISION (from_mode)
379 != GET_MODE_PRECISION (to_mode))
380 || (DECIMAL_FLOAT_MODE_P (from_mode)
381 != DECIMAL_FLOAT_MODE_P (to_mode)));
382
383 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
384 /* Conversion between decimal float and binary float, same size. */
385 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
386 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
387 tab = sext_optab;
388 else
389 tab = trunc_optab;
390
391 /* Try converting directly if the insn is supported. */
392
393 code = convert_optab_handler (tab, to_mode, from_mode);
394 if (code != CODE_FOR_nothing)
395 {
396 emit_unop_insn (code, to, from,
397 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
398 return;
399 }
400
401 /* Otherwise use a libcall. */
402 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
403
404 /* Is this conversion implemented yet? */
405 gcc_assert (libcall);
406
407 start_sequence ();
408 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
409 1, from, from_mode);
410 insns = get_insns ();
411 end_sequence ();
412 emit_libcall_block (insns, to, value,
413 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
414 from)
415 : gen_rtx_FLOAT_EXTEND (to_mode, from));
416 return;
417 }
418
419 /* Handle pointer conversion. */ /* SPEE 900220. */
420 /* Targets are expected to provide conversion insns between PxImode and
421 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
422 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
423 {
424 enum machine_mode full_mode
425 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
426
427 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
428 != CODE_FOR_nothing);
429
430 if (full_mode != from_mode)
431 from = convert_to_mode (full_mode, from, unsignedp);
432 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
433 to, from, UNKNOWN);
434 return;
435 }
436 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
437 {
438 rtx new_from;
439 enum machine_mode full_mode
440 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
441
442 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)
443 != CODE_FOR_nothing);
444
445 if (to_mode == full_mode)
446 {
447 emit_unop_insn (convert_optab_handler (sext_optab, full_mode,
448 from_mode),
449 to, from, UNKNOWN);
450 return;
451 }
452
453 new_from = gen_reg_rtx (full_mode);
454 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode),
455 new_from, from, UNKNOWN);
456
457 /* else proceed to integer conversions below. */
458 from_mode = full_mode;
459 from = new_from;
460 }
461
462 /* Make sure both are fixed-point modes or both are not. */
463 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
464 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
465 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
466 {
467 /* If we widen from_mode to to_mode and they are in the same class,
468 we won't saturate the result.
469 Otherwise, always saturate the result to play safe. */
470 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
471 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
472 expand_fixed_convert (to, from, 0, 0);
473 else
474 expand_fixed_convert (to, from, 0, 1);
475 return;
476 }
477
478 /* Now both modes are integers. */
479
480 /* Handle expanding beyond a word. */
481 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
482 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
483 {
484 rtx insns;
485 rtx lowpart;
486 rtx fill_value;
487 rtx lowfrom;
488 int i;
489 enum machine_mode lowpart_mode;
490 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
491
492 /* Try converting directly if the insn is supported. */
493 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
494 != CODE_FOR_nothing)
495 {
496 /* If FROM is a SUBREG, put it into a register. Do this
497 so that we always generate the same set of insns for
498 better cse'ing; if an intermediate assignment occurred,
499 we won't be doing the operation directly on the SUBREG. */
500 if (optimize > 0 && GET_CODE (from) == SUBREG)
501 from = force_reg (from_mode, from);
502 emit_unop_insn (code, to, from, equiv_code);
503 return;
504 }
505 /* Next, try converting via full word. */
506 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
507 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
508 != CODE_FOR_nothing))
509 {
510 rtx word_to = gen_reg_rtx (word_mode);
511 if (REG_P (to))
512 {
513 if (reg_overlap_mentioned_p (to, from))
514 from = force_reg (from_mode, from);
515 emit_clobber (to);
516 }
517 convert_move (word_to, from, unsignedp);
518 emit_unop_insn (code, to, word_to, equiv_code);
519 return;
520 }
521
522 /* No special multiword conversion insn; do it by hand. */
523 start_sequence ();
524
525 /* Since we will turn this into a no conflict block, we must ensure
526 that the source does not overlap the target. */
527
528 if (reg_overlap_mentioned_p (to, from))
529 from = force_reg (from_mode, from);
530
531 /* Get a copy of FROM widened to a word, if necessary. */
532 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
533 lowpart_mode = word_mode;
534 else
535 lowpart_mode = from_mode;
536
537 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
538
539 lowpart = gen_lowpart (lowpart_mode, to);
540 emit_move_insn (lowpart, lowfrom);
541
542 /* Compute the value to put in each remaining word. */
543 if (unsignedp)
544 fill_value = const0_rtx;
545 else
546 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
547 LT, lowfrom, const0_rtx,
548 VOIDmode, 0, -1);
549
550 /* Fill the remaining words. */
551 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 {
553 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
554 rtx subword = operand_subword (to, index, 1, to_mode);
555
556 gcc_assert (subword);
557
558 if (fill_value != subword)
559 emit_move_insn (subword, fill_value);
560 }
561
562 insns = get_insns ();
563 end_sequence ();
564
565 emit_insn (insns);
566 return;
567 }
568
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
572 {
573 if (!((MEM_P (from)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0)))
577 || REG_P (from)
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
581 return;
582 }
583
584 /* Now follow all the conversions between integers
585 no more than a word long. */
586
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
590 GET_MODE_BITSIZE (from_mode)))
591 {
592 if (!((MEM_P (from)
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0)))
596 || REG_P (from)
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
603 return;
604 }
605
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
608 {
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
611 != CODE_FOR_nothing)
612 {
613 emit_unop_insn (code, to, from, equiv_code);
614 return;
615 }
616 else
617 {
618 enum machine_mode intermediate;
619 rtx tmp;
620 int shift_amount;
621
622 /* Search for a mode to convert via. */
623 for (intermediate = from_mode; intermediate != VOIDmode;
624 intermediate = GET_MODE_WIDER_MODE (intermediate))
625 if (((can_extend_p (to_mode, intermediate, unsignedp)
626 != CODE_FOR_nothing)
627 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
628 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
629 GET_MODE_BITSIZE (intermediate))))
630 && (can_extend_p (intermediate, from_mode, unsignedp)
631 != CODE_FOR_nothing))
632 {
633 convert_move (to, convert_to_mode (intermediate, from,
634 unsignedp), unsignedp);
635 return;
636 }
637
638 /* No suitable intermediate mode.
639 Generate what we need with shifts. */
640 shift_amount = (GET_MODE_BITSIZE (to_mode)
641 - GET_MODE_BITSIZE (from_mode));
642 from = gen_lowpart (to_mode, force_reg (from_mode, from));
643 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
644 to, unsignedp);
645 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
646 to, unsignedp);
647 if (tmp != to)
648 emit_move_insn (to, tmp);
649 return;
650 }
651 }
652
653 /* Support special truncate insns for certain modes. */
654 if (convert_optab_handler (trunc_optab, to_mode,
655 from_mode) != CODE_FOR_nothing)
656 {
657 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
658 to, from, UNKNOWN);
659 return;
660 }
661
662 /* Handle truncation of volatile memrefs, and so on;
663 the things that couldn't be truncated directly,
664 and for which there was no special instruction.
665
666 ??? Code above formerly short-circuited this, for most integer
667 mode pairs, with a force_reg in from_mode followed by a recursive
668 call to this routine. Appears always to have been wrong. */
669 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
670 {
671 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
672 emit_move_insn (to, temp);
673 return;
674 }
675
676 /* Mode combination is not recognized. */
677 gcc_unreachable ();
678 }
679
680 /* Return an rtx for a value that would result
681 from converting X to mode MODE.
682 Both X and MODE may be floating, or both integer.
683 UNSIGNEDP is nonzero if X is an unsigned value.
684 This can be done by referring to a part of X in place
685 or by copying to a new temporary with conversion. */
686
687 rtx
688 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
689 {
690 return convert_modes (mode, VOIDmode, x, unsignedp);
691 }
692
693 /* Return an rtx for a value that would result
694 from converting X from mode OLDMODE to mode MODE.
695 Both modes may be floating, or both integer.
696 UNSIGNEDP is nonzero if X is an unsigned value.
697
698 This can be done by referring to a part of X in place
699 or by copying to a new temporary with conversion.
700
701 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
702
703 rtx
704 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
705 {
706 rtx temp;
707
708 /* If FROM is a SUBREG that indicates that we have already done at least
709 the required extension, strip it. */
710
711 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
712 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
713 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
714 x = gen_lowpart (mode, x);
715
716 if (GET_MODE (x) != VOIDmode)
717 oldmode = GET_MODE (x);
718
719 if (mode == oldmode)
720 return x;
721
722 /* There is one case that we must handle specially: If we are converting
723 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
724 we are to interpret the constant as unsigned, gen_lowpart will do
725 the wrong if the constant appears negative. What we want to do is
726 make the high-order word of the constant zero, not all ones. */
727
728 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
729 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
730 && CONST_INT_P (x) && INTVAL (x) < 0)
731 {
732 double_int val = uhwi_to_double_int (INTVAL (x));
733
734 /* We need to zero extend VAL. */
735 if (oldmode != VOIDmode)
736 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
737
738 return immed_double_int_const (val, mode);
739 }
740
741 /* We can do this with a gen_lowpart if both desired and current modes
742 are integer, and this is either a constant integer, a register, or a
743 non-volatile MEM. Except for the constant case where MODE is no
744 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
745
746 if ((CONST_INT_P (x)
747 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
748 || (GET_MODE_CLASS (mode) == MODE_INT
749 && GET_MODE_CLASS (oldmode) == MODE_INT
750 && (GET_CODE (x) == CONST_DOUBLE
751 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
752 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
753 && direct_load[(int) mode])
754 || (REG_P (x)
755 && (! HARD_REGISTER_P (x)
756 || HARD_REGNO_MODE_OK (REGNO (x), mode))
757 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
758 GET_MODE_BITSIZE (GET_MODE (x)))))))))
759 {
760 /* ?? If we don't know OLDMODE, we have to assume here that
761 X does not need sign- or zero-extension. This may not be
762 the case, but it's the best we can do. */
763 if (CONST_INT_P (x) && oldmode != VOIDmode
764 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
765 {
766 HOST_WIDE_INT val = INTVAL (x);
767 int width = GET_MODE_BITSIZE (oldmode);
768
769 /* We must sign or zero-extend in this case. Start by
770 zero-extending, then sign extend if we need to. */
771 val &= ((HOST_WIDE_INT) 1 << width) - 1;
772 if (! unsignedp
773 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
774 val |= (HOST_WIDE_INT) (-1) << width;
775
776 return gen_int_mode (val, mode);
777 }
778
779 return gen_lowpart (mode, x);
780 }
781
782 /* Converting from integer constant into mode is always equivalent to an
783 subreg operation. */
784 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
785 {
786 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
787 return simplify_gen_subreg (mode, x, oldmode, 0);
788 }
789
790 temp = gen_reg_rtx (mode);
791 convert_move (temp, x, unsignedp);
792 return temp;
793 }
794 \f
795 /* Return the largest alignment we can use for doing a move (or store)
796 of MAX_PIECES. ALIGN is the largest alignment we could use. */
797
798 static unsigned int
799 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
800 {
801 enum machine_mode tmode;
802
803 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
804 if (align >= GET_MODE_ALIGNMENT (tmode))
805 align = GET_MODE_ALIGNMENT (tmode);
806 else
807 {
808 enum machine_mode tmode, xmode;
809
810 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
811 tmode != VOIDmode;
812 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
813 if (GET_MODE_SIZE (tmode) > max_pieces
814 || SLOW_UNALIGNED_ACCESS (tmode, align))
815 break;
816
817 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
818 }
819
820 return align;
821 }
822
823 /* Return the widest integer mode no wider than SIZE. If no such mode
824 can be found, return VOIDmode. */
825
826 static enum machine_mode
827 widest_int_mode_for_size (unsigned int size)
828 {
829 enum machine_mode tmode, mode = VOIDmode;
830
831 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
832 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
833 if (GET_MODE_SIZE (tmode) < size)
834 mode = tmode;
835
836 return mode;
837 }
838
839 /* STORE_MAX_PIECES is the number of bytes at a time that we can
840 store efficiently. Due to internal GCC limitations, this is
841 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
842 for an immediate constant. */
843
844 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
845
846 /* Determine whether the LEN bytes can be moved by using several move
847 instructions. Return nonzero if a call to move_by_pieces should
848 succeed. */
849
850 int
851 can_move_by_pieces (unsigned HOST_WIDE_INT len,
852 unsigned int align ATTRIBUTE_UNUSED)
853 {
854 return MOVE_BY_PIECES_P (len, align);
855 }
856
857 /* Generate several move instructions to copy LEN bytes from block FROM to
858 block TO. (These are MEM rtx's with BLKmode).
859
860 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
861 used to push FROM to the stack.
862
863 ALIGN is maximum stack alignment we can assume.
864
865 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
866 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
867 stpcpy. */
868
869 rtx
870 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
871 unsigned int align, int endp)
872 {
873 struct move_by_pieces_d data;
874 enum machine_mode to_addr_mode, from_addr_mode
875 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
876 rtx to_addr, from_addr = XEXP (from, 0);
877 unsigned int max_size = MOVE_MAX_PIECES + 1;
878 enum insn_code icode;
879
880 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
881
882 data.offset = 0;
883 data.from_addr = from_addr;
884 if (to)
885 {
886 to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
887 to_addr = XEXP (to, 0);
888 data.to = to;
889 data.autinc_to
890 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
891 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
892 data.reverse
893 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
894 }
895 else
896 {
897 to_addr_mode = VOIDmode;
898 to_addr = NULL_RTX;
899 data.to = NULL_RTX;
900 data.autinc_to = 1;
901 #ifdef STACK_GROWS_DOWNWARD
902 data.reverse = 1;
903 #else
904 data.reverse = 0;
905 #endif
906 }
907 data.to_addr = to_addr;
908 data.from = from;
909 data.autinc_from
910 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
911 || GET_CODE (from_addr) == POST_INC
912 || GET_CODE (from_addr) == POST_DEC);
913
914 data.explicit_inc_from = 0;
915 data.explicit_inc_to = 0;
916 if (data.reverse) data.offset = len;
917 data.len = len;
918
919 /* If copying requires more than two move insns,
920 copy addresses to registers (to make displacements shorter)
921 and use post-increment if available. */
922 if (!(data.autinc_from && data.autinc_to)
923 && move_by_pieces_ninsns (len, align, max_size) > 2)
924 {
925 /* Find the mode of the largest move...
926 MODE might not be used depending on the definitions of the
927 USE_* macros below. */
928 enum machine_mode mode ATTRIBUTE_UNUSED
929 = widest_int_mode_for_size (max_size);
930
931 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
932 {
933 data.from_addr = copy_to_mode_reg (from_addr_mode,
934 plus_constant (from_addr, len));
935 data.autinc_from = 1;
936 data.explicit_inc_from = -1;
937 }
938 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
939 {
940 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
941 data.autinc_from = 1;
942 data.explicit_inc_from = 1;
943 }
944 if (!data.autinc_from && CONSTANT_P (from_addr))
945 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
946 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
947 {
948 data.to_addr = copy_to_mode_reg (to_addr_mode,
949 plus_constant (to_addr, len));
950 data.autinc_to = 1;
951 data.explicit_inc_to = -1;
952 }
953 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
954 {
955 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
956 data.autinc_to = 1;
957 data.explicit_inc_to = 1;
958 }
959 if (!data.autinc_to && CONSTANT_P (to_addr))
960 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
961 }
962
963 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
964
965 /* First move what we can in the largest integer mode, then go to
966 successively smaller modes. */
967
968 while (max_size > 1)
969 {
970 enum machine_mode mode = widest_int_mode_for_size (max_size);
971
972 if (mode == VOIDmode)
973 break;
974
975 icode = optab_handler (mov_optab, mode);
976 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
977 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
978
979 max_size = GET_MODE_SIZE (mode);
980 }
981
982 /* The code above should have handled everything. */
983 gcc_assert (!data.len);
984
985 if (endp)
986 {
987 rtx to1;
988
989 gcc_assert (!data.reverse);
990 if (data.autinc_to)
991 {
992 if (endp == 2)
993 {
994 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
995 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
996 else
997 data.to_addr = copy_to_mode_reg (to_addr_mode,
998 plus_constant (data.to_addr,
999 -1));
1000 }
1001 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1002 data.offset);
1003 }
1004 else
1005 {
1006 if (endp == 2)
1007 --data.offset;
1008 to1 = adjust_address (data.to, QImode, data.offset);
1009 }
1010 return to1;
1011 }
1012 else
1013 return data.to;
1014 }
1015
1016 /* Return number of insns required to move L bytes by pieces.
1017 ALIGN (in bits) is maximum alignment we can assume. */
1018
1019 static unsigned HOST_WIDE_INT
1020 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1021 unsigned int max_size)
1022 {
1023 unsigned HOST_WIDE_INT n_insns = 0;
1024
1025 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1026
1027 while (max_size > 1)
1028 {
1029 enum machine_mode mode;
1030 enum insn_code icode;
1031
1032 mode = widest_int_mode_for_size (max_size);
1033
1034 if (mode == VOIDmode)
1035 break;
1036
1037 icode = optab_handler (mov_optab, mode);
1038 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1039 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1040
1041 max_size = GET_MODE_SIZE (mode);
1042 }
1043
1044 gcc_assert (!l);
1045 return n_insns;
1046 }
1047
1048 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1049 with move instructions for mode MODE. GENFUN is the gen_... function
1050 to make a move insn for that mode. DATA has all the other info. */
1051
1052 static void
1053 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1054 struct move_by_pieces_d *data)
1055 {
1056 unsigned int size = GET_MODE_SIZE (mode);
1057 rtx to1 = NULL_RTX, from1;
1058
1059 while (data->len >= size)
1060 {
1061 if (data->reverse)
1062 data->offset -= size;
1063
1064 if (data->to)
1065 {
1066 if (data->autinc_to)
1067 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1068 data->offset);
1069 else
1070 to1 = adjust_address (data->to, mode, data->offset);
1071 }
1072
1073 if (data->autinc_from)
1074 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1075 data->offset);
1076 else
1077 from1 = adjust_address (data->from, mode, data->offset);
1078
1079 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1080 emit_insn (gen_add2_insn (data->to_addr,
1081 GEN_INT (-(HOST_WIDE_INT)size)));
1082 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1083 emit_insn (gen_add2_insn (data->from_addr,
1084 GEN_INT (-(HOST_WIDE_INT)size)));
1085
1086 if (data->to)
1087 emit_insn ((*genfun) (to1, from1));
1088 else
1089 {
1090 #ifdef PUSH_ROUNDING
1091 emit_single_push_insn (mode, from1, NULL);
1092 #else
1093 gcc_unreachable ();
1094 #endif
1095 }
1096
1097 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1098 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1099 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1100 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1101
1102 if (! data->reverse)
1103 data->offset += size;
1104
1105 data->len -= size;
1106 }
1107 }
1108 \f
1109 /* Emit code to move a block Y to a block X. This may be done with
1110 string-move instructions, with multiple scalar move instructions,
1111 or with a library call.
1112
1113 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1114 SIZE is an rtx that says how long they are.
1115 ALIGN is the maximum alignment we can assume they have.
1116 METHOD describes what kind of copy this is, and what mechanisms may be used.
1117
1118 Return the address of the new block, if memcpy is called and returns it,
1119 0 otherwise. */
1120
1121 rtx
1122 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1123 unsigned int expected_align, HOST_WIDE_INT expected_size)
1124 {
1125 bool may_use_call;
1126 rtx retval = 0;
1127 unsigned int align;
1128
1129 gcc_assert (size);
1130 if (CONST_INT_P (size)
1131 && INTVAL (size) == 0)
1132 return 0;
1133
1134 switch (method)
1135 {
1136 case BLOCK_OP_NORMAL:
1137 case BLOCK_OP_TAILCALL:
1138 may_use_call = true;
1139 break;
1140
1141 case BLOCK_OP_CALL_PARM:
1142 may_use_call = block_move_libcall_safe_for_call_parm ();
1143
1144 /* Make inhibit_defer_pop nonzero around the library call
1145 to force it to pop the arguments right away. */
1146 NO_DEFER_POP;
1147 break;
1148
1149 case BLOCK_OP_NO_LIBCALL:
1150 may_use_call = false;
1151 break;
1152
1153 default:
1154 gcc_unreachable ();
1155 }
1156
1157 gcc_assert (MEM_P (x) && MEM_P (y));
1158 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1159 gcc_assert (align >= BITS_PER_UNIT);
1160
1161 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1162 block copy is more efficient for other large modes, e.g. DCmode. */
1163 x = adjust_address (x, BLKmode, 0);
1164 y = adjust_address (y, BLKmode, 0);
1165
1166 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1167 can be incorrect is coming from __builtin_memcpy. */
1168 if (CONST_INT_P (size))
1169 {
1170 x = shallow_copy_rtx (x);
1171 y = shallow_copy_rtx (y);
1172 set_mem_size (x, size);
1173 set_mem_size (y, size);
1174 }
1175
1176 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1177 move_by_pieces (x, y, INTVAL (size), align, 0);
1178 else if (emit_block_move_via_movmem (x, y, size, align,
1179 expected_align, expected_size))
1180 ;
1181 else if (may_use_call
1182 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1183 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1184 {
1185 /* Since x and y are passed to a libcall, mark the corresponding
1186 tree EXPR as addressable. */
1187 tree y_expr = MEM_EXPR (y);
1188 tree x_expr = MEM_EXPR (x);
1189 if (y_expr)
1190 mark_addressable (y_expr);
1191 if (x_expr)
1192 mark_addressable (x_expr);
1193 retval = emit_block_move_via_libcall (x, y, size,
1194 method == BLOCK_OP_TAILCALL);
1195 }
1196
1197 else
1198 emit_block_move_via_loop (x, y, size, align);
1199
1200 if (method == BLOCK_OP_CALL_PARM)
1201 OK_DEFER_POP;
1202
1203 return retval;
1204 }
1205
1206 rtx
1207 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1208 {
1209 return emit_block_move_hints (x, y, size, method, 0, -1);
1210 }
1211
1212 /* A subroutine of emit_block_move. Returns true if calling the
1213 block move libcall will not clobber any parameters which may have
1214 already been placed on the stack. */
1215
1216 static bool
1217 block_move_libcall_safe_for_call_parm (void)
1218 {
1219 #if defined (REG_PARM_STACK_SPACE)
1220 tree fn;
1221 #endif
1222
1223 /* If arguments are pushed on the stack, then they're safe. */
1224 if (PUSH_ARGS)
1225 return true;
1226
1227 /* If registers go on the stack anyway, any argument is sure to clobber
1228 an outgoing argument. */
1229 #if defined (REG_PARM_STACK_SPACE)
1230 fn = emit_block_move_libcall_fn (false);
1231 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1232 depend on its argument. */
1233 (void) fn;
1234 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1235 && REG_PARM_STACK_SPACE (fn) != 0)
1236 return false;
1237 #endif
1238
1239 /* If any argument goes in memory, then it might clobber an outgoing
1240 argument. */
1241 {
1242 CUMULATIVE_ARGS args_so_far_v;
1243 cumulative_args_t args_so_far;
1244 tree fn, arg;
1245
1246 fn = emit_block_move_libcall_fn (false);
1247 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1248 args_so_far = pack_cumulative_args (&args_so_far_v);
1249
1250 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1251 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1252 {
1253 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1254 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1255 NULL_TREE, true);
1256 if (!tmp || !REG_P (tmp))
1257 return false;
1258 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1259 return false;
1260 targetm.calls.function_arg_advance (args_so_far, mode,
1261 NULL_TREE, true);
1262 }
1263 }
1264 return true;
1265 }
1266
1267 /* A subroutine of emit_block_move. Expand a movmem pattern;
1268 return true if successful. */
1269
1270 static bool
1271 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1272 unsigned int expected_align, HOST_WIDE_INT expected_size)
1273 {
1274 int save_volatile_ok = volatile_ok;
1275 enum machine_mode mode;
1276
1277 if (expected_align < align)
1278 expected_align = align;
1279
1280 /* Since this is a move insn, we don't care about volatility. */
1281 volatile_ok = 1;
1282
1283 /* Try the most limited insn first, because there's no point
1284 including more than one in the machine description unless
1285 the more limited one has some advantage. */
1286
1287 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1288 mode = GET_MODE_WIDER_MODE (mode))
1289 {
1290 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1291
1292 if (code != CODE_FOR_nothing
1293 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1294 here because if SIZE is less than the mode mask, as it is
1295 returned by the macro, it will definitely be less than the
1296 actual mode mask. */
1297 && ((CONST_INT_P (size)
1298 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1299 <= (GET_MODE_MASK (mode) >> 1)))
1300 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
1301 {
1302 struct expand_operand ops[6];
1303 unsigned int nops;
1304
1305 /* ??? When called via emit_block_move_for_call, it'd be
1306 nice if there were some way to inform the backend, so
1307 that it doesn't fail the expansion because it thinks
1308 emitting the libcall would be more efficient. */
1309 nops = insn_data[(int) code].n_generator_args;
1310 gcc_assert (nops == 4 || nops == 6);
1311
1312 create_fixed_operand (&ops[0], x);
1313 create_fixed_operand (&ops[1], y);
1314 /* The check above guarantees that this size conversion is valid. */
1315 create_convert_operand_to (&ops[2], size, mode, true);
1316 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1317 if (nops == 6)
1318 {
1319 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1320 create_integer_operand (&ops[5], expected_size);
1321 }
1322 if (maybe_expand_insn (code, nops, ops))
1323 {
1324 volatile_ok = save_volatile_ok;
1325 return true;
1326 }
1327 }
1328 }
1329
1330 volatile_ok = save_volatile_ok;
1331 return false;
1332 }
1333
1334 /* A subroutine of emit_block_move. Expand a call to memcpy.
1335 Return the return value from memcpy, 0 otherwise. */
1336
1337 rtx
1338 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1339 {
1340 rtx dst_addr, src_addr;
1341 tree call_expr, fn, src_tree, dst_tree, size_tree;
1342 enum machine_mode size_mode;
1343 rtx retval;
1344
1345 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1346 pseudos. We can then place those new pseudos into a VAR_DECL and
1347 use them later. */
1348
1349 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1350 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1351
1352 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1353 src_addr = convert_memory_address (ptr_mode, src_addr);
1354
1355 dst_tree = make_tree (ptr_type_node, dst_addr);
1356 src_tree = make_tree (ptr_type_node, src_addr);
1357
1358 size_mode = TYPE_MODE (sizetype);
1359
1360 size = convert_to_mode (size_mode, size, 1);
1361 size = copy_to_mode_reg (size_mode, size);
1362
1363 /* It is incorrect to use the libcall calling conventions to call
1364 memcpy in this context. This could be a user call to memcpy and
1365 the user may wish to examine the return value from memcpy. For
1366 targets where libcalls and normal calls have different conventions
1367 for returning pointers, we could end up generating incorrect code. */
1368
1369 size_tree = make_tree (sizetype, size);
1370
1371 fn = emit_block_move_libcall_fn (true);
1372 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1373 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1374
1375 retval = expand_normal (call_expr);
1376
1377 return retval;
1378 }
1379
1380 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1381 for the function we use for block copies. The first time FOR_CALL
1382 is true, we call assemble_external. */
1383
1384 static GTY(()) tree block_move_fn;
1385
1386 void
1387 init_block_move_fn (const char *asmspec)
1388 {
1389 if (!block_move_fn)
1390 {
1391 tree args, fn;
1392
1393 fn = get_identifier ("memcpy");
1394 args = build_function_type_list (ptr_type_node, ptr_type_node,
1395 const_ptr_type_node, sizetype,
1396 NULL_TREE);
1397
1398 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1399 DECL_EXTERNAL (fn) = 1;
1400 TREE_PUBLIC (fn) = 1;
1401 DECL_ARTIFICIAL (fn) = 1;
1402 TREE_NOTHROW (fn) = 1;
1403 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1404 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1405
1406 block_move_fn = fn;
1407 }
1408
1409 if (asmspec)
1410 set_user_assembler_name (block_move_fn, asmspec);
1411 }
1412
1413 static tree
1414 emit_block_move_libcall_fn (int for_call)
1415 {
1416 static bool emitted_extern;
1417
1418 if (!block_move_fn)
1419 init_block_move_fn (NULL);
1420
1421 if (for_call && !emitted_extern)
1422 {
1423 emitted_extern = true;
1424 make_decl_rtl (block_move_fn);
1425 assemble_external (block_move_fn);
1426 }
1427
1428 return block_move_fn;
1429 }
1430
1431 /* A subroutine of emit_block_move. Copy the data via an explicit
1432 loop. This is used only when libcalls are forbidden. */
1433 /* ??? It'd be nice to copy in hunks larger than QImode. */
1434
1435 static void
1436 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1437 unsigned int align ATTRIBUTE_UNUSED)
1438 {
1439 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1440 enum machine_mode x_addr_mode
1441 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1442 enum machine_mode y_addr_mode
1443 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1444 enum machine_mode iter_mode;
1445
1446 iter_mode = GET_MODE (size);
1447 if (iter_mode == VOIDmode)
1448 iter_mode = word_mode;
1449
1450 top_label = gen_label_rtx ();
1451 cmp_label = gen_label_rtx ();
1452 iter = gen_reg_rtx (iter_mode);
1453
1454 emit_move_insn (iter, const0_rtx);
1455
1456 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1457 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1458 do_pending_stack_adjust ();
1459
1460 emit_jump (cmp_label);
1461 emit_label (top_label);
1462
1463 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1464 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1465
1466 if (x_addr_mode != y_addr_mode)
1467 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1468 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1469
1470 x = change_address (x, QImode, x_addr);
1471 y = change_address (y, QImode, y_addr);
1472
1473 emit_move_insn (x, y);
1474
1475 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1476 true, OPTAB_LIB_WIDEN);
1477 if (tmp != iter)
1478 emit_move_insn (iter, tmp);
1479
1480 emit_label (cmp_label);
1481
1482 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1483 true, top_label);
1484 }
1485 \f
1486 /* Copy all or part of a value X into registers starting at REGNO.
1487 The number of registers to be filled is NREGS. */
1488
1489 void
1490 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1491 {
1492 int i;
1493 #ifdef HAVE_load_multiple
1494 rtx pat;
1495 rtx last;
1496 #endif
1497
1498 if (nregs == 0)
1499 return;
1500
1501 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1502 x = validize_mem (force_const_mem (mode, x));
1503
1504 /* See if the machine can do this with a load multiple insn. */
1505 #ifdef HAVE_load_multiple
1506 if (HAVE_load_multiple)
1507 {
1508 last = get_last_insn ();
1509 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1510 GEN_INT (nregs));
1511 if (pat)
1512 {
1513 emit_insn (pat);
1514 return;
1515 }
1516 else
1517 delete_insns_since (last);
1518 }
1519 #endif
1520
1521 for (i = 0; i < nregs; i++)
1522 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1523 operand_subword_force (x, i, mode));
1524 }
1525
1526 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1527 The number of registers to be filled is NREGS. */
1528
1529 void
1530 move_block_from_reg (int regno, rtx x, int nregs)
1531 {
1532 int i;
1533
1534 if (nregs == 0)
1535 return;
1536
1537 /* See if the machine can do this with a store multiple insn. */
1538 #ifdef HAVE_store_multiple
1539 if (HAVE_store_multiple)
1540 {
1541 rtx last = get_last_insn ();
1542 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1543 GEN_INT (nregs));
1544 if (pat)
1545 {
1546 emit_insn (pat);
1547 return;
1548 }
1549 else
1550 delete_insns_since (last);
1551 }
1552 #endif
1553
1554 for (i = 0; i < nregs; i++)
1555 {
1556 rtx tem = operand_subword (x, i, 1, BLKmode);
1557
1558 gcc_assert (tem);
1559
1560 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1561 }
1562 }
1563
1564 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1565 ORIG, where ORIG is a non-consecutive group of registers represented by
1566 a PARALLEL. The clone is identical to the original except in that the
1567 original set of registers is replaced by a new set of pseudo registers.
1568 The new set has the same modes as the original set. */
1569
1570 rtx
1571 gen_group_rtx (rtx orig)
1572 {
1573 int i, length;
1574 rtx *tmps;
1575
1576 gcc_assert (GET_CODE (orig) == PARALLEL);
1577
1578 length = XVECLEN (orig, 0);
1579 tmps = XALLOCAVEC (rtx, length);
1580
1581 /* Skip a NULL entry in first slot. */
1582 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1583
1584 if (i)
1585 tmps[0] = 0;
1586
1587 for (; i < length; i++)
1588 {
1589 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1590 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1591
1592 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1593 }
1594
1595 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1596 }
1597
1598 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1599 except that values are placed in TMPS[i], and must later be moved
1600 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1601
1602 static void
1603 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1604 {
1605 rtx src;
1606 int start, i;
1607 enum machine_mode m = GET_MODE (orig_src);
1608
1609 gcc_assert (GET_CODE (dst) == PARALLEL);
1610
1611 if (m != VOIDmode
1612 && !SCALAR_INT_MODE_P (m)
1613 && !MEM_P (orig_src)
1614 && GET_CODE (orig_src) != CONCAT)
1615 {
1616 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1617 if (imode == BLKmode)
1618 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1619 else
1620 src = gen_reg_rtx (imode);
1621 if (imode != BLKmode)
1622 src = gen_lowpart (GET_MODE (orig_src), src);
1623 emit_move_insn (src, orig_src);
1624 /* ...and back again. */
1625 if (imode != BLKmode)
1626 src = gen_lowpart (imode, src);
1627 emit_group_load_1 (tmps, dst, src, type, ssize);
1628 return;
1629 }
1630
1631 /* Check for a NULL entry, used to indicate that the parameter goes
1632 both on the stack and in registers. */
1633 if (XEXP (XVECEXP (dst, 0, 0), 0))
1634 start = 0;
1635 else
1636 start = 1;
1637
1638 /* Process the pieces. */
1639 for (i = start; i < XVECLEN (dst, 0); i++)
1640 {
1641 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1642 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1643 unsigned int bytelen = GET_MODE_SIZE (mode);
1644 int shift = 0;
1645
1646 /* Handle trailing fragments that run over the size of the struct. */
1647 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1648 {
1649 /* Arrange to shift the fragment to where it belongs.
1650 extract_bit_field loads to the lsb of the reg. */
1651 if (
1652 #ifdef BLOCK_REG_PADDING
1653 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1654 == (BYTES_BIG_ENDIAN ? upward : downward)
1655 #else
1656 BYTES_BIG_ENDIAN
1657 #endif
1658 )
1659 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1660 bytelen = ssize - bytepos;
1661 gcc_assert (bytelen > 0);
1662 }
1663
1664 /* If we won't be loading directly from memory, protect the real source
1665 from strange tricks we might play; but make sure that the source can
1666 be loaded directly into the destination. */
1667 src = orig_src;
1668 if (!MEM_P (orig_src)
1669 && (!CONSTANT_P (orig_src)
1670 || (GET_MODE (orig_src) != mode
1671 && GET_MODE (orig_src) != VOIDmode)))
1672 {
1673 if (GET_MODE (orig_src) == VOIDmode)
1674 src = gen_reg_rtx (mode);
1675 else
1676 src = gen_reg_rtx (GET_MODE (orig_src));
1677
1678 emit_move_insn (src, orig_src);
1679 }
1680
1681 /* Optimize the access just a bit. */
1682 if (MEM_P (src)
1683 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1684 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1685 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1686 && bytelen == GET_MODE_SIZE (mode))
1687 {
1688 tmps[i] = gen_reg_rtx (mode);
1689 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1690 }
1691 else if (COMPLEX_MODE_P (mode)
1692 && GET_MODE (src) == mode
1693 && bytelen == GET_MODE_SIZE (mode))
1694 /* Let emit_move_complex do the bulk of the work. */
1695 tmps[i] = src;
1696 else if (GET_CODE (src) == CONCAT)
1697 {
1698 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1699 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1700
1701 if ((bytepos == 0 && bytelen == slen0)
1702 || (bytepos != 0 && bytepos + bytelen <= slen))
1703 {
1704 /* The following assumes that the concatenated objects all
1705 have the same size. In this case, a simple calculation
1706 can be used to determine the object and the bit field
1707 to be extracted. */
1708 tmps[i] = XEXP (src, bytepos / slen0);
1709 if (! CONSTANT_P (tmps[i])
1710 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1711 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1712 (bytepos % slen0) * BITS_PER_UNIT,
1713 1, false, NULL_RTX, mode, mode);
1714 }
1715 else
1716 {
1717 rtx mem;
1718
1719 gcc_assert (!bytepos);
1720 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1721 emit_move_insn (mem, src);
1722 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1723 0, 1, false, NULL_RTX, mode, mode);
1724 }
1725 }
1726 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1727 SIMD register, which is currently broken. While we get GCC
1728 to emit proper RTL for these cases, let's dump to memory. */
1729 else if (VECTOR_MODE_P (GET_MODE (dst))
1730 && REG_P (src))
1731 {
1732 int slen = GET_MODE_SIZE (GET_MODE (src));
1733 rtx mem;
1734
1735 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1736 emit_move_insn (mem, src);
1737 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1738 }
1739 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1740 && XVECLEN (dst, 0) > 1)
1741 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1742 else if (CONSTANT_P (src))
1743 {
1744 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1745
1746 if (len == ssize)
1747 tmps[i] = src;
1748 else
1749 {
1750 rtx first, second;
1751
1752 gcc_assert (2 * len == ssize);
1753 split_double (src, &first, &second);
1754 if (i)
1755 tmps[i] = second;
1756 else
1757 tmps[i] = first;
1758 }
1759 }
1760 else if (REG_P (src) && GET_MODE (src) == mode)
1761 tmps[i] = src;
1762 else
1763 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1764 bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1765 mode, mode);
1766
1767 if (shift)
1768 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1769 shift, tmps[i], 0);
1770 }
1771 }
1772
1773 /* Emit code to move a block SRC of type TYPE to a block DST,
1774 where DST is non-consecutive registers represented by a PARALLEL.
1775 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1776 if not known. */
1777
1778 void
1779 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1780 {
1781 rtx *tmps;
1782 int i;
1783
1784 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1785 emit_group_load_1 (tmps, dst, src, type, ssize);
1786
1787 /* Copy the extracted pieces into the proper (probable) hard regs. */
1788 for (i = 0; i < XVECLEN (dst, 0); i++)
1789 {
1790 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1791 if (d == NULL)
1792 continue;
1793 emit_move_insn (d, tmps[i]);
1794 }
1795 }
1796
1797 /* Similar, but load SRC into new pseudos in a format that looks like
1798 PARALLEL. This can later be fed to emit_group_move to get things
1799 in the right place. */
1800
1801 rtx
1802 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1803 {
1804 rtvec vec;
1805 int i;
1806
1807 vec = rtvec_alloc (XVECLEN (parallel, 0));
1808 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1809
1810 /* Convert the vector to look just like the original PARALLEL, except
1811 with the computed values. */
1812 for (i = 0; i < XVECLEN (parallel, 0); i++)
1813 {
1814 rtx e = XVECEXP (parallel, 0, i);
1815 rtx d = XEXP (e, 0);
1816
1817 if (d)
1818 {
1819 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1820 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1821 }
1822 RTVEC_ELT (vec, i) = e;
1823 }
1824
1825 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1826 }
1827
1828 /* Emit code to move a block SRC to block DST, where SRC and DST are
1829 non-consecutive groups of registers, each represented by a PARALLEL. */
1830
1831 void
1832 emit_group_move (rtx dst, rtx src)
1833 {
1834 int i;
1835
1836 gcc_assert (GET_CODE (src) == PARALLEL
1837 && GET_CODE (dst) == PARALLEL
1838 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1839
1840 /* Skip first entry if NULL. */
1841 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1842 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1843 XEXP (XVECEXP (src, 0, i), 0));
1844 }
1845
1846 /* Move a group of registers represented by a PARALLEL into pseudos. */
1847
1848 rtx
1849 emit_group_move_into_temps (rtx src)
1850 {
1851 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1852 int i;
1853
1854 for (i = 0; i < XVECLEN (src, 0); i++)
1855 {
1856 rtx e = XVECEXP (src, 0, i);
1857 rtx d = XEXP (e, 0);
1858
1859 if (d)
1860 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1861 RTVEC_ELT (vec, i) = e;
1862 }
1863
1864 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1865 }
1866
1867 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1868 where SRC is non-consecutive registers represented by a PARALLEL.
1869 SSIZE represents the total size of block ORIG_DST, or -1 if not
1870 known. */
1871
1872 void
1873 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1874 {
1875 rtx *tmps, dst;
1876 int start, finish, i;
1877 enum machine_mode m = GET_MODE (orig_dst);
1878
1879 gcc_assert (GET_CODE (src) == PARALLEL);
1880
1881 if (!SCALAR_INT_MODE_P (m)
1882 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1883 {
1884 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1885 if (imode == BLKmode)
1886 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1887 else
1888 dst = gen_reg_rtx (imode);
1889 emit_group_store (dst, src, type, ssize);
1890 if (imode != BLKmode)
1891 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1892 emit_move_insn (orig_dst, dst);
1893 return;
1894 }
1895
1896 /* Check for a NULL entry, used to indicate that the parameter goes
1897 both on the stack and in registers. */
1898 if (XEXP (XVECEXP (src, 0, 0), 0))
1899 start = 0;
1900 else
1901 start = 1;
1902 finish = XVECLEN (src, 0);
1903
1904 tmps = XALLOCAVEC (rtx, finish);
1905
1906 /* Copy the (probable) hard regs into pseudos. */
1907 for (i = start; i < finish; i++)
1908 {
1909 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1910 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1911 {
1912 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1913 emit_move_insn (tmps[i], reg);
1914 }
1915 else
1916 tmps[i] = reg;
1917 }
1918
1919 /* If we won't be storing directly into memory, protect the real destination
1920 from strange tricks we might play. */
1921 dst = orig_dst;
1922 if (GET_CODE (dst) == PARALLEL)
1923 {
1924 rtx temp;
1925
1926 /* We can get a PARALLEL dst if there is a conditional expression in
1927 a return statement. In that case, the dst and src are the same,
1928 so no action is necessary. */
1929 if (rtx_equal_p (dst, src))
1930 return;
1931
1932 /* It is unclear if we can ever reach here, but we may as well handle
1933 it. Allocate a temporary, and split this into a store/load to/from
1934 the temporary. */
1935
1936 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1937 emit_group_store (temp, src, type, ssize);
1938 emit_group_load (dst, temp, type, ssize);
1939 return;
1940 }
1941 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1942 {
1943 enum machine_mode outer = GET_MODE (dst);
1944 enum machine_mode inner;
1945 HOST_WIDE_INT bytepos;
1946 bool done = false;
1947 rtx temp;
1948
1949 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1950 dst = gen_reg_rtx (outer);
1951
1952 /* Make life a bit easier for combine. */
1953 /* If the first element of the vector is the low part
1954 of the destination mode, use a paradoxical subreg to
1955 initialize the destination. */
1956 if (start < finish)
1957 {
1958 inner = GET_MODE (tmps[start]);
1959 bytepos = subreg_lowpart_offset (inner, outer);
1960 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1961 {
1962 temp = simplify_gen_subreg (outer, tmps[start],
1963 inner, 0);
1964 if (temp)
1965 {
1966 emit_move_insn (dst, temp);
1967 done = true;
1968 start++;
1969 }
1970 }
1971 }
1972
1973 /* If the first element wasn't the low part, try the last. */
1974 if (!done
1975 && start < finish - 1)
1976 {
1977 inner = GET_MODE (tmps[finish - 1]);
1978 bytepos = subreg_lowpart_offset (inner, outer);
1979 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1980 {
1981 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1982 inner, 0);
1983 if (temp)
1984 {
1985 emit_move_insn (dst, temp);
1986 done = true;
1987 finish--;
1988 }
1989 }
1990 }
1991
1992 /* Otherwise, simply initialize the result to zero. */
1993 if (!done)
1994 emit_move_insn (dst, CONST0_RTX (outer));
1995 }
1996
1997 /* Process the pieces. */
1998 for (i = start; i < finish; i++)
1999 {
2000 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2001 enum machine_mode mode = GET_MODE (tmps[i]);
2002 unsigned int bytelen = GET_MODE_SIZE (mode);
2003 unsigned int adj_bytelen = bytelen;
2004 rtx dest = dst;
2005
2006 /* Handle trailing fragments that run over the size of the struct. */
2007 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2008 adj_bytelen = ssize - bytepos;
2009
2010 if (GET_CODE (dst) == CONCAT)
2011 {
2012 if (bytepos + adj_bytelen
2013 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2014 dest = XEXP (dst, 0);
2015 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2016 {
2017 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2018 dest = XEXP (dst, 1);
2019 }
2020 else
2021 {
2022 enum machine_mode dest_mode = GET_MODE (dest);
2023 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2024
2025 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2026
2027 if (GET_MODE_ALIGNMENT (dest_mode)
2028 >= GET_MODE_ALIGNMENT (tmp_mode))
2029 {
2030 dest = assign_stack_temp (dest_mode,
2031 GET_MODE_SIZE (dest_mode),
2032 0);
2033 emit_move_insn (adjust_address (dest,
2034 tmp_mode,
2035 bytepos),
2036 tmps[i]);
2037 dst = dest;
2038 }
2039 else
2040 {
2041 dest = assign_stack_temp (tmp_mode,
2042 GET_MODE_SIZE (tmp_mode),
2043 0);
2044 emit_move_insn (dest, tmps[i]);
2045 dst = adjust_address (dest, dest_mode, bytepos);
2046 }
2047 break;
2048 }
2049 }
2050
2051 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2052 {
2053 /* store_bit_field always takes its value from the lsb.
2054 Move the fragment to the lsb if it's not already there. */
2055 if (
2056 #ifdef BLOCK_REG_PADDING
2057 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2058 == (BYTES_BIG_ENDIAN ? upward : downward)
2059 #else
2060 BYTES_BIG_ENDIAN
2061 #endif
2062 )
2063 {
2064 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2065 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2066 shift, tmps[i], 0);
2067 }
2068 bytelen = adj_bytelen;
2069 }
2070
2071 /* Optimize the access just a bit. */
2072 if (MEM_P (dest)
2073 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2074 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2075 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2076 && bytelen == GET_MODE_SIZE (mode))
2077 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2078 else
2079 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2080 mode, tmps[i]);
2081 }
2082
2083 /* Copy from the pseudo into the (probable) hard reg. */
2084 if (orig_dst != dst)
2085 emit_move_insn (orig_dst, dst);
2086 }
2087
2088 /* Generate code to copy a BLKmode object of TYPE out of a
2089 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2090 is null, a stack temporary is created. TGTBLK is returned.
2091
2092 The purpose of this routine is to handle functions that return
2093 BLKmode structures in registers. Some machines (the PA for example)
2094 want to return all small structures in registers regardless of the
2095 structure's alignment. */
2096
2097 rtx
2098 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2099 {
2100 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2101 rtx src = NULL, dst = NULL;
2102 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2103 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2104 enum machine_mode copy_mode;
2105
2106 if (tgtblk == 0)
2107 {
2108 tgtblk = assign_temp (build_qualified_type (type,
2109 (TYPE_QUALS (type)
2110 | TYPE_QUAL_CONST)),
2111 0, 1, 1);
2112 preserve_temp_slots (tgtblk);
2113 }
2114
2115 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2116 into a new pseudo which is a full word. */
2117
2118 if (GET_MODE (srcreg) != BLKmode
2119 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2120 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2121
2122 /* If the structure doesn't take up a whole number of words, see whether
2123 SRCREG is padded on the left or on the right. If it's on the left,
2124 set PADDING_CORRECTION to the number of bits to skip.
2125
2126 In most ABIs, the structure will be returned at the least end of
2127 the register, which translates to right padding on little-endian
2128 targets and left padding on big-endian targets. The opposite
2129 holds if the structure is returned at the most significant
2130 end of the register. */
2131 if (bytes % UNITS_PER_WORD != 0
2132 && (targetm.calls.return_in_msb (type)
2133 ? !BYTES_BIG_ENDIAN
2134 : BYTES_BIG_ENDIAN))
2135 padding_correction
2136 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2137
2138 /* Copy the structure BITSIZE bits at a time. If the target lives in
2139 memory, take care of not reading/writing past its end by selecting
2140 a copy mode suited to BITSIZE. This should always be possible given
2141 how it is computed.
2142
2143 We could probably emit more efficient code for machines which do not use
2144 strict alignment, but it doesn't seem worth the effort at the current
2145 time. */
2146
2147 copy_mode = word_mode;
2148 if (MEM_P (tgtblk))
2149 {
2150 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2151 if (mem_mode != BLKmode)
2152 copy_mode = mem_mode;
2153 }
2154
2155 for (bitpos = 0, xbitpos = padding_correction;
2156 bitpos < bytes * BITS_PER_UNIT;
2157 bitpos += bitsize, xbitpos += bitsize)
2158 {
2159 /* We need a new source operand each time xbitpos is on a
2160 word boundary and when xbitpos == padding_correction
2161 (the first time through). */
2162 if (xbitpos % BITS_PER_WORD == 0
2163 || xbitpos == padding_correction)
2164 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2165 GET_MODE (srcreg));
2166
2167 /* We need a new destination operand each time bitpos is on
2168 a word boundary. */
2169 if (bitpos % BITS_PER_WORD == 0)
2170 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2171
2172 /* Use xbitpos for the source extraction (right justified) and
2173 bitpos for the destination store (left justified). */
2174 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2175 extract_bit_field (src, bitsize,
2176 xbitpos % BITS_PER_WORD, 1, false,
2177 NULL_RTX, copy_mode, copy_mode));
2178 }
2179
2180 return tgtblk;
2181 }
2182
2183 /* Add a USE expression for REG to the (possibly empty) list pointed
2184 to by CALL_FUSAGE. REG must denote a hard register. */
2185
2186 void
2187 use_reg (rtx *call_fusage, rtx reg)
2188 {
2189 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2190
2191 *call_fusage
2192 = gen_rtx_EXPR_LIST (VOIDmode,
2193 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2194 }
2195
2196 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2197 starting at REGNO. All of these registers must be hard registers. */
2198
2199 void
2200 use_regs (rtx *call_fusage, int regno, int nregs)
2201 {
2202 int i;
2203
2204 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2205
2206 for (i = 0; i < nregs; i++)
2207 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2208 }
2209
2210 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2211 PARALLEL REGS. This is for calls that pass values in multiple
2212 non-contiguous locations. The Irix 6 ABI has examples of this. */
2213
2214 void
2215 use_group_regs (rtx *call_fusage, rtx regs)
2216 {
2217 int i;
2218
2219 for (i = 0; i < XVECLEN (regs, 0); i++)
2220 {
2221 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2222
2223 /* A NULL entry means the parameter goes both on the stack and in
2224 registers. This can also be a MEM for targets that pass values
2225 partially on the stack and partially in registers. */
2226 if (reg != 0 && REG_P (reg))
2227 use_reg (call_fusage, reg);
2228 }
2229 }
2230
2231 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2232 assigment and the code of the expresion on the RHS is CODE. Return
2233 NULL otherwise. */
2234
2235 static gimple
2236 get_def_for_expr (tree name, enum tree_code code)
2237 {
2238 gimple def_stmt;
2239
2240 if (TREE_CODE (name) != SSA_NAME)
2241 return NULL;
2242
2243 def_stmt = get_gimple_for_ssa_name (name);
2244 if (!def_stmt
2245 || gimple_assign_rhs_code (def_stmt) != code)
2246 return NULL;
2247
2248 return def_stmt;
2249 }
2250 \f
2251
2252 /* Determine whether the LEN bytes generated by CONSTFUN can be
2253 stored to memory using several move instructions. CONSTFUNDATA is
2254 a pointer which will be passed as argument in every CONSTFUN call.
2255 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2256 a memset operation and false if it's a copy of a constant string.
2257 Return nonzero if a call to store_by_pieces should succeed. */
2258
2259 int
2260 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2261 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2262 void *constfundata, unsigned int align, bool memsetp)
2263 {
2264 unsigned HOST_WIDE_INT l;
2265 unsigned int max_size;
2266 HOST_WIDE_INT offset = 0;
2267 enum machine_mode mode;
2268 enum insn_code icode;
2269 int reverse;
2270 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2271 rtx cst ATTRIBUTE_UNUSED;
2272
2273 if (len == 0)
2274 return 1;
2275
2276 if (! (memsetp
2277 ? SET_BY_PIECES_P (len, align)
2278 : STORE_BY_PIECES_P (len, align)))
2279 return 0;
2280
2281 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2282
2283 /* We would first store what we can in the largest integer mode, then go to
2284 successively smaller modes. */
2285
2286 for (reverse = 0;
2287 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2288 reverse++)
2289 {
2290 l = len;
2291 max_size = STORE_MAX_PIECES + 1;
2292 while (max_size > 1)
2293 {
2294 mode = widest_int_mode_for_size (max_size);
2295
2296 if (mode == VOIDmode)
2297 break;
2298
2299 icode = optab_handler (mov_optab, mode);
2300 if (icode != CODE_FOR_nothing
2301 && align >= GET_MODE_ALIGNMENT (mode))
2302 {
2303 unsigned int size = GET_MODE_SIZE (mode);
2304
2305 while (l >= size)
2306 {
2307 if (reverse)
2308 offset -= size;
2309
2310 cst = (*constfun) (constfundata, offset, mode);
2311 if (!targetm.legitimate_constant_p (mode, cst))
2312 return 0;
2313
2314 if (!reverse)
2315 offset += size;
2316
2317 l -= size;
2318 }
2319 }
2320
2321 max_size = GET_MODE_SIZE (mode);
2322 }
2323
2324 /* The code above should have handled everything. */
2325 gcc_assert (!l);
2326 }
2327
2328 return 1;
2329 }
2330
2331 /* Generate several move instructions to store LEN bytes generated by
2332 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2333 pointer which will be passed as argument in every CONSTFUN call.
2334 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2335 a memset operation and false if it's a copy of a constant string.
2336 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2337 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2338 stpcpy. */
2339
2340 rtx
2341 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2342 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2343 void *constfundata, unsigned int align, bool memsetp, int endp)
2344 {
2345 enum machine_mode to_addr_mode
2346 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2347 struct store_by_pieces_d data;
2348
2349 if (len == 0)
2350 {
2351 gcc_assert (endp != 2);
2352 return to;
2353 }
2354
2355 gcc_assert (memsetp
2356 ? SET_BY_PIECES_P (len, align)
2357 : STORE_BY_PIECES_P (len, align));
2358 data.constfun = constfun;
2359 data.constfundata = constfundata;
2360 data.len = len;
2361 data.to = to;
2362 store_by_pieces_1 (&data, align);
2363 if (endp)
2364 {
2365 rtx to1;
2366
2367 gcc_assert (!data.reverse);
2368 if (data.autinc_to)
2369 {
2370 if (endp == 2)
2371 {
2372 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2373 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2374 else
2375 data.to_addr = copy_to_mode_reg (to_addr_mode,
2376 plus_constant (data.to_addr,
2377 -1));
2378 }
2379 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2380 data.offset);
2381 }
2382 else
2383 {
2384 if (endp == 2)
2385 --data.offset;
2386 to1 = adjust_address (data.to, QImode, data.offset);
2387 }
2388 return to1;
2389 }
2390 else
2391 return data.to;
2392 }
2393
2394 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2395 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2396
2397 static void
2398 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2399 {
2400 struct store_by_pieces_d data;
2401
2402 if (len == 0)
2403 return;
2404
2405 data.constfun = clear_by_pieces_1;
2406 data.constfundata = NULL;
2407 data.len = len;
2408 data.to = to;
2409 store_by_pieces_1 (&data, align);
2410 }
2411
2412 /* Callback routine for clear_by_pieces.
2413 Return const0_rtx unconditionally. */
2414
2415 static rtx
2416 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2417 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2418 enum machine_mode mode ATTRIBUTE_UNUSED)
2419 {
2420 return const0_rtx;
2421 }
2422
2423 /* Subroutine of clear_by_pieces and store_by_pieces.
2424 Generate several move instructions to store LEN bytes of block TO. (A MEM
2425 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2426
2427 static void
2428 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2429 unsigned int align ATTRIBUTE_UNUSED)
2430 {
2431 enum machine_mode to_addr_mode
2432 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2433 rtx to_addr = XEXP (data->to, 0);
2434 unsigned int max_size = STORE_MAX_PIECES + 1;
2435 enum insn_code icode;
2436
2437 data->offset = 0;
2438 data->to_addr = to_addr;
2439 data->autinc_to
2440 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2441 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2442
2443 data->explicit_inc_to = 0;
2444 data->reverse
2445 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2446 if (data->reverse)
2447 data->offset = data->len;
2448
2449 /* If storing requires more than two move insns,
2450 copy addresses to registers (to make displacements shorter)
2451 and use post-increment if available. */
2452 if (!data->autinc_to
2453 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2454 {
2455 /* Determine the main mode we'll be using.
2456 MODE might not be used depending on the definitions of the
2457 USE_* macros below. */
2458 enum machine_mode mode ATTRIBUTE_UNUSED
2459 = widest_int_mode_for_size (max_size);
2460
2461 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2462 {
2463 data->to_addr = copy_to_mode_reg (to_addr_mode,
2464 plus_constant (to_addr, data->len));
2465 data->autinc_to = 1;
2466 data->explicit_inc_to = -1;
2467 }
2468
2469 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2470 && ! data->autinc_to)
2471 {
2472 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2473 data->autinc_to = 1;
2474 data->explicit_inc_to = 1;
2475 }
2476
2477 if ( !data->autinc_to && CONSTANT_P (to_addr))
2478 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2479 }
2480
2481 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2482
2483 /* First store what we can in the largest integer mode, then go to
2484 successively smaller modes. */
2485
2486 while (max_size > 1)
2487 {
2488 enum machine_mode mode = widest_int_mode_for_size (max_size);
2489
2490 if (mode == VOIDmode)
2491 break;
2492
2493 icode = optab_handler (mov_optab, mode);
2494 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2495 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2496
2497 max_size = GET_MODE_SIZE (mode);
2498 }
2499
2500 /* The code above should have handled everything. */
2501 gcc_assert (!data->len);
2502 }
2503
2504 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2505 with move instructions for mode MODE. GENFUN is the gen_... function
2506 to make a move insn for that mode. DATA has all the other info. */
2507
2508 static void
2509 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2510 struct store_by_pieces_d *data)
2511 {
2512 unsigned int size = GET_MODE_SIZE (mode);
2513 rtx to1, cst;
2514
2515 while (data->len >= size)
2516 {
2517 if (data->reverse)
2518 data->offset -= size;
2519
2520 if (data->autinc_to)
2521 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2522 data->offset);
2523 else
2524 to1 = adjust_address (data->to, mode, data->offset);
2525
2526 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2527 emit_insn (gen_add2_insn (data->to_addr,
2528 GEN_INT (-(HOST_WIDE_INT) size)));
2529
2530 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2531 emit_insn ((*genfun) (to1, cst));
2532
2533 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2534 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2535
2536 if (! data->reverse)
2537 data->offset += size;
2538
2539 data->len -= size;
2540 }
2541 }
2542 \f
2543 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2544 its length in bytes. */
2545
2546 rtx
2547 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2548 unsigned int expected_align, HOST_WIDE_INT expected_size)
2549 {
2550 enum machine_mode mode = GET_MODE (object);
2551 unsigned int align;
2552
2553 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2554
2555 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2556 just move a zero. Otherwise, do this a piece at a time. */
2557 if (mode != BLKmode
2558 && CONST_INT_P (size)
2559 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2560 {
2561 rtx zero = CONST0_RTX (mode);
2562 if (zero != NULL)
2563 {
2564 emit_move_insn (object, zero);
2565 return NULL;
2566 }
2567
2568 if (COMPLEX_MODE_P (mode))
2569 {
2570 zero = CONST0_RTX (GET_MODE_INNER (mode));
2571 if (zero != NULL)
2572 {
2573 write_complex_part (object, zero, 0);
2574 write_complex_part (object, zero, 1);
2575 return NULL;
2576 }
2577 }
2578 }
2579
2580 if (size == const0_rtx)
2581 return NULL;
2582
2583 align = MEM_ALIGN (object);
2584
2585 if (CONST_INT_P (size)
2586 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2587 clear_by_pieces (object, INTVAL (size), align);
2588 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2589 expected_align, expected_size))
2590 ;
2591 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2592 return set_storage_via_libcall (object, size, const0_rtx,
2593 method == BLOCK_OP_TAILCALL);
2594 else
2595 gcc_unreachable ();
2596
2597 return NULL;
2598 }
2599
2600 rtx
2601 clear_storage (rtx object, rtx size, enum block_op_methods method)
2602 {
2603 return clear_storage_hints (object, size, method, 0, -1);
2604 }
2605
2606
2607 /* A subroutine of clear_storage. Expand a call to memset.
2608 Return the return value of memset, 0 otherwise. */
2609
2610 rtx
2611 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2612 {
2613 tree call_expr, fn, object_tree, size_tree, val_tree;
2614 enum machine_mode size_mode;
2615 rtx retval;
2616
2617 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2618 place those into new pseudos into a VAR_DECL and use them later. */
2619
2620 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2621
2622 size_mode = TYPE_MODE (sizetype);
2623 size = convert_to_mode (size_mode, size, 1);
2624 size = copy_to_mode_reg (size_mode, size);
2625
2626 /* It is incorrect to use the libcall calling conventions to call
2627 memset in this context. This could be a user call to memset and
2628 the user may wish to examine the return value from memset. For
2629 targets where libcalls and normal calls have different conventions
2630 for returning pointers, we could end up generating incorrect code. */
2631
2632 object_tree = make_tree (ptr_type_node, object);
2633 if (!CONST_INT_P (val))
2634 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2635 size_tree = make_tree (sizetype, size);
2636 val_tree = make_tree (integer_type_node, val);
2637
2638 fn = clear_storage_libcall_fn (true);
2639 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2640 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2641
2642 retval = expand_normal (call_expr);
2643
2644 return retval;
2645 }
2646
2647 /* A subroutine of set_storage_via_libcall. Create the tree node
2648 for the function we use for block clears. The first time FOR_CALL
2649 is true, we call assemble_external. */
2650
2651 tree block_clear_fn;
2652
2653 void
2654 init_block_clear_fn (const char *asmspec)
2655 {
2656 if (!block_clear_fn)
2657 {
2658 tree fn, args;
2659
2660 fn = get_identifier ("memset");
2661 args = build_function_type_list (ptr_type_node, ptr_type_node,
2662 integer_type_node, sizetype,
2663 NULL_TREE);
2664
2665 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2666 DECL_EXTERNAL (fn) = 1;
2667 TREE_PUBLIC (fn) = 1;
2668 DECL_ARTIFICIAL (fn) = 1;
2669 TREE_NOTHROW (fn) = 1;
2670 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2671 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2672
2673 block_clear_fn = fn;
2674 }
2675
2676 if (asmspec)
2677 set_user_assembler_name (block_clear_fn, asmspec);
2678 }
2679
2680 static tree
2681 clear_storage_libcall_fn (int for_call)
2682 {
2683 static bool emitted_extern;
2684
2685 if (!block_clear_fn)
2686 init_block_clear_fn (NULL);
2687
2688 if (for_call && !emitted_extern)
2689 {
2690 emitted_extern = true;
2691 make_decl_rtl (block_clear_fn);
2692 assemble_external (block_clear_fn);
2693 }
2694
2695 return block_clear_fn;
2696 }
2697 \f
2698 /* Expand a setmem pattern; return true if successful. */
2699
2700 bool
2701 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2702 unsigned int expected_align, HOST_WIDE_INT expected_size)
2703 {
2704 /* Try the most limited insn first, because there's no point
2705 including more than one in the machine description unless
2706 the more limited one has some advantage. */
2707
2708 enum machine_mode mode;
2709
2710 if (expected_align < align)
2711 expected_align = align;
2712
2713 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2714 mode = GET_MODE_WIDER_MODE (mode))
2715 {
2716 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2717
2718 if (code != CODE_FOR_nothing
2719 /* We don't need MODE to be narrower than
2720 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2721 the mode mask, as it is returned by the macro, it will
2722 definitely be less than the actual mode mask. */
2723 && ((CONST_INT_P (size)
2724 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2725 <= (GET_MODE_MASK (mode) >> 1)))
2726 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
2727 {
2728 struct expand_operand ops[6];
2729 unsigned int nops;
2730
2731 nops = insn_data[(int) code].n_generator_args;
2732 gcc_assert (nops == 4 || nops == 6);
2733
2734 create_fixed_operand (&ops[0], object);
2735 /* The check above guarantees that this size conversion is valid. */
2736 create_convert_operand_to (&ops[1], size, mode, true);
2737 create_convert_operand_from (&ops[2], val, byte_mode, true);
2738 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2739 if (nops == 6)
2740 {
2741 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2742 create_integer_operand (&ops[5], expected_size);
2743 }
2744 if (maybe_expand_insn (code, nops, ops))
2745 return true;
2746 }
2747 }
2748
2749 return false;
2750 }
2751
2752 \f
2753 /* Write to one of the components of the complex value CPLX. Write VAL to
2754 the real part if IMAG_P is false, and the imaginary part if its true. */
2755
2756 static void
2757 write_complex_part (rtx cplx, rtx val, bool imag_p)
2758 {
2759 enum machine_mode cmode;
2760 enum machine_mode imode;
2761 unsigned ibitsize;
2762
2763 if (GET_CODE (cplx) == CONCAT)
2764 {
2765 emit_move_insn (XEXP (cplx, imag_p), val);
2766 return;
2767 }
2768
2769 cmode = GET_MODE (cplx);
2770 imode = GET_MODE_INNER (cmode);
2771 ibitsize = GET_MODE_BITSIZE (imode);
2772
2773 /* For MEMs simplify_gen_subreg may generate an invalid new address
2774 because, e.g., the original address is considered mode-dependent
2775 by the target, which restricts simplify_subreg from invoking
2776 adjust_address_nv. Instead of preparing fallback support for an
2777 invalid address, we call adjust_address_nv directly. */
2778 if (MEM_P (cplx))
2779 {
2780 emit_move_insn (adjust_address_nv (cplx, imode,
2781 imag_p ? GET_MODE_SIZE (imode) : 0),
2782 val);
2783 return;
2784 }
2785
2786 /* If the sub-object is at least word sized, then we know that subregging
2787 will work. This special case is important, since store_bit_field
2788 wants to operate on integer modes, and there's rarely an OImode to
2789 correspond to TCmode. */
2790 if (ibitsize >= BITS_PER_WORD
2791 /* For hard regs we have exact predicates. Assume we can split
2792 the original object if it spans an even number of hard regs.
2793 This special case is important for SCmode on 64-bit platforms
2794 where the natural size of floating-point regs is 32-bit. */
2795 || (REG_P (cplx)
2796 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2797 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2798 {
2799 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2800 imag_p ? GET_MODE_SIZE (imode) : 0);
2801 if (part)
2802 {
2803 emit_move_insn (part, val);
2804 return;
2805 }
2806 else
2807 /* simplify_gen_subreg may fail for sub-word MEMs. */
2808 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2809 }
2810
2811 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2812 }
2813
2814 /* Extract one of the components of the complex value CPLX. Extract the
2815 real part if IMAG_P is false, and the imaginary part if it's true. */
2816
2817 static rtx
2818 read_complex_part (rtx cplx, bool imag_p)
2819 {
2820 enum machine_mode cmode, imode;
2821 unsigned ibitsize;
2822
2823 if (GET_CODE (cplx) == CONCAT)
2824 return XEXP (cplx, imag_p);
2825
2826 cmode = GET_MODE (cplx);
2827 imode = GET_MODE_INNER (cmode);
2828 ibitsize = GET_MODE_BITSIZE (imode);
2829
2830 /* Special case reads from complex constants that got spilled to memory. */
2831 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2832 {
2833 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2834 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2835 {
2836 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2837 if (CONSTANT_CLASS_P (part))
2838 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2839 }
2840 }
2841
2842 /* For MEMs simplify_gen_subreg may generate an invalid new address
2843 because, e.g., the original address is considered mode-dependent
2844 by the target, which restricts simplify_subreg from invoking
2845 adjust_address_nv. Instead of preparing fallback support for an
2846 invalid address, we call adjust_address_nv directly. */
2847 if (MEM_P (cplx))
2848 return adjust_address_nv (cplx, imode,
2849 imag_p ? GET_MODE_SIZE (imode) : 0);
2850
2851 /* If the sub-object is at least word sized, then we know that subregging
2852 will work. This special case is important, since extract_bit_field
2853 wants to operate on integer modes, and there's rarely an OImode to
2854 correspond to TCmode. */
2855 if (ibitsize >= BITS_PER_WORD
2856 /* For hard regs we have exact predicates. Assume we can split
2857 the original object if it spans an even number of hard regs.
2858 This special case is important for SCmode on 64-bit platforms
2859 where the natural size of floating-point regs is 32-bit. */
2860 || (REG_P (cplx)
2861 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2862 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2863 {
2864 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2865 imag_p ? GET_MODE_SIZE (imode) : 0);
2866 if (ret)
2867 return ret;
2868 else
2869 /* simplify_gen_subreg may fail for sub-word MEMs. */
2870 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2871 }
2872
2873 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2874 true, false, NULL_RTX, imode, imode);
2875 }
2876 \f
2877 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2878 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2879 represented in NEW_MODE. If FORCE is true, this will never happen, as
2880 we'll force-create a SUBREG if needed. */
2881
2882 static rtx
2883 emit_move_change_mode (enum machine_mode new_mode,
2884 enum machine_mode old_mode, rtx x, bool force)
2885 {
2886 rtx ret;
2887
2888 if (push_operand (x, GET_MODE (x)))
2889 {
2890 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2891 MEM_COPY_ATTRIBUTES (ret, x);
2892 }
2893 else if (MEM_P (x))
2894 {
2895 /* We don't have to worry about changing the address since the
2896 size in bytes is supposed to be the same. */
2897 if (reload_in_progress)
2898 {
2899 /* Copy the MEM to change the mode and move any
2900 substitutions from the old MEM to the new one. */
2901 ret = adjust_address_nv (x, new_mode, 0);
2902 copy_replacements (x, ret);
2903 }
2904 else
2905 ret = adjust_address (x, new_mode, 0);
2906 }
2907 else
2908 {
2909 /* Note that we do want simplify_subreg's behavior of validating
2910 that the new mode is ok for a hard register. If we were to use
2911 simplify_gen_subreg, we would create the subreg, but would
2912 probably run into the target not being able to implement it. */
2913 /* Except, of course, when FORCE is true, when this is exactly what
2914 we want. Which is needed for CCmodes on some targets. */
2915 if (force)
2916 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2917 else
2918 ret = simplify_subreg (new_mode, x, old_mode, 0);
2919 }
2920
2921 return ret;
2922 }
2923
2924 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2925 an integer mode of the same size as MODE. Returns the instruction
2926 emitted, or NULL if such a move could not be generated. */
2927
2928 static rtx
2929 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2930 {
2931 enum machine_mode imode;
2932 enum insn_code code;
2933
2934 /* There must exist a mode of the exact size we require. */
2935 imode = int_mode_for_mode (mode);
2936 if (imode == BLKmode)
2937 return NULL_RTX;
2938
2939 /* The target must support moves in this mode. */
2940 code = optab_handler (mov_optab, imode);
2941 if (code == CODE_FOR_nothing)
2942 return NULL_RTX;
2943
2944 x = emit_move_change_mode (imode, mode, x, force);
2945 if (x == NULL_RTX)
2946 return NULL_RTX;
2947 y = emit_move_change_mode (imode, mode, y, force);
2948 if (y == NULL_RTX)
2949 return NULL_RTX;
2950 return emit_insn (GEN_FCN (code) (x, y));
2951 }
2952
2953 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2954 Return an equivalent MEM that does not use an auto-increment. */
2955
2956 static rtx
2957 emit_move_resolve_push (enum machine_mode mode, rtx x)
2958 {
2959 enum rtx_code code = GET_CODE (XEXP (x, 0));
2960 HOST_WIDE_INT adjust;
2961 rtx temp;
2962
2963 adjust = GET_MODE_SIZE (mode);
2964 #ifdef PUSH_ROUNDING
2965 adjust = PUSH_ROUNDING (adjust);
2966 #endif
2967 if (code == PRE_DEC || code == POST_DEC)
2968 adjust = -adjust;
2969 else if (code == PRE_MODIFY || code == POST_MODIFY)
2970 {
2971 rtx expr = XEXP (XEXP (x, 0), 1);
2972 HOST_WIDE_INT val;
2973
2974 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2975 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
2976 val = INTVAL (XEXP (expr, 1));
2977 if (GET_CODE (expr) == MINUS)
2978 val = -val;
2979 gcc_assert (adjust == val || adjust == -val);
2980 adjust = val;
2981 }
2982
2983 /* Do not use anti_adjust_stack, since we don't want to update
2984 stack_pointer_delta. */
2985 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2986 GEN_INT (adjust), stack_pointer_rtx,
2987 0, OPTAB_LIB_WIDEN);
2988 if (temp != stack_pointer_rtx)
2989 emit_move_insn (stack_pointer_rtx, temp);
2990
2991 switch (code)
2992 {
2993 case PRE_INC:
2994 case PRE_DEC:
2995 case PRE_MODIFY:
2996 temp = stack_pointer_rtx;
2997 break;
2998 case POST_INC:
2999 case POST_DEC:
3000 case POST_MODIFY:
3001 temp = plus_constant (stack_pointer_rtx, -adjust);
3002 break;
3003 default:
3004 gcc_unreachable ();
3005 }
3006
3007 return replace_equiv_address (x, temp);
3008 }
3009
3010 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3011 X is known to satisfy push_operand, and MODE is known to be complex.
3012 Returns the last instruction emitted. */
3013
3014 rtx
3015 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3016 {
3017 enum machine_mode submode = GET_MODE_INNER (mode);
3018 bool imag_first;
3019
3020 #ifdef PUSH_ROUNDING
3021 unsigned int submodesize = GET_MODE_SIZE (submode);
3022
3023 /* In case we output to the stack, but the size is smaller than the
3024 machine can push exactly, we need to use move instructions. */
3025 if (PUSH_ROUNDING (submodesize) != submodesize)
3026 {
3027 x = emit_move_resolve_push (mode, x);
3028 return emit_move_insn (x, y);
3029 }
3030 #endif
3031
3032 /* Note that the real part always precedes the imag part in memory
3033 regardless of machine's endianness. */
3034 switch (GET_CODE (XEXP (x, 0)))
3035 {
3036 case PRE_DEC:
3037 case POST_DEC:
3038 imag_first = true;
3039 break;
3040 case PRE_INC:
3041 case POST_INC:
3042 imag_first = false;
3043 break;
3044 default:
3045 gcc_unreachable ();
3046 }
3047
3048 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3049 read_complex_part (y, imag_first));
3050 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3051 read_complex_part (y, !imag_first));
3052 }
3053
3054 /* A subroutine of emit_move_complex. Perform the move from Y to X
3055 via two moves of the parts. Returns the last instruction emitted. */
3056
3057 rtx
3058 emit_move_complex_parts (rtx x, rtx y)
3059 {
3060 /* Show the output dies here. This is necessary for SUBREGs
3061 of pseudos since we cannot track their lifetimes correctly;
3062 hard regs shouldn't appear here except as return values. */
3063 if (!reload_completed && !reload_in_progress
3064 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3065 emit_clobber (x);
3066
3067 write_complex_part (x, read_complex_part (y, false), false);
3068 write_complex_part (x, read_complex_part (y, true), true);
3069
3070 return get_last_insn ();
3071 }
3072
3073 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3074 MODE is known to be complex. Returns the last instruction emitted. */
3075
3076 static rtx
3077 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3078 {
3079 bool try_int;
3080
3081 /* Need to take special care for pushes, to maintain proper ordering
3082 of the data, and possibly extra padding. */
3083 if (push_operand (x, mode))
3084 return emit_move_complex_push (mode, x, y);
3085
3086 /* See if we can coerce the target into moving both values at once. */
3087
3088 /* Move floating point as parts. */
3089 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3090 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3091 try_int = false;
3092 /* Not possible if the values are inherently not adjacent. */
3093 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3094 try_int = false;
3095 /* Is possible if both are registers (or subregs of registers). */
3096 else if (register_operand (x, mode) && register_operand (y, mode))
3097 try_int = true;
3098 /* If one of the operands is a memory, and alignment constraints
3099 are friendly enough, we may be able to do combined memory operations.
3100 We do not attempt this if Y is a constant because that combination is
3101 usually better with the by-parts thing below. */
3102 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3103 && (!STRICT_ALIGNMENT
3104 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3105 try_int = true;
3106 else
3107 try_int = false;
3108
3109 if (try_int)
3110 {
3111 rtx ret;
3112
3113 /* For memory to memory moves, optimal behavior can be had with the
3114 existing block move logic. */
3115 if (MEM_P (x) && MEM_P (y))
3116 {
3117 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3118 BLOCK_OP_NO_LIBCALL);
3119 return get_last_insn ();
3120 }
3121
3122 ret = emit_move_via_integer (mode, x, y, true);
3123 if (ret)
3124 return ret;
3125 }
3126
3127 return emit_move_complex_parts (x, y);
3128 }
3129
3130 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3131 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3132
3133 static rtx
3134 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3135 {
3136 rtx ret;
3137
3138 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3139 if (mode != CCmode)
3140 {
3141 enum insn_code code = optab_handler (mov_optab, CCmode);
3142 if (code != CODE_FOR_nothing)
3143 {
3144 x = emit_move_change_mode (CCmode, mode, x, true);
3145 y = emit_move_change_mode (CCmode, mode, y, true);
3146 return emit_insn (GEN_FCN (code) (x, y));
3147 }
3148 }
3149
3150 /* Otherwise, find the MODE_INT mode of the same width. */
3151 ret = emit_move_via_integer (mode, x, y, false);
3152 gcc_assert (ret != NULL);
3153 return ret;
3154 }
3155
3156 /* Return true if word I of OP lies entirely in the
3157 undefined bits of a paradoxical subreg. */
3158
3159 static bool
3160 undefined_operand_subword_p (const_rtx op, int i)
3161 {
3162 enum machine_mode innermode, innermostmode;
3163 int offset;
3164 if (GET_CODE (op) != SUBREG)
3165 return false;
3166 innermode = GET_MODE (op);
3167 innermostmode = GET_MODE (SUBREG_REG (op));
3168 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3169 /* The SUBREG_BYTE represents offset, as if the value were stored in
3170 memory, except for a paradoxical subreg where we define
3171 SUBREG_BYTE to be 0; undo this exception as in
3172 simplify_subreg. */
3173 if (SUBREG_BYTE (op) == 0
3174 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3175 {
3176 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3177 if (WORDS_BIG_ENDIAN)
3178 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3179 if (BYTES_BIG_ENDIAN)
3180 offset += difference % UNITS_PER_WORD;
3181 }
3182 if (offset >= GET_MODE_SIZE (innermostmode)
3183 || offset <= -GET_MODE_SIZE (word_mode))
3184 return true;
3185 return false;
3186 }
3187
3188 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3189 MODE is any multi-word or full-word mode that lacks a move_insn
3190 pattern. Note that you will get better code if you define such
3191 patterns, even if they must turn into multiple assembler instructions. */
3192
3193 static rtx
3194 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3195 {
3196 rtx last_insn = 0;
3197 rtx seq, inner;
3198 bool need_clobber;
3199 int i;
3200
3201 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3202
3203 /* If X is a push on the stack, do the push now and replace
3204 X with a reference to the stack pointer. */
3205 if (push_operand (x, mode))
3206 x = emit_move_resolve_push (mode, x);
3207
3208 /* If we are in reload, see if either operand is a MEM whose address
3209 is scheduled for replacement. */
3210 if (reload_in_progress && MEM_P (x)
3211 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3212 x = replace_equiv_address_nv (x, inner);
3213 if (reload_in_progress && MEM_P (y)
3214 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3215 y = replace_equiv_address_nv (y, inner);
3216
3217 start_sequence ();
3218
3219 need_clobber = false;
3220 for (i = 0;
3221 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3222 i++)
3223 {
3224 rtx xpart = operand_subword (x, i, 1, mode);
3225 rtx ypart;
3226
3227 /* Do not generate code for a move if it would come entirely
3228 from the undefined bits of a paradoxical subreg. */
3229 if (undefined_operand_subword_p (y, i))
3230 continue;
3231
3232 ypart = operand_subword (y, i, 1, mode);
3233
3234 /* If we can't get a part of Y, put Y into memory if it is a
3235 constant. Otherwise, force it into a register. Then we must
3236 be able to get a part of Y. */
3237 if (ypart == 0 && CONSTANT_P (y))
3238 {
3239 y = use_anchored_address (force_const_mem (mode, y));
3240 ypart = operand_subword (y, i, 1, mode);
3241 }
3242 else if (ypart == 0)
3243 ypart = operand_subword_force (y, i, mode);
3244
3245 gcc_assert (xpart && ypart);
3246
3247 need_clobber |= (GET_CODE (xpart) == SUBREG);
3248
3249 last_insn = emit_move_insn (xpart, ypart);
3250 }
3251
3252 seq = get_insns ();
3253 end_sequence ();
3254
3255 /* Show the output dies here. This is necessary for SUBREGs
3256 of pseudos since we cannot track their lifetimes correctly;
3257 hard regs shouldn't appear here except as return values.
3258 We never want to emit such a clobber after reload. */
3259 if (x != y
3260 && ! (reload_in_progress || reload_completed)
3261 && need_clobber != 0)
3262 emit_clobber (x);
3263
3264 emit_insn (seq);
3265
3266 return last_insn;
3267 }
3268
3269 /* Low level part of emit_move_insn.
3270 Called just like emit_move_insn, but assumes X and Y
3271 are basically valid. */
3272
3273 rtx
3274 emit_move_insn_1 (rtx x, rtx y)
3275 {
3276 enum machine_mode mode = GET_MODE (x);
3277 enum insn_code code;
3278
3279 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3280
3281 code = optab_handler (mov_optab, mode);
3282 if (code != CODE_FOR_nothing)
3283 return emit_insn (GEN_FCN (code) (x, y));
3284
3285 /* Expand complex moves by moving real part and imag part. */
3286 if (COMPLEX_MODE_P (mode))
3287 return emit_move_complex (mode, x, y);
3288
3289 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3290 || ALL_FIXED_POINT_MODE_P (mode))
3291 {
3292 rtx result = emit_move_via_integer (mode, x, y, true);
3293
3294 /* If we can't find an integer mode, use multi words. */
3295 if (result)
3296 return result;
3297 else
3298 return emit_move_multi_word (mode, x, y);
3299 }
3300
3301 if (GET_MODE_CLASS (mode) == MODE_CC)
3302 return emit_move_ccmode (mode, x, y);
3303
3304 /* Try using a move pattern for the corresponding integer mode. This is
3305 only safe when simplify_subreg can convert MODE constants into integer
3306 constants. At present, it can only do this reliably if the value
3307 fits within a HOST_WIDE_INT. */
3308 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3309 {
3310 rtx ret = emit_move_via_integer (mode, x, y, false);
3311 if (ret)
3312 return ret;
3313 }
3314
3315 return emit_move_multi_word (mode, x, y);
3316 }
3317
3318 /* Generate code to copy Y into X.
3319 Both Y and X must have the same mode, except that
3320 Y can be a constant with VOIDmode.
3321 This mode cannot be BLKmode; use emit_block_move for that.
3322
3323 Return the last instruction emitted. */
3324
3325 rtx
3326 emit_move_insn (rtx x, rtx y)
3327 {
3328 enum machine_mode mode = GET_MODE (x);
3329 rtx y_cst = NULL_RTX;
3330 rtx last_insn, set;
3331
3332 gcc_assert (mode != BLKmode
3333 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3334
3335 if (CONSTANT_P (y))
3336 {
3337 if (optimize
3338 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3339 && (last_insn = compress_float_constant (x, y)))
3340 return last_insn;
3341
3342 y_cst = y;
3343
3344 if (!targetm.legitimate_constant_p (mode, y))
3345 {
3346 y = force_const_mem (mode, y);
3347
3348 /* If the target's cannot_force_const_mem prevented the spill,
3349 assume that the target's move expanders will also take care
3350 of the non-legitimate constant. */
3351 if (!y)
3352 y = y_cst;
3353 else
3354 y = use_anchored_address (y);
3355 }
3356 }
3357
3358 /* If X or Y are memory references, verify that their addresses are valid
3359 for the machine. */
3360 if (MEM_P (x)
3361 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3362 MEM_ADDR_SPACE (x))
3363 && ! push_operand (x, GET_MODE (x))))
3364 x = validize_mem (x);
3365
3366 if (MEM_P (y)
3367 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3368 MEM_ADDR_SPACE (y)))
3369 y = validize_mem (y);
3370
3371 gcc_assert (mode != BLKmode);
3372
3373 last_insn = emit_move_insn_1 (x, y);
3374
3375 if (y_cst && REG_P (x)
3376 && (set = single_set (last_insn)) != NULL_RTX
3377 && SET_DEST (set) == x
3378 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3379 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3380
3381 return last_insn;
3382 }
3383
3384 /* If Y is representable exactly in a narrower mode, and the target can
3385 perform the extension directly from constant or memory, then emit the
3386 move as an extension. */
3387
3388 static rtx
3389 compress_float_constant (rtx x, rtx y)
3390 {
3391 enum machine_mode dstmode = GET_MODE (x);
3392 enum machine_mode orig_srcmode = GET_MODE (y);
3393 enum machine_mode srcmode;
3394 REAL_VALUE_TYPE r;
3395 int oldcost, newcost;
3396 bool speed = optimize_insn_for_speed_p ();
3397
3398 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3399
3400 if (targetm.legitimate_constant_p (dstmode, y))
3401 oldcost = rtx_cost (y, SET, speed);
3402 else
3403 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3404
3405 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3406 srcmode != orig_srcmode;
3407 srcmode = GET_MODE_WIDER_MODE (srcmode))
3408 {
3409 enum insn_code ic;
3410 rtx trunc_y, last_insn;
3411
3412 /* Skip if the target can't extend this way. */
3413 ic = can_extend_p (dstmode, srcmode, 0);
3414 if (ic == CODE_FOR_nothing)
3415 continue;
3416
3417 /* Skip if the narrowed value isn't exact. */
3418 if (! exact_real_truncate (srcmode, &r))
3419 continue;
3420
3421 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3422
3423 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3424 {
3425 /* Skip if the target needs extra instructions to perform
3426 the extension. */
3427 if (!insn_operand_matches (ic, 1, trunc_y))
3428 continue;
3429 /* This is valid, but may not be cheaper than the original. */
3430 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3431 if (oldcost < newcost)
3432 continue;
3433 }
3434 else if (float_extend_from_mem[dstmode][srcmode])
3435 {
3436 trunc_y = force_const_mem (srcmode, trunc_y);
3437 /* This is valid, but may not be cheaper than the original. */
3438 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3439 if (oldcost < newcost)
3440 continue;
3441 trunc_y = validize_mem (trunc_y);
3442 }
3443 else
3444 continue;
3445
3446 /* For CSE's benefit, force the compressed constant pool entry
3447 into a new pseudo. This constant may be used in different modes,
3448 and if not, combine will put things back together for us. */
3449 trunc_y = force_reg (srcmode, trunc_y);
3450 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3451 last_insn = get_last_insn ();
3452
3453 if (REG_P (x))
3454 set_unique_reg_note (last_insn, REG_EQUAL, y);
3455
3456 return last_insn;
3457 }
3458
3459 return NULL_RTX;
3460 }
3461 \f
3462 /* Pushing data onto the stack. */
3463
3464 /* Push a block of length SIZE (perhaps variable)
3465 and return an rtx to address the beginning of the block.
3466 The value may be virtual_outgoing_args_rtx.
3467
3468 EXTRA is the number of bytes of padding to push in addition to SIZE.
3469 BELOW nonzero means this padding comes at low addresses;
3470 otherwise, the padding comes at high addresses. */
3471
3472 rtx
3473 push_block (rtx size, int extra, int below)
3474 {
3475 rtx temp;
3476
3477 size = convert_modes (Pmode, ptr_mode, size, 1);
3478 if (CONSTANT_P (size))
3479 anti_adjust_stack (plus_constant (size, extra));
3480 else if (REG_P (size) && extra == 0)
3481 anti_adjust_stack (size);
3482 else
3483 {
3484 temp = copy_to_mode_reg (Pmode, size);
3485 if (extra != 0)
3486 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3487 temp, 0, OPTAB_LIB_WIDEN);
3488 anti_adjust_stack (temp);
3489 }
3490
3491 #ifndef STACK_GROWS_DOWNWARD
3492 if (0)
3493 #else
3494 if (1)
3495 #endif
3496 {
3497 temp = virtual_outgoing_args_rtx;
3498 if (extra != 0 && below)
3499 temp = plus_constant (temp, extra);
3500 }
3501 else
3502 {
3503 if (CONST_INT_P (size))
3504 temp = plus_constant (virtual_outgoing_args_rtx,
3505 -INTVAL (size) - (below ? 0 : extra));
3506 else if (extra != 0 && !below)
3507 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3508 negate_rtx (Pmode, plus_constant (size, extra)));
3509 else
3510 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3511 negate_rtx (Pmode, size));
3512 }
3513
3514 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3515 }
3516
3517 #ifdef PUSH_ROUNDING
3518
3519 /* Emit single push insn. */
3520
3521 static void
3522 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3523 {
3524 rtx dest_addr;
3525 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3526 rtx dest;
3527 enum insn_code icode;
3528
3529 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3530 /* If there is push pattern, use it. Otherwise try old way of throwing
3531 MEM representing push operation to move expander. */
3532 icode = optab_handler (push_optab, mode);
3533 if (icode != CODE_FOR_nothing)
3534 {
3535 struct expand_operand ops[1];
3536
3537 create_input_operand (&ops[0], x, mode);
3538 if (maybe_expand_insn (icode, 1, ops))
3539 return;
3540 }
3541 if (GET_MODE_SIZE (mode) == rounded_size)
3542 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3543 /* If we are to pad downward, adjust the stack pointer first and
3544 then store X into the stack location using an offset. This is
3545 because emit_move_insn does not know how to pad; it does not have
3546 access to type. */
3547 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3548 {
3549 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3550 HOST_WIDE_INT offset;
3551
3552 emit_move_insn (stack_pointer_rtx,
3553 expand_binop (Pmode,
3554 #ifdef STACK_GROWS_DOWNWARD
3555 sub_optab,
3556 #else
3557 add_optab,
3558 #endif
3559 stack_pointer_rtx,
3560 GEN_INT (rounded_size),
3561 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3562
3563 offset = (HOST_WIDE_INT) padding_size;
3564 #ifdef STACK_GROWS_DOWNWARD
3565 if (STACK_PUSH_CODE == POST_DEC)
3566 /* We have already decremented the stack pointer, so get the
3567 previous value. */
3568 offset += (HOST_WIDE_INT) rounded_size;
3569 #else
3570 if (STACK_PUSH_CODE == POST_INC)
3571 /* We have already incremented the stack pointer, so get the
3572 previous value. */
3573 offset -= (HOST_WIDE_INT) rounded_size;
3574 #endif
3575 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3576 }
3577 else
3578 {
3579 #ifdef STACK_GROWS_DOWNWARD
3580 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3581 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3582 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3583 #else
3584 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3585 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3586 GEN_INT (rounded_size));
3587 #endif
3588 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3589 }
3590
3591 dest = gen_rtx_MEM (mode, dest_addr);
3592
3593 if (type != 0)
3594 {
3595 set_mem_attributes (dest, type, 1);
3596
3597 if (flag_optimize_sibling_calls)
3598 /* Function incoming arguments may overlap with sibling call
3599 outgoing arguments and we cannot allow reordering of reads
3600 from function arguments with stores to outgoing arguments
3601 of sibling calls. */
3602 set_mem_alias_set (dest, 0);
3603 }
3604 emit_move_insn (dest, x);
3605 }
3606 #endif
3607
3608 /* Generate code to push X onto the stack, assuming it has mode MODE and
3609 type TYPE.
3610 MODE is redundant except when X is a CONST_INT (since they don't
3611 carry mode info).
3612 SIZE is an rtx for the size of data to be copied (in bytes),
3613 needed only if X is BLKmode.
3614
3615 ALIGN (in bits) is maximum alignment we can assume.
3616
3617 If PARTIAL and REG are both nonzero, then copy that many of the first
3618 bytes of X into registers starting with REG, and push the rest of X.
3619 The amount of space pushed is decreased by PARTIAL bytes.
3620 REG must be a hard register in this case.
3621 If REG is zero but PARTIAL is not, take any all others actions for an
3622 argument partially in registers, but do not actually load any
3623 registers.
3624
3625 EXTRA is the amount in bytes of extra space to leave next to this arg.
3626 This is ignored if an argument block has already been allocated.
3627
3628 On a machine that lacks real push insns, ARGS_ADDR is the address of
3629 the bottom of the argument block for this call. We use indexing off there
3630 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3631 argument block has not been preallocated.
3632
3633 ARGS_SO_FAR is the size of args previously pushed for this call.
3634
3635 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3636 for arguments passed in registers. If nonzero, it will be the number
3637 of bytes required. */
3638
3639 void
3640 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3641 unsigned int align, int partial, rtx reg, int extra,
3642 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3643 rtx alignment_pad)
3644 {
3645 rtx xinner;
3646 enum direction stack_direction
3647 #ifdef STACK_GROWS_DOWNWARD
3648 = downward;
3649 #else
3650 = upward;
3651 #endif
3652
3653 /* Decide where to pad the argument: `downward' for below,
3654 `upward' for above, or `none' for don't pad it.
3655 Default is below for small data on big-endian machines; else above. */
3656 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3657
3658 /* Invert direction if stack is post-decrement.
3659 FIXME: why? */
3660 if (STACK_PUSH_CODE == POST_DEC)
3661 if (where_pad != none)
3662 where_pad = (where_pad == downward ? upward : downward);
3663
3664 xinner = x;
3665
3666 if (mode == BLKmode
3667 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3668 {
3669 /* Copy a block into the stack, entirely or partially. */
3670
3671 rtx temp;
3672 int used;
3673 int offset;
3674 int skip;
3675
3676 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3677 used = partial - offset;
3678
3679 if (mode != BLKmode)
3680 {
3681 /* A value is to be stored in an insufficiently aligned
3682 stack slot; copy via a suitably aligned slot if
3683 necessary. */
3684 size = GEN_INT (GET_MODE_SIZE (mode));
3685 if (!MEM_P (xinner))
3686 {
3687 temp = assign_temp (type, 0, 1, 1);
3688 emit_move_insn (temp, xinner);
3689 xinner = temp;
3690 }
3691 }
3692
3693 gcc_assert (size);
3694
3695 /* USED is now the # of bytes we need not copy to the stack
3696 because registers will take care of them. */
3697
3698 if (partial != 0)
3699 xinner = adjust_address (xinner, BLKmode, used);
3700
3701 /* If the partial register-part of the arg counts in its stack size,
3702 skip the part of stack space corresponding to the registers.
3703 Otherwise, start copying to the beginning of the stack space,
3704 by setting SKIP to 0. */
3705 skip = (reg_parm_stack_space == 0) ? 0 : used;
3706
3707 #ifdef PUSH_ROUNDING
3708 /* Do it with several push insns if that doesn't take lots of insns
3709 and if there is no difficulty with push insns that skip bytes
3710 on the stack for alignment purposes. */
3711 if (args_addr == 0
3712 && PUSH_ARGS
3713 && CONST_INT_P (size)
3714 && skip == 0
3715 && MEM_ALIGN (xinner) >= align
3716 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3717 /* Here we avoid the case of a structure whose weak alignment
3718 forces many pushes of a small amount of data,
3719 and such small pushes do rounding that causes trouble. */
3720 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3721 || align >= BIGGEST_ALIGNMENT
3722 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3723 == (align / BITS_PER_UNIT)))
3724 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3725 {
3726 /* Push padding now if padding above and stack grows down,
3727 or if padding below and stack grows up.
3728 But if space already allocated, this has already been done. */
3729 if (extra && args_addr == 0
3730 && where_pad != none && where_pad != stack_direction)
3731 anti_adjust_stack (GEN_INT (extra));
3732
3733 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3734 }
3735 else
3736 #endif /* PUSH_ROUNDING */
3737 {
3738 rtx target;
3739
3740 /* Otherwise make space on the stack and copy the data
3741 to the address of that space. */
3742
3743 /* Deduct words put into registers from the size we must copy. */
3744 if (partial != 0)
3745 {
3746 if (CONST_INT_P (size))
3747 size = GEN_INT (INTVAL (size) - used);
3748 else
3749 size = expand_binop (GET_MODE (size), sub_optab, size,
3750 GEN_INT (used), NULL_RTX, 0,
3751 OPTAB_LIB_WIDEN);
3752 }
3753
3754 /* Get the address of the stack space.
3755 In this case, we do not deal with EXTRA separately.
3756 A single stack adjust will do. */
3757 if (! args_addr)
3758 {
3759 temp = push_block (size, extra, where_pad == downward);
3760 extra = 0;
3761 }
3762 else if (CONST_INT_P (args_so_far))
3763 temp = memory_address (BLKmode,
3764 plus_constant (args_addr,
3765 skip + INTVAL (args_so_far)));
3766 else
3767 temp = memory_address (BLKmode,
3768 plus_constant (gen_rtx_PLUS (Pmode,
3769 args_addr,
3770 args_so_far),
3771 skip));
3772
3773 if (!ACCUMULATE_OUTGOING_ARGS)
3774 {
3775 /* If the source is referenced relative to the stack pointer,
3776 copy it to another register to stabilize it. We do not need
3777 to do this if we know that we won't be changing sp. */
3778
3779 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3780 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3781 temp = copy_to_reg (temp);
3782 }
3783
3784 target = gen_rtx_MEM (BLKmode, temp);
3785
3786 /* We do *not* set_mem_attributes here, because incoming arguments
3787 may overlap with sibling call outgoing arguments and we cannot
3788 allow reordering of reads from function arguments with stores
3789 to outgoing arguments of sibling calls. We do, however, want
3790 to record the alignment of the stack slot. */
3791 /* ALIGN may well be better aligned than TYPE, e.g. due to
3792 PARM_BOUNDARY. Assume the caller isn't lying. */
3793 set_mem_align (target, align);
3794
3795 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3796 }
3797 }
3798 else if (partial > 0)
3799 {
3800 /* Scalar partly in registers. */
3801
3802 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3803 int i;
3804 int not_stack;
3805 /* # bytes of start of argument
3806 that we must make space for but need not store. */
3807 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3808 int args_offset = INTVAL (args_so_far);
3809 int skip;
3810
3811 /* Push padding now if padding above and stack grows down,
3812 or if padding below and stack grows up.
3813 But if space already allocated, this has already been done. */
3814 if (extra && args_addr == 0
3815 && where_pad != none && where_pad != stack_direction)
3816 anti_adjust_stack (GEN_INT (extra));
3817
3818 /* If we make space by pushing it, we might as well push
3819 the real data. Otherwise, we can leave OFFSET nonzero
3820 and leave the space uninitialized. */
3821 if (args_addr == 0)
3822 offset = 0;
3823
3824 /* Now NOT_STACK gets the number of words that we don't need to
3825 allocate on the stack. Convert OFFSET to words too. */
3826 not_stack = (partial - offset) / UNITS_PER_WORD;
3827 offset /= UNITS_PER_WORD;
3828
3829 /* If the partial register-part of the arg counts in its stack size,
3830 skip the part of stack space corresponding to the registers.
3831 Otherwise, start copying to the beginning of the stack space,
3832 by setting SKIP to 0. */
3833 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3834
3835 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
3836 x = validize_mem (force_const_mem (mode, x));
3837
3838 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3839 SUBREGs of such registers are not allowed. */
3840 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3841 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3842 x = copy_to_reg (x);
3843
3844 /* Loop over all the words allocated on the stack for this arg. */
3845 /* We can do it by words, because any scalar bigger than a word
3846 has a size a multiple of a word. */
3847 #ifndef PUSH_ARGS_REVERSED
3848 for (i = not_stack; i < size; i++)
3849 #else
3850 for (i = size - 1; i >= not_stack; i--)
3851 #endif
3852 if (i >= not_stack + offset)
3853 emit_push_insn (operand_subword_force (x, i, mode),
3854 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3855 0, args_addr,
3856 GEN_INT (args_offset + ((i - not_stack + skip)
3857 * UNITS_PER_WORD)),
3858 reg_parm_stack_space, alignment_pad);
3859 }
3860 else
3861 {
3862 rtx addr;
3863 rtx dest;
3864
3865 /* Push padding now if padding above and stack grows down,
3866 or if padding below and stack grows up.
3867 But if space already allocated, this has already been done. */
3868 if (extra && args_addr == 0
3869 && where_pad != none && where_pad != stack_direction)
3870 anti_adjust_stack (GEN_INT (extra));
3871
3872 #ifdef PUSH_ROUNDING
3873 if (args_addr == 0 && PUSH_ARGS)
3874 emit_single_push_insn (mode, x, type);
3875 else
3876 #endif
3877 {
3878 if (CONST_INT_P (args_so_far))
3879 addr
3880 = memory_address (mode,
3881 plus_constant (args_addr,
3882 INTVAL (args_so_far)));
3883 else
3884 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3885 args_so_far));
3886 dest = gen_rtx_MEM (mode, addr);
3887
3888 /* We do *not* set_mem_attributes here, because incoming arguments
3889 may overlap with sibling call outgoing arguments and we cannot
3890 allow reordering of reads from function arguments with stores
3891 to outgoing arguments of sibling calls. We do, however, want
3892 to record the alignment of the stack slot. */
3893 /* ALIGN may well be better aligned than TYPE, e.g. due to
3894 PARM_BOUNDARY. Assume the caller isn't lying. */
3895 set_mem_align (dest, align);
3896
3897 emit_move_insn (dest, x);
3898 }
3899 }
3900
3901 /* If part should go in registers, copy that part
3902 into the appropriate registers. Do this now, at the end,
3903 since mem-to-mem copies above may do function calls. */
3904 if (partial > 0 && reg != 0)
3905 {
3906 /* Handle calls that pass values in multiple non-contiguous locations.
3907 The Irix 6 ABI has examples of this. */
3908 if (GET_CODE (reg) == PARALLEL)
3909 emit_group_load (reg, x, type, -1);
3910 else
3911 {
3912 gcc_assert (partial % UNITS_PER_WORD == 0);
3913 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3914 }
3915 }
3916
3917 if (extra && args_addr == 0 && where_pad == stack_direction)
3918 anti_adjust_stack (GEN_INT (extra));
3919
3920 if (alignment_pad && args_addr == 0)
3921 anti_adjust_stack (alignment_pad);
3922 }
3923 \f
3924 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3925 operations. */
3926
3927 static rtx
3928 get_subtarget (rtx x)
3929 {
3930 return (optimize
3931 || x == 0
3932 /* Only registers can be subtargets. */
3933 || !REG_P (x)
3934 /* Don't use hard regs to avoid extending their life. */
3935 || REGNO (x) < FIRST_PSEUDO_REGISTER
3936 ? 0 : x);
3937 }
3938
3939 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3940 FIELD is a bitfield. Returns true if the optimization was successful,
3941 and there's nothing else to do. */
3942
3943 static bool
3944 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3945 unsigned HOST_WIDE_INT bitpos,
3946 enum machine_mode mode1, rtx str_rtx,
3947 tree to, tree src)
3948 {
3949 enum machine_mode str_mode = GET_MODE (str_rtx);
3950 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3951 tree op0, op1;
3952 rtx value, result;
3953 optab binop;
3954 gimple srcstmt;
3955 enum tree_code code;
3956
3957 if (mode1 != VOIDmode
3958 || bitsize >= BITS_PER_WORD
3959 || str_bitsize > BITS_PER_WORD
3960 || TREE_SIDE_EFFECTS (to)
3961 || TREE_THIS_VOLATILE (to))
3962 return false;
3963
3964 STRIP_NOPS (src);
3965 if (TREE_CODE (src) != SSA_NAME)
3966 return false;
3967 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3968 return false;
3969
3970 srcstmt = get_gimple_for_ssa_name (src);
3971 if (!srcstmt
3972 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
3973 return false;
3974
3975 code = gimple_assign_rhs_code (srcstmt);
3976
3977 op0 = gimple_assign_rhs1 (srcstmt);
3978
3979 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
3980 to find its initialization. Hopefully the initialization will
3981 be from a bitfield load. */
3982 if (TREE_CODE (op0) == SSA_NAME)
3983 {
3984 gimple op0stmt = get_gimple_for_ssa_name (op0);
3985
3986 /* We want to eventually have OP0 be the same as TO, which
3987 should be a bitfield. */
3988 if (!op0stmt
3989 || !is_gimple_assign (op0stmt)
3990 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
3991 return false;
3992 op0 = gimple_assign_rhs1 (op0stmt);
3993 }
3994
3995 op1 = gimple_assign_rhs2 (srcstmt);
3996
3997 if (!operand_equal_p (to, op0, 0))
3998 return false;
3999
4000 if (MEM_P (str_rtx))
4001 {
4002 unsigned HOST_WIDE_INT offset1;
4003
4004 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4005 str_mode = word_mode;
4006 str_mode = get_best_mode (bitsize, bitpos,
4007 MEM_ALIGN (str_rtx), str_mode, 0);
4008 if (str_mode == VOIDmode)
4009 return false;
4010 str_bitsize = GET_MODE_BITSIZE (str_mode);
4011
4012 offset1 = bitpos;
4013 bitpos %= str_bitsize;
4014 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4015 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4016 }
4017 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4018 return false;
4019
4020 /* If the bit field covers the whole REG/MEM, store_field
4021 will likely generate better code. */
4022 if (bitsize >= str_bitsize)
4023 return false;
4024
4025 /* We can't handle fields split across multiple entities. */
4026 if (bitpos + bitsize > str_bitsize)
4027 return false;
4028
4029 if (BYTES_BIG_ENDIAN)
4030 bitpos = str_bitsize - bitpos - bitsize;
4031
4032 switch (code)
4033 {
4034 case PLUS_EXPR:
4035 case MINUS_EXPR:
4036 /* For now, just optimize the case of the topmost bitfield
4037 where we don't need to do any masking and also
4038 1 bit bitfields where xor can be used.
4039 We might win by one instruction for the other bitfields
4040 too if insv/extv instructions aren't used, so that
4041 can be added later. */
4042 if (bitpos + bitsize != str_bitsize
4043 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4044 break;
4045
4046 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4047 value = convert_modes (str_mode,
4048 TYPE_MODE (TREE_TYPE (op1)), value,
4049 TYPE_UNSIGNED (TREE_TYPE (op1)));
4050
4051 /* We may be accessing data outside the field, which means
4052 we can alias adjacent data. */
4053 if (MEM_P (str_rtx))
4054 {
4055 str_rtx = shallow_copy_rtx (str_rtx);
4056 set_mem_alias_set (str_rtx, 0);
4057 set_mem_expr (str_rtx, 0);
4058 }
4059
4060 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4061 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4062 {
4063 value = expand_and (str_mode, value, const1_rtx, NULL);
4064 binop = xor_optab;
4065 }
4066 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4067 bitpos, NULL_RTX, 1);
4068 result = expand_binop (str_mode, binop, str_rtx,
4069 value, str_rtx, 1, OPTAB_WIDEN);
4070 if (result != str_rtx)
4071 emit_move_insn (str_rtx, result);
4072 return true;
4073
4074 case BIT_IOR_EXPR:
4075 case BIT_XOR_EXPR:
4076 if (TREE_CODE (op1) != INTEGER_CST)
4077 break;
4078 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4079 value = convert_modes (GET_MODE (str_rtx),
4080 TYPE_MODE (TREE_TYPE (op1)), value,
4081 TYPE_UNSIGNED (TREE_TYPE (op1)));
4082
4083 /* We may be accessing data outside the field, which means
4084 we can alias adjacent data. */
4085 if (MEM_P (str_rtx))
4086 {
4087 str_rtx = shallow_copy_rtx (str_rtx);
4088 set_mem_alias_set (str_rtx, 0);
4089 set_mem_expr (str_rtx, 0);
4090 }
4091
4092 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4093 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4094 {
4095 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4096 - 1);
4097 value = expand_and (GET_MODE (str_rtx), value, mask,
4098 NULL_RTX);
4099 }
4100 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4101 bitpos, NULL_RTX, 1);
4102 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4103 value, str_rtx, 1, OPTAB_WIDEN);
4104 if (result != str_rtx)
4105 emit_move_insn (str_rtx, result);
4106 return true;
4107
4108 default:
4109 break;
4110 }
4111
4112 return false;
4113 }
4114
4115
4116 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4117 is true, try generating a nontemporal store. */
4118
4119 void
4120 expand_assignment (tree to, tree from, bool nontemporal)
4121 {
4122 rtx to_rtx = 0;
4123 rtx result;
4124 enum machine_mode mode;
4125 int align;
4126 enum insn_code icode;
4127
4128 /* Don't crash if the lhs of the assignment was erroneous. */
4129 if (TREE_CODE (to) == ERROR_MARK)
4130 {
4131 expand_normal (from);
4132 return;
4133 }
4134
4135 /* Optimize away no-op moves without side-effects. */
4136 if (operand_equal_p (to, from, 0))
4137 return;
4138
4139 mode = TYPE_MODE (TREE_TYPE (to));
4140 if ((TREE_CODE (to) == MEM_REF
4141 || TREE_CODE (to) == TARGET_MEM_REF)
4142 && mode != BLKmode
4143 && ((align = MAX (TYPE_ALIGN (TREE_TYPE (to)),
4144 get_object_alignment (to, BIGGEST_ALIGNMENT)))
4145 < (signed) GET_MODE_ALIGNMENT (mode))
4146 && ((icode = optab_handler (movmisalign_optab, mode))
4147 != CODE_FOR_nothing))
4148 {
4149 struct expand_operand ops[2];
4150 enum machine_mode address_mode;
4151 rtx reg, op0, mem;
4152
4153 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4154 reg = force_not_mem (reg);
4155
4156 if (TREE_CODE (to) == MEM_REF)
4157 {
4158 addr_space_t as
4159 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 1))));
4160 tree base = TREE_OPERAND (to, 0);
4161 address_mode = targetm.addr_space.address_mode (as);
4162 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4163 op0 = convert_memory_address_addr_space (address_mode, op0, as);
4164 if (!integer_zerop (TREE_OPERAND (to, 1)))
4165 {
4166 rtx off
4167 = immed_double_int_const (mem_ref_offset (to), address_mode);
4168 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
4169 }
4170 op0 = memory_address_addr_space (mode, op0, as);
4171 mem = gen_rtx_MEM (mode, op0);
4172 set_mem_attributes (mem, to, 0);
4173 set_mem_addr_space (mem, as);
4174 }
4175 else if (TREE_CODE (to) == TARGET_MEM_REF)
4176 {
4177 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (to));
4178 struct mem_address addr;
4179
4180 get_address_description (to, &addr);
4181 op0 = addr_for_mem_ref (&addr, as, true);
4182 op0 = memory_address_addr_space (mode, op0, as);
4183 mem = gen_rtx_MEM (mode, op0);
4184 set_mem_attributes (mem, to, 0);
4185 set_mem_addr_space (mem, as);
4186 }
4187 else
4188 gcc_unreachable ();
4189 if (TREE_THIS_VOLATILE (to))
4190 MEM_VOLATILE_P (mem) = 1;
4191
4192 create_fixed_operand (&ops[0], mem);
4193 create_input_operand (&ops[1], reg, mode);
4194 /* The movmisalign<mode> pattern cannot fail, else the assignment would
4195 silently be omitted. */
4196 expand_insn (icode, 2, ops);
4197 return;
4198 }
4199
4200 /* Assignment of a structure component needs special treatment
4201 if the structure component's rtx is not simply a MEM.
4202 Assignment of an array element at a constant index, and assignment of
4203 an array element in an unaligned packed structure field, has the same
4204 problem. */
4205 if (handled_component_p (to)
4206 /* ??? We only need to handle MEM_REF here if the access is not
4207 a full access of the base object. */
4208 || (TREE_CODE (to) == MEM_REF
4209 && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR)
4210 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4211 {
4212 enum machine_mode mode1;
4213 HOST_WIDE_INT bitsize, bitpos;
4214 tree offset;
4215 int unsignedp;
4216 int volatilep = 0;
4217 tree tem;
4218
4219 push_temp_slots ();
4220 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4221 &unsignedp, &volatilep, true);
4222
4223 /* If we are going to use store_bit_field and extract_bit_field,
4224 make sure to_rtx will be safe for multiple use. */
4225
4226 to_rtx = expand_normal (tem);
4227
4228 /* If the bitfield is volatile, we want to access it in the
4229 field's mode, not the computed mode.
4230 If a MEM has VOIDmode (external with incomplete type),
4231 use BLKmode for it instead. */
4232 if (MEM_P (to_rtx))
4233 {
4234 if (volatilep && flag_strict_volatile_bitfields > 0)
4235 to_rtx = adjust_address (to_rtx, mode1, 0);
4236 else if (GET_MODE (to_rtx) == VOIDmode)
4237 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4238 }
4239
4240 if (offset != 0)
4241 {
4242 enum machine_mode address_mode;
4243 rtx offset_rtx;
4244
4245 if (!MEM_P (to_rtx))
4246 {
4247 /* We can get constant negative offsets into arrays with broken
4248 user code. Translate this to a trap instead of ICEing. */
4249 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4250 expand_builtin_trap ();
4251 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4252 }
4253
4254 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4255 address_mode
4256 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4257 if (GET_MODE (offset_rtx) != address_mode)
4258 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4259
4260 /* A constant address in TO_RTX can have VOIDmode, we must not try
4261 to call force_reg for that case. Avoid that case. */
4262 if (MEM_P (to_rtx)
4263 && GET_MODE (to_rtx) == BLKmode
4264 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4265 && bitsize > 0
4266 && (bitpos % bitsize) == 0
4267 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4268 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4269 {
4270 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4271 bitpos = 0;
4272 }
4273
4274 to_rtx = offset_address (to_rtx, offset_rtx,
4275 highest_pow2_factor_for_target (to,
4276 offset));
4277 }
4278
4279 /* No action is needed if the target is not a memory and the field
4280 lies completely outside that target. This can occur if the source
4281 code contains an out-of-bounds access to a small array. */
4282 if (!MEM_P (to_rtx)
4283 && GET_MODE (to_rtx) != BLKmode
4284 && (unsigned HOST_WIDE_INT) bitpos
4285 >= GET_MODE_BITSIZE (GET_MODE (to_rtx)))
4286 {
4287 expand_normal (from);
4288 result = NULL;
4289 }
4290 /* Handle expand_expr of a complex value returning a CONCAT. */
4291 else if (GET_CODE (to_rtx) == CONCAT)
4292 {
4293 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4294 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4295 && bitpos == 0
4296 && bitsize == mode_bitsize)
4297 result = store_expr (from, to_rtx, false, nontemporal);
4298 else if (bitsize == mode_bitsize / 2
4299 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4300 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4301 nontemporal);
4302 else if (bitpos + bitsize <= mode_bitsize / 2)
4303 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4304 mode1, from, TREE_TYPE (tem),
4305 get_alias_set (to), nontemporal);
4306 else if (bitpos >= mode_bitsize / 2)
4307 result = store_field (XEXP (to_rtx, 1), bitsize,
4308 bitpos - mode_bitsize / 2, mode1, from,
4309 TREE_TYPE (tem), get_alias_set (to),
4310 nontemporal);
4311 else if (bitpos == 0 && bitsize == mode_bitsize)
4312 {
4313 rtx from_rtx;
4314 result = expand_normal (from);
4315 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4316 TYPE_MODE (TREE_TYPE (from)), 0);
4317 emit_move_insn (XEXP (to_rtx, 0),
4318 read_complex_part (from_rtx, false));
4319 emit_move_insn (XEXP (to_rtx, 1),
4320 read_complex_part (from_rtx, true));
4321 }
4322 else
4323 {
4324 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4325 GET_MODE_SIZE (GET_MODE (to_rtx)),
4326 0);
4327 write_complex_part (temp, XEXP (to_rtx, 0), false);
4328 write_complex_part (temp, XEXP (to_rtx, 1), true);
4329 result = store_field (temp, bitsize, bitpos, mode1, from,
4330 TREE_TYPE (tem), get_alias_set (to),
4331 nontemporal);
4332 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4333 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4334 }
4335 }
4336 else
4337 {
4338 if (MEM_P (to_rtx))
4339 {
4340 /* If the field is at offset zero, we could have been given the
4341 DECL_RTX of the parent struct. Don't munge it. */
4342 to_rtx = shallow_copy_rtx (to_rtx);
4343
4344 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4345
4346 /* Deal with volatile and readonly fields. The former is only
4347 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4348 if (volatilep)
4349 MEM_VOLATILE_P (to_rtx) = 1;
4350 if (component_uses_parent_alias_set (to))
4351 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4352 }
4353
4354 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4355 to_rtx, to, from))
4356 result = NULL;
4357 else
4358 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4359 TREE_TYPE (tem), get_alias_set (to),
4360 nontemporal);
4361 }
4362
4363 if (result)
4364 preserve_temp_slots (result);
4365 free_temp_slots ();
4366 pop_temp_slots ();
4367 return;
4368 }
4369
4370 /* If the rhs is a function call and its value is not an aggregate,
4371 call the function before we start to compute the lhs.
4372 This is needed for correct code for cases such as
4373 val = setjmp (buf) on machines where reference to val
4374 requires loading up part of an address in a separate insn.
4375
4376 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4377 since it might be a promoted variable where the zero- or sign- extension
4378 needs to be done. Handling this in the normal way is safe because no
4379 computation is done before the call. The same is true for SSA names. */
4380 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4381 && COMPLETE_TYPE_P (TREE_TYPE (from))
4382 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4383 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4384 && REG_P (DECL_RTL (to)))
4385 || TREE_CODE (to) == SSA_NAME))
4386 {
4387 rtx value;
4388
4389 push_temp_slots ();
4390 value = expand_normal (from);
4391 if (to_rtx == 0)
4392 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4393
4394 /* Handle calls that return values in multiple non-contiguous locations.
4395 The Irix 6 ABI has examples of this. */
4396 if (GET_CODE (to_rtx) == PARALLEL)
4397 emit_group_load (to_rtx, value, TREE_TYPE (from),
4398 int_size_in_bytes (TREE_TYPE (from)));
4399 else if (GET_MODE (to_rtx) == BLKmode)
4400 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4401 else
4402 {
4403 if (POINTER_TYPE_P (TREE_TYPE (to)))
4404 value = convert_memory_address_addr_space
4405 (GET_MODE (to_rtx), value,
4406 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4407
4408 emit_move_insn (to_rtx, value);
4409 }
4410 preserve_temp_slots (to_rtx);
4411 free_temp_slots ();
4412 pop_temp_slots ();
4413 return;
4414 }
4415
4416 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4417 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4418
4419 if (to_rtx == 0)
4420 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4421
4422 /* Don't move directly into a return register. */
4423 if (TREE_CODE (to) == RESULT_DECL
4424 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4425 {
4426 rtx temp;
4427
4428 push_temp_slots ();
4429 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4430
4431 if (GET_CODE (to_rtx) == PARALLEL)
4432 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4433 int_size_in_bytes (TREE_TYPE (from)));
4434 else
4435 emit_move_insn (to_rtx, temp);
4436
4437 preserve_temp_slots (to_rtx);
4438 free_temp_slots ();
4439 pop_temp_slots ();
4440 return;
4441 }
4442
4443 /* In case we are returning the contents of an object which overlaps
4444 the place the value is being stored, use a safe function when copying
4445 a value through a pointer into a structure value return block. */
4446 if (TREE_CODE (to) == RESULT_DECL
4447 && TREE_CODE (from) == INDIRECT_REF
4448 && ADDR_SPACE_GENERIC_P
4449 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4450 && refs_may_alias_p (to, from)
4451 && cfun->returns_struct
4452 && !cfun->returns_pcc_struct)
4453 {
4454 rtx from_rtx, size;
4455
4456 push_temp_slots ();
4457 size = expr_size (from);
4458 from_rtx = expand_normal (from);
4459
4460 emit_library_call (memmove_libfunc, LCT_NORMAL,
4461 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4462 XEXP (from_rtx, 0), Pmode,
4463 convert_to_mode (TYPE_MODE (sizetype),
4464 size, TYPE_UNSIGNED (sizetype)),
4465 TYPE_MODE (sizetype));
4466
4467 preserve_temp_slots (to_rtx);
4468 free_temp_slots ();
4469 pop_temp_slots ();
4470 return;
4471 }
4472
4473 /* Compute FROM and store the value in the rtx we got. */
4474
4475 push_temp_slots ();
4476 result = store_expr (from, to_rtx, 0, nontemporal);
4477 preserve_temp_slots (result);
4478 free_temp_slots ();
4479 pop_temp_slots ();
4480 return;
4481 }
4482
4483 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4484 succeeded, false otherwise. */
4485
4486 bool
4487 emit_storent_insn (rtx to, rtx from)
4488 {
4489 struct expand_operand ops[2];
4490 enum machine_mode mode = GET_MODE (to);
4491 enum insn_code code = optab_handler (storent_optab, mode);
4492
4493 if (code == CODE_FOR_nothing)
4494 return false;
4495
4496 create_fixed_operand (&ops[0], to);
4497 create_input_operand (&ops[1], from, mode);
4498 return maybe_expand_insn (code, 2, ops);
4499 }
4500
4501 /* Generate code for computing expression EXP,
4502 and storing the value into TARGET.
4503
4504 If the mode is BLKmode then we may return TARGET itself.
4505 It turns out that in BLKmode it doesn't cause a problem.
4506 because C has no operators that could combine two different
4507 assignments into the same BLKmode object with different values
4508 with no sequence point. Will other languages need this to
4509 be more thorough?
4510
4511 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4512 stack, and block moves may need to be treated specially.
4513
4514 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4515
4516 rtx
4517 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4518 {
4519 rtx temp;
4520 rtx alt_rtl = NULL_RTX;
4521 location_t loc = EXPR_LOCATION (exp);
4522
4523 if (VOID_TYPE_P (TREE_TYPE (exp)))
4524 {
4525 /* C++ can generate ?: expressions with a throw expression in one
4526 branch and an rvalue in the other. Here, we resolve attempts to
4527 store the throw expression's nonexistent result. */
4528 gcc_assert (!call_param_p);
4529 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4530 return NULL_RTX;
4531 }
4532 if (TREE_CODE (exp) == COMPOUND_EXPR)
4533 {
4534 /* Perform first part of compound expression, then assign from second
4535 part. */
4536 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4537 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4538 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4539 nontemporal);
4540 }
4541 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4542 {
4543 /* For conditional expression, get safe form of the target. Then
4544 test the condition, doing the appropriate assignment on either
4545 side. This avoids the creation of unnecessary temporaries.
4546 For non-BLKmode, it is more efficient not to do this. */
4547
4548 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4549
4550 do_pending_stack_adjust ();
4551 NO_DEFER_POP;
4552 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
4553 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4554 nontemporal);
4555 emit_jump_insn (gen_jump (lab2));
4556 emit_barrier ();
4557 emit_label (lab1);
4558 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4559 nontemporal);
4560 emit_label (lab2);
4561 OK_DEFER_POP;
4562
4563 return NULL_RTX;
4564 }
4565 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4566 /* If this is a scalar in a register that is stored in a wider mode
4567 than the declared mode, compute the result into its declared mode
4568 and then convert to the wider mode. Our value is the computed
4569 expression. */
4570 {
4571 rtx inner_target = 0;
4572
4573 /* We can do the conversion inside EXP, which will often result
4574 in some optimizations. Do the conversion in two steps: first
4575 change the signedness, if needed, then the extend. But don't
4576 do this if the type of EXP is a subtype of something else
4577 since then the conversion might involve more than just
4578 converting modes. */
4579 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4580 && TREE_TYPE (TREE_TYPE (exp)) == 0
4581 && GET_MODE_PRECISION (GET_MODE (target))
4582 == TYPE_PRECISION (TREE_TYPE (exp)))
4583 {
4584 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4585 != SUBREG_PROMOTED_UNSIGNED_P (target))
4586 {
4587 /* Some types, e.g. Fortran's logical*4, won't have a signed
4588 version, so use the mode instead. */
4589 tree ntype
4590 = (signed_or_unsigned_type_for
4591 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4592 if (ntype == NULL)
4593 ntype = lang_hooks.types.type_for_mode
4594 (TYPE_MODE (TREE_TYPE (exp)),
4595 SUBREG_PROMOTED_UNSIGNED_P (target));
4596
4597 exp = fold_convert_loc (loc, ntype, exp);
4598 }
4599
4600 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4601 (GET_MODE (SUBREG_REG (target)),
4602 SUBREG_PROMOTED_UNSIGNED_P (target)),
4603 exp);
4604
4605 inner_target = SUBREG_REG (target);
4606 }
4607
4608 temp = expand_expr (exp, inner_target, VOIDmode,
4609 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4610
4611 /* If TEMP is a VOIDmode constant, use convert_modes to make
4612 sure that we properly convert it. */
4613 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4614 {
4615 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4616 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4617 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4618 GET_MODE (target), temp,
4619 SUBREG_PROMOTED_UNSIGNED_P (target));
4620 }
4621
4622 convert_move (SUBREG_REG (target), temp,
4623 SUBREG_PROMOTED_UNSIGNED_P (target));
4624
4625 return NULL_RTX;
4626 }
4627 else if ((TREE_CODE (exp) == STRING_CST
4628 || (TREE_CODE (exp) == MEM_REF
4629 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4630 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4631 == STRING_CST
4632 && integer_zerop (TREE_OPERAND (exp, 1))))
4633 && !nontemporal && !call_param_p
4634 && MEM_P (target))
4635 {
4636 /* Optimize initialization of an array with a STRING_CST. */
4637 HOST_WIDE_INT exp_len, str_copy_len;
4638 rtx dest_mem;
4639 tree str = TREE_CODE (exp) == STRING_CST
4640 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4641
4642 exp_len = int_expr_size (exp);
4643 if (exp_len <= 0)
4644 goto normal_expr;
4645
4646 if (TREE_STRING_LENGTH (str) <= 0)
4647 goto normal_expr;
4648
4649 str_copy_len = strlen (TREE_STRING_POINTER (str));
4650 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
4651 goto normal_expr;
4652
4653 str_copy_len = TREE_STRING_LENGTH (str);
4654 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
4655 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
4656 {
4657 str_copy_len += STORE_MAX_PIECES - 1;
4658 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4659 }
4660 str_copy_len = MIN (str_copy_len, exp_len);
4661 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4662 CONST_CAST (char *, TREE_STRING_POINTER (str)),
4663 MEM_ALIGN (target), false))
4664 goto normal_expr;
4665
4666 dest_mem = target;
4667
4668 dest_mem = store_by_pieces (dest_mem,
4669 str_copy_len, builtin_strncpy_read_str,
4670 CONST_CAST (char *,
4671 TREE_STRING_POINTER (str)),
4672 MEM_ALIGN (target), false,
4673 exp_len > str_copy_len ? 1 : 0);
4674 if (exp_len > str_copy_len)
4675 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4676 GEN_INT (exp_len - str_copy_len),
4677 BLOCK_OP_NORMAL);
4678 return NULL_RTX;
4679 }
4680 else
4681 {
4682 rtx tmp_target;
4683
4684 normal_expr:
4685 /* If we want to use a nontemporal store, force the value to
4686 register first. */
4687 tmp_target = nontemporal ? NULL_RTX : target;
4688 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4689 (call_param_p
4690 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4691 &alt_rtl);
4692 }
4693
4694 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4695 the same as that of TARGET, adjust the constant. This is needed, for
4696 example, in case it is a CONST_DOUBLE and we want only a word-sized
4697 value. */
4698 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4699 && TREE_CODE (exp) != ERROR_MARK
4700 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4701 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4702 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4703
4704 /* If value was not generated in the target, store it there.
4705 Convert the value to TARGET's type first if necessary and emit the
4706 pending incrementations that have been queued when expanding EXP.
4707 Note that we cannot emit the whole queue blindly because this will
4708 effectively disable the POST_INC optimization later.
4709
4710 If TEMP and TARGET compare equal according to rtx_equal_p, but
4711 one or both of them are volatile memory refs, we have to distinguish
4712 two cases:
4713 - expand_expr has used TARGET. In this case, we must not generate
4714 another copy. This can be detected by TARGET being equal according
4715 to == .
4716 - expand_expr has not used TARGET - that means that the source just
4717 happens to have the same RTX form. Since temp will have been created
4718 by expand_expr, it will compare unequal according to == .
4719 We must generate a copy in this case, to reach the correct number
4720 of volatile memory references. */
4721
4722 if ((! rtx_equal_p (temp, target)
4723 || (temp != target && (side_effects_p (temp)
4724 || side_effects_p (target))))
4725 && TREE_CODE (exp) != ERROR_MARK
4726 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4727 but TARGET is not valid memory reference, TEMP will differ
4728 from TARGET although it is really the same location. */
4729 && !(alt_rtl
4730 && rtx_equal_p (alt_rtl, target)
4731 && !side_effects_p (alt_rtl)
4732 && !side_effects_p (target))
4733 /* If there's nothing to copy, don't bother. Don't call
4734 expr_size unless necessary, because some front-ends (C++)
4735 expr_size-hook must not be given objects that are not
4736 supposed to be bit-copied or bit-initialized. */
4737 && expr_size (exp) != const0_rtx)
4738 {
4739 if (GET_MODE (temp) != GET_MODE (target)
4740 && GET_MODE (temp) != VOIDmode)
4741 {
4742 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4743 if (GET_MODE (target) == BLKmode
4744 && GET_MODE (temp) == BLKmode)
4745 emit_block_move (target, temp, expr_size (exp),
4746 (call_param_p
4747 ? BLOCK_OP_CALL_PARM
4748 : BLOCK_OP_NORMAL));
4749 else if (GET_MODE (target) == BLKmode)
4750 store_bit_field (target, INTVAL (expr_size (exp)) * BITS_PER_UNIT,
4751 0, GET_MODE (temp), temp);
4752 else
4753 convert_move (target, temp, unsignedp);
4754 }
4755
4756 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4757 {
4758 /* Handle copying a string constant into an array. The string
4759 constant may be shorter than the array. So copy just the string's
4760 actual length, and clear the rest. First get the size of the data
4761 type of the string, which is actually the size of the target. */
4762 rtx size = expr_size (exp);
4763
4764 if (CONST_INT_P (size)
4765 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4766 emit_block_move (target, temp, size,
4767 (call_param_p
4768 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4769 else
4770 {
4771 enum machine_mode pointer_mode
4772 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
4773 enum machine_mode address_mode
4774 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
4775
4776 /* Compute the size of the data to copy from the string. */
4777 tree copy_size
4778 = size_binop_loc (loc, MIN_EXPR,
4779 make_tree (sizetype, size),
4780 size_int (TREE_STRING_LENGTH (exp)));
4781 rtx copy_size_rtx
4782 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4783 (call_param_p
4784 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4785 rtx label = 0;
4786
4787 /* Copy that much. */
4788 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
4789 TYPE_UNSIGNED (sizetype));
4790 emit_block_move (target, temp, copy_size_rtx,
4791 (call_param_p
4792 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4793
4794 /* Figure out how much is left in TARGET that we have to clear.
4795 Do all calculations in pointer_mode. */
4796 if (CONST_INT_P (copy_size_rtx))
4797 {
4798 size = plus_constant (size, -INTVAL (copy_size_rtx));
4799 target = adjust_address (target, BLKmode,
4800 INTVAL (copy_size_rtx));
4801 }
4802 else
4803 {
4804 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4805 copy_size_rtx, NULL_RTX, 0,
4806 OPTAB_LIB_WIDEN);
4807
4808 if (GET_MODE (copy_size_rtx) != address_mode)
4809 copy_size_rtx = convert_to_mode (address_mode,
4810 copy_size_rtx,
4811 TYPE_UNSIGNED (sizetype));
4812
4813 target = offset_address (target, copy_size_rtx,
4814 highest_pow2_factor (copy_size));
4815 label = gen_label_rtx ();
4816 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4817 GET_MODE (size), 0, label);
4818 }
4819
4820 if (size != const0_rtx)
4821 clear_storage (target, size, BLOCK_OP_NORMAL);
4822
4823 if (label)
4824 emit_label (label);
4825 }
4826 }
4827 /* Handle calls that return values in multiple non-contiguous locations.
4828 The Irix 6 ABI has examples of this. */
4829 else if (GET_CODE (target) == PARALLEL)
4830 emit_group_load (target, temp, TREE_TYPE (exp),
4831 int_size_in_bytes (TREE_TYPE (exp)));
4832 else if (GET_MODE (temp) == BLKmode)
4833 emit_block_move (target, temp, expr_size (exp),
4834 (call_param_p
4835 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4836 else if (nontemporal
4837 && emit_storent_insn (target, temp))
4838 /* If we managed to emit a nontemporal store, there is nothing else to
4839 do. */
4840 ;
4841 else
4842 {
4843 temp = force_operand (temp, target);
4844 if (temp != target)
4845 emit_move_insn (target, temp);
4846 }
4847 }
4848
4849 return NULL_RTX;
4850 }
4851 \f
4852 /* Helper for categorize_ctor_elements. Identical interface. */
4853
4854 static bool
4855 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4856 HOST_WIDE_INT *p_elt_count,
4857 bool *p_must_clear)
4858 {
4859 unsigned HOST_WIDE_INT idx;
4860 HOST_WIDE_INT nz_elts, elt_count;
4861 tree value, purpose;
4862
4863 /* Whether CTOR is a valid constant initializer, in accordance with what
4864 initializer_constant_valid_p does. If inferred from the constructor
4865 elements, true until proven otherwise. */
4866 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4867 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4868
4869 nz_elts = 0;
4870 elt_count = 0;
4871
4872 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4873 {
4874 HOST_WIDE_INT mult = 1;
4875
4876 if (TREE_CODE (purpose) == RANGE_EXPR)
4877 {
4878 tree lo_index = TREE_OPERAND (purpose, 0);
4879 tree hi_index = TREE_OPERAND (purpose, 1);
4880
4881 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4882 mult = (tree_low_cst (hi_index, 1)
4883 - tree_low_cst (lo_index, 1) + 1);
4884 }
4885
4886 switch (TREE_CODE (value))
4887 {
4888 case CONSTRUCTOR:
4889 {
4890 HOST_WIDE_INT nz = 0, ic = 0;
4891
4892 bool const_elt_p
4893 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4894
4895 nz_elts += mult * nz;
4896 elt_count += mult * ic;
4897
4898 if (const_from_elts_p && const_p)
4899 const_p = const_elt_p;
4900 }
4901 break;
4902
4903 case INTEGER_CST:
4904 case REAL_CST:
4905 case FIXED_CST:
4906 if (!initializer_zerop (value))
4907 nz_elts += mult;
4908 elt_count += mult;
4909 break;
4910
4911 case STRING_CST:
4912 nz_elts += mult * TREE_STRING_LENGTH (value);
4913 elt_count += mult * TREE_STRING_LENGTH (value);
4914 break;
4915
4916 case COMPLEX_CST:
4917 if (!initializer_zerop (TREE_REALPART (value)))
4918 nz_elts += mult;
4919 if (!initializer_zerop (TREE_IMAGPART (value)))
4920 nz_elts += mult;
4921 elt_count += mult;
4922 break;
4923
4924 case VECTOR_CST:
4925 {
4926 tree v;
4927 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4928 {
4929 if (!initializer_zerop (TREE_VALUE (v)))
4930 nz_elts += mult;
4931 elt_count += mult;
4932 }
4933 }
4934 break;
4935
4936 default:
4937 {
4938 HOST_WIDE_INT tc = count_type_elements (TREE_TYPE (value), true);
4939 if (tc < 1)
4940 tc = 1;
4941 nz_elts += mult * tc;
4942 elt_count += mult * tc;
4943
4944 if (const_from_elts_p && const_p)
4945 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4946 != NULL_TREE;
4947 }
4948 break;
4949 }
4950 }
4951
4952 if (!*p_must_clear
4953 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4954 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4955 {
4956 tree init_sub_type;
4957 bool clear_this = true;
4958
4959 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4960 {
4961 /* We don't expect more than one element of the union to be
4962 initialized. Not sure what we should do otherwise... */
4963 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4964 == 1);
4965
4966 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4967 CONSTRUCTOR_ELTS (ctor),
4968 0)->value);
4969
4970 /* ??? We could look at each element of the union, and find the
4971 largest element. Which would avoid comparing the size of the
4972 initialized element against any tail padding in the union.
4973 Doesn't seem worth the effort... */
4974 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4975 TYPE_SIZE (init_sub_type)) == 1)
4976 {
4977 /* And now we have to find out if the element itself is fully
4978 constructed. E.g. for union { struct { int a, b; } s; } u
4979 = { .s = { .a = 1 } }. */
4980 if (elt_count == count_type_elements (init_sub_type, false))
4981 clear_this = false;
4982 }
4983 }
4984
4985 *p_must_clear = clear_this;
4986 }
4987
4988 *p_nz_elts += nz_elts;
4989 *p_elt_count += elt_count;
4990
4991 return const_p;
4992 }
4993
4994 /* Examine CTOR to discover:
4995 * how many scalar fields are set to nonzero values,
4996 and place it in *P_NZ_ELTS;
4997 * how many scalar fields in total are in CTOR,
4998 and place it in *P_ELT_COUNT.
4999 * if a type is a union, and the initializer from the constructor
5000 is not the largest element in the union, then set *p_must_clear.
5001
5002 Return whether or not CTOR is a valid static constant initializer, the same
5003 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5004
5005 bool
5006 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5007 HOST_WIDE_INT *p_elt_count,
5008 bool *p_must_clear)
5009 {
5010 *p_nz_elts = 0;
5011 *p_elt_count = 0;
5012 *p_must_clear = false;
5013
5014 return
5015 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
5016 }
5017
5018 /* Count the number of scalars in TYPE. Return -1 on overflow or
5019 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
5020 array member at the end of the structure. */
5021
5022 HOST_WIDE_INT
5023 count_type_elements (const_tree type, bool allow_flexarr)
5024 {
5025 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
5026 switch (TREE_CODE (type))
5027 {
5028 case ARRAY_TYPE:
5029 {
5030 tree telts = array_type_nelts (type);
5031 if (telts && host_integerp (telts, 1))
5032 {
5033 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
5034 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
5035 if (n == 0)
5036 return 0;
5037 else if (max / n > m)
5038 return n * m;
5039 }
5040 return -1;
5041 }
5042
5043 case RECORD_TYPE:
5044 {
5045 HOST_WIDE_INT n = 0, t;
5046 tree f;
5047
5048 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5049 if (TREE_CODE (f) == FIELD_DECL)
5050 {
5051 t = count_type_elements (TREE_TYPE (f), false);
5052 if (t < 0)
5053 {
5054 /* Check for structures with flexible array member. */
5055 tree tf = TREE_TYPE (f);
5056 if (allow_flexarr
5057 && DECL_CHAIN (f) == NULL
5058 && TREE_CODE (tf) == ARRAY_TYPE
5059 && TYPE_DOMAIN (tf)
5060 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5061 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5062 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5063 && int_size_in_bytes (type) >= 0)
5064 break;
5065
5066 return -1;
5067 }
5068 n += t;
5069 }
5070
5071 return n;
5072 }
5073
5074 case UNION_TYPE:
5075 case QUAL_UNION_TYPE:
5076 return -1;
5077
5078 case COMPLEX_TYPE:
5079 return 2;
5080
5081 case VECTOR_TYPE:
5082 return TYPE_VECTOR_SUBPARTS (type);
5083
5084 case INTEGER_TYPE:
5085 case REAL_TYPE:
5086 case FIXED_POINT_TYPE:
5087 case ENUMERAL_TYPE:
5088 case BOOLEAN_TYPE:
5089 case POINTER_TYPE:
5090 case OFFSET_TYPE:
5091 case REFERENCE_TYPE:
5092 return 1;
5093
5094 case ERROR_MARK:
5095 return 0;
5096
5097 case VOID_TYPE:
5098 case METHOD_TYPE:
5099 case FUNCTION_TYPE:
5100 case LANG_TYPE:
5101 default:
5102 gcc_unreachable ();
5103 }
5104 }
5105
5106 /* Return 1 if EXP contains mostly (3/4) zeros. */
5107
5108 static int
5109 mostly_zeros_p (const_tree exp)
5110 {
5111 if (TREE_CODE (exp) == CONSTRUCTOR)
5112
5113 {
5114 HOST_WIDE_INT nz_elts, count, elts;
5115 bool must_clear;
5116
5117 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5118 if (must_clear)
5119 return 1;
5120
5121 elts = count_type_elements (TREE_TYPE (exp), false);
5122
5123 return nz_elts < elts / 4;
5124 }
5125
5126 return initializer_zerop (exp);
5127 }
5128
5129 /* Return 1 if EXP contains all zeros. */
5130
5131 static int
5132 all_zeros_p (const_tree exp)
5133 {
5134 if (TREE_CODE (exp) == CONSTRUCTOR)
5135
5136 {
5137 HOST_WIDE_INT nz_elts, count;
5138 bool must_clear;
5139
5140 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5141 return nz_elts == 0;
5142 }
5143
5144 return initializer_zerop (exp);
5145 }
5146 \f
5147 /* Helper function for store_constructor.
5148 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5149 TYPE is the type of the CONSTRUCTOR, not the element type.
5150 CLEARED is as for store_constructor.
5151 ALIAS_SET is the alias set to use for any stores.
5152
5153 This provides a recursive shortcut back to store_constructor when it isn't
5154 necessary to go through store_field. This is so that we can pass through
5155 the cleared field to let store_constructor know that we may not have to
5156 clear a substructure if the outer structure has already been cleared. */
5157
5158 static void
5159 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5160 HOST_WIDE_INT bitpos, enum machine_mode mode,
5161 tree exp, tree type, int cleared,
5162 alias_set_type alias_set)
5163 {
5164 if (TREE_CODE (exp) == CONSTRUCTOR
5165 /* We can only call store_constructor recursively if the size and
5166 bit position are on a byte boundary. */
5167 && bitpos % BITS_PER_UNIT == 0
5168 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5169 /* If we have a nonzero bitpos for a register target, then we just
5170 let store_field do the bitfield handling. This is unlikely to
5171 generate unnecessary clear instructions anyways. */
5172 && (bitpos == 0 || MEM_P (target)))
5173 {
5174 if (MEM_P (target))
5175 target
5176 = adjust_address (target,
5177 GET_MODE (target) == BLKmode
5178 || 0 != (bitpos
5179 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5180 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5181
5182
5183 /* Update the alias set, if required. */
5184 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5185 && MEM_ALIAS_SET (target) != 0)
5186 {
5187 target = copy_rtx (target);
5188 set_mem_alias_set (target, alias_set);
5189 }
5190
5191 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5192 }
5193 else
5194 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5195 }
5196
5197 /* Store the value of constructor EXP into the rtx TARGET.
5198 TARGET is either a REG or a MEM; we know it cannot conflict, since
5199 safe_from_p has been called.
5200 CLEARED is true if TARGET is known to have been zero'd.
5201 SIZE is the number of bytes of TARGET we are allowed to modify: this
5202 may not be the same as the size of EXP if we are assigning to a field
5203 which has been packed to exclude padding bits. */
5204
5205 static void
5206 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5207 {
5208 tree type = TREE_TYPE (exp);
5209 #ifdef WORD_REGISTER_OPERATIONS
5210 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5211 #endif
5212
5213 switch (TREE_CODE (type))
5214 {
5215 case RECORD_TYPE:
5216 case UNION_TYPE:
5217 case QUAL_UNION_TYPE:
5218 {
5219 unsigned HOST_WIDE_INT idx;
5220 tree field, value;
5221
5222 /* If size is zero or the target is already cleared, do nothing. */
5223 if (size == 0 || cleared)
5224 cleared = 1;
5225 /* We either clear the aggregate or indicate the value is dead. */
5226 else if ((TREE_CODE (type) == UNION_TYPE
5227 || TREE_CODE (type) == QUAL_UNION_TYPE)
5228 && ! CONSTRUCTOR_ELTS (exp))
5229 /* If the constructor is empty, clear the union. */
5230 {
5231 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5232 cleared = 1;
5233 }
5234
5235 /* If we are building a static constructor into a register,
5236 set the initial value as zero so we can fold the value into
5237 a constant. But if more than one register is involved,
5238 this probably loses. */
5239 else if (REG_P (target) && TREE_STATIC (exp)
5240 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5241 {
5242 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5243 cleared = 1;
5244 }
5245
5246 /* If the constructor has fewer fields than the structure or
5247 if we are initializing the structure to mostly zeros, clear
5248 the whole structure first. Don't do this if TARGET is a
5249 register whose mode size isn't equal to SIZE since
5250 clear_storage can't handle this case. */
5251 else if (size > 0
5252 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5253 != fields_length (type))
5254 || mostly_zeros_p (exp))
5255 && (!REG_P (target)
5256 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5257 == size)))
5258 {
5259 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5260 cleared = 1;
5261 }
5262
5263 if (REG_P (target) && !cleared)
5264 emit_clobber (target);
5265
5266 /* Store each element of the constructor into the
5267 corresponding field of TARGET. */
5268 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5269 {
5270 enum machine_mode mode;
5271 HOST_WIDE_INT bitsize;
5272 HOST_WIDE_INT bitpos = 0;
5273 tree offset;
5274 rtx to_rtx = target;
5275
5276 /* Just ignore missing fields. We cleared the whole
5277 structure, above, if any fields are missing. */
5278 if (field == 0)
5279 continue;
5280
5281 if (cleared && initializer_zerop (value))
5282 continue;
5283
5284 if (host_integerp (DECL_SIZE (field), 1))
5285 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5286 else
5287 bitsize = -1;
5288
5289 mode = DECL_MODE (field);
5290 if (DECL_BIT_FIELD (field))
5291 mode = VOIDmode;
5292
5293 offset = DECL_FIELD_OFFSET (field);
5294 if (host_integerp (offset, 0)
5295 && host_integerp (bit_position (field), 0))
5296 {
5297 bitpos = int_bit_position (field);
5298 offset = 0;
5299 }
5300 else
5301 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5302
5303 if (offset)
5304 {
5305 enum machine_mode address_mode;
5306 rtx offset_rtx;
5307
5308 offset
5309 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5310 make_tree (TREE_TYPE (exp),
5311 target));
5312
5313 offset_rtx = expand_normal (offset);
5314 gcc_assert (MEM_P (to_rtx));
5315
5316 address_mode
5317 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5318 if (GET_MODE (offset_rtx) != address_mode)
5319 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5320
5321 to_rtx = offset_address (to_rtx, offset_rtx,
5322 highest_pow2_factor (offset));
5323 }
5324
5325 #ifdef WORD_REGISTER_OPERATIONS
5326 /* If this initializes a field that is smaller than a
5327 word, at the start of a word, try to widen it to a full
5328 word. This special case allows us to output C++ member
5329 function initializations in a form that the optimizers
5330 can understand. */
5331 if (REG_P (target)
5332 && bitsize < BITS_PER_WORD
5333 && bitpos % BITS_PER_WORD == 0
5334 && GET_MODE_CLASS (mode) == MODE_INT
5335 && TREE_CODE (value) == INTEGER_CST
5336 && exp_size >= 0
5337 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5338 {
5339 tree type = TREE_TYPE (value);
5340
5341 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5342 {
5343 type = lang_hooks.types.type_for_size
5344 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5345 value = fold_convert (type, value);
5346 }
5347
5348 if (BYTES_BIG_ENDIAN)
5349 value
5350 = fold_build2 (LSHIFT_EXPR, type, value,
5351 build_int_cst (type,
5352 BITS_PER_WORD - bitsize));
5353 bitsize = BITS_PER_WORD;
5354 mode = word_mode;
5355 }
5356 #endif
5357
5358 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5359 && DECL_NONADDRESSABLE_P (field))
5360 {
5361 to_rtx = copy_rtx (to_rtx);
5362 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5363 }
5364
5365 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5366 value, type, cleared,
5367 get_alias_set (TREE_TYPE (field)));
5368 }
5369 break;
5370 }
5371 case ARRAY_TYPE:
5372 {
5373 tree value, index;
5374 unsigned HOST_WIDE_INT i;
5375 int need_to_clear;
5376 tree domain;
5377 tree elttype = TREE_TYPE (type);
5378 int const_bounds_p;
5379 HOST_WIDE_INT minelt = 0;
5380 HOST_WIDE_INT maxelt = 0;
5381
5382 domain = TYPE_DOMAIN (type);
5383 const_bounds_p = (TYPE_MIN_VALUE (domain)
5384 && TYPE_MAX_VALUE (domain)
5385 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5386 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5387
5388 /* If we have constant bounds for the range of the type, get them. */
5389 if (const_bounds_p)
5390 {
5391 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5392 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5393 }
5394
5395 /* If the constructor has fewer elements than the array, clear
5396 the whole array first. Similarly if this is static
5397 constructor of a non-BLKmode object. */
5398 if (cleared)
5399 need_to_clear = 0;
5400 else if (REG_P (target) && TREE_STATIC (exp))
5401 need_to_clear = 1;
5402 else
5403 {
5404 unsigned HOST_WIDE_INT idx;
5405 tree index, value;
5406 HOST_WIDE_INT count = 0, zero_count = 0;
5407 need_to_clear = ! const_bounds_p;
5408
5409 /* This loop is a more accurate version of the loop in
5410 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5411 is also needed to check for missing elements. */
5412 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5413 {
5414 HOST_WIDE_INT this_node_count;
5415
5416 if (need_to_clear)
5417 break;
5418
5419 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5420 {
5421 tree lo_index = TREE_OPERAND (index, 0);
5422 tree hi_index = TREE_OPERAND (index, 1);
5423
5424 if (! host_integerp (lo_index, 1)
5425 || ! host_integerp (hi_index, 1))
5426 {
5427 need_to_clear = 1;
5428 break;
5429 }
5430
5431 this_node_count = (tree_low_cst (hi_index, 1)
5432 - tree_low_cst (lo_index, 1) + 1);
5433 }
5434 else
5435 this_node_count = 1;
5436
5437 count += this_node_count;
5438 if (mostly_zeros_p (value))
5439 zero_count += this_node_count;
5440 }
5441
5442 /* Clear the entire array first if there are any missing
5443 elements, or if the incidence of zero elements is >=
5444 75%. */
5445 if (! need_to_clear
5446 && (count < maxelt - minelt + 1
5447 || 4 * zero_count >= 3 * count))
5448 need_to_clear = 1;
5449 }
5450
5451 if (need_to_clear && size > 0)
5452 {
5453 if (REG_P (target))
5454 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5455 else
5456 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5457 cleared = 1;
5458 }
5459
5460 if (!cleared && REG_P (target))
5461 /* Inform later passes that the old value is dead. */
5462 emit_clobber (target);
5463
5464 /* Store each element of the constructor into the
5465 corresponding element of TARGET, determined by counting the
5466 elements. */
5467 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5468 {
5469 enum machine_mode mode;
5470 HOST_WIDE_INT bitsize;
5471 HOST_WIDE_INT bitpos;
5472 rtx xtarget = target;
5473
5474 if (cleared && initializer_zerop (value))
5475 continue;
5476
5477 mode = TYPE_MODE (elttype);
5478 if (mode == BLKmode)
5479 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5480 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5481 : -1);
5482 else
5483 bitsize = GET_MODE_BITSIZE (mode);
5484
5485 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5486 {
5487 tree lo_index = TREE_OPERAND (index, 0);
5488 tree hi_index = TREE_OPERAND (index, 1);
5489 rtx index_r, pos_rtx;
5490 HOST_WIDE_INT lo, hi, count;
5491 tree position;
5492
5493 /* If the range is constant and "small", unroll the loop. */
5494 if (const_bounds_p
5495 && host_integerp (lo_index, 0)
5496 && host_integerp (hi_index, 0)
5497 && (lo = tree_low_cst (lo_index, 0),
5498 hi = tree_low_cst (hi_index, 0),
5499 count = hi - lo + 1,
5500 (!MEM_P (target)
5501 || count <= 2
5502 || (host_integerp (TYPE_SIZE (elttype), 1)
5503 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5504 <= 40 * 8)))))
5505 {
5506 lo -= minelt; hi -= minelt;
5507 for (; lo <= hi; lo++)
5508 {
5509 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5510
5511 if (MEM_P (target)
5512 && !MEM_KEEP_ALIAS_SET_P (target)
5513 && TREE_CODE (type) == ARRAY_TYPE
5514 && TYPE_NONALIASED_COMPONENT (type))
5515 {
5516 target = copy_rtx (target);
5517 MEM_KEEP_ALIAS_SET_P (target) = 1;
5518 }
5519
5520 store_constructor_field
5521 (target, bitsize, bitpos, mode, value, type, cleared,
5522 get_alias_set (elttype));
5523 }
5524 }
5525 else
5526 {
5527 rtx loop_start = gen_label_rtx ();
5528 rtx loop_end = gen_label_rtx ();
5529 tree exit_cond;
5530
5531 expand_normal (hi_index);
5532
5533 index = build_decl (EXPR_LOCATION (exp),
5534 VAR_DECL, NULL_TREE, domain);
5535 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5536 SET_DECL_RTL (index, index_r);
5537 store_expr (lo_index, index_r, 0, false);
5538
5539 /* Build the head of the loop. */
5540 do_pending_stack_adjust ();
5541 emit_label (loop_start);
5542
5543 /* Assign value to element index. */
5544 position =
5545 fold_convert (ssizetype,
5546 fold_build2 (MINUS_EXPR,
5547 TREE_TYPE (index),
5548 index,
5549 TYPE_MIN_VALUE (domain)));
5550
5551 position =
5552 size_binop (MULT_EXPR, position,
5553 fold_convert (ssizetype,
5554 TYPE_SIZE_UNIT (elttype)));
5555
5556 pos_rtx = expand_normal (position);
5557 xtarget = offset_address (target, pos_rtx,
5558 highest_pow2_factor (position));
5559 xtarget = adjust_address (xtarget, mode, 0);
5560 if (TREE_CODE (value) == CONSTRUCTOR)
5561 store_constructor (value, xtarget, cleared,
5562 bitsize / BITS_PER_UNIT);
5563 else
5564 store_expr (value, xtarget, 0, false);
5565
5566 /* Generate a conditional jump to exit the loop. */
5567 exit_cond = build2 (LT_EXPR, integer_type_node,
5568 index, hi_index);
5569 jumpif (exit_cond, loop_end, -1);
5570
5571 /* Update the loop counter, and jump to the head of
5572 the loop. */
5573 expand_assignment (index,
5574 build2 (PLUS_EXPR, TREE_TYPE (index),
5575 index, integer_one_node),
5576 false);
5577
5578 emit_jump (loop_start);
5579
5580 /* Build the end of the loop. */
5581 emit_label (loop_end);
5582 }
5583 }
5584 else if ((index != 0 && ! host_integerp (index, 0))
5585 || ! host_integerp (TYPE_SIZE (elttype), 1))
5586 {
5587 tree position;
5588
5589 if (index == 0)
5590 index = ssize_int (1);
5591
5592 if (minelt)
5593 index = fold_convert (ssizetype,
5594 fold_build2 (MINUS_EXPR,
5595 TREE_TYPE (index),
5596 index,
5597 TYPE_MIN_VALUE (domain)));
5598
5599 position =
5600 size_binop (MULT_EXPR, index,
5601 fold_convert (ssizetype,
5602 TYPE_SIZE_UNIT (elttype)));
5603 xtarget = offset_address (target,
5604 expand_normal (position),
5605 highest_pow2_factor (position));
5606 xtarget = adjust_address (xtarget, mode, 0);
5607 store_expr (value, xtarget, 0, false);
5608 }
5609 else
5610 {
5611 if (index != 0)
5612 bitpos = ((tree_low_cst (index, 0) - minelt)
5613 * tree_low_cst (TYPE_SIZE (elttype), 1));
5614 else
5615 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5616
5617 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5618 && TREE_CODE (type) == ARRAY_TYPE
5619 && TYPE_NONALIASED_COMPONENT (type))
5620 {
5621 target = copy_rtx (target);
5622 MEM_KEEP_ALIAS_SET_P (target) = 1;
5623 }
5624 store_constructor_field (target, bitsize, bitpos, mode, value,
5625 type, cleared, get_alias_set (elttype));
5626 }
5627 }
5628 break;
5629 }
5630
5631 case VECTOR_TYPE:
5632 {
5633 unsigned HOST_WIDE_INT idx;
5634 constructor_elt *ce;
5635 int i;
5636 int need_to_clear;
5637 int icode = 0;
5638 tree elttype = TREE_TYPE (type);
5639 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5640 enum machine_mode eltmode = TYPE_MODE (elttype);
5641 HOST_WIDE_INT bitsize;
5642 HOST_WIDE_INT bitpos;
5643 rtvec vector = NULL;
5644 unsigned n_elts;
5645 alias_set_type alias;
5646
5647 gcc_assert (eltmode != BLKmode);
5648
5649 n_elts = TYPE_VECTOR_SUBPARTS (type);
5650 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5651 {
5652 enum machine_mode mode = GET_MODE (target);
5653
5654 icode = (int) optab_handler (vec_init_optab, mode);
5655 if (icode != CODE_FOR_nothing)
5656 {
5657 unsigned int i;
5658
5659 vector = rtvec_alloc (n_elts);
5660 for (i = 0; i < n_elts; i++)
5661 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5662 }
5663 }
5664
5665 /* If the constructor has fewer elements than the vector,
5666 clear the whole array first. Similarly if this is static
5667 constructor of a non-BLKmode object. */
5668 if (cleared)
5669 need_to_clear = 0;
5670 else if (REG_P (target) && TREE_STATIC (exp))
5671 need_to_clear = 1;
5672 else
5673 {
5674 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5675 tree value;
5676
5677 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5678 {
5679 int n_elts_here = tree_low_cst
5680 (int_const_binop (TRUNC_DIV_EXPR,
5681 TYPE_SIZE (TREE_TYPE (value)),
5682 TYPE_SIZE (elttype)), 1);
5683
5684 count += n_elts_here;
5685 if (mostly_zeros_p (value))
5686 zero_count += n_elts_here;
5687 }
5688
5689 /* Clear the entire vector first if there are any missing elements,
5690 or if the incidence of zero elements is >= 75%. */
5691 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5692 }
5693
5694 if (need_to_clear && size > 0 && !vector)
5695 {
5696 if (REG_P (target))
5697 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5698 else
5699 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5700 cleared = 1;
5701 }
5702
5703 /* Inform later passes that the old value is dead. */
5704 if (!cleared && !vector && REG_P (target))
5705 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5706
5707 if (MEM_P (target))
5708 alias = MEM_ALIAS_SET (target);
5709 else
5710 alias = get_alias_set (elttype);
5711
5712 /* Store each element of the constructor into the corresponding
5713 element of TARGET, determined by counting the elements. */
5714 for (idx = 0, i = 0;
5715 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5716 idx++, i += bitsize / elt_size)
5717 {
5718 HOST_WIDE_INT eltpos;
5719 tree value = ce->value;
5720
5721 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5722 if (cleared && initializer_zerop (value))
5723 continue;
5724
5725 if (ce->index)
5726 eltpos = tree_low_cst (ce->index, 1);
5727 else
5728 eltpos = i;
5729
5730 if (vector)
5731 {
5732 /* Vector CONSTRUCTORs should only be built from smaller
5733 vectors in the case of BLKmode vectors. */
5734 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5735 RTVEC_ELT (vector, eltpos)
5736 = expand_normal (value);
5737 }
5738 else
5739 {
5740 enum machine_mode value_mode =
5741 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5742 ? TYPE_MODE (TREE_TYPE (value))
5743 : eltmode;
5744 bitpos = eltpos * elt_size;
5745 store_constructor_field (target, bitsize, bitpos,
5746 value_mode, value, type,
5747 cleared, alias);
5748 }
5749 }
5750
5751 if (vector)
5752 emit_insn (GEN_FCN (icode)
5753 (target,
5754 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5755 break;
5756 }
5757
5758 default:
5759 gcc_unreachable ();
5760 }
5761 }
5762
5763 /* Store the value of EXP (an expression tree)
5764 into a subfield of TARGET which has mode MODE and occupies
5765 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5766 If MODE is VOIDmode, it means that we are storing into a bit-field.
5767
5768 Always return const0_rtx unless we have something particular to
5769 return.
5770
5771 TYPE is the type of the underlying object,
5772
5773 ALIAS_SET is the alias set for the destination. This value will
5774 (in general) be different from that for TARGET, since TARGET is a
5775 reference to the containing structure.
5776
5777 If NONTEMPORAL is true, try generating a nontemporal store. */
5778
5779 static rtx
5780 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5781 enum machine_mode mode, tree exp, tree type,
5782 alias_set_type alias_set, bool nontemporal)
5783 {
5784 if (TREE_CODE (exp) == ERROR_MARK)
5785 return const0_rtx;
5786
5787 /* If we have nothing to store, do nothing unless the expression has
5788 side-effects. */
5789 if (bitsize == 0)
5790 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5791
5792 /* If we are storing into an unaligned field of an aligned union that is
5793 in a register, we may have the mode of TARGET being an integer mode but
5794 MODE == BLKmode. In that case, get an aligned object whose size and
5795 alignment are the same as TARGET and store TARGET into it (we can avoid
5796 the store if the field being stored is the entire width of TARGET). Then
5797 call ourselves recursively to store the field into a BLKmode version of
5798 that object. Finally, load from the object into TARGET. This is not
5799 very efficient in general, but should only be slightly more expensive
5800 than the otherwise-required unaligned accesses. Perhaps this can be
5801 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5802 twice, once with emit_move_insn and once via store_field. */
5803
5804 if (mode == BLKmode
5805 && (REG_P (target) || GET_CODE (target) == SUBREG))
5806 {
5807 rtx object = assign_temp (type, 0, 1, 1);
5808 rtx blk_object = adjust_address (object, BLKmode, 0);
5809
5810 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5811 emit_move_insn (object, target);
5812
5813 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5814 nontemporal);
5815
5816 emit_move_insn (target, object);
5817
5818 /* We want to return the BLKmode version of the data. */
5819 return blk_object;
5820 }
5821
5822 if (GET_CODE (target) == CONCAT)
5823 {
5824 /* We're storing into a struct containing a single __complex. */
5825
5826 gcc_assert (!bitpos);
5827 return store_expr (exp, target, 0, nontemporal);
5828 }
5829
5830 /* If the structure is in a register or if the component
5831 is a bit field, we cannot use addressing to access it.
5832 Use bit-field techniques or SUBREG to store in it. */
5833
5834 if (mode == VOIDmode
5835 || (mode != BLKmode && ! direct_store[(int) mode]
5836 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5837 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5838 || REG_P (target)
5839 || GET_CODE (target) == SUBREG
5840 /* If the field isn't aligned enough to store as an ordinary memref,
5841 store it as a bit field. */
5842 || (mode != BLKmode
5843 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5844 || bitpos % GET_MODE_ALIGNMENT (mode))
5845 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5846 || (bitpos % BITS_PER_UNIT != 0)))
5847 /* If the RHS and field are a constant size and the size of the
5848 RHS isn't the same size as the bitfield, we must use bitfield
5849 operations. */
5850 || (bitsize >= 0
5851 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5852 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
5853 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
5854 decl we must use bitfield operations. */
5855 || (bitsize >= 0
5856 && TREE_CODE (exp) == MEM_REF
5857 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5858 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5859 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
5860 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
5861 {
5862 rtx temp;
5863 gimple nop_def;
5864
5865 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5866 implies a mask operation. If the precision is the same size as
5867 the field we're storing into, that mask is redundant. This is
5868 particularly common with bit field assignments generated by the
5869 C front end. */
5870 nop_def = get_def_for_expr (exp, NOP_EXPR);
5871 if (nop_def)
5872 {
5873 tree type = TREE_TYPE (exp);
5874 if (INTEGRAL_TYPE_P (type)
5875 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5876 && bitsize == TYPE_PRECISION (type))
5877 {
5878 tree op = gimple_assign_rhs1 (nop_def);
5879 type = TREE_TYPE (op);
5880 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5881 exp = op;
5882 }
5883 }
5884
5885 temp = expand_normal (exp);
5886
5887 /* If BITSIZE is narrower than the size of the type of EXP
5888 we will be narrowing TEMP. Normally, what's wanted are the
5889 low-order bits. However, if EXP's type is a record and this is
5890 big-endian machine, we want the upper BITSIZE bits. */
5891 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5892 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5893 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5894 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5895 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
5896 NULL_RTX, 1);
5897
5898 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5899 MODE. */
5900 if (mode != VOIDmode && mode != BLKmode
5901 && mode != TYPE_MODE (TREE_TYPE (exp)))
5902 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5903
5904 /* If the modes of TEMP and TARGET are both BLKmode, both
5905 must be in memory and BITPOS must be aligned on a byte
5906 boundary. If so, we simply do a block copy. Likewise
5907 for a BLKmode-like TARGET. */
5908 if (GET_MODE (temp) == BLKmode
5909 && (GET_MODE (target) == BLKmode
5910 || (MEM_P (target)
5911 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5912 && (bitpos % BITS_PER_UNIT) == 0
5913 && (bitsize % BITS_PER_UNIT) == 0)))
5914 {
5915 gcc_assert (MEM_P (target) && MEM_P (temp)
5916 && (bitpos % BITS_PER_UNIT) == 0);
5917
5918 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5919 emit_block_move (target, temp,
5920 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5921 / BITS_PER_UNIT),
5922 BLOCK_OP_NORMAL);
5923
5924 return const0_rtx;
5925 }
5926
5927 /* Store the value in the bitfield. */
5928 store_bit_field (target, bitsize, bitpos, mode, temp);
5929
5930 return const0_rtx;
5931 }
5932 else
5933 {
5934 /* Now build a reference to just the desired component. */
5935 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5936
5937 if (to_rtx == target)
5938 to_rtx = copy_rtx (to_rtx);
5939
5940 if (!MEM_SCALAR_P (to_rtx))
5941 MEM_IN_STRUCT_P (to_rtx) = 1;
5942 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5943 set_mem_alias_set (to_rtx, alias_set);
5944
5945 return store_expr (exp, to_rtx, 0, nontemporal);
5946 }
5947 }
5948 \f
5949 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5950 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5951 codes and find the ultimate containing object, which we return.
5952
5953 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5954 bit position, and *PUNSIGNEDP to the signedness of the field.
5955 If the position of the field is variable, we store a tree
5956 giving the variable offset (in units) in *POFFSET.
5957 This offset is in addition to the bit position.
5958 If the position is not variable, we store 0 in *POFFSET.
5959
5960 If any of the extraction expressions is volatile,
5961 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5962
5963 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5964 Otherwise, it is a mode that can be used to access the field.
5965
5966 If the field describes a variable-sized object, *PMODE is set to
5967 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5968 this case, but the address of the object can be found.
5969
5970 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5971 look through nodes that serve as markers of a greater alignment than
5972 the one that can be deduced from the expression. These nodes make it
5973 possible for front-ends to prevent temporaries from being created by
5974 the middle-end on alignment considerations. For that purpose, the
5975 normal operating mode at high-level is to always pass FALSE so that
5976 the ultimate containing object is really returned; moreover, the
5977 associated predicate handled_component_p will always return TRUE
5978 on these nodes, thus indicating that they are essentially handled
5979 by get_inner_reference. TRUE should only be passed when the caller
5980 is scanning the expression in order to build another representation
5981 and specifically knows how to handle these nodes; as such, this is
5982 the normal operating mode in the RTL expanders. */
5983
5984 tree
5985 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5986 HOST_WIDE_INT *pbitpos, tree *poffset,
5987 enum machine_mode *pmode, int *punsignedp,
5988 int *pvolatilep, bool keep_aligning)
5989 {
5990 tree size_tree = 0;
5991 enum machine_mode mode = VOIDmode;
5992 bool blkmode_bitfield = false;
5993 tree offset = size_zero_node;
5994 double_int bit_offset = double_int_zero;
5995
5996 /* First get the mode, signedness, and size. We do this from just the
5997 outermost expression. */
5998 *pbitsize = -1;
5999 if (TREE_CODE (exp) == COMPONENT_REF)
6000 {
6001 tree field = TREE_OPERAND (exp, 1);
6002 size_tree = DECL_SIZE (field);
6003 if (!DECL_BIT_FIELD (field))
6004 mode = DECL_MODE (field);
6005 else if (DECL_MODE (field) == BLKmode)
6006 blkmode_bitfield = true;
6007 else if (TREE_THIS_VOLATILE (exp)
6008 && flag_strict_volatile_bitfields > 0)
6009 /* Volatile bitfields should be accessed in the mode of the
6010 field's type, not the mode computed based on the bit
6011 size. */
6012 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6013
6014 *punsignedp = DECL_UNSIGNED (field);
6015 }
6016 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6017 {
6018 size_tree = TREE_OPERAND (exp, 1);
6019 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6020 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6021
6022 /* For vector types, with the correct size of access, use the mode of
6023 inner type. */
6024 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6025 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6026 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6027 mode = TYPE_MODE (TREE_TYPE (exp));
6028 }
6029 else
6030 {
6031 mode = TYPE_MODE (TREE_TYPE (exp));
6032 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6033
6034 if (mode == BLKmode)
6035 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6036 else
6037 *pbitsize = GET_MODE_BITSIZE (mode);
6038 }
6039
6040 if (size_tree != 0)
6041 {
6042 if (! host_integerp (size_tree, 1))
6043 mode = BLKmode, *pbitsize = -1;
6044 else
6045 *pbitsize = tree_low_cst (size_tree, 1);
6046 }
6047
6048 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6049 and find the ultimate containing object. */
6050 while (1)
6051 {
6052 switch (TREE_CODE (exp))
6053 {
6054 case BIT_FIELD_REF:
6055 bit_offset
6056 = double_int_add (bit_offset,
6057 tree_to_double_int (TREE_OPERAND (exp, 2)));
6058 break;
6059
6060 case COMPONENT_REF:
6061 {
6062 tree field = TREE_OPERAND (exp, 1);
6063 tree this_offset = component_ref_field_offset (exp);
6064
6065 /* If this field hasn't been filled in yet, don't go past it.
6066 This should only happen when folding expressions made during
6067 type construction. */
6068 if (this_offset == 0)
6069 break;
6070
6071 offset = size_binop (PLUS_EXPR, offset, this_offset);
6072 bit_offset = double_int_add (bit_offset,
6073 tree_to_double_int
6074 (DECL_FIELD_BIT_OFFSET (field)));
6075
6076 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6077 }
6078 break;
6079
6080 case ARRAY_REF:
6081 case ARRAY_RANGE_REF:
6082 {
6083 tree index = TREE_OPERAND (exp, 1);
6084 tree low_bound = array_ref_low_bound (exp);
6085 tree unit_size = array_ref_element_size (exp);
6086
6087 /* We assume all arrays have sizes that are a multiple of a byte.
6088 First subtract the lower bound, if any, in the type of the
6089 index, then convert to sizetype and multiply by the size of
6090 the array element. */
6091 if (! integer_zerop (low_bound))
6092 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6093 index, low_bound);
6094
6095 offset = size_binop (PLUS_EXPR, offset,
6096 size_binop (MULT_EXPR,
6097 fold_convert (sizetype, index),
6098 unit_size));
6099 }
6100 break;
6101
6102 case REALPART_EXPR:
6103 break;
6104
6105 case IMAGPART_EXPR:
6106 bit_offset = double_int_add (bit_offset,
6107 uhwi_to_double_int (*pbitsize));
6108 break;
6109
6110 case VIEW_CONVERT_EXPR:
6111 if (keep_aligning && STRICT_ALIGNMENT
6112 && (TYPE_ALIGN (TREE_TYPE (exp))
6113 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6114 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6115 < BIGGEST_ALIGNMENT)
6116 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6117 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6118 goto done;
6119 break;
6120
6121 case MEM_REF:
6122 /* Hand back the decl for MEM[&decl, off]. */
6123 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6124 {
6125 tree off = TREE_OPERAND (exp, 1);
6126 if (!integer_zerop (off))
6127 {
6128 double_int boff, coff = mem_ref_offset (exp);
6129 boff = double_int_lshift (coff,
6130 BITS_PER_UNIT == 8
6131 ? 3 : exact_log2 (BITS_PER_UNIT),
6132 HOST_BITS_PER_DOUBLE_INT, true);
6133 bit_offset = double_int_add (bit_offset, boff);
6134 }
6135 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6136 }
6137 goto done;
6138
6139 default:
6140 goto done;
6141 }
6142
6143 /* If any reference in the chain is volatile, the effect is volatile. */
6144 if (TREE_THIS_VOLATILE (exp))
6145 *pvolatilep = 1;
6146
6147 exp = TREE_OPERAND (exp, 0);
6148 }
6149 done:
6150
6151 /* If OFFSET is constant, see if we can return the whole thing as a
6152 constant bit position. Make sure to handle overflow during
6153 this conversion. */
6154 if (host_integerp (offset, 0))
6155 {
6156 double_int tem = double_int_lshift (tree_to_double_int (offset),
6157 BITS_PER_UNIT == 8
6158 ? 3 : exact_log2 (BITS_PER_UNIT),
6159 HOST_BITS_PER_DOUBLE_INT, true);
6160 tem = double_int_add (tem, bit_offset);
6161 if (double_int_fits_in_shwi_p (tem))
6162 {
6163 *pbitpos = double_int_to_shwi (tem);
6164 *poffset = offset = NULL_TREE;
6165 }
6166 }
6167
6168 /* Otherwise, split it up. */
6169 if (offset)
6170 {
6171 *pbitpos = double_int_to_shwi (bit_offset);
6172 *poffset = offset;
6173 }
6174
6175 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6176 if (mode == VOIDmode
6177 && blkmode_bitfield
6178 && (*pbitpos % BITS_PER_UNIT) == 0
6179 && (*pbitsize % BITS_PER_UNIT) == 0)
6180 *pmode = BLKmode;
6181 else
6182 *pmode = mode;
6183
6184 return exp;
6185 }
6186
6187 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6188 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6189 EXP is marked as PACKED. */
6190
6191 bool
6192 contains_packed_reference (const_tree exp)
6193 {
6194 bool packed_p = false;
6195
6196 while (1)
6197 {
6198 switch (TREE_CODE (exp))
6199 {
6200 case COMPONENT_REF:
6201 {
6202 tree field = TREE_OPERAND (exp, 1);
6203 packed_p = DECL_PACKED (field)
6204 || TYPE_PACKED (TREE_TYPE (field))
6205 || TYPE_PACKED (TREE_TYPE (exp));
6206 if (packed_p)
6207 goto done;
6208 }
6209 break;
6210
6211 case BIT_FIELD_REF:
6212 case ARRAY_REF:
6213 case ARRAY_RANGE_REF:
6214 case REALPART_EXPR:
6215 case IMAGPART_EXPR:
6216 case VIEW_CONVERT_EXPR:
6217 break;
6218
6219 default:
6220 goto done;
6221 }
6222 exp = TREE_OPERAND (exp, 0);
6223 }
6224 done:
6225 return packed_p;
6226 }
6227
6228 /* Return a tree of sizetype representing the size, in bytes, of the element
6229 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6230
6231 tree
6232 array_ref_element_size (tree exp)
6233 {
6234 tree aligned_size = TREE_OPERAND (exp, 3);
6235 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6236 location_t loc = EXPR_LOCATION (exp);
6237
6238 /* If a size was specified in the ARRAY_REF, it's the size measured
6239 in alignment units of the element type. So multiply by that value. */
6240 if (aligned_size)
6241 {
6242 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6243 sizetype from another type of the same width and signedness. */
6244 if (TREE_TYPE (aligned_size) != sizetype)
6245 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6246 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6247 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6248 }
6249
6250 /* Otherwise, take the size from that of the element type. Substitute
6251 any PLACEHOLDER_EXPR that we have. */
6252 else
6253 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6254 }
6255
6256 /* Return a tree representing the lower bound of the array mentioned in
6257 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6258
6259 tree
6260 array_ref_low_bound (tree exp)
6261 {
6262 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6263
6264 /* If a lower bound is specified in EXP, use it. */
6265 if (TREE_OPERAND (exp, 2))
6266 return TREE_OPERAND (exp, 2);
6267
6268 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6269 substituting for a PLACEHOLDER_EXPR as needed. */
6270 if (domain_type && TYPE_MIN_VALUE (domain_type))
6271 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6272
6273 /* Otherwise, return a zero of the appropriate type. */
6274 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6275 }
6276
6277 /* Return a tree representing the upper bound of the array mentioned in
6278 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6279
6280 tree
6281 array_ref_up_bound (tree exp)
6282 {
6283 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6284
6285 /* If there is a domain type and it has an upper bound, use it, substituting
6286 for a PLACEHOLDER_EXPR as needed. */
6287 if (domain_type && TYPE_MAX_VALUE (domain_type))
6288 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6289
6290 /* Otherwise fail. */
6291 return NULL_TREE;
6292 }
6293
6294 /* Return a tree representing the offset, in bytes, of the field referenced
6295 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6296
6297 tree
6298 component_ref_field_offset (tree exp)
6299 {
6300 tree aligned_offset = TREE_OPERAND (exp, 2);
6301 tree field = TREE_OPERAND (exp, 1);
6302 location_t loc = EXPR_LOCATION (exp);
6303
6304 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6305 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6306 value. */
6307 if (aligned_offset)
6308 {
6309 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6310 sizetype from another type of the same width and signedness. */
6311 if (TREE_TYPE (aligned_offset) != sizetype)
6312 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6313 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6314 size_int (DECL_OFFSET_ALIGN (field)
6315 / BITS_PER_UNIT));
6316 }
6317
6318 /* Otherwise, take the offset from that of the field. Substitute
6319 any PLACEHOLDER_EXPR that we have. */
6320 else
6321 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6322 }
6323
6324 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6325
6326 static unsigned HOST_WIDE_INT
6327 target_align (const_tree target)
6328 {
6329 /* We might have a chain of nested references with intermediate misaligning
6330 bitfields components, so need to recurse to find out. */
6331
6332 unsigned HOST_WIDE_INT this_align, outer_align;
6333
6334 switch (TREE_CODE (target))
6335 {
6336 case BIT_FIELD_REF:
6337 return 1;
6338
6339 case COMPONENT_REF:
6340 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6341 outer_align = target_align (TREE_OPERAND (target, 0));
6342 return MIN (this_align, outer_align);
6343
6344 case ARRAY_REF:
6345 case ARRAY_RANGE_REF:
6346 this_align = TYPE_ALIGN (TREE_TYPE (target));
6347 outer_align = target_align (TREE_OPERAND (target, 0));
6348 return MIN (this_align, outer_align);
6349
6350 CASE_CONVERT:
6351 case NON_LVALUE_EXPR:
6352 case VIEW_CONVERT_EXPR:
6353 this_align = TYPE_ALIGN (TREE_TYPE (target));
6354 outer_align = target_align (TREE_OPERAND (target, 0));
6355 return MAX (this_align, outer_align);
6356
6357 default:
6358 return TYPE_ALIGN (TREE_TYPE (target));
6359 }
6360 }
6361
6362 \f
6363 /* Given an rtx VALUE that may contain additions and multiplications, return
6364 an equivalent value that just refers to a register, memory, or constant.
6365 This is done by generating instructions to perform the arithmetic and
6366 returning a pseudo-register containing the value.
6367
6368 The returned value may be a REG, SUBREG, MEM or constant. */
6369
6370 rtx
6371 force_operand (rtx value, rtx target)
6372 {
6373 rtx op1, op2;
6374 /* Use subtarget as the target for operand 0 of a binary operation. */
6375 rtx subtarget = get_subtarget (target);
6376 enum rtx_code code = GET_CODE (value);
6377
6378 /* Check for subreg applied to an expression produced by loop optimizer. */
6379 if (code == SUBREG
6380 && !REG_P (SUBREG_REG (value))
6381 && !MEM_P (SUBREG_REG (value)))
6382 {
6383 value
6384 = simplify_gen_subreg (GET_MODE (value),
6385 force_reg (GET_MODE (SUBREG_REG (value)),
6386 force_operand (SUBREG_REG (value),
6387 NULL_RTX)),
6388 GET_MODE (SUBREG_REG (value)),
6389 SUBREG_BYTE (value));
6390 code = GET_CODE (value);
6391 }
6392
6393 /* Check for a PIC address load. */
6394 if ((code == PLUS || code == MINUS)
6395 && XEXP (value, 0) == pic_offset_table_rtx
6396 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6397 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6398 || GET_CODE (XEXP (value, 1)) == CONST))
6399 {
6400 if (!subtarget)
6401 subtarget = gen_reg_rtx (GET_MODE (value));
6402 emit_move_insn (subtarget, value);
6403 return subtarget;
6404 }
6405
6406 if (ARITHMETIC_P (value))
6407 {
6408 op2 = XEXP (value, 1);
6409 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6410 subtarget = 0;
6411 if (code == MINUS && CONST_INT_P (op2))
6412 {
6413 code = PLUS;
6414 op2 = negate_rtx (GET_MODE (value), op2);
6415 }
6416
6417 /* Check for an addition with OP2 a constant integer and our first
6418 operand a PLUS of a virtual register and something else. In that
6419 case, we want to emit the sum of the virtual register and the
6420 constant first and then add the other value. This allows virtual
6421 register instantiation to simply modify the constant rather than
6422 creating another one around this addition. */
6423 if (code == PLUS && CONST_INT_P (op2)
6424 && GET_CODE (XEXP (value, 0)) == PLUS
6425 && REG_P (XEXP (XEXP (value, 0), 0))
6426 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6427 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6428 {
6429 rtx temp = expand_simple_binop (GET_MODE (value), code,
6430 XEXP (XEXP (value, 0), 0), op2,
6431 subtarget, 0, OPTAB_LIB_WIDEN);
6432 return expand_simple_binop (GET_MODE (value), code, temp,
6433 force_operand (XEXP (XEXP (value,
6434 0), 1), 0),
6435 target, 0, OPTAB_LIB_WIDEN);
6436 }
6437
6438 op1 = force_operand (XEXP (value, 0), subtarget);
6439 op2 = force_operand (op2, NULL_RTX);
6440 switch (code)
6441 {
6442 case MULT:
6443 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6444 case DIV:
6445 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6446 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6447 target, 1, OPTAB_LIB_WIDEN);
6448 else
6449 return expand_divmod (0,
6450 FLOAT_MODE_P (GET_MODE (value))
6451 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6452 GET_MODE (value), op1, op2, target, 0);
6453 case MOD:
6454 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6455 target, 0);
6456 case UDIV:
6457 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6458 target, 1);
6459 case UMOD:
6460 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6461 target, 1);
6462 case ASHIFTRT:
6463 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6464 target, 0, OPTAB_LIB_WIDEN);
6465 default:
6466 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6467 target, 1, OPTAB_LIB_WIDEN);
6468 }
6469 }
6470 if (UNARY_P (value))
6471 {
6472 if (!target)
6473 target = gen_reg_rtx (GET_MODE (value));
6474 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6475 switch (code)
6476 {
6477 case ZERO_EXTEND:
6478 case SIGN_EXTEND:
6479 case TRUNCATE:
6480 case FLOAT_EXTEND:
6481 case FLOAT_TRUNCATE:
6482 convert_move (target, op1, code == ZERO_EXTEND);
6483 return target;
6484
6485 case FIX:
6486 case UNSIGNED_FIX:
6487 expand_fix (target, op1, code == UNSIGNED_FIX);
6488 return target;
6489
6490 case FLOAT:
6491 case UNSIGNED_FLOAT:
6492 expand_float (target, op1, code == UNSIGNED_FLOAT);
6493 return target;
6494
6495 default:
6496 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6497 }
6498 }
6499
6500 #ifdef INSN_SCHEDULING
6501 /* On machines that have insn scheduling, we want all memory reference to be
6502 explicit, so we need to deal with such paradoxical SUBREGs. */
6503 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6504 && (GET_MODE_SIZE (GET_MODE (value))
6505 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6506 value
6507 = simplify_gen_subreg (GET_MODE (value),
6508 force_reg (GET_MODE (SUBREG_REG (value)),
6509 force_operand (SUBREG_REG (value),
6510 NULL_RTX)),
6511 GET_MODE (SUBREG_REG (value)),
6512 SUBREG_BYTE (value));
6513 #endif
6514
6515 return value;
6516 }
6517 \f
6518 /* Subroutine of expand_expr: return nonzero iff there is no way that
6519 EXP can reference X, which is being modified. TOP_P is nonzero if this
6520 call is going to be used to determine whether we need a temporary
6521 for EXP, as opposed to a recursive call to this function.
6522
6523 It is always safe for this routine to return zero since it merely
6524 searches for optimization opportunities. */
6525
6526 int
6527 safe_from_p (const_rtx x, tree exp, int top_p)
6528 {
6529 rtx exp_rtl = 0;
6530 int i, nops;
6531
6532 if (x == 0
6533 /* If EXP has varying size, we MUST use a target since we currently
6534 have no way of allocating temporaries of variable size
6535 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6536 So we assume here that something at a higher level has prevented a
6537 clash. This is somewhat bogus, but the best we can do. Only
6538 do this when X is BLKmode and when we are at the top level. */
6539 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6540 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6541 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6542 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6543 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6544 != INTEGER_CST)
6545 && GET_MODE (x) == BLKmode)
6546 /* If X is in the outgoing argument area, it is always safe. */
6547 || (MEM_P (x)
6548 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6549 || (GET_CODE (XEXP (x, 0)) == PLUS
6550 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6551 return 1;
6552
6553 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6554 find the underlying pseudo. */
6555 if (GET_CODE (x) == SUBREG)
6556 {
6557 x = SUBREG_REG (x);
6558 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6559 return 0;
6560 }
6561
6562 /* Now look at our tree code and possibly recurse. */
6563 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6564 {
6565 case tcc_declaration:
6566 exp_rtl = DECL_RTL_IF_SET (exp);
6567 break;
6568
6569 case tcc_constant:
6570 return 1;
6571
6572 case tcc_exceptional:
6573 if (TREE_CODE (exp) == TREE_LIST)
6574 {
6575 while (1)
6576 {
6577 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6578 return 0;
6579 exp = TREE_CHAIN (exp);
6580 if (!exp)
6581 return 1;
6582 if (TREE_CODE (exp) != TREE_LIST)
6583 return safe_from_p (x, exp, 0);
6584 }
6585 }
6586 else if (TREE_CODE (exp) == CONSTRUCTOR)
6587 {
6588 constructor_elt *ce;
6589 unsigned HOST_WIDE_INT idx;
6590
6591 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce)
6592 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6593 || !safe_from_p (x, ce->value, 0))
6594 return 0;
6595 return 1;
6596 }
6597 else if (TREE_CODE (exp) == ERROR_MARK)
6598 return 1; /* An already-visited SAVE_EXPR? */
6599 else
6600 return 0;
6601
6602 case tcc_statement:
6603 /* The only case we look at here is the DECL_INITIAL inside a
6604 DECL_EXPR. */
6605 return (TREE_CODE (exp) != DECL_EXPR
6606 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6607 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6608 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6609
6610 case tcc_binary:
6611 case tcc_comparison:
6612 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6613 return 0;
6614 /* Fall through. */
6615
6616 case tcc_unary:
6617 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6618
6619 case tcc_expression:
6620 case tcc_reference:
6621 case tcc_vl_exp:
6622 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6623 the expression. If it is set, we conflict iff we are that rtx or
6624 both are in memory. Otherwise, we check all operands of the
6625 expression recursively. */
6626
6627 switch (TREE_CODE (exp))
6628 {
6629 case ADDR_EXPR:
6630 /* If the operand is static or we are static, we can't conflict.
6631 Likewise if we don't conflict with the operand at all. */
6632 if (staticp (TREE_OPERAND (exp, 0))
6633 || TREE_STATIC (exp)
6634 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6635 return 1;
6636
6637 /* Otherwise, the only way this can conflict is if we are taking
6638 the address of a DECL a that address if part of X, which is
6639 very rare. */
6640 exp = TREE_OPERAND (exp, 0);
6641 if (DECL_P (exp))
6642 {
6643 if (!DECL_RTL_SET_P (exp)
6644 || !MEM_P (DECL_RTL (exp)))
6645 return 0;
6646 else
6647 exp_rtl = XEXP (DECL_RTL (exp), 0);
6648 }
6649 break;
6650
6651 case MEM_REF:
6652 if (MEM_P (x)
6653 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6654 get_alias_set (exp)))
6655 return 0;
6656 break;
6657
6658 case CALL_EXPR:
6659 /* Assume that the call will clobber all hard registers and
6660 all of memory. */
6661 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6662 || MEM_P (x))
6663 return 0;
6664 break;
6665
6666 case WITH_CLEANUP_EXPR:
6667 case CLEANUP_POINT_EXPR:
6668 /* Lowered by gimplify.c. */
6669 gcc_unreachable ();
6670
6671 case SAVE_EXPR:
6672 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6673
6674 default:
6675 break;
6676 }
6677
6678 /* If we have an rtx, we do not need to scan our operands. */
6679 if (exp_rtl)
6680 break;
6681
6682 nops = TREE_OPERAND_LENGTH (exp);
6683 for (i = 0; i < nops; i++)
6684 if (TREE_OPERAND (exp, i) != 0
6685 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6686 return 0;
6687
6688 break;
6689
6690 case tcc_type:
6691 /* Should never get a type here. */
6692 gcc_unreachable ();
6693 }
6694
6695 /* If we have an rtl, find any enclosed object. Then see if we conflict
6696 with it. */
6697 if (exp_rtl)
6698 {
6699 if (GET_CODE (exp_rtl) == SUBREG)
6700 {
6701 exp_rtl = SUBREG_REG (exp_rtl);
6702 if (REG_P (exp_rtl)
6703 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6704 return 0;
6705 }
6706
6707 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6708 are memory and they conflict. */
6709 return ! (rtx_equal_p (x, exp_rtl)
6710 || (MEM_P (x) && MEM_P (exp_rtl)
6711 && true_dependence (exp_rtl, VOIDmode, x,
6712 rtx_addr_varies_p)));
6713 }
6714
6715 /* If we reach here, it is safe. */
6716 return 1;
6717 }
6718
6719 \f
6720 /* Return the highest power of two that EXP is known to be a multiple of.
6721 This is used in updating alignment of MEMs in array references. */
6722
6723 unsigned HOST_WIDE_INT
6724 highest_pow2_factor (const_tree exp)
6725 {
6726 unsigned HOST_WIDE_INT c0, c1;
6727
6728 switch (TREE_CODE (exp))
6729 {
6730 case INTEGER_CST:
6731 /* We can find the lowest bit that's a one. If the low
6732 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6733 We need to handle this case since we can find it in a COND_EXPR,
6734 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6735 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6736 later ICE. */
6737 if (TREE_OVERFLOW (exp))
6738 return BIGGEST_ALIGNMENT;
6739 else
6740 {
6741 /* Note: tree_low_cst is intentionally not used here,
6742 we don't care about the upper bits. */
6743 c0 = TREE_INT_CST_LOW (exp);
6744 c0 &= -c0;
6745 return c0 ? c0 : BIGGEST_ALIGNMENT;
6746 }
6747 break;
6748
6749 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6750 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6751 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6752 return MIN (c0, c1);
6753
6754 case MULT_EXPR:
6755 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6756 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6757 return c0 * c1;
6758
6759 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6760 case CEIL_DIV_EXPR:
6761 if (integer_pow2p (TREE_OPERAND (exp, 1))
6762 && host_integerp (TREE_OPERAND (exp, 1), 1))
6763 {
6764 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6765 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6766 return MAX (1, c0 / c1);
6767 }
6768 break;
6769
6770 case BIT_AND_EXPR:
6771 /* The highest power of two of a bit-and expression is the maximum of
6772 that of its operands. We typically get here for a complex LHS and
6773 a constant negative power of two on the RHS to force an explicit
6774 alignment, so don't bother looking at the LHS. */
6775 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6776
6777 CASE_CONVERT:
6778 case SAVE_EXPR:
6779 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6780
6781 case COMPOUND_EXPR:
6782 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6783
6784 case COND_EXPR:
6785 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6786 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6787 return MIN (c0, c1);
6788
6789 default:
6790 break;
6791 }
6792
6793 return 1;
6794 }
6795
6796 /* Similar, except that the alignment requirements of TARGET are
6797 taken into account. Assume it is at least as aligned as its
6798 type, unless it is a COMPONENT_REF in which case the layout of
6799 the structure gives the alignment. */
6800
6801 static unsigned HOST_WIDE_INT
6802 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6803 {
6804 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6805 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6806
6807 return MAX (factor, talign);
6808 }
6809 \f
6810 /* Subroutine of expand_expr. Expand the two operands of a binary
6811 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6812 The value may be stored in TARGET if TARGET is nonzero. The
6813 MODIFIER argument is as documented by expand_expr. */
6814
6815 static void
6816 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6817 enum expand_modifier modifier)
6818 {
6819 if (! safe_from_p (target, exp1, 1))
6820 target = 0;
6821 if (operand_equal_p (exp0, exp1, 0))
6822 {
6823 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6824 *op1 = copy_rtx (*op0);
6825 }
6826 else
6827 {
6828 /* If we need to preserve evaluation order, copy exp0 into its own
6829 temporary variable so that it can't be clobbered by exp1. */
6830 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6831 exp0 = save_expr (exp0);
6832 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6833 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6834 }
6835 }
6836
6837 \f
6838 /* Return a MEM that contains constant EXP. DEFER is as for
6839 output_constant_def and MODIFIER is as for expand_expr. */
6840
6841 static rtx
6842 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6843 {
6844 rtx mem;
6845
6846 mem = output_constant_def (exp, defer);
6847 if (modifier != EXPAND_INITIALIZER)
6848 mem = use_anchored_address (mem);
6849 return mem;
6850 }
6851
6852 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6853 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6854
6855 static rtx
6856 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6857 enum expand_modifier modifier, addr_space_t as)
6858 {
6859 rtx result, subtarget;
6860 tree inner, offset;
6861 HOST_WIDE_INT bitsize, bitpos;
6862 int volatilep, unsignedp;
6863 enum machine_mode mode1;
6864
6865 /* If we are taking the address of a constant and are at the top level,
6866 we have to use output_constant_def since we can't call force_const_mem
6867 at top level. */
6868 /* ??? This should be considered a front-end bug. We should not be
6869 generating ADDR_EXPR of something that isn't an LVALUE. The only
6870 exception here is STRING_CST. */
6871 if (CONSTANT_CLASS_P (exp))
6872 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6873
6874 /* Everything must be something allowed by is_gimple_addressable. */
6875 switch (TREE_CODE (exp))
6876 {
6877 case INDIRECT_REF:
6878 /* This case will happen via recursion for &a->b. */
6879 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6880
6881 case MEM_REF:
6882 {
6883 tree tem = TREE_OPERAND (exp, 0);
6884 if (!integer_zerop (TREE_OPERAND (exp, 1)))
6885 tem = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6886 tem,
6887 double_int_to_tree (sizetype, mem_ref_offset (exp)));
6888 return expand_expr (tem, target, tmode, modifier);
6889 }
6890
6891 case CONST_DECL:
6892 /* Expand the initializer like constants above. */
6893 return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
6894
6895 case REALPART_EXPR:
6896 /* The real part of the complex number is always first, therefore
6897 the address is the same as the address of the parent object. */
6898 offset = 0;
6899 bitpos = 0;
6900 inner = TREE_OPERAND (exp, 0);
6901 break;
6902
6903 case IMAGPART_EXPR:
6904 /* The imaginary part of the complex number is always second.
6905 The expression is therefore always offset by the size of the
6906 scalar type. */
6907 offset = 0;
6908 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6909 inner = TREE_OPERAND (exp, 0);
6910 break;
6911
6912 default:
6913 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6914 expand_expr, as that can have various side effects; LABEL_DECLs for
6915 example, may not have their DECL_RTL set yet. Expand the rtl of
6916 CONSTRUCTORs too, which should yield a memory reference for the
6917 constructor's contents. Assume language specific tree nodes can
6918 be expanded in some interesting way. */
6919 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6920 if (DECL_P (exp)
6921 || TREE_CODE (exp) == CONSTRUCTOR
6922 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6923 {
6924 result = expand_expr (exp, target, tmode,
6925 modifier == EXPAND_INITIALIZER
6926 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6927
6928 /* If the DECL isn't in memory, then the DECL wasn't properly
6929 marked TREE_ADDRESSABLE, which will be either a front-end
6930 or a tree optimizer bug. */
6931 gcc_assert (MEM_P (result));
6932 result = XEXP (result, 0);
6933
6934 /* ??? Is this needed anymore? */
6935 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6936 {
6937 assemble_external (exp);
6938 TREE_USED (exp) = 1;
6939 }
6940
6941 if (modifier != EXPAND_INITIALIZER
6942 && modifier != EXPAND_CONST_ADDRESS)
6943 result = force_operand (result, target);
6944 return result;
6945 }
6946
6947 /* Pass FALSE as the last argument to get_inner_reference although
6948 we are expanding to RTL. The rationale is that we know how to
6949 handle "aligning nodes" here: we can just bypass them because
6950 they won't change the final object whose address will be returned
6951 (they actually exist only for that purpose). */
6952 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6953 &mode1, &unsignedp, &volatilep, false);
6954 break;
6955 }
6956
6957 /* We must have made progress. */
6958 gcc_assert (inner != exp);
6959
6960 subtarget = offset || bitpos ? NULL_RTX : target;
6961 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6962 inner alignment, force the inner to be sufficiently aligned. */
6963 if (CONSTANT_CLASS_P (inner)
6964 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6965 {
6966 inner = copy_node (inner);
6967 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6968 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6969 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6970 }
6971 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
6972
6973 if (offset)
6974 {
6975 rtx tmp;
6976
6977 if (modifier != EXPAND_NORMAL)
6978 result = force_operand (result, NULL);
6979 tmp = expand_expr (offset, NULL_RTX, tmode,
6980 modifier == EXPAND_INITIALIZER
6981 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6982
6983 result = convert_memory_address_addr_space (tmode, result, as);
6984 tmp = convert_memory_address_addr_space (tmode, tmp, as);
6985
6986 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6987 result = simplify_gen_binary (PLUS, tmode, result, tmp);
6988 else
6989 {
6990 subtarget = bitpos ? NULL_RTX : target;
6991 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6992 1, OPTAB_LIB_WIDEN);
6993 }
6994 }
6995
6996 if (bitpos)
6997 {
6998 /* Someone beforehand should have rejected taking the address
6999 of such an object. */
7000 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7001
7002 result = plus_constant (result, bitpos / BITS_PER_UNIT);
7003 if (modifier < EXPAND_SUM)
7004 result = force_operand (result, target);
7005 }
7006
7007 return result;
7008 }
7009
7010 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7011 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7012
7013 static rtx
7014 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7015 enum expand_modifier modifier)
7016 {
7017 addr_space_t as = ADDR_SPACE_GENERIC;
7018 enum machine_mode address_mode = Pmode;
7019 enum machine_mode pointer_mode = ptr_mode;
7020 enum machine_mode rmode;
7021 rtx result;
7022
7023 /* Target mode of VOIDmode says "whatever's natural". */
7024 if (tmode == VOIDmode)
7025 tmode = TYPE_MODE (TREE_TYPE (exp));
7026
7027 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7028 {
7029 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7030 address_mode = targetm.addr_space.address_mode (as);
7031 pointer_mode = targetm.addr_space.pointer_mode (as);
7032 }
7033
7034 /* We can get called with some Weird Things if the user does silliness
7035 like "(short) &a". In that case, convert_memory_address won't do
7036 the right thing, so ignore the given target mode. */
7037 if (tmode != address_mode && tmode != pointer_mode)
7038 tmode = address_mode;
7039
7040 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7041 tmode, modifier, as);
7042
7043 /* Despite expand_expr claims concerning ignoring TMODE when not
7044 strictly convenient, stuff breaks if we don't honor it. Note
7045 that combined with the above, we only do this for pointer modes. */
7046 rmode = GET_MODE (result);
7047 if (rmode == VOIDmode)
7048 rmode = tmode;
7049 if (rmode != tmode)
7050 result = convert_memory_address_addr_space (tmode, result, as);
7051
7052 return result;
7053 }
7054
7055 /* Generate code for computing CONSTRUCTOR EXP.
7056 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7057 is TRUE, instead of creating a temporary variable in memory
7058 NULL is returned and the caller needs to handle it differently. */
7059
7060 static rtx
7061 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7062 bool avoid_temp_mem)
7063 {
7064 tree type = TREE_TYPE (exp);
7065 enum machine_mode mode = TYPE_MODE (type);
7066
7067 /* Try to avoid creating a temporary at all. This is possible
7068 if all of the initializer is zero.
7069 FIXME: try to handle all [0..255] initializers we can handle
7070 with memset. */
7071 if (TREE_STATIC (exp)
7072 && !TREE_ADDRESSABLE (exp)
7073 && target != 0 && mode == BLKmode
7074 && all_zeros_p (exp))
7075 {
7076 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7077 return target;
7078 }
7079
7080 /* All elts simple constants => refer to a constant in memory. But
7081 if this is a non-BLKmode mode, let it store a field at a time
7082 since that should make a CONST_INT or CONST_DOUBLE when we
7083 fold. Likewise, if we have a target we can use, it is best to
7084 store directly into the target unless the type is large enough
7085 that memcpy will be used. If we are making an initializer and
7086 all operands are constant, put it in memory as well.
7087
7088 FIXME: Avoid trying to fill vector constructors piece-meal.
7089 Output them with output_constant_def below unless we're sure
7090 they're zeros. This should go away when vector initializers
7091 are treated like VECTOR_CST instead of arrays. */
7092 if ((TREE_STATIC (exp)
7093 && ((mode == BLKmode
7094 && ! (target != 0 && safe_from_p (target, exp, 1)))
7095 || TREE_ADDRESSABLE (exp)
7096 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7097 && (! MOVE_BY_PIECES_P
7098 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7099 TYPE_ALIGN (type)))
7100 && ! mostly_zeros_p (exp))))
7101 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7102 && TREE_CONSTANT (exp)))
7103 {
7104 rtx constructor;
7105
7106 if (avoid_temp_mem)
7107 return NULL_RTX;
7108
7109 constructor = expand_expr_constant (exp, 1, modifier);
7110
7111 if (modifier != EXPAND_CONST_ADDRESS
7112 && modifier != EXPAND_INITIALIZER
7113 && modifier != EXPAND_SUM)
7114 constructor = validize_mem (constructor);
7115
7116 return constructor;
7117 }
7118
7119 /* Handle calls that pass values in multiple non-contiguous
7120 locations. The Irix 6 ABI has examples of this. */
7121 if (target == 0 || ! safe_from_p (target, exp, 1)
7122 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7123 {
7124 if (avoid_temp_mem)
7125 return NULL_RTX;
7126
7127 target
7128 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7129 | (TREE_READONLY (exp)
7130 * TYPE_QUAL_CONST))),
7131 0, TREE_ADDRESSABLE (exp), 1);
7132 }
7133
7134 store_constructor (exp, target, 0, int_expr_size (exp));
7135 return target;
7136 }
7137
7138
7139 /* expand_expr: generate code for computing expression EXP.
7140 An rtx for the computed value is returned. The value is never null.
7141 In the case of a void EXP, const0_rtx is returned.
7142
7143 The value may be stored in TARGET if TARGET is nonzero.
7144 TARGET is just a suggestion; callers must assume that
7145 the rtx returned may not be the same as TARGET.
7146
7147 If TARGET is CONST0_RTX, it means that the value will be ignored.
7148
7149 If TMODE is not VOIDmode, it suggests generating the
7150 result in mode TMODE. But this is done only when convenient.
7151 Otherwise, TMODE is ignored and the value generated in its natural mode.
7152 TMODE is just a suggestion; callers must assume that
7153 the rtx returned may not have mode TMODE.
7154
7155 Note that TARGET may have neither TMODE nor MODE. In that case, it
7156 probably will not be used.
7157
7158 If MODIFIER is EXPAND_SUM then when EXP is an addition
7159 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7160 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7161 products as above, or REG or MEM, or constant.
7162 Ordinarily in such cases we would output mul or add instructions
7163 and then return a pseudo reg containing the sum.
7164
7165 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7166 it also marks a label as absolutely required (it can't be dead).
7167 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7168 This is used for outputting expressions used in initializers.
7169
7170 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7171 with a constant address even if that address is not normally legitimate.
7172 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7173
7174 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7175 a call parameter. Such targets require special care as we haven't yet
7176 marked TARGET so that it's safe from being trashed by libcalls. We
7177 don't want to use TARGET for anything but the final result;
7178 Intermediate values must go elsewhere. Additionally, calls to
7179 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7180
7181 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7182 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7183 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7184 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7185 recursively. */
7186
7187 rtx
7188 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7189 enum expand_modifier modifier, rtx *alt_rtl)
7190 {
7191 rtx ret;
7192
7193 /* Handle ERROR_MARK before anybody tries to access its type. */
7194 if (TREE_CODE (exp) == ERROR_MARK
7195 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7196 {
7197 ret = CONST0_RTX (tmode);
7198 return ret ? ret : const0_rtx;
7199 }
7200
7201 /* If this is an expression of some kind and it has an associated line
7202 number, then emit the line number before expanding the expression.
7203
7204 We need to save and restore the file and line information so that
7205 errors discovered during expansion are emitted with the right
7206 information. It would be better of the diagnostic routines
7207 used the file/line information embedded in the tree nodes rather
7208 than globals. */
7209 if (cfun && EXPR_HAS_LOCATION (exp))
7210 {
7211 location_t saved_location = input_location;
7212 location_t saved_curr_loc = get_curr_insn_source_location ();
7213 tree saved_block = get_curr_insn_block ();
7214 input_location = EXPR_LOCATION (exp);
7215 set_curr_insn_source_location (input_location);
7216
7217 /* Record where the insns produced belong. */
7218 set_curr_insn_block (TREE_BLOCK (exp));
7219
7220 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7221
7222 input_location = saved_location;
7223 set_curr_insn_block (saved_block);
7224 set_curr_insn_source_location (saved_curr_loc);
7225 }
7226 else
7227 {
7228 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7229 }
7230
7231 return ret;
7232 }
7233
7234 rtx
7235 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7236 enum expand_modifier modifier)
7237 {
7238 rtx op0, op1, op2, temp;
7239 tree type;
7240 int unsignedp;
7241 enum machine_mode mode;
7242 enum tree_code code = ops->code;
7243 optab this_optab;
7244 rtx subtarget, original_target;
7245 int ignore;
7246 bool reduce_bit_field;
7247 location_t loc = ops->location;
7248 tree treeop0, treeop1, treeop2;
7249 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7250 ? reduce_to_bit_field_precision ((expr), \
7251 target, \
7252 type) \
7253 : (expr))
7254
7255 type = ops->type;
7256 mode = TYPE_MODE (type);
7257 unsignedp = TYPE_UNSIGNED (type);
7258
7259 treeop0 = ops->op0;
7260 treeop1 = ops->op1;
7261 treeop2 = ops->op2;
7262
7263 /* We should be called only on simple (binary or unary) expressions,
7264 exactly those that are valid in gimple expressions that aren't
7265 GIMPLE_SINGLE_RHS (or invalid). */
7266 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7267 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7268 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7269
7270 ignore = (target == const0_rtx
7271 || ((CONVERT_EXPR_CODE_P (code)
7272 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7273 && TREE_CODE (type) == VOID_TYPE));
7274
7275 /* We should be called only if we need the result. */
7276 gcc_assert (!ignore);
7277
7278 /* An operation in what may be a bit-field type needs the
7279 result to be reduced to the precision of the bit-field type,
7280 which is narrower than that of the type's mode. */
7281 reduce_bit_field = (INTEGRAL_TYPE_P (type)
7282 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7283
7284 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7285 target = 0;
7286
7287 /* Use subtarget as the target for operand 0 of a binary operation. */
7288 subtarget = get_subtarget (target);
7289 original_target = target;
7290
7291 switch (code)
7292 {
7293 case NON_LVALUE_EXPR:
7294 case PAREN_EXPR:
7295 CASE_CONVERT:
7296 if (treeop0 == error_mark_node)
7297 return const0_rtx;
7298
7299 if (TREE_CODE (type) == UNION_TYPE)
7300 {
7301 tree valtype = TREE_TYPE (treeop0);
7302
7303 /* If both input and output are BLKmode, this conversion isn't doing
7304 anything except possibly changing memory attribute. */
7305 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7306 {
7307 rtx result = expand_expr (treeop0, target, tmode,
7308 modifier);
7309
7310 result = copy_rtx (result);
7311 set_mem_attributes (result, type, 0);
7312 return result;
7313 }
7314
7315 if (target == 0)
7316 {
7317 if (TYPE_MODE (type) != BLKmode)
7318 target = gen_reg_rtx (TYPE_MODE (type));
7319 else
7320 target = assign_temp (type, 0, 1, 1);
7321 }
7322
7323 if (MEM_P (target))
7324 /* Store data into beginning of memory target. */
7325 store_expr (treeop0,
7326 adjust_address (target, TYPE_MODE (valtype), 0),
7327 modifier == EXPAND_STACK_PARM,
7328 false);
7329
7330 else
7331 {
7332 gcc_assert (REG_P (target));
7333
7334 /* Store this field into a union of the proper type. */
7335 store_field (target,
7336 MIN ((int_size_in_bytes (TREE_TYPE
7337 (treeop0))
7338 * BITS_PER_UNIT),
7339 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7340 0, TYPE_MODE (valtype), treeop0,
7341 type, 0, false);
7342 }
7343
7344 /* Return the entire union. */
7345 return target;
7346 }
7347
7348 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7349 {
7350 op0 = expand_expr (treeop0, target, VOIDmode,
7351 modifier);
7352
7353 /* If the signedness of the conversion differs and OP0 is
7354 a promoted SUBREG, clear that indication since we now
7355 have to do the proper extension. */
7356 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7357 && GET_CODE (op0) == SUBREG)
7358 SUBREG_PROMOTED_VAR_P (op0) = 0;
7359
7360 return REDUCE_BIT_FIELD (op0);
7361 }
7362
7363 op0 = expand_expr (treeop0, NULL_RTX, mode,
7364 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7365 if (GET_MODE (op0) == mode)
7366 ;
7367
7368 /* If OP0 is a constant, just convert it into the proper mode. */
7369 else if (CONSTANT_P (op0))
7370 {
7371 tree inner_type = TREE_TYPE (treeop0);
7372 enum machine_mode inner_mode = GET_MODE (op0);
7373
7374 if (inner_mode == VOIDmode)
7375 inner_mode = TYPE_MODE (inner_type);
7376
7377 if (modifier == EXPAND_INITIALIZER)
7378 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7379 subreg_lowpart_offset (mode,
7380 inner_mode));
7381 else
7382 op0= convert_modes (mode, inner_mode, op0,
7383 TYPE_UNSIGNED (inner_type));
7384 }
7385
7386 else if (modifier == EXPAND_INITIALIZER)
7387 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7388
7389 else if (target == 0)
7390 op0 = convert_to_mode (mode, op0,
7391 TYPE_UNSIGNED (TREE_TYPE
7392 (treeop0)));
7393 else
7394 {
7395 convert_move (target, op0,
7396 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7397 op0 = target;
7398 }
7399
7400 return REDUCE_BIT_FIELD (op0);
7401
7402 case ADDR_SPACE_CONVERT_EXPR:
7403 {
7404 tree treeop0_type = TREE_TYPE (treeop0);
7405 addr_space_t as_to;
7406 addr_space_t as_from;
7407
7408 gcc_assert (POINTER_TYPE_P (type));
7409 gcc_assert (POINTER_TYPE_P (treeop0_type));
7410
7411 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7412 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7413
7414 /* Conversions between pointers to the same address space should
7415 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7416 gcc_assert (as_to != as_from);
7417
7418 /* Ask target code to handle conversion between pointers
7419 to overlapping address spaces. */
7420 if (targetm.addr_space.subset_p (as_to, as_from)
7421 || targetm.addr_space.subset_p (as_from, as_to))
7422 {
7423 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7424 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7425 gcc_assert (op0);
7426 return op0;
7427 }
7428
7429 /* For disjoint address spaces, converting anything but
7430 a null pointer invokes undefined behaviour. We simply
7431 always return a null pointer here. */
7432 return CONST0_RTX (mode);
7433 }
7434
7435 case POINTER_PLUS_EXPR:
7436 /* Even though the sizetype mode and the pointer's mode can be different
7437 expand is able to handle this correctly and get the correct result out
7438 of the PLUS_EXPR code. */
7439 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7440 if sizetype precision is smaller than pointer precision. */
7441 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7442 treeop1 = fold_convert_loc (loc, type,
7443 fold_convert_loc (loc, ssizetype,
7444 treeop1));
7445 case PLUS_EXPR:
7446 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7447 something else, make sure we add the register to the constant and
7448 then to the other thing. This case can occur during strength
7449 reduction and doing it this way will produce better code if the
7450 frame pointer or argument pointer is eliminated.
7451
7452 fold-const.c will ensure that the constant is always in the inner
7453 PLUS_EXPR, so the only case we need to do anything about is if
7454 sp, ap, or fp is our second argument, in which case we must swap
7455 the innermost first argument and our second argument. */
7456
7457 if (TREE_CODE (treeop0) == PLUS_EXPR
7458 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7459 && TREE_CODE (treeop1) == VAR_DECL
7460 && (DECL_RTL (treeop1) == frame_pointer_rtx
7461 || DECL_RTL (treeop1) == stack_pointer_rtx
7462 || DECL_RTL (treeop1) == arg_pointer_rtx))
7463 {
7464 tree t = treeop1;
7465
7466 treeop1 = TREE_OPERAND (treeop0, 0);
7467 TREE_OPERAND (treeop0, 0) = t;
7468 }
7469
7470 /* If the result is to be ptr_mode and we are adding an integer to
7471 something, we might be forming a constant. So try to use
7472 plus_constant. If it produces a sum and we can't accept it,
7473 use force_operand. This allows P = &ARR[const] to generate
7474 efficient code on machines where a SYMBOL_REF is not a valid
7475 address.
7476
7477 If this is an EXPAND_SUM call, always return the sum. */
7478 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7479 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7480 {
7481 if (modifier == EXPAND_STACK_PARM)
7482 target = 0;
7483 if (TREE_CODE (treeop0) == INTEGER_CST
7484 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7485 && TREE_CONSTANT (treeop1))
7486 {
7487 rtx constant_part;
7488
7489 op1 = expand_expr (treeop1, subtarget, VOIDmode,
7490 EXPAND_SUM);
7491 /* Use immed_double_const to ensure that the constant is
7492 truncated according to the mode of OP1, then sign extended
7493 to a HOST_WIDE_INT. Using the constant directly can result
7494 in non-canonical RTL in a 64x32 cross compile. */
7495 constant_part
7496 = immed_double_const (TREE_INT_CST_LOW (treeop0),
7497 (HOST_WIDE_INT) 0,
7498 TYPE_MODE (TREE_TYPE (treeop1)));
7499 op1 = plus_constant (op1, INTVAL (constant_part));
7500 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7501 op1 = force_operand (op1, target);
7502 return REDUCE_BIT_FIELD (op1);
7503 }
7504
7505 else if (TREE_CODE (treeop1) == INTEGER_CST
7506 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7507 && TREE_CONSTANT (treeop0))
7508 {
7509 rtx constant_part;
7510
7511 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7512 (modifier == EXPAND_INITIALIZER
7513 ? EXPAND_INITIALIZER : EXPAND_SUM));
7514 if (! CONSTANT_P (op0))
7515 {
7516 op1 = expand_expr (treeop1, NULL_RTX,
7517 VOIDmode, modifier);
7518 /* Return a PLUS if modifier says it's OK. */
7519 if (modifier == EXPAND_SUM
7520 || modifier == EXPAND_INITIALIZER)
7521 return simplify_gen_binary (PLUS, mode, op0, op1);
7522 goto binop2;
7523 }
7524 /* Use immed_double_const to ensure that the constant is
7525 truncated according to the mode of OP1, then sign extended
7526 to a HOST_WIDE_INT. Using the constant directly can result
7527 in non-canonical RTL in a 64x32 cross compile. */
7528 constant_part
7529 = immed_double_const (TREE_INT_CST_LOW (treeop1),
7530 (HOST_WIDE_INT) 0,
7531 TYPE_MODE (TREE_TYPE (treeop0)));
7532 op0 = plus_constant (op0, INTVAL (constant_part));
7533 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7534 op0 = force_operand (op0, target);
7535 return REDUCE_BIT_FIELD (op0);
7536 }
7537 }
7538
7539 /* Use TER to expand pointer addition of a negated value
7540 as pointer subtraction. */
7541 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
7542 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
7543 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
7544 && TREE_CODE (treeop1) == SSA_NAME
7545 && TYPE_MODE (TREE_TYPE (treeop0))
7546 == TYPE_MODE (TREE_TYPE (treeop1)))
7547 {
7548 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
7549 if (def)
7550 {
7551 treeop1 = gimple_assign_rhs1 (def);
7552 code = MINUS_EXPR;
7553 goto do_minus;
7554 }
7555 }
7556
7557 /* No sense saving up arithmetic to be done
7558 if it's all in the wrong mode to form part of an address.
7559 And force_operand won't know whether to sign-extend or
7560 zero-extend. */
7561 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7562 || mode != ptr_mode)
7563 {
7564 expand_operands (treeop0, treeop1,
7565 subtarget, &op0, &op1, EXPAND_NORMAL);
7566 if (op0 == const0_rtx)
7567 return op1;
7568 if (op1 == const0_rtx)
7569 return op0;
7570 goto binop2;
7571 }
7572
7573 expand_operands (treeop0, treeop1,
7574 subtarget, &op0, &op1, modifier);
7575 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7576
7577 case MINUS_EXPR:
7578 do_minus:
7579 /* For initializers, we are allowed to return a MINUS of two
7580 symbolic constants. Here we handle all cases when both operands
7581 are constant. */
7582 /* Handle difference of two symbolic constants,
7583 for the sake of an initializer. */
7584 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7585 && really_constant_p (treeop0)
7586 && really_constant_p (treeop1))
7587 {
7588 expand_operands (treeop0, treeop1,
7589 NULL_RTX, &op0, &op1, modifier);
7590
7591 /* If the last operand is a CONST_INT, use plus_constant of
7592 the negated constant. Else make the MINUS. */
7593 if (CONST_INT_P (op1))
7594 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7595 else
7596 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7597 }
7598
7599 /* No sense saving up arithmetic to be done
7600 if it's all in the wrong mode to form part of an address.
7601 And force_operand won't know whether to sign-extend or
7602 zero-extend. */
7603 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7604 || mode != ptr_mode)
7605 goto binop;
7606
7607 expand_operands (treeop0, treeop1,
7608 subtarget, &op0, &op1, modifier);
7609
7610 /* Convert A - const to A + (-const). */
7611 if (CONST_INT_P (op1))
7612 {
7613 op1 = negate_rtx (mode, op1);
7614 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7615 }
7616
7617 goto binop2;
7618
7619 case WIDEN_MULT_PLUS_EXPR:
7620 case WIDEN_MULT_MINUS_EXPR:
7621 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
7622 op2 = expand_normal (treeop2);
7623 target = expand_widen_pattern_expr (ops, op0, op1, op2,
7624 target, unsignedp);
7625 return target;
7626
7627 case WIDEN_MULT_EXPR:
7628 /* If first operand is constant, swap them.
7629 Thus the following special case checks need only
7630 check the second operand. */
7631 if (TREE_CODE (treeop0) == INTEGER_CST)
7632 {
7633 tree t1 = treeop0;
7634 treeop0 = treeop1;
7635 treeop1 = t1;
7636 }
7637
7638 /* First, check if we have a multiplication of one signed and one
7639 unsigned operand. */
7640 if (TREE_CODE (treeop1) != INTEGER_CST
7641 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
7642 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
7643 {
7644 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
7645 this_optab = usmul_widen_optab;
7646 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7647 {
7648 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7649 {
7650 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7651 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7652 EXPAND_NORMAL);
7653 else
7654 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
7655 EXPAND_NORMAL);
7656 goto binop3;
7657 }
7658 }
7659 }
7660 /* Check for a multiplication with matching signedness. */
7661 else if ((TREE_CODE (treeop1) == INTEGER_CST
7662 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
7663 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
7664 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
7665 {
7666 tree op0type = TREE_TYPE (treeop0);
7667 enum machine_mode innermode = TYPE_MODE (op0type);
7668 bool zextend_p = TYPE_UNSIGNED (op0type);
7669 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7670 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7671
7672 if (mode == GET_MODE_2XWIDER_MODE (innermode)
7673 && TREE_CODE (treeop0) != INTEGER_CST)
7674 {
7675 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7676 {
7677 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7678 EXPAND_NORMAL);
7679 temp = expand_widening_mult (mode, op0, op1, target,
7680 unsignedp, this_optab);
7681 return REDUCE_BIT_FIELD (temp);
7682 }
7683 if (optab_handler (other_optab, mode) != CODE_FOR_nothing
7684 && innermode == word_mode)
7685 {
7686 rtx htem, hipart;
7687 op0 = expand_normal (treeop0);
7688 if (TREE_CODE (treeop1) == INTEGER_CST)
7689 op1 = convert_modes (innermode, mode,
7690 expand_normal (treeop1), unsignedp);
7691 else
7692 op1 = expand_normal (treeop1);
7693 temp = expand_binop (mode, other_optab, op0, op1, target,
7694 unsignedp, OPTAB_LIB_WIDEN);
7695 hipart = gen_highpart (innermode, temp);
7696 htem = expand_mult_highpart_adjust (innermode, hipart,
7697 op0, op1, hipart,
7698 zextend_p);
7699 if (htem != hipart)
7700 emit_move_insn (hipart, htem);
7701 return REDUCE_BIT_FIELD (temp);
7702 }
7703 }
7704 }
7705 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
7706 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
7707 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7708 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7709
7710 case FMA_EXPR:
7711 {
7712 optab opt = fma_optab;
7713 gimple def0, def2;
7714
7715 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
7716 call. */
7717 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
7718 {
7719 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
7720 tree call_expr;
7721
7722 gcc_assert (fn != NULL_TREE);
7723 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
7724 return expand_builtin (call_expr, target, subtarget, mode, false);
7725 }
7726
7727 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
7728 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
7729
7730 op0 = op2 = NULL;
7731
7732 if (def0 && def2
7733 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
7734 {
7735 opt = fnms_optab;
7736 op0 = expand_normal (gimple_assign_rhs1 (def0));
7737 op2 = expand_normal (gimple_assign_rhs1 (def2));
7738 }
7739 else if (def0
7740 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
7741 {
7742 opt = fnma_optab;
7743 op0 = expand_normal (gimple_assign_rhs1 (def0));
7744 }
7745 else if (def2
7746 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
7747 {
7748 opt = fms_optab;
7749 op2 = expand_normal (gimple_assign_rhs1 (def2));
7750 }
7751
7752 if (op0 == NULL)
7753 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
7754 if (op2 == NULL)
7755 op2 = expand_normal (treeop2);
7756 op1 = expand_normal (treeop1);
7757
7758 return expand_ternary_op (TYPE_MODE (type), opt,
7759 op0, op1, op2, target, 0);
7760 }
7761
7762 case MULT_EXPR:
7763 /* If this is a fixed-point operation, then we cannot use the code
7764 below because "expand_mult" doesn't support sat/no-sat fixed-point
7765 multiplications. */
7766 if (ALL_FIXED_POINT_MODE_P (mode))
7767 goto binop;
7768
7769 /* If first operand is constant, swap them.
7770 Thus the following special case checks need only
7771 check the second operand. */
7772 if (TREE_CODE (treeop0) == INTEGER_CST)
7773 {
7774 tree t1 = treeop0;
7775 treeop0 = treeop1;
7776 treeop1 = t1;
7777 }
7778
7779 /* Attempt to return something suitable for generating an
7780 indexed address, for machines that support that. */
7781
7782 if (modifier == EXPAND_SUM && mode == ptr_mode
7783 && host_integerp (treeop1, 0))
7784 {
7785 tree exp1 = treeop1;
7786
7787 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7788 EXPAND_SUM);
7789
7790 if (!REG_P (op0))
7791 op0 = force_operand (op0, NULL_RTX);
7792 if (!REG_P (op0))
7793 op0 = copy_to_mode_reg (mode, op0);
7794
7795 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7796 gen_int_mode (tree_low_cst (exp1, 0),
7797 TYPE_MODE (TREE_TYPE (exp1)))));
7798 }
7799
7800 if (modifier == EXPAND_STACK_PARM)
7801 target = 0;
7802
7803 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7804 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7805
7806 case TRUNC_DIV_EXPR:
7807 case FLOOR_DIV_EXPR:
7808 case CEIL_DIV_EXPR:
7809 case ROUND_DIV_EXPR:
7810 case EXACT_DIV_EXPR:
7811 /* If this is a fixed-point operation, then we cannot use the code
7812 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7813 divisions. */
7814 if (ALL_FIXED_POINT_MODE_P (mode))
7815 goto binop;
7816
7817 if (modifier == EXPAND_STACK_PARM)
7818 target = 0;
7819 /* Possible optimization: compute the dividend with EXPAND_SUM
7820 then if the divisor is constant can optimize the case
7821 where some terms of the dividend have coeffs divisible by it. */
7822 expand_operands (treeop0, treeop1,
7823 subtarget, &op0, &op1, EXPAND_NORMAL);
7824 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7825
7826 case RDIV_EXPR:
7827 goto binop;
7828
7829 case TRUNC_MOD_EXPR:
7830 case FLOOR_MOD_EXPR:
7831 case CEIL_MOD_EXPR:
7832 case ROUND_MOD_EXPR:
7833 if (modifier == EXPAND_STACK_PARM)
7834 target = 0;
7835 expand_operands (treeop0, treeop1,
7836 subtarget, &op0, &op1, EXPAND_NORMAL);
7837 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7838
7839 case FIXED_CONVERT_EXPR:
7840 op0 = expand_normal (treeop0);
7841 if (target == 0 || modifier == EXPAND_STACK_PARM)
7842 target = gen_reg_rtx (mode);
7843
7844 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7845 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7846 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7847 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7848 else
7849 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7850 return target;
7851
7852 case FIX_TRUNC_EXPR:
7853 op0 = expand_normal (treeop0);
7854 if (target == 0 || modifier == EXPAND_STACK_PARM)
7855 target = gen_reg_rtx (mode);
7856 expand_fix (target, op0, unsignedp);
7857 return target;
7858
7859 case FLOAT_EXPR:
7860 op0 = expand_normal (treeop0);
7861 if (target == 0 || modifier == EXPAND_STACK_PARM)
7862 target = gen_reg_rtx (mode);
7863 /* expand_float can't figure out what to do if FROM has VOIDmode.
7864 So give it the correct mode. With -O, cse will optimize this. */
7865 if (GET_MODE (op0) == VOIDmode)
7866 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7867 op0);
7868 expand_float (target, op0,
7869 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7870 return target;
7871
7872 case NEGATE_EXPR:
7873 op0 = expand_expr (treeop0, subtarget,
7874 VOIDmode, EXPAND_NORMAL);
7875 if (modifier == EXPAND_STACK_PARM)
7876 target = 0;
7877 temp = expand_unop (mode,
7878 optab_for_tree_code (NEGATE_EXPR, type,
7879 optab_default),
7880 op0, target, 0);
7881 gcc_assert (temp);
7882 return REDUCE_BIT_FIELD (temp);
7883
7884 case ABS_EXPR:
7885 op0 = expand_expr (treeop0, subtarget,
7886 VOIDmode, EXPAND_NORMAL);
7887 if (modifier == EXPAND_STACK_PARM)
7888 target = 0;
7889
7890 /* ABS_EXPR is not valid for complex arguments. */
7891 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7892 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7893
7894 /* Unsigned abs is simply the operand. Testing here means we don't
7895 risk generating incorrect code below. */
7896 if (TYPE_UNSIGNED (type))
7897 return op0;
7898
7899 return expand_abs (mode, op0, target, unsignedp,
7900 safe_from_p (target, treeop0, 1));
7901
7902 case MAX_EXPR:
7903 case MIN_EXPR:
7904 target = original_target;
7905 if (target == 0
7906 || modifier == EXPAND_STACK_PARM
7907 || (MEM_P (target) && MEM_VOLATILE_P (target))
7908 || GET_MODE (target) != mode
7909 || (REG_P (target)
7910 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7911 target = gen_reg_rtx (mode);
7912 expand_operands (treeop0, treeop1,
7913 target, &op0, &op1, EXPAND_NORMAL);
7914
7915 /* First try to do it with a special MIN or MAX instruction.
7916 If that does not win, use a conditional jump to select the proper
7917 value. */
7918 this_optab = optab_for_tree_code (code, type, optab_default);
7919 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7920 OPTAB_WIDEN);
7921 if (temp != 0)
7922 return temp;
7923
7924 /* At this point, a MEM target is no longer useful; we will get better
7925 code without it. */
7926
7927 if (! REG_P (target))
7928 target = gen_reg_rtx (mode);
7929
7930 /* If op1 was placed in target, swap op0 and op1. */
7931 if (target != op0 && target == op1)
7932 {
7933 temp = op0;
7934 op0 = op1;
7935 op1 = temp;
7936 }
7937
7938 /* We generate better code and avoid problems with op1 mentioning
7939 target by forcing op1 into a pseudo if it isn't a constant. */
7940 if (! CONSTANT_P (op1))
7941 op1 = force_reg (mode, op1);
7942
7943 {
7944 enum rtx_code comparison_code;
7945 rtx cmpop1 = op1;
7946
7947 if (code == MAX_EXPR)
7948 comparison_code = unsignedp ? GEU : GE;
7949 else
7950 comparison_code = unsignedp ? LEU : LE;
7951
7952 /* Canonicalize to comparisons against 0. */
7953 if (op1 == const1_rtx)
7954 {
7955 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7956 or (a != 0 ? a : 1) for unsigned.
7957 For MIN we are safe converting (a <= 1 ? a : 1)
7958 into (a <= 0 ? a : 1) */
7959 cmpop1 = const0_rtx;
7960 if (code == MAX_EXPR)
7961 comparison_code = unsignedp ? NE : GT;
7962 }
7963 if (op1 == constm1_rtx && !unsignedp)
7964 {
7965 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7966 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7967 cmpop1 = const0_rtx;
7968 if (code == MIN_EXPR)
7969 comparison_code = LT;
7970 }
7971 #ifdef HAVE_conditional_move
7972 /* Use a conditional move if possible. */
7973 if (can_conditionally_move_p (mode))
7974 {
7975 rtx insn;
7976
7977 /* ??? Same problem as in expmed.c: emit_conditional_move
7978 forces a stack adjustment via compare_from_rtx, and we
7979 lose the stack adjustment if the sequence we are about
7980 to create is discarded. */
7981 do_pending_stack_adjust ();
7982
7983 start_sequence ();
7984
7985 /* Try to emit the conditional move. */
7986 insn = emit_conditional_move (target, comparison_code,
7987 op0, cmpop1, mode,
7988 op0, op1, mode,
7989 unsignedp);
7990
7991 /* If we could do the conditional move, emit the sequence,
7992 and return. */
7993 if (insn)
7994 {
7995 rtx seq = get_insns ();
7996 end_sequence ();
7997 emit_insn (seq);
7998 return target;
7999 }
8000
8001 /* Otherwise discard the sequence and fall back to code with
8002 branches. */
8003 end_sequence ();
8004 }
8005 #endif
8006 if (target != op0)
8007 emit_move_insn (target, op0);
8008
8009 temp = gen_label_rtx ();
8010 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8011 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8012 -1);
8013 }
8014 emit_move_insn (target, op1);
8015 emit_label (temp);
8016 return target;
8017
8018 case BIT_NOT_EXPR:
8019 op0 = expand_expr (treeop0, subtarget,
8020 VOIDmode, EXPAND_NORMAL);
8021 if (modifier == EXPAND_STACK_PARM)
8022 target = 0;
8023 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8024 gcc_assert (temp);
8025 return temp;
8026
8027 /* ??? Can optimize bitwise operations with one arg constant.
8028 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8029 and (a bitwise1 b) bitwise2 b (etc)
8030 but that is probably not worth while. */
8031
8032 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8033 boolean values when we want in all cases to compute both of them. In
8034 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8035 as actual zero-or-1 values and then bitwise anding. In cases where
8036 there cannot be any side effects, better code would be made by
8037 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8038 how to recognize those cases. */
8039
8040 case TRUTH_AND_EXPR:
8041 code = BIT_AND_EXPR;
8042 case BIT_AND_EXPR:
8043 goto binop;
8044
8045 case TRUTH_OR_EXPR:
8046 code = BIT_IOR_EXPR;
8047 case BIT_IOR_EXPR:
8048 goto binop;
8049
8050 case TRUTH_XOR_EXPR:
8051 code = BIT_XOR_EXPR;
8052 case BIT_XOR_EXPR:
8053 goto binop;
8054
8055 case LROTATE_EXPR:
8056 case RROTATE_EXPR:
8057 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8058 || (GET_MODE_PRECISION (TYPE_MODE (type))
8059 == TYPE_PRECISION (type)));
8060 /* fall through */
8061
8062 case LSHIFT_EXPR:
8063 case RSHIFT_EXPR:
8064 /* If this is a fixed-point operation, then we cannot use the code
8065 below because "expand_shift" doesn't support sat/no-sat fixed-point
8066 shifts. */
8067 if (ALL_FIXED_POINT_MODE_P (mode))
8068 goto binop;
8069
8070 if (! safe_from_p (subtarget, treeop1, 1))
8071 subtarget = 0;
8072 if (modifier == EXPAND_STACK_PARM)
8073 target = 0;
8074 op0 = expand_expr (treeop0, subtarget,
8075 VOIDmode, EXPAND_NORMAL);
8076 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8077 unsignedp);
8078 if (code == LSHIFT_EXPR)
8079 temp = REDUCE_BIT_FIELD (temp);
8080 return temp;
8081
8082 /* Could determine the answer when only additive constants differ. Also,
8083 the addition of one can be handled by changing the condition. */
8084 case LT_EXPR:
8085 case LE_EXPR:
8086 case GT_EXPR:
8087 case GE_EXPR:
8088 case EQ_EXPR:
8089 case NE_EXPR:
8090 case UNORDERED_EXPR:
8091 case ORDERED_EXPR:
8092 case UNLT_EXPR:
8093 case UNLE_EXPR:
8094 case UNGT_EXPR:
8095 case UNGE_EXPR:
8096 case UNEQ_EXPR:
8097 case LTGT_EXPR:
8098 temp = do_store_flag (ops,
8099 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8100 tmode != VOIDmode ? tmode : mode);
8101 if (temp)
8102 return temp;
8103
8104 /* Use a compare and a jump for BLKmode comparisons, or for function
8105 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8106
8107 if ((target == 0
8108 || modifier == EXPAND_STACK_PARM
8109 || ! safe_from_p (target, treeop0, 1)
8110 || ! safe_from_p (target, treeop1, 1)
8111 /* Make sure we don't have a hard reg (such as function's return
8112 value) live across basic blocks, if not optimizing. */
8113 || (!optimize && REG_P (target)
8114 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8115 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8116
8117 emit_move_insn (target, const0_rtx);
8118
8119 op1 = gen_label_rtx ();
8120 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8121
8122 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8123 emit_move_insn (target, constm1_rtx);
8124 else
8125 emit_move_insn (target, const1_rtx);
8126
8127 emit_label (op1);
8128 return target;
8129
8130 case TRUTH_NOT_EXPR:
8131 if (modifier == EXPAND_STACK_PARM)
8132 target = 0;
8133 op0 = expand_expr (treeop0, target,
8134 VOIDmode, EXPAND_NORMAL);
8135 /* The parser is careful to generate TRUTH_NOT_EXPR
8136 only with operands that are always zero or one. */
8137 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8138 target, 1, OPTAB_LIB_WIDEN);
8139 gcc_assert (temp);
8140 return temp;
8141
8142 case COMPLEX_EXPR:
8143 /* Get the rtx code of the operands. */
8144 op0 = expand_normal (treeop0);
8145 op1 = expand_normal (treeop1);
8146
8147 if (!target)
8148 target = gen_reg_rtx (TYPE_MODE (type));
8149
8150 /* Move the real (op0) and imaginary (op1) parts to their location. */
8151 write_complex_part (target, op0, false);
8152 write_complex_part (target, op1, true);
8153
8154 return target;
8155
8156 case WIDEN_SUM_EXPR:
8157 {
8158 tree oprnd0 = treeop0;
8159 tree oprnd1 = treeop1;
8160
8161 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8162 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8163 target, unsignedp);
8164 return target;
8165 }
8166
8167 case REDUC_MAX_EXPR:
8168 case REDUC_MIN_EXPR:
8169 case REDUC_PLUS_EXPR:
8170 {
8171 op0 = expand_normal (treeop0);
8172 this_optab = optab_for_tree_code (code, type, optab_default);
8173 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8174 gcc_assert (temp);
8175 return temp;
8176 }
8177
8178 case VEC_EXTRACT_EVEN_EXPR:
8179 case VEC_EXTRACT_ODD_EXPR:
8180 {
8181 expand_operands (treeop0, treeop1,
8182 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8183 this_optab = optab_for_tree_code (code, type, optab_default);
8184 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8185 OPTAB_WIDEN);
8186 gcc_assert (temp);
8187 return temp;
8188 }
8189
8190 case VEC_INTERLEAVE_HIGH_EXPR:
8191 case VEC_INTERLEAVE_LOW_EXPR:
8192 {
8193 expand_operands (treeop0, treeop1,
8194 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8195 this_optab = optab_for_tree_code (code, type, optab_default);
8196 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8197 OPTAB_WIDEN);
8198 gcc_assert (temp);
8199 return temp;
8200 }
8201
8202 case VEC_LSHIFT_EXPR:
8203 case VEC_RSHIFT_EXPR:
8204 {
8205 target = expand_vec_shift_expr (ops, target);
8206 return target;
8207 }
8208
8209 case VEC_UNPACK_HI_EXPR:
8210 case VEC_UNPACK_LO_EXPR:
8211 {
8212 op0 = expand_normal (treeop0);
8213 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8214 target, unsignedp);
8215 gcc_assert (temp);
8216 return temp;
8217 }
8218
8219 case VEC_UNPACK_FLOAT_HI_EXPR:
8220 case VEC_UNPACK_FLOAT_LO_EXPR:
8221 {
8222 op0 = expand_normal (treeop0);
8223 /* The signedness is determined from input operand. */
8224 temp = expand_widen_pattern_expr
8225 (ops, op0, NULL_RTX, NULL_RTX,
8226 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8227
8228 gcc_assert (temp);
8229 return temp;
8230 }
8231
8232 case VEC_WIDEN_MULT_HI_EXPR:
8233 case VEC_WIDEN_MULT_LO_EXPR:
8234 {
8235 tree oprnd0 = treeop0;
8236 tree oprnd1 = treeop1;
8237
8238 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8239 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8240 target, unsignedp);
8241 gcc_assert (target);
8242 return target;
8243 }
8244
8245 case VEC_PACK_TRUNC_EXPR:
8246 case VEC_PACK_SAT_EXPR:
8247 case VEC_PACK_FIX_TRUNC_EXPR:
8248 mode = TYPE_MODE (TREE_TYPE (treeop0));
8249 goto binop;
8250
8251 case DOT_PROD_EXPR:
8252 {
8253 tree oprnd0 = treeop0;
8254 tree oprnd1 = treeop1;
8255 tree oprnd2 = treeop2;
8256 rtx op2;
8257
8258 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8259 op2 = expand_normal (oprnd2);
8260 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8261 target, unsignedp);
8262 return target;
8263 }
8264
8265 case REALIGN_LOAD_EXPR:
8266 {
8267 tree oprnd0 = treeop0;
8268 tree oprnd1 = treeop1;
8269 tree oprnd2 = treeop2;
8270 rtx op2;
8271
8272 this_optab = optab_for_tree_code (code, type, optab_default);
8273 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8274 op2 = expand_normal (oprnd2);
8275 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8276 target, unsignedp);
8277 gcc_assert (temp);
8278 return temp;
8279 }
8280
8281 default:
8282 gcc_unreachable ();
8283 }
8284
8285 /* Here to do an ordinary binary operator. */
8286 binop:
8287 expand_operands (treeop0, treeop1,
8288 subtarget, &op0, &op1, EXPAND_NORMAL);
8289 binop2:
8290 this_optab = optab_for_tree_code (code, type, optab_default);
8291 binop3:
8292 if (modifier == EXPAND_STACK_PARM)
8293 target = 0;
8294 temp = expand_binop (mode, this_optab, op0, op1, target,
8295 unsignedp, OPTAB_LIB_WIDEN);
8296 gcc_assert (temp);
8297 return REDUCE_BIT_FIELD (temp);
8298 }
8299 #undef REDUCE_BIT_FIELD
8300
8301 rtx
8302 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8303 enum expand_modifier modifier, rtx *alt_rtl)
8304 {
8305 rtx op0, op1, temp, decl_rtl;
8306 tree type;
8307 int unsignedp;
8308 enum machine_mode mode;
8309 enum tree_code code = TREE_CODE (exp);
8310 rtx subtarget, original_target;
8311 int ignore;
8312 tree context;
8313 bool reduce_bit_field;
8314 location_t loc = EXPR_LOCATION (exp);
8315 struct separate_ops ops;
8316 tree treeop0, treeop1, treeop2;
8317 tree ssa_name = NULL_TREE;
8318 gimple g;
8319
8320 type = TREE_TYPE (exp);
8321 mode = TYPE_MODE (type);
8322 unsignedp = TYPE_UNSIGNED (type);
8323
8324 treeop0 = treeop1 = treeop2 = NULL_TREE;
8325 if (!VL_EXP_CLASS_P (exp))
8326 switch (TREE_CODE_LENGTH (code))
8327 {
8328 default:
8329 case 3: treeop2 = TREE_OPERAND (exp, 2);
8330 case 2: treeop1 = TREE_OPERAND (exp, 1);
8331 case 1: treeop0 = TREE_OPERAND (exp, 0);
8332 case 0: break;
8333 }
8334 ops.code = code;
8335 ops.type = type;
8336 ops.op0 = treeop0;
8337 ops.op1 = treeop1;
8338 ops.op2 = treeop2;
8339 ops.location = loc;
8340
8341 ignore = (target == const0_rtx
8342 || ((CONVERT_EXPR_CODE_P (code)
8343 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8344 && TREE_CODE (type) == VOID_TYPE));
8345
8346 /* An operation in what may be a bit-field type needs the
8347 result to be reduced to the precision of the bit-field type,
8348 which is narrower than that of the type's mode. */
8349 reduce_bit_field = (!ignore
8350 && INTEGRAL_TYPE_P (type)
8351 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8352
8353 /* If we are going to ignore this result, we need only do something
8354 if there is a side-effect somewhere in the expression. If there
8355 is, short-circuit the most common cases here. Note that we must
8356 not call expand_expr with anything but const0_rtx in case this
8357 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8358
8359 if (ignore)
8360 {
8361 if (! TREE_SIDE_EFFECTS (exp))
8362 return const0_rtx;
8363
8364 /* Ensure we reference a volatile object even if value is ignored, but
8365 don't do this if all we are doing is taking its address. */
8366 if (TREE_THIS_VOLATILE (exp)
8367 && TREE_CODE (exp) != FUNCTION_DECL
8368 && mode != VOIDmode && mode != BLKmode
8369 && modifier != EXPAND_CONST_ADDRESS)
8370 {
8371 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8372 if (MEM_P (temp))
8373 copy_to_reg (temp);
8374 return const0_rtx;
8375 }
8376
8377 if (TREE_CODE_CLASS (code) == tcc_unary
8378 || code == COMPONENT_REF || code == INDIRECT_REF)
8379 return expand_expr (treeop0, const0_rtx, VOIDmode,
8380 modifier);
8381
8382 else if (TREE_CODE_CLASS (code) == tcc_binary
8383 || TREE_CODE_CLASS (code) == tcc_comparison
8384 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8385 {
8386 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8387 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8388 return const0_rtx;
8389 }
8390 else if (code == BIT_FIELD_REF)
8391 {
8392 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8393 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8394 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8395 return const0_rtx;
8396 }
8397
8398 target = 0;
8399 }
8400
8401 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8402 target = 0;
8403
8404 /* Use subtarget as the target for operand 0 of a binary operation. */
8405 subtarget = get_subtarget (target);
8406 original_target = target;
8407
8408 switch (code)
8409 {
8410 case LABEL_DECL:
8411 {
8412 tree function = decl_function_context (exp);
8413
8414 temp = label_rtx (exp);
8415 temp = gen_rtx_LABEL_REF (Pmode, temp);
8416
8417 if (function != current_function_decl
8418 && function != 0)
8419 LABEL_REF_NONLOCAL_P (temp) = 1;
8420
8421 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8422 return temp;
8423 }
8424
8425 case SSA_NAME:
8426 /* ??? ivopts calls expander, without any preparation from
8427 out-of-ssa. So fake instructions as if this was an access to the
8428 base variable. This unnecessarily allocates a pseudo, see how we can
8429 reuse it, if partition base vars have it set already. */
8430 if (!currently_expanding_to_rtl)
8431 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
8432 NULL);
8433
8434 g = get_gimple_for_ssa_name (exp);
8435 /* For EXPAND_INITIALIZER try harder to get something simpler. */
8436 if (g == NULL
8437 && modifier == EXPAND_INITIALIZER
8438 && !SSA_NAME_IS_DEFAULT_DEF (exp)
8439 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
8440 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
8441 g = SSA_NAME_DEF_STMT (exp);
8442 if (g)
8443 return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
8444 modifier, NULL);
8445
8446 ssa_name = exp;
8447 decl_rtl = get_rtx_for_ssa_name (ssa_name);
8448 exp = SSA_NAME_VAR (ssa_name);
8449 goto expand_decl_rtl;
8450
8451 case PARM_DECL:
8452 case VAR_DECL:
8453 /* If a static var's type was incomplete when the decl was written,
8454 but the type is complete now, lay out the decl now. */
8455 if (DECL_SIZE (exp) == 0
8456 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8457 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8458 layout_decl (exp, 0);
8459
8460 /* ... fall through ... */
8461
8462 case FUNCTION_DECL:
8463 case RESULT_DECL:
8464 decl_rtl = DECL_RTL (exp);
8465 expand_decl_rtl:
8466 gcc_assert (decl_rtl);
8467 decl_rtl = copy_rtx (decl_rtl);
8468 /* Record writes to register variables. */
8469 if (modifier == EXPAND_WRITE
8470 && REG_P (decl_rtl)
8471 && HARD_REGISTER_P (decl_rtl))
8472 add_to_hard_reg_set (&crtl->asm_clobbers,
8473 GET_MODE (decl_rtl), REGNO (decl_rtl));
8474
8475 /* Ensure variable marked as used even if it doesn't go through
8476 a parser. If it hasn't be used yet, write out an external
8477 definition. */
8478 if (! TREE_USED (exp))
8479 {
8480 assemble_external (exp);
8481 TREE_USED (exp) = 1;
8482 }
8483
8484 /* Show we haven't gotten RTL for this yet. */
8485 temp = 0;
8486
8487 /* Variables inherited from containing functions should have
8488 been lowered by this point. */
8489 context = decl_function_context (exp);
8490 gcc_assert (!context
8491 || context == current_function_decl
8492 || TREE_STATIC (exp)
8493 || DECL_EXTERNAL (exp)
8494 /* ??? C++ creates functions that are not TREE_STATIC. */
8495 || TREE_CODE (exp) == FUNCTION_DECL);
8496
8497 /* This is the case of an array whose size is to be determined
8498 from its initializer, while the initializer is still being parsed.
8499 See expand_decl. */
8500
8501 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8502 temp = validize_mem (decl_rtl);
8503
8504 /* If DECL_RTL is memory, we are in the normal case and the
8505 address is not valid, get the address into a register. */
8506
8507 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8508 {
8509 if (alt_rtl)
8510 *alt_rtl = decl_rtl;
8511 decl_rtl = use_anchored_address (decl_rtl);
8512 if (modifier != EXPAND_CONST_ADDRESS
8513 && modifier != EXPAND_SUM
8514 && !memory_address_addr_space_p (DECL_MODE (exp),
8515 XEXP (decl_rtl, 0),
8516 MEM_ADDR_SPACE (decl_rtl)))
8517 temp = replace_equiv_address (decl_rtl,
8518 copy_rtx (XEXP (decl_rtl, 0)));
8519 }
8520
8521 /* If we got something, return it. But first, set the alignment
8522 if the address is a register. */
8523 if (temp != 0)
8524 {
8525 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8526 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8527
8528 return temp;
8529 }
8530
8531 /* If the mode of DECL_RTL does not match that of the decl, it
8532 must be a promoted value. We return a SUBREG of the wanted mode,
8533 but mark it so that we know that it was already extended. */
8534 if (REG_P (decl_rtl) && GET_MODE (decl_rtl) != DECL_MODE (exp))
8535 {
8536 enum machine_mode pmode;
8537
8538 /* Get the signedness to be used for this variable. Ensure we get
8539 the same mode we got when the variable was declared. */
8540 if (code == SSA_NAME
8541 && (g = SSA_NAME_DEF_STMT (ssa_name))
8542 && gimple_code (g) == GIMPLE_CALL)
8543 {
8544 gcc_assert (!gimple_call_internal_p (g));
8545 pmode = promote_function_mode (type, mode, &unsignedp,
8546 gimple_call_fntype (g),
8547 2);
8548 }
8549 else
8550 pmode = promote_decl_mode (exp, &unsignedp);
8551 gcc_assert (GET_MODE (decl_rtl) == pmode);
8552
8553 temp = gen_lowpart_SUBREG (mode, decl_rtl);
8554 SUBREG_PROMOTED_VAR_P (temp) = 1;
8555 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8556 return temp;
8557 }
8558
8559 return decl_rtl;
8560
8561 case INTEGER_CST:
8562 temp = immed_double_const (TREE_INT_CST_LOW (exp),
8563 TREE_INT_CST_HIGH (exp), mode);
8564
8565 return temp;
8566
8567 case VECTOR_CST:
8568 {
8569 tree tmp = NULL_TREE;
8570 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8571 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8572 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8573 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8574 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8575 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8576 return const_vector_from_tree (exp);
8577 if (GET_MODE_CLASS (mode) == MODE_INT)
8578 {
8579 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8580 if (type_for_mode)
8581 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8582 }
8583 if (!tmp)
8584 tmp = build_constructor_from_list (type,
8585 TREE_VECTOR_CST_ELTS (exp));
8586 return expand_expr (tmp, ignore ? const0_rtx : target,
8587 tmode, modifier);
8588 }
8589
8590 case CONST_DECL:
8591 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8592
8593 case REAL_CST:
8594 /* If optimized, generate immediate CONST_DOUBLE
8595 which will be turned into memory by reload if necessary.
8596
8597 We used to force a register so that loop.c could see it. But
8598 this does not allow gen_* patterns to perform optimizations with
8599 the constants. It also produces two insns in cases like "x = 1.0;".
8600 On most machines, floating-point constants are not permitted in
8601 many insns, so we'd end up copying it to a register in any case.
8602
8603 Now, we do the copying in expand_binop, if appropriate. */
8604 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8605 TYPE_MODE (TREE_TYPE (exp)));
8606
8607 case FIXED_CST:
8608 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8609 TYPE_MODE (TREE_TYPE (exp)));
8610
8611 case COMPLEX_CST:
8612 /* Handle evaluating a complex constant in a CONCAT target. */
8613 if (original_target && GET_CODE (original_target) == CONCAT)
8614 {
8615 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8616 rtx rtarg, itarg;
8617
8618 rtarg = XEXP (original_target, 0);
8619 itarg = XEXP (original_target, 1);
8620
8621 /* Move the real and imaginary parts separately. */
8622 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8623 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8624
8625 if (op0 != rtarg)
8626 emit_move_insn (rtarg, op0);
8627 if (op1 != itarg)
8628 emit_move_insn (itarg, op1);
8629
8630 return original_target;
8631 }
8632
8633 /* ... fall through ... */
8634
8635 case STRING_CST:
8636 temp = expand_expr_constant (exp, 1, modifier);
8637
8638 /* temp contains a constant address.
8639 On RISC machines where a constant address isn't valid,
8640 make some insns to get that address into a register. */
8641 if (modifier != EXPAND_CONST_ADDRESS
8642 && modifier != EXPAND_INITIALIZER
8643 && modifier != EXPAND_SUM
8644 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8645 MEM_ADDR_SPACE (temp)))
8646 return replace_equiv_address (temp,
8647 copy_rtx (XEXP (temp, 0)));
8648 return temp;
8649
8650 case SAVE_EXPR:
8651 {
8652 tree val = treeop0;
8653 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8654
8655 if (!SAVE_EXPR_RESOLVED_P (exp))
8656 {
8657 /* We can indeed still hit this case, typically via builtin
8658 expanders calling save_expr immediately before expanding
8659 something. Assume this means that we only have to deal
8660 with non-BLKmode values. */
8661 gcc_assert (GET_MODE (ret) != BLKmode);
8662
8663 val = build_decl (EXPR_LOCATION (exp),
8664 VAR_DECL, NULL, TREE_TYPE (exp));
8665 DECL_ARTIFICIAL (val) = 1;
8666 DECL_IGNORED_P (val) = 1;
8667 treeop0 = val;
8668 TREE_OPERAND (exp, 0) = treeop0;
8669 SAVE_EXPR_RESOLVED_P (exp) = 1;
8670
8671 if (!CONSTANT_P (ret))
8672 ret = copy_to_reg (ret);
8673 SET_DECL_RTL (val, ret);
8674 }
8675
8676 return ret;
8677 }
8678
8679
8680 case CONSTRUCTOR:
8681 /* If we don't need the result, just ensure we evaluate any
8682 subexpressions. */
8683 if (ignore)
8684 {
8685 unsigned HOST_WIDE_INT idx;
8686 tree value;
8687
8688 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8689 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8690
8691 return const0_rtx;
8692 }
8693
8694 return expand_constructor (exp, target, modifier, false);
8695
8696 case TARGET_MEM_REF:
8697 {
8698 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8699 struct mem_address addr;
8700 int icode, align;
8701
8702 get_address_description (exp, &addr);
8703 op0 = addr_for_mem_ref (&addr, as, true);
8704 op0 = memory_address_addr_space (mode, op0, as);
8705 temp = gen_rtx_MEM (mode, op0);
8706 set_mem_attributes (temp, exp, 0);
8707 set_mem_addr_space (temp, as);
8708 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
8709 get_object_alignment (exp, BIGGEST_ALIGNMENT));
8710 if (mode != BLKmode
8711 && (unsigned) align < GET_MODE_ALIGNMENT (mode)
8712 /* If the target does not have special handling for unaligned
8713 loads of mode then it can use regular moves for them. */
8714 && ((icode = optab_handler (movmisalign_optab, mode))
8715 != CODE_FOR_nothing))
8716 {
8717 rtx reg, insn;
8718
8719 /* We've already validated the memory, and we're creating a
8720 new pseudo destination. The predicates really can't fail. */
8721 reg = gen_reg_rtx (mode);
8722
8723 /* Nor can the insn generator. */
8724 insn = GEN_FCN (icode) (reg, temp);
8725 gcc_assert (insn != NULL_RTX);
8726 emit_insn (insn);
8727
8728 return reg;
8729 }
8730 return temp;
8731 }
8732
8733 case MEM_REF:
8734 {
8735 addr_space_t as
8736 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8737 enum machine_mode address_mode;
8738 tree base = TREE_OPERAND (exp, 0);
8739 gimple def_stmt;
8740 int icode, align;
8741 /* Handle expansion of non-aliased memory with non-BLKmode. That
8742 might end up in a register. */
8743 if (TREE_CODE (base) == ADDR_EXPR)
8744 {
8745 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
8746 tree bit_offset;
8747 base = TREE_OPERAND (base, 0);
8748 if (!DECL_P (base))
8749 {
8750 HOST_WIDE_INT off;
8751 base = get_addr_base_and_unit_offset (base, &off);
8752 gcc_assert (base);
8753 offset += off;
8754 }
8755 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
8756 decl we must use bitfield operations. */
8757 if (DECL_P (base)
8758 && !TREE_ADDRESSABLE (base)
8759 && DECL_MODE (base) != BLKmode
8760 && DECL_RTL_SET_P (base)
8761 && !MEM_P (DECL_RTL (base)))
8762 {
8763 tree bftype;
8764 if (offset == 0
8765 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
8766 && (GET_MODE_BITSIZE (DECL_MODE (base))
8767 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
8768 return expand_expr (build1 (VIEW_CONVERT_EXPR,
8769 TREE_TYPE (exp), base),
8770 target, tmode, modifier);
8771 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
8772 bftype = TREE_TYPE (base);
8773 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
8774 bftype = TREE_TYPE (exp);
8775 return expand_expr (build3 (BIT_FIELD_REF, bftype,
8776 base,
8777 TYPE_SIZE (TREE_TYPE (exp)),
8778 bit_offset),
8779 target, tmode, modifier);
8780 }
8781 }
8782 address_mode = targetm.addr_space.address_mode (as);
8783 base = TREE_OPERAND (exp, 0);
8784 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
8785 {
8786 tree mask = gimple_assign_rhs2 (def_stmt);
8787 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
8788 gimple_assign_rhs1 (def_stmt), mask);
8789 TREE_OPERAND (exp, 0) = base;
8790 }
8791 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
8792 get_object_alignment (exp, BIGGEST_ALIGNMENT));
8793 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
8794 op0 = memory_address_addr_space (address_mode, op0, as);
8795 if (!integer_zerop (TREE_OPERAND (exp, 1)))
8796 {
8797 rtx off
8798 = immed_double_int_const (mem_ref_offset (exp), address_mode);
8799 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
8800 }
8801 op0 = memory_address_addr_space (mode, op0, as);
8802 temp = gen_rtx_MEM (mode, op0);
8803 set_mem_attributes (temp, exp, 0);
8804 set_mem_addr_space (temp, as);
8805 if (TREE_THIS_VOLATILE (exp))
8806 MEM_VOLATILE_P (temp) = 1;
8807 if (mode != BLKmode
8808 && (unsigned) align < GET_MODE_ALIGNMENT (mode)
8809 /* If the target does not have special handling for unaligned
8810 loads of mode then it can use regular moves for them. */
8811 && ((icode = optab_handler (movmisalign_optab, mode))
8812 != CODE_FOR_nothing))
8813 {
8814 rtx reg, insn;
8815
8816 /* We've already validated the memory, and we're creating a
8817 new pseudo destination. The predicates really can't fail. */
8818 reg = gen_reg_rtx (mode);
8819
8820 /* Nor can the insn generator. */
8821 insn = GEN_FCN (icode) (reg, temp);
8822 emit_insn (insn);
8823
8824 return reg;
8825 }
8826 return temp;
8827 }
8828
8829 case ARRAY_REF:
8830
8831 {
8832 tree array = treeop0;
8833 tree index = treeop1;
8834
8835 /* Fold an expression like: "foo"[2].
8836 This is not done in fold so it won't happen inside &.
8837 Don't fold if this is for wide characters since it's too
8838 difficult to do correctly and this is a very rare case. */
8839
8840 if (modifier != EXPAND_CONST_ADDRESS
8841 && modifier != EXPAND_INITIALIZER
8842 && modifier != EXPAND_MEMORY)
8843 {
8844 tree t = fold_read_from_constant_string (exp);
8845
8846 if (t)
8847 return expand_expr (t, target, tmode, modifier);
8848 }
8849
8850 /* If this is a constant index into a constant array,
8851 just get the value from the array. Handle both the cases when
8852 we have an explicit constructor and when our operand is a variable
8853 that was declared const. */
8854
8855 if (modifier != EXPAND_CONST_ADDRESS
8856 && modifier != EXPAND_INITIALIZER
8857 && modifier != EXPAND_MEMORY
8858 && TREE_CODE (array) == CONSTRUCTOR
8859 && ! TREE_SIDE_EFFECTS (array)
8860 && TREE_CODE (index) == INTEGER_CST)
8861 {
8862 unsigned HOST_WIDE_INT ix;
8863 tree field, value;
8864
8865 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8866 field, value)
8867 if (tree_int_cst_equal (field, index))
8868 {
8869 if (!TREE_SIDE_EFFECTS (value))
8870 return expand_expr (fold (value), target, tmode, modifier);
8871 break;
8872 }
8873 }
8874
8875 else if (optimize >= 1
8876 && modifier != EXPAND_CONST_ADDRESS
8877 && modifier != EXPAND_INITIALIZER
8878 && modifier != EXPAND_MEMORY
8879 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8880 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8881 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8882 && const_value_known_p (array))
8883 {
8884 if (TREE_CODE (index) == INTEGER_CST)
8885 {
8886 tree init = DECL_INITIAL (array);
8887
8888 if (TREE_CODE (init) == CONSTRUCTOR)
8889 {
8890 unsigned HOST_WIDE_INT ix;
8891 tree field, value;
8892
8893 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8894 field, value)
8895 if (tree_int_cst_equal (field, index))
8896 {
8897 if (TREE_SIDE_EFFECTS (value))
8898 break;
8899
8900 if (TREE_CODE (value) == CONSTRUCTOR)
8901 {
8902 /* If VALUE is a CONSTRUCTOR, this
8903 optimization is only useful if
8904 this doesn't store the CONSTRUCTOR
8905 into memory. If it does, it is more
8906 efficient to just load the data from
8907 the array directly. */
8908 rtx ret = expand_constructor (value, target,
8909 modifier, true);
8910 if (ret == NULL_RTX)
8911 break;
8912 }
8913
8914 return expand_expr (fold (value), target, tmode,
8915 modifier);
8916 }
8917 }
8918 else if(TREE_CODE (init) == STRING_CST)
8919 {
8920 tree index1 = index;
8921 tree low_bound = array_ref_low_bound (exp);
8922 index1 = fold_convert_loc (loc, sizetype,
8923 treeop1);
8924
8925 /* Optimize the special-case of a zero lower bound.
8926
8927 We convert the low_bound to sizetype to avoid some problems
8928 with constant folding. (E.g. suppose the lower bound is 1,
8929 and its mode is QI. Without the conversion,l (ARRAY
8930 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8931 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8932
8933 if (! integer_zerop (low_bound))
8934 index1 = size_diffop_loc (loc, index1,
8935 fold_convert_loc (loc, sizetype,
8936 low_bound));
8937
8938 if (0 > compare_tree_int (index1,
8939 TREE_STRING_LENGTH (init)))
8940 {
8941 tree type = TREE_TYPE (TREE_TYPE (init));
8942 enum machine_mode mode = TYPE_MODE (type);
8943
8944 if (GET_MODE_CLASS (mode) == MODE_INT
8945 && GET_MODE_SIZE (mode) == 1)
8946 return gen_int_mode (TREE_STRING_POINTER (init)
8947 [TREE_INT_CST_LOW (index1)],
8948 mode);
8949 }
8950 }
8951 }
8952 }
8953 }
8954 goto normal_inner_ref;
8955
8956 case COMPONENT_REF:
8957 /* If the operand is a CONSTRUCTOR, we can just extract the
8958 appropriate field if it is present. */
8959 if (TREE_CODE (treeop0) == CONSTRUCTOR)
8960 {
8961 unsigned HOST_WIDE_INT idx;
8962 tree field, value;
8963
8964 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8965 idx, field, value)
8966 if (field == treeop1
8967 /* We can normally use the value of the field in the
8968 CONSTRUCTOR. However, if this is a bitfield in
8969 an integral mode that we can fit in a HOST_WIDE_INT,
8970 we must mask only the number of bits in the bitfield,
8971 since this is done implicitly by the constructor. If
8972 the bitfield does not meet either of those conditions,
8973 we can't do this optimization. */
8974 && (! DECL_BIT_FIELD (field)
8975 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8976 && (GET_MODE_BITSIZE (DECL_MODE (field))
8977 <= HOST_BITS_PER_WIDE_INT))))
8978 {
8979 if (DECL_BIT_FIELD (field)
8980 && modifier == EXPAND_STACK_PARM)
8981 target = 0;
8982 op0 = expand_expr (value, target, tmode, modifier);
8983 if (DECL_BIT_FIELD (field))
8984 {
8985 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8986 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8987
8988 if (TYPE_UNSIGNED (TREE_TYPE (field)))
8989 {
8990 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8991 op0 = expand_and (imode, op0, op1, target);
8992 }
8993 else
8994 {
8995 int count = GET_MODE_BITSIZE (imode) - bitsize;
8996
8997 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
8998 target, 0);
8999 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9000 target, 0);
9001 }
9002 }
9003
9004 return op0;
9005 }
9006 }
9007 goto normal_inner_ref;
9008
9009 case BIT_FIELD_REF:
9010 case ARRAY_RANGE_REF:
9011 normal_inner_ref:
9012 {
9013 enum machine_mode mode1, mode2;
9014 HOST_WIDE_INT bitsize, bitpos;
9015 tree offset;
9016 int volatilep = 0, must_force_mem;
9017 bool packedp = false;
9018 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9019 &mode1, &unsignedp, &volatilep, true);
9020 rtx orig_op0, memloc;
9021
9022 /* If we got back the original object, something is wrong. Perhaps
9023 we are evaluating an expression too early. In any event, don't
9024 infinitely recurse. */
9025 gcc_assert (tem != exp);
9026
9027 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
9028 || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
9029 && DECL_PACKED (TREE_OPERAND (exp, 1))))
9030 packedp = true;
9031
9032 /* If TEM's type is a union of variable size, pass TARGET to the inner
9033 computation, since it will need a temporary and TARGET is known
9034 to have to do. This occurs in unchecked conversion in Ada. */
9035 orig_op0 = op0
9036 = expand_expr (tem,
9037 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9038 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9039 != INTEGER_CST)
9040 && modifier != EXPAND_STACK_PARM
9041 ? target : NULL_RTX),
9042 VOIDmode,
9043 (modifier == EXPAND_INITIALIZER
9044 || modifier == EXPAND_CONST_ADDRESS
9045 || modifier == EXPAND_STACK_PARM)
9046 ? modifier : EXPAND_NORMAL);
9047
9048
9049 /* If the bitfield is volatile, we want to access it in the
9050 field's mode, not the computed mode.
9051 If a MEM has VOIDmode (external with incomplete type),
9052 use BLKmode for it instead. */
9053 if (MEM_P (op0))
9054 {
9055 if (volatilep && flag_strict_volatile_bitfields > 0)
9056 op0 = adjust_address (op0, mode1, 0);
9057 else if (GET_MODE (op0) == VOIDmode)
9058 op0 = adjust_address (op0, BLKmode, 0);
9059 }
9060
9061 mode2
9062 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9063
9064 /* If we have either an offset, a BLKmode result, or a reference
9065 outside the underlying object, we must force it to memory.
9066 Such a case can occur in Ada if we have unchecked conversion
9067 of an expression from a scalar type to an aggregate type or
9068 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9069 passed a partially uninitialized object or a view-conversion
9070 to a larger size. */
9071 must_force_mem = (offset
9072 || mode1 == BLKmode
9073 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9074
9075 /* Handle CONCAT first. */
9076 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9077 {
9078 if (bitpos == 0
9079 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9080 return op0;
9081 if (bitpos == 0
9082 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9083 && bitsize)
9084 {
9085 op0 = XEXP (op0, 0);
9086 mode2 = GET_MODE (op0);
9087 }
9088 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9089 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9090 && bitpos
9091 && bitsize)
9092 {
9093 op0 = XEXP (op0, 1);
9094 bitpos = 0;
9095 mode2 = GET_MODE (op0);
9096 }
9097 else
9098 /* Otherwise force into memory. */
9099 must_force_mem = 1;
9100 }
9101
9102 /* If this is a constant, put it in a register if it is a legitimate
9103 constant and we don't need a memory reference. */
9104 if (CONSTANT_P (op0)
9105 && mode2 != BLKmode
9106 && targetm.legitimate_constant_p (mode2, op0)
9107 && !must_force_mem)
9108 op0 = force_reg (mode2, op0);
9109
9110 /* Otherwise, if this is a constant, try to force it to the constant
9111 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9112 is a legitimate constant. */
9113 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9114 op0 = validize_mem (memloc);
9115
9116 /* Otherwise, if this is a constant or the object is not in memory
9117 and need be, put it there. */
9118 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9119 {
9120 tree nt = build_qualified_type (TREE_TYPE (tem),
9121 (TYPE_QUALS (TREE_TYPE (tem))
9122 | TYPE_QUAL_CONST));
9123 memloc = assign_temp (nt, 1, 1, 1);
9124 emit_move_insn (memloc, op0);
9125 op0 = memloc;
9126 }
9127
9128 if (offset)
9129 {
9130 enum machine_mode address_mode;
9131 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9132 EXPAND_SUM);
9133
9134 gcc_assert (MEM_P (op0));
9135
9136 address_mode
9137 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9138 if (GET_MODE (offset_rtx) != address_mode)
9139 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9140
9141 if (GET_MODE (op0) == BLKmode
9142 /* A constant address in OP0 can have VOIDmode, we must
9143 not try to call force_reg in that case. */
9144 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9145 && bitsize != 0
9146 && (bitpos % bitsize) == 0
9147 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9148 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9149 {
9150 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9151 bitpos = 0;
9152 }
9153
9154 op0 = offset_address (op0, offset_rtx,
9155 highest_pow2_factor (offset));
9156 }
9157
9158 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9159 record its alignment as BIGGEST_ALIGNMENT. */
9160 if (MEM_P (op0) && bitpos == 0 && offset != 0
9161 && is_aligning_offset (offset, tem))
9162 set_mem_align (op0, BIGGEST_ALIGNMENT);
9163
9164 /* Don't forget about volatility even if this is a bitfield. */
9165 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9166 {
9167 if (op0 == orig_op0)
9168 op0 = copy_rtx (op0);
9169
9170 MEM_VOLATILE_P (op0) = 1;
9171 }
9172
9173 /* In cases where an aligned union has an unaligned object
9174 as a field, we might be extracting a BLKmode value from
9175 an integer-mode (e.g., SImode) object. Handle this case
9176 by doing the extract into an object as wide as the field
9177 (which we know to be the width of a basic mode), then
9178 storing into memory, and changing the mode to BLKmode. */
9179 if (mode1 == VOIDmode
9180 || REG_P (op0) || GET_CODE (op0) == SUBREG
9181 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9182 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9183 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9184 && modifier != EXPAND_CONST_ADDRESS
9185 && modifier != EXPAND_INITIALIZER)
9186 /* If the field is volatile, we always want an aligned
9187 access. Only do this if the access is not already naturally
9188 aligned, otherwise "normal" (non-bitfield) volatile fields
9189 become non-addressable. */
9190 || (volatilep && flag_strict_volatile_bitfields > 0
9191 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
9192 /* If the field isn't aligned enough to fetch as a memref,
9193 fetch it as a bit field. */
9194 || (mode1 != BLKmode
9195 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9196 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9197 || (MEM_P (op0)
9198 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9199 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9200 && ((modifier == EXPAND_CONST_ADDRESS
9201 || modifier == EXPAND_INITIALIZER)
9202 ? STRICT_ALIGNMENT
9203 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9204 || (bitpos % BITS_PER_UNIT != 0)))
9205 /* If the type and the field are a constant size and the
9206 size of the type isn't the same size as the bitfield,
9207 we must use bitfield operations. */
9208 || (bitsize >= 0
9209 && TYPE_SIZE (TREE_TYPE (exp))
9210 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9211 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9212 bitsize)))
9213 {
9214 enum machine_mode ext_mode = mode;
9215
9216 if (ext_mode == BLKmode
9217 && ! (target != 0 && MEM_P (op0)
9218 && MEM_P (target)
9219 && bitpos % BITS_PER_UNIT == 0))
9220 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9221
9222 if (ext_mode == BLKmode)
9223 {
9224 if (target == 0)
9225 target = assign_temp (type, 0, 1, 1);
9226
9227 if (bitsize == 0)
9228 return target;
9229
9230 /* In this case, BITPOS must start at a byte boundary and
9231 TARGET, if specified, must be a MEM. */
9232 gcc_assert (MEM_P (op0)
9233 && (!target || MEM_P (target))
9234 && !(bitpos % BITS_PER_UNIT));
9235
9236 emit_block_move (target,
9237 adjust_address (op0, VOIDmode,
9238 bitpos / BITS_PER_UNIT),
9239 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9240 / BITS_PER_UNIT),
9241 (modifier == EXPAND_STACK_PARM
9242 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9243
9244 return target;
9245 }
9246
9247 op0 = validize_mem (op0);
9248
9249 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9250 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9251
9252 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
9253 (modifier == EXPAND_STACK_PARM
9254 ? NULL_RTX : target),
9255 ext_mode, ext_mode);
9256
9257 /* If the result is a record type and BITSIZE is narrower than
9258 the mode of OP0, an integral mode, and this is a big endian
9259 machine, we must put the field into the high-order bits. */
9260 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9261 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9262 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9263 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9264 GET_MODE_BITSIZE (GET_MODE (op0))
9265 - bitsize, op0, 1);
9266
9267 /* If the result type is BLKmode, store the data into a temporary
9268 of the appropriate type, but with the mode corresponding to the
9269 mode for the data we have (op0's mode). It's tempting to make
9270 this a constant type, since we know it's only being stored once,
9271 but that can cause problems if we are taking the address of this
9272 COMPONENT_REF because the MEM of any reference via that address
9273 will have flags corresponding to the type, which will not
9274 necessarily be constant. */
9275 if (mode == BLKmode)
9276 {
9277 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9278 rtx new_rtx;
9279
9280 /* If the reference doesn't use the alias set of its type,
9281 we cannot create the temporary using that type. */
9282 if (component_uses_parent_alias_set (exp))
9283 {
9284 new_rtx = assign_stack_local (ext_mode, size, 0);
9285 set_mem_alias_set (new_rtx, get_alias_set (exp));
9286 }
9287 else
9288 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9289
9290 emit_move_insn (new_rtx, op0);
9291 op0 = copy_rtx (new_rtx);
9292 PUT_MODE (op0, BLKmode);
9293 set_mem_attributes (op0, exp, 1);
9294 }
9295
9296 return op0;
9297 }
9298
9299 /* If the result is BLKmode, use that to access the object
9300 now as well. */
9301 if (mode == BLKmode)
9302 mode1 = BLKmode;
9303
9304 /* Get a reference to just this component. */
9305 if (modifier == EXPAND_CONST_ADDRESS
9306 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9307 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9308 else
9309 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9310
9311 if (op0 == orig_op0)
9312 op0 = copy_rtx (op0);
9313
9314 set_mem_attributes (op0, exp, 0);
9315 if (REG_P (XEXP (op0, 0)))
9316 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9317
9318 MEM_VOLATILE_P (op0) |= volatilep;
9319 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9320 || modifier == EXPAND_CONST_ADDRESS
9321 || modifier == EXPAND_INITIALIZER)
9322 return op0;
9323 else if (target == 0)
9324 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9325
9326 convert_move (target, op0, unsignedp);
9327 return target;
9328 }
9329
9330 case OBJ_TYPE_REF:
9331 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9332
9333 case CALL_EXPR:
9334 /* All valid uses of __builtin_va_arg_pack () are removed during
9335 inlining. */
9336 if (CALL_EXPR_VA_ARG_PACK (exp))
9337 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9338 {
9339 tree fndecl = get_callee_fndecl (exp), attr;
9340
9341 if (fndecl
9342 && (attr = lookup_attribute ("error",
9343 DECL_ATTRIBUTES (fndecl))) != NULL)
9344 error ("%Kcall to %qs declared with attribute error: %s",
9345 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9346 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9347 if (fndecl
9348 && (attr = lookup_attribute ("warning",
9349 DECL_ATTRIBUTES (fndecl))) != NULL)
9350 warning_at (tree_nonartificial_location (exp),
9351 0, "%Kcall to %qs declared with attribute warning: %s",
9352 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9353 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9354
9355 /* Check for a built-in function. */
9356 if (fndecl && DECL_BUILT_IN (fndecl))
9357 {
9358 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9359 return expand_builtin (exp, target, subtarget, tmode, ignore);
9360 }
9361 }
9362 return expand_call (exp, target, ignore);
9363
9364 case VIEW_CONVERT_EXPR:
9365 op0 = NULL_RTX;
9366
9367 /* If we are converting to BLKmode, try to avoid an intermediate
9368 temporary by fetching an inner memory reference. */
9369 if (mode == BLKmode
9370 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9371 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9372 && handled_component_p (treeop0))
9373 {
9374 enum machine_mode mode1;
9375 HOST_WIDE_INT bitsize, bitpos;
9376 tree offset;
9377 int unsignedp;
9378 int volatilep = 0;
9379 tree tem
9380 = get_inner_reference (treeop0, &bitsize, &bitpos,
9381 &offset, &mode1, &unsignedp, &volatilep,
9382 true);
9383 rtx orig_op0;
9384
9385 /* ??? We should work harder and deal with non-zero offsets. */
9386 if (!offset
9387 && (bitpos % BITS_PER_UNIT) == 0
9388 && bitsize >= 0
9389 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9390 {
9391 /* See the normal_inner_ref case for the rationale. */
9392 orig_op0
9393 = expand_expr (tem,
9394 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9395 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9396 != INTEGER_CST)
9397 && modifier != EXPAND_STACK_PARM
9398 ? target : NULL_RTX),
9399 VOIDmode,
9400 (modifier == EXPAND_INITIALIZER
9401 || modifier == EXPAND_CONST_ADDRESS
9402 || modifier == EXPAND_STACK_PARM)
9403 ? modifier : EXPAND_NORMAL);
9404
9405 if (MEM_P (orig_op0))
9406 {
9407 op0 = orig_op0;
9408
9409 /* Get a reference to just this component. */
9410 if (modifier == EXPAND_CONST_ADDRESS
9411 || modifier == EXPAND_SUM
9412 || modifier == EXPAND_INITIALIZER)
9413 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9414 else
9415 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9416
9417 if (op0 == orig_op0)
9418 op0 = copy_rtx (op0);
9419
9420 set_mem_attributes (op0, treeop0, 0);
9421 if (REG_P (XEXP (op0, 0)))
9422 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9423
9424 MEM_VOLATILE_P (op0) |= volatilep;
9425 }
9426 }
9427 }
9428
9429 if (!op0)
9430 op0 = expand_expr (treeop0,
9431 NULL_RTX, VOIDmode, modifier);
9432
9433 /* If the input and output modes are both the same, we are done. */
9434 if (mode == GET_MODE (op0))
9435 ;
9436 /* If neither mode is BLKmode, and both modes are the same size
9437 then we can use gen_lowpart. */
9438 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9439 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9440 && !COMPLEX_MODE_P (GET_MODE (op0)))
9441 {
9442 if (GET_CODE (op0) == SUBREG)
9443 op0 = force_reg (GET_MODE (op0), op0);
9444 temp = gen_lowpart_common (mode, op0);
9445 if (temp)
9446 op0 = temp;
9447 else
9448 {
9449 if (!REG_P (op0) && !MEM_P (op0))
9450 op0 = force_reg (GET_MODE (op0), op0);
9451 op0 = gen_lowpart (mode, op0);
9452 }
9453 }
9454 /* If both types are integral, convert from one mode to the other. */
9455 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
9456 op0 = convert_modes (mode, GET_MODE (op0), op0,
9457 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9458 /* As a last resort, spill op0 to memory, and reload it in a
9459 different mode. */
9460 else if (!MEM_P (op0))
9461 {
9462 /* If the operand is not a MEM, force it into memory. Since we
9463 are going to be changing the mode of the MEM, don't call
9464 force_const_mem for constants because we don't allow pool
9465 constants to change mode. */
9466 tree inner_type = TREE_TYPE (treeop0);
9467
9468 gcc_assert (!TREE_ADDRESSABLE (exp));
9469
9470 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9471 target
9472 = assign_stack_temp_for_type
9473 (TYPE_MODE (inner_type),
9474 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9475
9476 emit_move_insn (target, op0);
9477 op0 = target;
9478 }
9479
9480 /* At this point, OP0 is in the correct mode. If the output type is
9481 such that the operand is known to be aligned, indicate that it is.
9482 Otherwise, we need only be concerned about alignment for non-BLKmode
9483 results. */
9484 if (MEM_P (op0))
9485 {
9486 op0 = copy_rtx (op0);
9487
9488 if (TYPE_ALIGN_OK (type))
9489 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9490 else if (STRICT_ALIGNMENT
9491 && mode != BLKmode
9492 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9493 {
9494 tree inner_type = TREE_TYPE (treeop0);
9495 HOST_WIDE_INT temp_size
9496 = MAX (int_size_in_bytes (inner_type),
9497 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9498 rtx new_rtx
9499 = assign_stack_temp_for_type (mode, temp_size, 0, type);
9500 rtx new_with_op0_mode
9501 = adjust_address (new_rtx, GET_MODE (op0), 0);
9502
9503 gcc_assert (!TREE_ADDRESSABLE (exp));
9504
9505 if (GET_MODE (op0) == BLKmode)
9506 emit_block_move (new_with_op0_mode, op0,
9507 GEN_INT (GET_MODE_SIZE (mode)),
9508 (modifier == EXPAND_STACK_PARM
9509 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9510 else
9511 emit_move_insn (new_with_op0_mode, op0);
9512
9513 op0 = new_rtx;
9514 }
9515
9516 op0 = adjust_address (op0, mode, 0);
9517 }
9518
9519 return op0;
9520
9521 /* Use a compare and a jump for BLKmode comparisons, or for function
9522 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9523
9524 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9525 are occassionally created by folding during expansion. */
9526 case TRUTH_ANDIF_EXPR:
9527 case TRUTH_ORIF_EXPR:
9528 if (! ignore
9529 && (target == 0
9530 || modifier == EXPAND_STACK_PARM
9531 || ! safe_from_p (target, treeop0, 1)
9532 || ! safe_from_p (target, treeop1, 1)
9533 /* Make sure we don't have a hard reg (such as function's return
9534 value) live across basic blocks, if not optimizing. */
9535 || (!optimize && REG_P (target)
9536 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9537 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9538
9539 if (target)
9540 emit_move_insn (target, const0_rtx);
9541
9542 op1 = gen_label_rtx ();
9543 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9544
9545 if (target)
9546 emit_move_insn (target, const1_rtx);
9547
9548 emit_label (op1);
9549 return ignore ? const0_rtx : target;
9550
9551 case STATEMENT_LIST:
9552 {
9553 tree_stmt_iterator iter;
9554
9555 gcc_assert (ignore);
9556
9557 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9558 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9559 }
9560 return const0_rtx;
9561
9562 case COND_EXPR:
9563 /* A COND_EXPR with its type being VOID_TYPE represents a
9564 conditional jump and is handled in
9565 expand_gimple_cond_expr. */
9566 gcc_assert (!VOID_TYPE_P (type));
9567
9568 /* Note that COND_EXPRs whose type is a structure or union
9569 are required to be constructed to contain assignments of
9570 a temporary variable, so that we can evaluate them here
9571 for side effect only. If type is void, we must do likewise. */
9572
9573 gcc_assert (!TREE_ADDRESSABLE (type)
9574 && !ignore
9575 && TREE_TYPE (treeop1) != void_type_node
9576 && TREE_TYPE (treeop2) != void_type_node);
9577
9578 /* If we are not to produce a result, we have no target. Otherwise,
9579 if a target was specified use it; it will not be used as an
9580 intermediate target unless it is safe. If no target, use a
9581 temporary. */
9582
9583 if (modifier != EXPAND_STACK_PARM
9584 && original_target
9585 && safe_from_p (original_target, treeop0, 1)
9586 && GET_MODE (original_target) == mode
9587 #ifdef HAVE_conditional_move
9588 && (! can_conditionally_move_p (mode)
9589 || REG_P (original_target))
9590 #endif
9591 && !MEM_P (original_target))
9592 temp = original_target;
9593 else
9594 temp = assign_temp (type, 0, 0, 1);
9595
9596 do_pending_stack_adjust ();
9597 NO_DEFER_POP;
9598 op0 = gen_label_rtx ();
9599 op1 = gen_label_rtx ();
9600 jumpifnot (treeop0, op0, -1);
9601 store_expr (treeop1, temp,
9602 modifier == EXPAND_STACK_PARM,
9603 false);
9604
9605 emit_jump_insn (gen_jump (op1));
9606 emit_barrier ();
9607 emit_label (op0);
9608 store_expr (treeop2, temp,
9609 modifier == EXPAND_STACK_PARM,
9610 false);
9611
9612 emit_label (op1);
9613 OK_DEFER_POP;
9614 return temp;
9615
9616 case VEC_COND_EXPR:
9617 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9618 return target;
9619
9620 case MODIFY_EXPR:
9621 {
9622 tree lhs = treeop0;
9623 tree rhs = treeop1;
9624 gcc_assert (ignore);
9625
9626 /* Check for |= or &= of a bitfield of size one into another bitfield
9627 of size 1. In this case, (unless we need the result of the
9628 assignment) we can do this more efficiently with a
9629 test followed by an assignment, if necessary.
9630
9631 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9632 things change so we do, this code should be enhanced to
9633 support it. */
9634 if (TREE_CODE (lhs) == COMPONENT_REF
9635 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9636 || TREE_CODE (rhs) == BIT_AND_EXPR)
9637 && TREE_OPERAND (rhs, 0) == lhs
9638 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9639 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9640 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9641 {
9642 rtx label = gen_label_rtx ();
9643 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9644 do_jump (TREE_OPERAND (rhs, 1),
9645 value ? label : 0,
9646 value ? 0 : label, -1);
9647 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9648 MOVE_NONTEMPORAL (exp));
9649 do_pending_stack_adjust ();
9650 emit_label (label);
9651 return const0_rtx;
9652 }
9653
9654 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9655 return const0_rtx;
9656 }
9657
9658 case ADDR_EXPR:
9659 return expand_expr_addr_expr (exp, target, tmode, modifier);
9660
9661 case REALPART_EXPR:
9662 op0 = expand_normal (treeop0);
9663 return read_complex_part (op0, false);
9664
9665 case IMAGPART_EXPR:
9666 op0 = expand_normal (treeop0);
9667 return read_complex_part (op0, true);
9668
9669 case RETURN_EXPR:
9670 case LABEL_EXPR:
9671 case GOTO_EXPR:
9672 case SWITCH_EXPR:
9673 case ASM_EXPR:
9674 /* Expanded in cfgexpand.c. */
9675 gcc_unreachable ();
9676
9677 case TRY_CATCH_EXPR:
9678 case CATCH_EXPR:
9679 case EH_FILTER_EXPR:
9680 case TRY_FINALLY_EXPR:
9681 /* Lowered by tree-eh.c. */
9682 gcc_unreachable ();
9683
9684 case WITH_CLEANUP_EXPR:
9685 case CLEANUP_POINT_EXPR:
9686 case TARGET_EXPR:
9687 case CASE_LABEL_EXPR:
9688 case VA_ARG_EXPR:
9689 case BIND_EXPR:
9690 case INIT_EXPR:
9691 case CONJ_EXPR:
9692 case COMPOUND_EXPR:
9693 case PREINCREMENT_EXPR:
9694 case PREDECREMENT_EXPR:
9695 case POSTINCREMENT_EXPR:
9696 case POSTDECREMENT_EXPR:
9697 case LOOP_EXPR:
9698 case EXIT_EXPR:
9699 /* Lowered by gimplify.c. */
9700 gcc_unreachable ();
9701
9702 case FDESC_EXPR:
9703 /* Function descriptors are not valid except for as
9704 initialization constants, and should not be expanded. */
9705 gcc_unreachable ();
9706
9707 case WITH_SIZE_EXPR:
9708 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9709 have pulled out the size to use in whatever context it needed. */
9710 return expand_expr_real (treeop0, original_target, tmode,
9711 modifier, alt_rtl);
9712
9713 case COMPOUND_LITERAL_EXPR:
9714 {
9715 /* Initialize the anonymous variable declared in the compound
9716 literal, then return the variable. */
9717 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9718
9719 /* Create RTL for this variable. */
9720 if (!DECL_RTL_SET_P (decl))
9721 {
9722 if (DECL_HARD_REGISTER (decl))
9723 /* The user specified an assembler name for this variable.
9724 Set that up now. */
9725 rest_of_decl_compilation (decl, 0, 0);
9726 else
9727 expand_decl (decl);
9728 }
9729
9730 return expand_expr_real (decl, original_target, tmode,
9731 modifier, alt_rtl);
9732 }
9733
9734 default:
9735 return expand_expr_real_2 (&ops, target, tmode, modifier);
9736 }
9737 }
9738 \f
9739 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9740 signedness of TYPE), possibly returning the result in TARGET. */
9741 static rtx
9742 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9743 {
9744 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9745 if (target && GET_MODE (target) != GET_MODE (exp))
9746 target = 0;
9747 /* For constant values, reduce using build_int_cst_type. */
9748 if (CONST_INT_P (exp))
9749 {
9750 HOST_WIDE_INT value = INTVAL (exp);
9751 tree t = build_int_cst_type (type, value);
9752 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9753 }
9754 else if (TYPE_UNSIGNED (type))
9755 {
9756 rtx mask = immed_double_int_const (double_int_mask (prec),
9757 GET_MODE (exp));
9758 return expand_and (GET_MODE (exp), exp, mask, target);
9759 }
9760 else
9761 {
9762 int count = GET_MODE_BITSIZE (GET_MODE (exp)) - prec;
9763 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
9764 exp, count, target, 0);
9765 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
9766 exp, count, target, 0);
9767 }
9768 }
9769 \f
9770 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9771 when applied to the address of EXP produces an address known to be
9772 aligned more than BIGGEST_ALIGNMENT. */
9773
9774 static int
9775 is_aligning_offset (const_tree offset, const_tree exp)
9776 {
9777 /* Strip off any conversions. */
9778 while (CONVERT_EXPR_P (offset))
9779 offset = TREE_OPERAND (offset, 0);
9780
9781 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9782 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9783 if (TREE_CODE (offset) != BIT_AND_EXPR
9784 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9785 || compare_tree_int (TREE_OPERAND (offset, 1),
9786 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9787 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9788 return 0;
9789
9790 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9791 It must be NEGATE_EXPR. Then strip any more conversions. */
9792 offset = TREE_OPERAND (offset, 0);
9793 while (CONVERT_EXPR_P (offset))
9794 offset = TREE_OPERAND (offset, 0);
9795
9796 if (TREE_CODE (offset) != NEGATE_EXPR)
9797 return 0;
9798
9799 offset = TREE_OPERAND (offset, 0);
9800 while (CONVERT_EXPR_P (offset))
9801 offset = TREE_OPERAND (offset, 0);
9802
9803 /* This must now be the address of EXP. */
9804 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9805 }
9806 \f
9807 /* Return the tree node if an ARG corresponds to a string constant or zero
9808 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9809 in bytes within the string that ARG is accessing. The type of the
9810 offset will be `sizetype'. */
9811
9812 tree
9813 string_constant (tree arg, tree *ptr_offset)
9814 {
9815 tree array, offset, lower_bound;
9816 STRIP_NOPS (arg);
9817
9818 if (TREE_CODE (arg) == ADDR_EXPR)
9819 {
9820 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9821 {
9822 *ptr_offset = size_zero_node;
9823 return TREE_OPERAND (arg, 0);
9824 }
9825 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9826 {
9827 array = TREE_OPERAND (arg, 0);
9828 offset = size_zero_node;
9829 }
9830 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9831 {
9832 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9833 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9834 if (TREE_CODE (array) != STRING_CST
9835 && TREE_CODE (array) != VAR_DECL)
9836 return 0;
9837
9838 /* Check if the array has a nonzero lower bound. */
9839 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9840 if (!integer_zerop (lower_bound))
9841 {
9842 /* If the offset and base aren't both constants, return 0. */
9843 if (TREE_CODE (lower_bound) != INTEGER_CST)
9844 return 0;
9845 if (TREE_CODE (offset) != INTEGER_CST)
9846 return 0;
9847 /* Adjust offset by the lower bound. */
9848 offset = size_diffop (fold_convert (sizetype, offset),
9849 fold_convert (sizetype, lower_bound));
9850 }
9851 }
9852 else
9853 return 0;
9854 }
9855 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9856 {
9857 tree arg0 = TREE_OPERAND (arg, 0);
9858 tree arg1 = TREE_OPERAND (arg, 1);
9859
9860 STRIP_NOPS (arg0);
9861 STRIP_NOPS (arg1);
9862
9863 if (TREE_CODE (arg0) == ADDR_EXPR
9864 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9865 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9866 {
9867 array = TREE_OPERAND (arg0, 0);
9868 offset = arg1;
9869 }
9870 else if (TREE_CODE (arg1) == ADDR_EXPR
9871 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9872 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9873 {
9874 array = TREE_OPERAND (arg1, 0);
9875 offset = arg0;
9876 }
9877 else
9878 return 0;
9879 }
9880 else
9881 return 0;
9882
9883 if (TREE_CODE (array) == STRING_CST)
9884 {
9885 *ptr_offset = fold_convert (sizetype, offset);
9886 return array;
9887 }
9888 else if (TREE_CODE (array) == VAR_DECL
9889 || TREE_CODE (array) == CONST_DECL)
9890 {
9891 int length;
9892
9893 /* Variables initialized to string literals can be handled too. */
9894 if (!const_value_known_p (array)
9895 || !DECL_INITIAL (array)
9896 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9897 return 0;
9898
9899 /* Avoid const char foo[4] = "abcde"; */
9900 if (DECL_SIZE_UNIT (array) == NULL_TREE
9901 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9902 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9903 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9904 return 0;
9905
9906 /* If variable is bigger than the string literal, OFFSET must be constant
9907 and inside of the bounds of the string literal. */
9908 offset = fold_convert (sizetype, offset);
9909 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9910 && (! host_integerp (offset, 1)
9911 || compare_tree_int (offset, length) >= 0))
9912 return 0;
9913
9914 *ptr_offset = offset;
9915 return DECL_INITIAL (array);
9916 }
9917
9918 return 0;
9919 }
9920 \f
9921 /* Generate code to calculate OPS, and exploded expression
9922 using a store-flag instruction and return an rtx for the result.
9923 OPS reflects a comparison.
9924
9925 If TARGET is nonzero, store the result there if convenient.
9926
9927 Return zero if there is no suitable set-flag instruction
9928 available on this machine.
9929
9930 Once expand_expr has been called on the arguments of the comparison,
9931 we are committed to doing the store flag, since it is not safe to
9932 re-evaluate the expression. We emit the store-flag insn by calling
9933 emit_store_flag, but only expand the arguments if we have a reason
9934 to believe that emit_store_flag will be successful. If we think that
9935 it will, but it isn't, we have to simulate the store-flag with a
9936 set/jump/set sequence. */
9937
9938 static rtx
9939 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9940 {
9941 enum rtx_code code;
9942 tree arg0, arg1, type;
9943 tree tem;
9944 enum machine_mode operand_mode;
9945 int unsignedp;
9946 rtx op0, op1;
9947 rtx subtarget = target;
9948 location_t loc = ops->location;
9949
9950 arg0 = ops->op0;
9951 arg1 = ops->op1;
9952
9953 /* Don't crash if the comparison was erroneous. */
9954 if (arg0 == error_mark_node || arg1 == error_mark_node)
9955 return const0_rtx;
9956
9957 type = TREE_TYPE (arg0);
9958 operand_mode = TYPE_MODE (type);
9959 unsignedp = TYPE_UNSIGNED (type);
9960
9961 /* We won't bother with BLKmode store-flag operations because it would mean
9962 passing a lot of information to emit_store_flag. */
9963 if (operand_mode == BLKmode)
9964 return 0;
9965
9966 /* We won't bother with store-flag operations involving function pointers
9967 when function pointers must be canonicalized before comparisons. */
9968 #ifdef HAVE_canonicalize_funcptr_for_compare
9969 if (HAVE_canonicalize_funcptr_for_compare
9970 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9971 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9972 == FUNCTION_TYPE))
9973 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9974 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9975 == FUNCTION_TYPE))))
9976 return 0;
9977 #endif
9978
9979 STRIP_NOPS (arg0);
9980 STRIP_NOPS (arg1);
9981
9982 /* Get the rtx comparison code to use. We know that EXP is a comparison
9983 operation of some type. Some comparisons against 1 and -1 can be
9984 converted to comparisons with zero. Do so here so that the tests
9985 below will be aware that we have a comparison with zero. These
9986 tests will not catch constants in the first operand, but constants
9987 are rarely passed as the first operand. */
9988
9989 switch (ops->code)
9990 {
9991 case EQ_EXPR:
9992 code = EQ;
9993 break;
9994 case NE_EXPR:
9995 code = NE;
9996 break;
9997 case LT_EXPR:
9998 if (integer_onep (arg1))
9999 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10000 else
10001 code = unsignedp ? LTU : LT;
10002 break;
10003 case LE_EXPR:
10004 if (! unsignedp && integer_all_onesp (arg1))
10005 arg1 = integer_zero_node, code = LT;
10006 else
10007 code = unsignedp ? LEU : LE;
10008 break;
10009 case GT_EXPR:
10010 if (! unsignedp && integer_all_onesp (arg1))
10011 arg1 = integer_zero_node, code = GE;
10012 else
10013 code = unsignedp ? GTU : GT;
10014 break;
10015 case GE_EXPR:
10016 if (integer_onep (arg1))
10017 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10018 else
10019 code = unsignedp ? GEU : GE;
10020 break;
10021
10022 case UNORDERED_EXPR:
10023 code = UNORDERED;
10024 break;
10025 case ORDERED_EXPR:
10026 code = ORDERED;
10027 break;
10028 case UNLT_EXPR:
10029 code = UNLT;
10030 break;
10031 case UNLE_EXPR:
10032 code = UNLE;
10033 break;
10034 case UNGT_EXPR:
10035 code = UNGT;
10036 break;
10037 case UNGE_EXPR:
10038 code = UNGE;
10039 break;
10040 case UNEQ_EXPR:
10041 code = UNEQ;
10042 break;
10043 case LTGT_EXPR:
10044 code = LTGT;
10045 break;
10046
10047 default:
10048 gcc_unreachable ();
10049 }
10050
10051 /* Put a constant second. */
10052 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10053 || TREE_CODE (arg0) == FIXED_CST)
10054 {
10055 tem = arg0; arg0 = arg1; arg1 = tem;
10056 code = swap_condition (code);
10057 }
10058
10059 /* If this is an equality or inequality test of a single bit, we can
10060 do this by shifting the bit being tested to the low-order bit and
10061 masking the result with the constant 1. If the condition was EQ,
10062 we xor it with 1. This does not require an scc insn and is faster
10063 than an scc insn even if we have it.
10064
10065 The code to make this transformation was moved into fold_single_bit_test,
10066 so we just call into the folder and expand its result. */
10067
10068 if ((code == NE || code == EQ)
10069 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10070 && integer_pow2p (TREE_OPERAND (arg0, 1))
10071 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10072 {
10073 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10074 return expand_expr (fold_single_bit_test (loc,
10075 code == NE ? NE_EXPR : EQ_EXPR,
10076 arg0, arg1, type),
10077 target, VOIDmode, EXPAND_NORMAL);
10078 }
10079
10080 if (! get_subtarget (target)
10081 || GET_MODE (subtarget) != operand_mode)
10082 subtarget = 0;
10083
10084 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10085
10086 if (target == 0)
10087 target = gen_reg_rtx (mode);
10088
10089 /* Try a cstore if possible. */
10090 return emit_store_flag_force (target, code, op0, op1,
10091 operand_mode, unsignedp,
10092 (TYPE_PRECISION (ops->type) == 1
10093 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10094 }
10095 \f
10096
10097 /* Stubs in case we haven't got a casesi insn. */
10098 #ifndef HAVE_casesi
10099 # define HAVE_casesi 0
10100 # define gen_casesi(a, b, c, d, e) (0)
10101 # define CODE_FOR_casesi CODE_FOR_nothing
10102 #endif
10103
10104 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10105 0 otherwise (i.e. if there is no casesi instruction). */
10106 int
10107 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10108 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10109 rtx fallback_label ATTRIBUTE_UNUSED)
10110 {
10111 struct expand_operand ops[5];
10112 enum machine_mode index_mode = SImode;
10113 int index_bits = GET_MODE_BITSIZE (index_mode);
10114 rtx op1, op2, index;
10115
10116 if (! HAVE_casesi)
10117 return 0;
10118
10119 /* Convert the index to SImode. */
10120 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10121 {
10122 enum machine_mode omode = TYPE_MODE (index_type);
10123 rtx rangertx = expand_normal (range);
10124
10125 /* We must handle the endpoints in the original mode. */
10126 index_expr = build2 (MINUS_EXPR, index_type,
10127 index_expr, minval);
10128 minval = integer_zero_node;
10129 index = expand_normal (index_expr);
10130 if (default_label)
10131 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10132 omode, 1, default_label);
10133 /* Now we can safely truncate. */
10134 index = convert_to_mode (index_mode, index, 0);
10135 }
10136 else
10137 {
10138 if (TYPE_MODE (index_type) != index_mode)
10139 {
10140 index_type = lang_hooks.types.type_for_size (index_bits, 0);
10141 index_expr = fold_convert (index_type, index_expr);
10142 }
10143
10144 index = expand_normal (index_expr);
10145 }
10146
10147 do_pending_stack_adjust ();
10148
10149 op1 = expand_normal (minval);
10150 op2 = expand_normal (range);
10151
10152 create_input_operand (&ops[0], index, index_mode);
10153 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10154 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10155 create_fixed_operand (&ops[3], table_label);
10156 create_fixed_operand (&ops[4], (default_label
10157 ? default_label
10158 : fallback_label));
10159 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10160 return 1;
10161 }
10162
10163 /* Attempt to generate a tablejump instruction; same concept. */
10164 #ifndef HAVE_tablejump
10165 #define HAVE_tablejump 0
10166 #define gen_tablejump(x, y) (0)
10167 #endif
10168
10169 /* Subroutine of the next function.
10170
10171 INDEX is the value being switched on, with the lowest value
10172 in the table already subtracted.
10173 MODE is its expected mode (needed if INDEX is constant).
10174 RANGE is the length of the jump table.
10175 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10176
10177 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10178 index value is out of range. */
10179
10180 static void
10181 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10182 rtx default_label)
10183 {
10184 rtx temp, vector;
10185
10186 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10187 cfun->cfg->max_jumptable_ents = INTVAL (range);
10188
10189 /* Do an unsigned comparison (in the proper mode) between the index
10190 expression and the value which represents the length of the range.
10191 Since we just finished subtracting the lower bound of the range
10192 from the index expression, this comparison allows us to simultaneously
10193 check that the original index expression value is both greater than
10194 or equal to the minimum value of the range and less than or equal to
10195 the maximum value of the range. */
10196
10197 if (default_label)
10198 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10199 default_label);
10200
10201 /* If index is in range, it must fit in Pmode.
10202 Convert to Pmode so we can index with it. */
10203 if (mode != Pmode)
10204 index = convert_to_mode (Pmode, index, 1);
10205
10206 /* Don't let a MEM slip through, because then INDEX that comes
10207 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10208 and break_out_memory_refs will go to work on it and mess it up. */
10209 #ifdef PIC_CASE_VECTOR_ADDRESS
10210 if (flag_pic && !REG_P (index))
10211 index = copy_to_mode_reg (Pmode, index);
10212 #endif
10213
10214 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10215 GET_MODE_SIZE, because this indicates how large insns are. The other
10216 uses should all be Pmode, because they are addresses. This code
10217 could fail if addresses and insns are not the same size. */
10218 index = gen_rtx_PLUS (Pmode,
10219 gen_rtx_MULT (Pmode, index,
10220 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10221 gen_rtx_LABEL_REF (Pmode, table_label));
10222 #ifdef PIC_CASE_VECTOR_ADDRESS
10223 if (flag_pic)
10224 index = PIC_CASE_VECTOR_ADDRESS (index);
10225 else
10226 #endif
10227 index = memory_address (CASE_VECTOR_MODE, index);
10228 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10229 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10230 convert_move (temp, vector, 0);
10231
10232 emit_jump_insn (gen_tablejump (temp, table_label));
10233
10234 /* If we are generating PIC code or if the table is PC-relative, the
10235 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10236 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10237 emit_barrier ();
10238 }
10239
10240 int
10241 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10242 rtx table_label, rtx default_label)
10243 {
10244 rtx index;
10245
10246 if (! HAVE_tablejump)
10247 return 0;
10248
10249 index_expr = fold_build2 (MINUS_EXPR, index_type,
10250 fold_convert (index_type, index_expr),
10251 fold_convert (index_type, minval));
10252 index = expand_normal (index_expr);
10253 do_pending_stack_adjust ();
10254
10255 do_tablejump (index, TYPE_MODE (index_type),
10256 convert_modes (TYPE_MODE (index_type),
10257 TYPE_MODE (TREE_TYPE (range)),
10258 expand_normal (range),
10259 TYPE_UNSIGNED (TREE_TYPE (range))),
10260 table_label, default_label);
10261 return 1;
10262 }
10263
10264 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10265 static rtx
10266 const_vector_from_tree (tree exp)
10267 {
10268 rtvec v;
10269 int units, i;
10270 tree link, elt;
10271 enum machine_mode inner, mode;
10272
10273 mode = TYPE_MODE (TREE_TYPE (exp));
10274
10275 if (initializer_zerop (exp))
10276 return CONST0_RTX (mode);
10277
10278 units = GET_MODE_NUNITS (mode);
10279 inner = GET_MODE_INNER (mode);
10280
10281 v = rtvec_alloc (units);
10282
10283 link = TREE_VECTOR_CST_ELTS (exp);
10284 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10285 {
10286 elt = TREE_VALUE (link);
10287
10288 if (TREE_CODE (elt) == REAL_CST)
10289 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10290 inner);
10291 else if (TREE_CODE (elt) == FIXED_CST)
10292 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10293 inner);
10294 else
10295 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
10296 inner);
10297 }
10298
10299 /* Initialize remaining elements to 0. */
10300 for (; i < units; ++i)
10301 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10302
10303 return gen_rtx_CONST_VECTOR (mode, v);
10304 }
10305
10306 /* Build a decl for a personality function given a language prefix. */
10307
10308 tree
10309 build_personality_function (const char *lang)
10310 {
10311 const char *unwind_and_version;
10312 tree decl, type;
10313 char *name;
10314
10315 switch (targetm_common.except_unwind_info (&global_options))
10316 {
10317 case UI_NONE:
10318 return NULL;
10319 case UI_SJLJ:
10320 unwind_and_version = "_sj0";
10321 break;
10322 case UI_DWARF2:
10323 case UI_TARGET:
10324 unwind_and_version = "_v0";
10325 break;
10326 default:
10327 gcc_unreachable ();
10328 }
10329
10330 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
10331
10332 type = build_function_type_list (integer_type_node, integer_type_node,
10333 long_long_unsigned_type_node,
10334 ptr_type_node, ptr_type_node, NULL_TREE);
10335 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10336 get_identifier (name), type);
10337 DECL_ARTIFICIAL (decl) = 1;
10338 DECL_EXTERNAL (decl) = 1;
10339 TREE_PUBLIC (decl) = 1;
10340
10341 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10342 are the flags assigned by targetm.encode_section_info. */
10343 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10344
10345 return decl;
10346 }
10347
10348 /* Extracts the personality function of DECL and returns the corresponding
10349 libfunc. */
10350
10351 rtx
10352 get_personality_function (tree decl)
10353 {
10354 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10355 enum eh_personality_kind pk;
10356
10357 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10358 if (pk == eh_personality_none)
10359 return NULL;
10360
10361 if (!personality
10362 && pk == eh_personality_any)
10363 personality = lang_hooks.eh_personality ();
10364
10365 if (pk == eh_personality_lang)
10366 gcc_assert (personality != NULL_TREE);
10367
10368 return XEXP (DECL_RTL (personality), 0);
10369 }
10370
10371 #include "gt-expr.h"