rtl.h (plus_constant, [...]): Merge into a single plus_constant function.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "reload.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
51 #include "target.h"
52 #include "common/common-target.h"
53 #include "timevar.h"
54 #include "df.h"
55 #include "diagnostic.h"
56 #include "ssaexpand.h"
57 #include "target-globals.h"
58 #include "params.h"
59
60 /* Decide whether a function's arguments should be processed
61 from first to last or from last to first.
62
63 They should if the stack and args grow in opposite directions, but
64 only if we have push insns. */
65
66 #ifdef PUSH_ROUNDING
67
68 #ifndef PUSH_ARGS_REVERSED
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
72 #endif
73
74 #endif
75
76 #ifndef STACK_PUSH_CODE
77 #ifdef STACK_GROWS_DOWNWARD
78 #define STACK_PUSH_CODE PRE_DEC
79 #else
80 #define STACK_PUSH_CODE PRE_INC
81 #endif
82 #endif
83
84
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91 int cse_not_expected;
92
93 /* This structure is used by move_by_pieces to describe the move to
94 be performed. */
95 struct move_by_pieces_d
96 {
97 rtx to;
98 rtx to_addr;
99 int autinc_to;
100 int explicit_inc_to;
101 rtx from;
102 rtx from_addr;
103 int autinc_from;
104 int explicit_inc_from;
105 unsigned HOST_WIDE_INT len;
106 HOST_WIDE_INT offset;
107 int reverse;
108 };
109
110 /* This structure is used by store_by_pieces to describe the clear to
111 be performed. */
112
113 struct store_by_pieces_d
114 {
115 rtx to;
116 rtx to_addr;
117 int autinc_to;
118 int explicit_inc_to;
119 unsigned HOST_WIDE_INT len;
120 HOST_WIDE_INT offset;
121 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 void *constfundata;
123 int reverse;
124 };
125
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces_d *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces_d *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, alias_set_type);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
145 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
146 enum machine_mode,
147 tree, tree, alias_set_type, bool);
148
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150
151 static int is_aligning_offset (const_tree, const_tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (sepops, rtx, enum machine_mode);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
162
163 /* This macro is used to determine whether move_by_pieces should be called
164 to perform a structure copy. */
165 #ifndef MOVE_BY_PIECES_P
166 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
167 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
168 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
169 #endif
170
171 /* This macro is used to determine whether clear_by_pieces should be
172 called to clear storage. */
173 #ifndef CLEAR_BY_PIECES_P
174 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
175 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
176 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
177 #endif
178
179 /* This macro is used to determine whether store_by_pieces should be
180 called to "memset" storage with byte values other than zero. */
181 #ifndef SET_BY_PIECES_P
182 #define SET_BY_PIECES_P(SIZE, ALIGN) \
183 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
184 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
185 #endif
186
187 /* This macro is used to determine whether store_by_pieces should be
188 called to "memcpy" storage when the source is a constant string. */
189 #ifndef STORE_BY_PIECES_P
190 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
192 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
193 #endif
194
195 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
196
197 #ifndef SLOW_UNALIGNED_ACCESS
198 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
199 #endif
200 \f
201 /* This is run to set up which modes can be used
202 directly in memory and to initialize the block move optab. It is run
203 at the beginning of compilation and when the target is reinitialized. */
204
205 void
206 init_expr_target (void)
207 {
208 rtx insn, pat;
209 enum machine_mode mode;
210 int num_clobbers;
211 rtx mem, mem1;
212 rtx reg;
213
214 /* Try indexing by frame ptr and try by stack ptr.
215 It is known that on the Convex the stack ptr isn't a valid index.
216 With luck, one or the other is valid on any machine. */
217 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
218 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
219
220 /* A scratch register we can modify in-place below to avoid
221 useless RTL allocations. */
222 reg = gen_rtx_REG (VOIDmode, -1);
223
224 insn = rtx_alloc (INSN);
225 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
226 PATTERN (insn) = pat;
227
228 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
229 mode = (enum machine_mode) ((int) mode + 1))
230 {
231 int regno;
232
233 direct_load[(int) mode] = direct_store[(int) mode] = 0;
234 PUT_MODE (mem, mode);
235 PUT_MODE (mem1, mode);
236 PUT_MODE (reg, mode);
237
238 /* See if there is some register that can be used in this mode and
239 directly loaded or stored from memory. */
240
241 if (mode != VOIDmode && mode != BLKmode)
242 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
243 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
244 regno++)
245 {
246 if (! HARD_REGNO_MODE_OK (regno, mode))
247 continue;
248
249 SET_REGNO (reg, regno);
250
251 SET_SRC (pat) = mem;
252 SET_DEST (pat) = reg;
253 if (recog (pat, insn, &num_clobbers) >= 0)
254 direct_load[(int) mode] = 1;
255
256 SET_SRC (pat) = mem1;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
260
261 SET_SRC (pat) = reg;
262 SET_DEST (pat) = mem;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_store[(int) mode] = 1;
265
266 SET_SRC (pat) = reg;
267 SET_DEST (pat) = mem1;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
270 }
271 }
272
273 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
274
275 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
276 mode = GET_MODE_WIDER_MODE (mode))
277 {
278 enum machine_mode srcmode;
279 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
280 srcmode = GET_MODE_WIDER_MODE (srcmode))
281 {
282 enum insn_code ic;
283
284 ic = can_extend_p (mode, srcmode, 0);
285 if (ic == CODE_FOR_nothing)
286 continue;
287
288 PUT_MODE (mem, srcmode);
289
290 if (insn_operand_matches (ic, 1, mem))
291 float_extend_from_mem[mode][srcmode] = true;
292 }
293 }
294 }
295
296 /* This is run at the start of compiling a function. */
297
298 void
299 init_expr (void)
300 {
301 memset (&crtl->expr, 0, sizeof (crtl->expr));
302 }
303 \f
304 /* Copy data from FROM to TO, where the machine modes are not the same.
305 Both modes may be integer, or both may be floating, or both may be
306 fixed-point.
307 UNSIGNEDP should be nonzero if FROM is an unsigned type.
308 This causes zero-extension instead of sign-extension. */
309
310 void
311 convert_move (rtx to, rtx from, int unsignedp)
312 {
313 enum machine_mode to_mode = GET_MODE (to);
314 enum machine_mode from_mode = GET_MODE (from);
315 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
316 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
317 enum insn_code code;
318 rtx libcall;
319
320 /* rtx code for making an equivalent value. */
321 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
322 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
323
324
325 gcc_assert (to_real == from_real);
326 gcc_assert (to_mode != BLKmode);
327 gcc_assert (from_mode != BLKmode);
328
329 /* If the source and destination are already the same, then there's
330 nothing to do. */
331 if (to == from)
332 return;
333
334 /* If FROM is a SUBREG that indicates that we have already done at least
335 the required extension, strip it. We don't handle such SUBREGs as
336 TO here. */
337
338 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
339 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
340 >= GET_MODE_PRECISION (to_mode))
341 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
342 from = gen_lowpart (to_mode, from), from_mode = to_mode;
343
344 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
345
346 if (to_mode == from_mode
347 || (from_mode == VOIDmode && CONSTANT_P (from)))
348 {
349 emit_move_insn (to, from);
350 return;
351 }
352
353 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
354 {
355 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
356
357 if (VECTOR_MODE_P (to_mode))
358 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
359 else
360 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
361
362 emit_move_insn (to, from);
363 return;
364 }
365
366 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
367 {
368 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
369 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
370 return;
371 }
372
373 if (to_real)
374 {
375 rtx value, insns;
376 convert_optab tab;
377
378 gcc_assert ((GET_MODE_PRECISION (from_mode)
379 != GET_MODE_PRECISION (to_mode))
380 || (DECIMAL_FLOAT_MODE_P (from_mode)
381 != DECIMAL_FLOAT_MODE_P (to_mode)));
382
383 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
384 /* Conversion between decimal float and binary float, same size. */
385 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
386 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
387 tab = sext_optab;
388 else
389 tab = trunc_optab;
390
391 /* Try converting directly if the insn is supported. */
392
393 code = convert_optab_handler (tab, to_mode, from_mode);
394 if (code != CODE_FOR_nothing)
395 {
396 emit_unop_insn (code, to, from,
397 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
398 return;
399 }
400
401 /* Otherwise use a libcall. */
402 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
403
404 /* Is this conversion implemented yet? */
405 gcc_assert (libcall);
406
407 start_sequence ();
408 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
409 1, from, from_mode);
410 insns = get_insns ();
411 end_sequence ();
412 emit_libcall_block (insns, to, value,
413 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
414 from)
415 : gen_rtx_FLOAT_EXTEND (to_mode, from));
416 return;
417 }
418
419 /* Handle pointer conversion. */ /* SPEE 900220. */
420 /* Targets are expected to provide conversion insns between PxImode and
421 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
422 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
423 {
424 enum machine_mode full_mode
425 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
426
427 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
428 != CODE_FOR_nothing);
429
430 if (full_mode != from_mode)
431 from = convert_to_mode (full_mode, from, unsignedp);
432 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
433 to, from, UNKNOWN);
434 return;
435 }
436 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
437 {
438 rtx new_from;
439 enum machine_mode full_mode
440 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
441 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
442 enum insn_code icode;
443
444 icode = convert_optab_handler (ctab, full_mode, from_mode);
445 gcc_assert (icode != CODE_FOR_nothing);
446
447 if (to_mode == full_mode)
448 {
449 emit_unop_insn (icode, to, from, UNKNOWN);
450 return;
451 }
452
453 new_from = gen_reg_rtx (full_mode);
454 emit_unop_insn (icode, new_from, from, UNKNOWN);
455
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
458 from = new_from;
459 }
460
461 /* Make sure both are fixed-point modes or both are not. */
462 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
463 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
464 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
465 {
466 /* If we widen from_mode to to_mode and they are in the same class,
467 we won't saturate the result.
468 Otherwise, always saturate the result to play safe. */
469 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
470 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
471 expand_fixed_convert (to, from, 0, 0);
472 else
473 expand_fixed_convert (to, from, 0, 1);
474 return;
475 }
476
477 /* Now both modes are integers. */
478
479 /* Handle expanding beyond a word. */
480 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
481 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
482 {
483 rtx insns;
484 rtx lowpart;
485 rtx fill_value;
486 rtx lowfrom;
487 int i;
488 enum machine_mode lowpart_mode;
489 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
490
491 /* Try converting directly if the insn is supported. */
492 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
493 != CODE_FOR_nothing)
494 {
495 /* If FROM is a SUBREG, put it into a register. Do this
496 so that we always generate the same set of insns for
497 better cse'ing; if an intermediate assignment occurred,
498 we won't be doing the operation directly on the SUBREG. */
499 if (optimize > 0 && GET_CODE (from) == SUBREG)
500 from = force_reg (from_mode, from);
501 emit_unop_insn (code, to, from, equiv_code);
502 return;
503 }
504 /* Next, try converting via full word. */
505 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
506 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
507 != CODE_FOR_nothing))
508 {
509 rtx word_to = gen_reg_rtx (word_mode);
510 if (REG_P (to))
511 {
512 if (reg_overlap_mentioned_p (to, from))
513 from = force_reg (from_mode, from);
514 emit_clobber (to);
515 }
516 convert_move (word_to, from, unsignedp);
517 emit_unop_insn (code, to, word_to, equiv_code);
518 return;
519 }
520
521 /* No special multiword conversion insn; do it by hand. */
522 start_sequence ();
523
524 /* Since we will turn this into a no conflict block, we must ensure
525 that the source does not overlap the target. */
526
527 if (reg_overlap_mentioned_p (to, from))
528 from = force_reg (from_mode, from);
529
530 /* Get a copy of FROM widened to a word, if necessary. */
531 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
532 lowpart_mode = word_mode;
533 else
534 lowpart_mode = from_mode;
535
536 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
537
538 lowpart = gen_lowpart (lowpart_mode, to);
539 emit_move_insn (lowpart, lowfrom);
540
541 /* Compute the value to put in each remaining word. */
542 if (unsignedp)
543 fill_value = const0_rtx;
544 else
545 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
546 LT, lowfrom, const0_rtx,
547 VOIDmode, 0, -1);
548
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
551 {
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
554
555 gcc_assert (subword);
556
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
559 }
560
561 insns = get_insns ();
562 end_sequence ();
563
564 emit_insn (insns);
565 return;
566 }
567
568 /* Truncating multi-word to a word or less. */
569 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
570 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
571 {
572 if (!((MEM_P (from)
573 && ! MEM_VOLATILE_P (from)
574 && direct_load[(int) to_mode]
575 && ! mode_dependent_address_p (XEXP (from, 0)))
576 || REG_P (from)
577 || GET_CODE (from) == SUBREG))
578 from = force_reg (from_mode, from);
579 convert_move (to, gen_lowpart (word_mode, from), 0);
580 return;
581 }
582
583 /* Now follow all the conversions between integers
584 no more than a word long. */
585
586 /* For truncation, usually we can just refer to FROM in a narrower mode. */
587 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
588 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
589 {
590 if (!((MEM_P (from)
591 && ! MEM_VOLATILE_P (from)
592 && direct_load[(int) to_mode]
593 && ! mode_dependent_address_p (XEXP (from, 0)))
594 || REG_P (from)
595 || GET_CODE (from) == SUBREG))
596 from = force_reg (from_mode, from);
597 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
598 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
599 from = copy_to_reg (from);
600 emit_move_insn (to, gen_lowpart (to_mode, from));
601 return;
602 }
603
604 /* Handle extension. */
605 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
606 {
607 /* Convert directly if that works. */
608 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
609 != CODE_FOR_nothing)
610 {
611 emit_unop_insn (code, to, from, equiv_code);
612 return;
613 }
614 else
615 {
616 enum machine_mode intermediate;
617 rtx tmp;
618 int shift_amount;
619
620 /* Search for a mode to convert via. */
621 for (intermediate = from_mode; intermediate != VOIDmode;
622 intermediate = GET_MODE_WIDER_MODE (intermediate))
623 if (((can_extend_p (to_mode, intermediate, unsignedp)
624 != CODE_FOR_nothing)
625 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
626 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
627 && (can_extend_p (intermediate, from_mode, unsignedp)
628 != CODE_FOR_nothing))
629 {
630 convert_move (to, convert_to_mode (intermediate, from,
631 unsignedp), unsignedp);
632 return;
633 }
634
635 /* No suitable intermediate mode.
636 Generate what we need with shifts. */
637 shift_amount = (GET_MODE_PRECISION (to_mode)
638 - GET_MODE_PRECISION (from_mode));
639 from = gen_lowpart (to_mode, force_reg (from_mode, from));
640 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
641 to, unsignedp);
642 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
643 to, unsignedp);
644 if (tmp != to)
645 emit_move_insn (to, tmp);
646 return;
647 }
648 }
649
650 /* Support special truncate insns for certain modes. */
651 if (convert_optab_handler (trunc_optab, to_mode,
652 from_mode) != CODE_FOR_nothing)
653 {
654 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
655 to, from, UNKNOWN);
656 return;
657 }
658
659 /* Handle truncation of volatile memrefs, and so on;
660 the things that couldn't be truncated directly,
661 and for which there was no special instruction.
662
663 ??? Code above formerly short-circuited this, for most integer
664 mode pairs, with a force_reg in from_mode followed by a recursive
665 call to this routine. Appears always to have been wrong. */
666 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
667 {
668 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
669 emit_move_insn (to, temp);
670 return;
671 }
672
673 /* Mode combination is not recognized. */
674 gcc_unreachable ();
675 }
676
677 /* Return an rtx for a value that would result
678 from converting X to mode MODE.
679 Both X and MODE may be floating, or both integer.
680 UNSIGNEDP is nonzero if X is an unsigned value.
681 This can be done by referring to a part of X in place
682 or by copying to a new temporary with conversion. */
683
684 rtx
685 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
686 {
687 return convert_modes (mode, VOIDmode, x, unsignedp);
688 }
689
690 /* Return an rtx for a value that would result
691 from converting X from mode OLDMODE to mode MODE.
692 Both modes may be floating, or both integer.
693 UNSIGNEDP is nonzero if X is an unsigned value.
694
695 This can be done by referring to a part of X in place
696 or by copying to a new temporary with conversion.
697
698 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
699
700 rtx
701 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
702 {
703 rtx temp;
704
705 /* If FROM is a SUBREG that indicates that we have already done at least
706 the required extension, strip it. */
707
708 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
709 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
710 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
711 x = gen_lowpart (mode, x);
712
713 if (GET_MODE (x) != VOIDmode)
714 oldmode = GET_MODE (x);
715
716 if (mode == oldmode)
717 return x;
718
719 /* There is one case that we must handle specially: If we are converting
720 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
721 we are to interpret the constant as unsigned, gen_lowpart will do
722 the wrong if the constant appears negative. What we want to do is
723 make the high-order word of the constant zero, not all ones. */
724
725 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
726 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
727 && CONST_INT_P (x) && INTVAL (x) < 0)
728 {
729 double_int val = uhwi_to_double_int (INTVAL (x));
730
731 /* We need to zero extend VAL. */
732 if (oldmode != VOIDmode)
733 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
734
735 return immed_double_int_const (val, mode);
736 }
737
738 /* We can do this with a gen_lowpart if both desired and current modes
739 are integer, and this is either a constant integer, a register, or a
740 non-volatile MEM. Except for the constant case where MODE is no
741 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
742
743 if ((CONST_INT_P (x)
744 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
745 || (GET_MODE_CLASS (mode) == MODE_INT
746 && GET_MODE_CLASS (oldmode) == MODE_INT
747 && (GET_CODE (x) == CONST_DOUBLE
748 || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
749 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
750 && direct_load[(int) mode])
751 || (REG_P (x)
752 && (! HARD_REGISTER_P (x)
753 || HARD_REGNO_MODE_OK (REGNO (x), mode))
754 && TRULY_NOOP_TRUNCATION_MODES_P (mode,
755 GET_MODE (x))))))))
756 {
757 /* ?? If we don't know OLDMODE, we have to assume here that
758 X does not need sign- or zero-extension. This may not be
759 the case, but it's the best we can do. */
760 if (CONST_INT_P (x) && oldmode != VOIDmode
761 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
762 {
763 HOST_WIDE_INT val = INTVAL (x);
764
765 /* We must sign or zero-extend in this case. Start by
766 zero-extending, then sign extend if we need to. */
767 val &= GET_MODE_MASK (oldmode);
768 if (! unsignedp
769 && val_signbit_known_set_p (oldmode, val))
770 val |= ~GET_MODE_MASK (oldmode);
771
772 return gen_int_mode (val, mode);
773 }
774
775 return gen_lowpart (mode, x);
776 }
777
778 /* Converting from integer constant into mode is always equivalent to an
779 subreg operation. */
780 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
781 {
782 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
783 return simplify_gen_subreg (mode, x, oldmode, 0);
784 }
785
786 temp = gen_reg_rtx (mode);
787 convert_move (temp, x, unsignedp);
788 return temp;
789 }
790 \f
791 /* Return the largest alignment we can use for doing a move (or store)
792 of MAX_PIECES. ALIGN is the largest alignment we could use. */
793
794 static unsigned int
795 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
796 {
797 enum machine_mode tmode;
798
799 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
800 if (align >= GET_MODE_ALIGNMENT (tmode))
801 align = GET_MODE_ALIGNMENT (tmode);
802 else
803 {
804 enum machine_mode tmode, xmode;
805
806 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
807 tmode != VOIDmode;
808 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
809 if (GET_MODE_SIZE (tmode) > max_pieces
810 || SLOW_UNALIGNED_ACCESS (tmode, align))
811 break;
812
813 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
814 }
815
816 return align;
817 }
818
819 /* Return the widest integer mode no wider than SIZE. If no such mode
820 can be found, return VOIDmode. */
821
822 static enum machine_mode
823 widest_int_mode_for_size (unsigned int size)
824 {
825 enum machine_mode tmode, mode = VOIDmode;
826
827 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
828 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
829 if (GET_MODE_SIZE (tmode) < size)
830 mode = tmode;
831
832 return mode;
833 }
834
835 /* STORE_MAX_PIECES is the number of bytes at a time that we can
836 store efficiently. Due to internal GCC limitations, this is
837 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
838 for an immediate constant. */
839
840 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
841
842 /* Determine whether the LEN bytes can be moved by using several move
843 instructions. Return nonzero if a call to move_by_pieces should
844 succeed. */
845
846 int
847 can_move_by_pieces (unsigned HOST_WIDE_INT len,
848 unsigned int align ATTRIBUTE_UNUSED)
849 {
850 return MOVE_BY_PIECES_P (len, align);
851 }
852
853 /* Generate several move instructions to copy LEN bytes from block FROM to
854 block TO. (These are MEM rtx's with BLKmode).
855
856 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
857 used to push FROM to the stack.
858
859 ALIGN is maximum stack alignment we can assume.
860
861 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
862 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
863 stpcpy. */
864
865 rtx
866 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
867 unsigned int align, int endp)
868 {
869 struct move_by_pieces_d data;
870 enum machine_mode to_addr_mode, from_addr_mode
871 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
872 rtx to_addr, from_addr = XEXP (from, 0);
873 unsigned int max_size = MOVE_MAX_PIECES + 1;
874 enum insn_code icode;
875
876 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
877
878 data.offset = 0;
879 data.from_addr = from_addr;
880 if (to)
881 {
882 to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
883 to_addr = XEXP (to, 0);
884 data.to = to;
885 data.autinc_to
886 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
887 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
888 data.reverse
889 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
890 }
891 else
892 {
893 to_addr_mode = VOIDmode;
894 to_addr = NULL_RTX;
895 data.to = NULL_RTX;
896 data.autinc_to = 1;
897 #ifdef STACK_GROWS_DOWNWARD
898 data.reverse = 1;
899 #else
900 data.reverse = 0;
901 #endif
902 }
903 data.to_addr = to_addr;
904 data.from = from;
905 data.autinc_from
906 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
907 || GET_CODE (from_addr) == POST_INC
908 || GET_CODE (from_addr) == POST_DEC);
909
910 data.explicit_inc_from = 0;
911 data.explicit_inc_to = 0;
912 if (data.reverse) data.offset = len;
913 data.len = len;
914
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data.autinc_from && data.autinc_to)
919 && move_by_pieces_ninsns (len, align, max_size) > 2)
920 {
921 /* Find the mode of the largest move...
922 MODE might not be used depending on the definitions of the
923 USE_* macros below. */
924 enum machine_mode mode ATTRIBUTE_UNUSED
925 = widest_int_mode_for_size (max_size);
926
927 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
928 {
929 data.from_addr = copy_to_mode_reg (from_addr_mode,
930 plus_constant (from_addr_mode,
931 from_addr, len));
932 data.autinc_from = 1;
933 data.explicit_inc_from = -1;
934 }
935 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
936 {
937 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
938 data.autinc_from = 1;
939 data.explicit_inc_from = 1;
940 }
941 if (!data.autinc_from && CONSTANT_P (from_addr))
942 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
943 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
944 {
945 data.to_addr = copy_to_mode_reg (to_addr_mode,
946 plus_constant (to_addr_mode,
947 to_addr, len));
948 data.autinc_to = 1;
949 data.explicit_inc_to = -1;
950 }
951 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
952 {
953 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
954 data.autinc_to = 1;
955 data.explicit_inc_to = 1;
956 }
957 if (!data.autinc_to && CONSTANT_P (to_addr))
958 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
959 }
960
961 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
962
963 /* First move what we can in the largest integer mode, then go to
964 successively smaller modes. */
965
966 while (max_size > 1)
967 {
968 enum machine_mode mode = widest_int_mode_for_size (max_size);
969
970 if (mode == VOIDmode)
971 break;
972
973 icode = optab_handler (mov_optab, mode);
974 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
975 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
976
977 max_size = GET_MODE_SIZE (mode);
978 }
979
980 /* The code above should have handled everything. */
981 gcc_assert (!data.len);
982
983 if (endp)
984 {
985 rtx to1;
986
987 gcc_assert (!data.reverse);
988 if (data.autinc_to)
989 {
990 if (endp == 2)
991 {
992 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
993 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
994 else
995 data.to_addr = copy_to_mode_reg (to_addr_mode,
996 plus_constant (to_addr_mode,
997 data.to_addr,
998 -1));
999 }
1000 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1001 data.offset);
1002 }
1003 else
1004 {
1005 if (endp == 2)
1006 --data.offset;
1007 to1 = adjust_address (data.to, QImode, data.offset);
1008 }
1009 return to1;
1010 }
1011 else
1012 return data.to;
1013 }
1014
1015 /* Return number of insns required to move L bytes by pieces.
1016 ALIGN (in bits) is maximum alignment we can assume. */
1017
1018 unsigned HOST_WIDE_INT
1019 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1020 unsigned int max_size)
1021 {
1022 unsigned HOST_WIDE_INT n_insns = 0;
1023
1024 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1025
1026 while (max_size > 1)
1027 {
1028 enum machine_mode mode;
1029 enum insn_code icode;
1030
1031 mode = widest_int_mode_for_size (max_size);
1032
1033 if (mode == VOIDmode)
1034 break;
1035
1036 icode = optab_handler (mov_optab, mode);
1037 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1038 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1039
1040 max_size = GET_MODE_SIZE (mode);
1041 }
1042
1043 gcc_assert (!l);
1044 return n_insns;
1045 }
1046
1047 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1048 with move instructions for mode MODE. GENFUN is the gen_... function
1049 to make a move insn for that mode. DATA has all the other info. */
1050
1051 static void
1052 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1053 struct move_by_pieces_d *data)
1054 {
1055 unsigned int size = GET_MODE_SIZE (mode);
1056 rtx to1 = NULL_RTX, from1;
1057
1058 while (data->len >= size)
1059 {
1060 if (data->reverse)
1061 data->offset -= size;
1062
1063 if (data->to)
1064 {
1065 if (data->autinc_to)
1066 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1067 data->offset);
1068 else
1069 to1 = adjust_address (data->to, mode, data->offset);
1070 }
1071
1072 if (data->autinc_from)
1073 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1074 data->offset);
1075 else
1076 from1 = adjust_address (data->from, mode, data->offset);
1077
1078 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1079 emit_insn (gen_add2_insn (data->to_addr,
1080 GEN_INT (-(HOST_WIDE_INT)size)));
1081 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1082 emit_insn (gen_add2_insn (data->from_addr,
1083 GEN_INT (-(HOST_WIDE_INT)size)));
1084
1085 if (data->to)
1086 emit_insn ((*genfun) (to1, from1));
1087 else
1088 {
1089 #ifdef PUSH_ROUNDING
1090 emit_single_push_insn (mode, from1, NULL);
1091 #else
1092 gcc_unreachable ();
1093 #endif
1094 }
1095
1096 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1097 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1098 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1099 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1100
1101 if (! data->reverse)
1102 data->offset += size;
1103
1104 data->len -= size;
1105 }
1106 }
1107 \f
1108 /* Emit code to move a block Y to a block X. This may be done with
1109 string-move instructions, with multiple scalar move instructions,
1110 or with a library call.
1111
1112 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1113 SIZE is an rtx that says how long they are.
1114 ALIGN is the maximum alignment we can assume they have.
1115 METHOD describes what kind of copy this is, and what mechanisms may be used.
1116
1117 Return the address of the new block, if memcpy is called and returns it,
1118 0 otherwise. */
1119
1120 rtx
1121 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1122 unsigned int expected_align, HOST_WIDE_INT expected_size)
1123 {
1124 bool may_use_call;
1125 rtx retval = 0;
1126 unsigned int align;
1127
1128 gcc_assert (size);
1129 if (CONST_INT_P (size)
1130 && INTVAL (size) == 0)
1131 return 0;
1132
1133 switch (method)
1134 {
1135 case BLOCK_OP_NORMAL:
1136 case BLOCK_OP_TAILCALL:
1137 may_use_call = true;
1138 break;
1139
1140 case BLOCK_OP_CALL_PARM:
1141 may_use_call = block_move_libcall_safe_for_call_parm ();
1142
1143 /* Make inhibit_defer_pop nonzero around the library call
1144 to force it to pop the arguments right away. */
1145 NO_DEFER_POP;
1146 break;
1147
1148 case BLOCK_OP_NO_LIBCALL:
1149 may_use_call = false;
1150 break;
1151
1152 default:
1153 gcc_unreachable ();
1154 }
1155
1156 gcc_assert (MEM_P (x) && MEM_P (y));
1157 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1158 gcc_assert (align >= BITS_PER_UNIT);
1159
1160 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1161 block copy is more efficient for other large modes, e.g. DCmode. */
1162 x = adjust_address (x, BLKmode, 0);
1163 y = adjust_address (y, BLKmode, 0);
1164
1165 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1166 can be incorrect is coming from __builtin_memcpy. */
1167 if (CONST_INT_P (size))
1168 {
1169 x = shallow_copy_rtx (x);
1170 y = shallow_copy_rtx (y);
1171 set_mem_size (x, INTVAL (size));
1172 set_mem_size (y, INTVAL (size));
1173 }
1174
1175 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1176 move_by_pieces (x, y, INTVAL (size), align, 0);
1177 else if (emit_block_move_via_movmem (x, y, size, align,
1178 expected_align, expected_size))
1179 ;
1180 else if (may_use_call
1181 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1182 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1183 {
1184 /* Since x and y are passed to a libcall, mark the corresponding
1185 tree EXPR as addressable. */
1186 tree y_expr = MEM_EXPR (y);
1187 tree x_expr = MEM_EXPR (x);
1188 if (y_expr)
1189 mark_addressable (y_expr);
1190 if (x_expr)
1191 mark_addressable (x_expr);
1192 retval = emit_block_move_via_libcall (x, y, size,
1193 method == BLOCK_OP_TAILCALL);
1194 }
1195
1196 else
1197 emit_block_move_via_loop (x, y, size, align);
1198
1199 if (method == BLOCK_OP_CALL_PARM)
1200 OK_DEFER_POP;
1201
1202 return retval;
1203 }
1204
1205 rtx
1206 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1207 {
1208 return emit_block_move_hints (x, y, size, method, 0, -1);
1209 }
1210
1211 /* A subroutine of emit_block_move. Returns true if calling the
1212 block move libcall will not clobber any parameters which may have
1213 already been placed on the stack. */
1214
1215 static bool
1216 block_move_libcall_safe_for_call_parm (void)
1217 {
1218 #if defined (REG_PARM_STACK_SPACE)
1219 tree fn;
1220 #endif
1221
1222 /* If arguments are pushed on the stack, then they're safe. */
1223 if (PUSH_ARGS)
1224 return true;
1225
1226 /* If registers go on the stack anyway, any argument is sure to clobber
1227 an outgoing argument. */
1228 #if defined (REG_PARM_STACK_SPACE)
1229 fn = emit_block_move_libcall_fn (false);
1230 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1231 depend on its argument. */
1232 (void) fn;
1233 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1234 && REG_PARM_STACK_SPACE (fn) != 0)
1235 return false;
1236 #endif
1237
1238 /* If any argument goes in memory, then it might clobber an outgoing
1239 argument. */
1240 {
1241 CUMULATIVE_ARGS args_so_far_v;
1242 cumulative_args_t args_so_far;
1243 tree fn, arg;
1244
1245 fn = emit_block_move_libcall_fn (false);
1246 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1247 args_so_far = pack_cumulative_args (&args_so_far_v);
1248
1249 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1250 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1251 {
1252 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1253 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1254 NULL_TREE, true);
1255 if (!tmp || !REG_P (tmp))
1256 return false;
1257 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1258 return false;
1259 targetm.calls.function_arg_advance (args_so_far, mode,
1260 NULL_TREE, true);
1261 }
1262 }
1263 return true;
1264 }
1265
1266 /* A subroutine of emit_block_move. Expand a movmem pattern;
1267 return true if successful. */
1268
1269 static bool
1270 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1271 unsigned int expected_align, HOST_WIDE_INT expected_size)
1272 {
1273 int save_volatile_ok = volatile_ok;
1274 enum machine_mode mode;
1275
1276 if (expected_align < align)
1277 expected_align = align;
1278
1279 /* Since this is a move insn, we don't care about volatility. */
1280 volatile_ok = 1;
1281
1282 /* Try the most limited insn first, because there's no point
1283 including more than one in the machine description unless
1284 the more limited one has some advantage. */
1285
1286 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1287 mode = GET_MODE_WIDER_MODE (mode))
1288 {
1289 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1290
1291 if (code != CODE_FOR_nothing
1292 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1293 here because if SIZE is less than the mode mask, as it is
1294 returned by the macro, it will definitely be less than the
1295 actual mode mask. */
1296 && ((CONST_INT_P (size)
1297 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1298 <= (GET_MODE_MASK (mode) >> 1)))
1299 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
1300 {
1301 struct expand_operand ops[6];
1302 unsigned int nops;
1303
1304 /* ??? When called via emit_block_move_for_call, it'd be
1305 nice if there were some way to inform the backend, so
1306 that it doesn't fail the expansion because it thinks
1307 emitting the libcall would be more efficient. */
1308 nops = insn_data[(int) code].n_generator_args;
1309 gcc_assert (nops == 4 || nops == 6);
1310
1311 create_fixed_operand (&ops[0], x);
1312 create_fixed_operand (&ops[1], y);
1313 /* The check above guarantees that this size conversion is valid. */
1314 create_convert_operand_to (&ops[2], size, mode, true);
1315 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1316 if (nops == 6)
1317 {
1318 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1319 create_integer_operand (&ops[5], expected_size);
1320 }
1321 if (maybe_expand_insn (code, nops, ops))
1322 {
1323 volatile_ok = save_volatile_ok;
1324 return true;
1325 }
1326 }
1327 }
1328
1329 volatile_ok = save_volatile_ok;
1330 return false;
1331 }
1332
1333 /* A subroutine of emit_block_move. Expand a call to memcpy.
1334 Return the return value from memcpy, 0 otherwise. */
1335
1336 rtx
1337 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1338 {
1339 rtx dst_addr, src_addr;
1340 tree call_expr, fn, src_tree, dst_tree, size_tree;
1341 enum machine_mode size_mode;
1342 rtx retval;
1343
1344 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1345 pseudos. We can then place those new pseudos into a VAR_DECL and
1346 use them later. */
1347
1348 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1349 src_addr = copy_addr_to_reg (XEXP (src, 0));
1350
1351 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1352 src_addr = convert_memory_address (ptr_mode, src_addr);
1353
1354 dst_tree = make_tree (ptr_type_node, dst_addr);
1355 src_tree = make_tree (ptr_type_node, src_addr);
1356
1357 size_mode = TYPE_MODE (sizetype);
1358
1359 size = convert_to_mode (size_mode, size, 1);
1360 size = copy_to_mode_reg (size_mode, size);
1361
1362 /* It is incorrect to use the libcall calling conventions to call
1363 memcpy in this context. This could be a user call to memcpy and
1364 the user may wish to examine the return value from memcpy. For
1365 targets where libcalls and normal calls have different conventions
1366 for returning pointers, we could end up generating incorrect code. */
1367
1368 size_tree = make_tree (sizetype, size);
1369
1370 fn = emit_block_move_libcall_fn (true);
1371 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1372 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1373
1374 retval = expand_normal (call_expr);
1375
1376 return retval;
1377 }
1378
1379 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1380 for the function we use for block copies. */
1381
1382 static GTY(()) tree block_move_fn;
1383
1384 void
1385 init_block_move_fn (const char *asmspec)
1386 {
1387 if (!block_move_fn)
1388 {
1389 tree args, fn;
1390
1391 fn = get_identifier ("memcpy");
1392 args = build_function_type_list (ptr_type_node, ptr_type_node,
1393 const_ptr_type_node, sizetype,
1394 NULL_TREE);
1395
1396 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1397 DECL_EXTERNAL (fn) = 1;
1398 TREE_PUBLIC (fn) = 1;
1399 DECL_ARTIFICIAL (fn) = 1;
1400 TREE_NOTHROW (fn) = 1;
1401 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1402 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1403
1404 block_move_fn = fn;
1405 }
1406
1407 if (asmspec)
1408 set_user_assembler_name (block_move_fn, asmspec);
1409 }
1410
1411 static tree
1412 emit_block_move_libcall_fn (int for_call)
1413 {
1414 static bool emitted_extern;
1415
1416 if (!block_move_fn)
1417 init_block_move_fn (NULL);
1418
1419 if (for_call && !emitted_extern)
1420 {
1421 emitted_extern = true;
1422 make_decl_rtl (block_move_fn);
1423 }
1424
1425 return block_move_fn;
1426 }
1427
1428 /* A subroutine of emit_block_move. Copy the data via an explicit
1429 loop. This is used only when libcalls are forbidden. */
1430 /* ??? It'd be nice to copy in hunks larger than QImode. */
1431
1432 static void
1433 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1434 unsigned int align ATTRIBUTE_UNUSED)
1435 {
1436 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1437 enum machine_mode x_addr_mode
1438 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1439 enum machine_mode y_addr_mode
1440 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1441 enum machine_mode iter_mode;
1442
1443 iter_mode = GET_MODE (size);
1444 if (iter_mode == VOIDmode)
1445 iter_mode = word_mode;
1446
1447 top_label = gen_label_rtx ();
1448 cmp_label = gen_label_rtx ();
1449 iter = gen_reg_rtx (iter_mode);
1450
1451 emit_move_insn (iter, const0_rtx);
1452
1453 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1454 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1455 do_pending_stack_adjust ();
1456
1457 emit_jump (cmp_label);
1458 emit_label (top_label);
1459
1460 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1461 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1462
1463 if (x_addr_mode != y_addr_mode)
1464 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1465 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1466
1467 x = change_address (x, QImode, x_addr);
1468 y = change_address (y, QImode, y_addr);
1469
1470 emit_move_insn (x, y);
1471
1472 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1473 true, OPTAB_LIB_WIDEN);
1474 if (tmp != iter)
1475 emit_move_insn (iter, tmp);
1476
1477 emit_label (cmp_label);
1478
1479 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1480 true, top_label);
1481 }
1482 \f
1483 /* Copy all or part of a value X into registers starting at REGNO.
1484 The number of registers to be filled is NREGS. */
1485
1486 void
1487 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1488 {
1489 int i;
1490 #ifdef HAVE_load_multiple
1491 rtx pat;
1492 rtx last;
1493 #endif
1494
1495 if (nregs == 0)
1496 return;
1497
1498 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1499 x = validize_mem (force_const_mem (mode, x));
1500
1501 /* See if the machine can do this with a load multiple insn. */
1502 #ifdef HAVE_load_multiple
1503 if (HAVE_load_multiple)
1504 {
1505 last = get_last_insn ();
1506 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1507 GEN_INT (nregs));
1508 if (pat)
1509 {
1510 emit_insn (pat);
1511 return;
1512 }
1513 else
1514 delete_insns_since (last);
1515 }
1516 #endif
1517
1518 for (i = 0; i < nregs; i++)
1519 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1520 operand_subword_force (x, i, mode));
1521 }
1522
1523 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1524 The number of registers to be filled is NREGS. */
1525
1526 void
1527 move_block_from_reg (int regno, rtx x, int nregs)
1528 {
1529 int i;
1530
1531 if (nregs == 0)
1532 return;
1533
1534 /* See if the machine can do this with a store multiple insn. */
1535 #ifdef HAVE_store_multiple
1536 if (HAVE_store_multiple)
1537 {
1538 rtx last = get_last_insn ();
1539 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1540 GEN_INT (nregs));
1541 if (pat)
1542 {
1543 emit_insn (pat);
1544 return;
1545 }
1546 else
1547 delete_insns_since (last);
1548 }
1549 #endif
1550
1551 for (i = 0; i < nregs; i++)
1552 {
1553 rtx tem = operand_subword (x, i, 1, BLKmode);
1554
1555 gcc_assert (tem);
1556
1557 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1558 }
1559 }
1560
1561 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1562 ORIG, where ORIG is a non-consecutive group of registers represented by
1563 a PARALLEL. The clone is identical to the original except in that the
1564 original set of registers is replaced by a new set of pseudo registers.
1565 The new set has the same modes as the original set. */
1566
1567 rtx
1568 gen_group_rtx (rtx orig)
1569 {
1570 int i, length;
1571 rtx *tmps;
1572
1573 gcc_assert (GET_CODE (orig) == PARALLEL);
1574
1575 length = XVECLEN (orig, 0);
1576 tmps = XALLOCAVEC (rtx, length);
1577
1578 /* Skip a NULL entry in first slot. */
1579 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1580
1581 if (i)
1582 tmps[0] = 0;
1583
1584 for (; i < length; i++)
1585 {
1586 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1587 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1588
1589 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1590 }
1591
1592 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1593 }
1594
1595 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1596 except that values are placed in TMPS[i], and must later be moved
1597 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1598
1599 static void
1600 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1601 {
1602 rtx src;
1603 int start, i;
1604 enum machine_mode m = GET_MODE (orig_src);
1605
1606 gcc_assert (GET_CODE (dst) == PARALLEL);
1607
1608 if (m != VOIDmode
1609 && !SCALAR_INT_MODE_P (m)
1610 && !MEM_P (orig_src)
1611 && GET_CODE (orig_src) != CONCAT)
1612 {
1613 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1614 if (imode == BLKmode)
1615 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1616 else
1617 src = gen_reg_rtx (imode);
1618 if (imode != BLKmode)
1619 src = gen_lowpart (GET_MODE (orig_src), src);
1620 emit_move_insn (src, orig_src);
1621 /* ...and back again. */
1622 if (imode != BLKmode)
1623 src = gen_lowpart (imode, src);
1624 emit_group_load_1 (tmps, dst, src, type, ssize);
1625 return;
1626 }
1627
1628 /* Check for a NULL entry, used to indicate that the parameter goes
1629 both on the stack and in registers. */
1630 if (XEXP (XVECEXP (dst, 0, 0), 0))
1631 start = 0;
1632 else
1633 start = 1;
1634
1635 /* Process the pieces. */
1636 for (i = start; i < XVECLEN (dst, 0); i++)
1637 {
1638 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1639 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1640 unsigned int bytelen = GET_MODE_SIZE (mode);
1641 int shift = 0;
1642
1643 /* Handle trailing fragments that run over the size of the struct. */
1644 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1645 {
1646 /* Arrange to shift the fragment to where it belongs.
1647 extract_bit_field loads to the lsb of the reg. */
1648 if (
1649 #ifdef BLOCK_REG_PADDING
1650 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1651 == (BYTES_BIG_ENDIAN ? upward : downward)
1652 #else
1653 BYTES_BIG_ENDIAN
1654 #endif
1655 )
1656 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1657 bytelen = ssize - bytepos;
1658 gcc_assert (bytelen > 0);
1659 }
1660
1661 /* If we won't be loading directly from memory, protect the real source
1662 from strange tricks we might play; but make sure that the source can
1663 be loaded directly into the destination. */
1664 src = orig_src;
1665 if (!MEM_P (orig_src)
1666 && (!CONSTANT_P (orig_src)
1667 || (GET_MODE (orig_src) != mode
1668 && GET_MODE (orig_src) != VOIDmode)))
1669 {
1670 if (GET_MODE (orig_src) == VOIDmode)
1671 src = gen_reg_rtx (mode);
1672 else
1673 src = gen_reg_rtx (GET_MODE (orig_src));
1674
1675 emit_move_insn (src, orig_src);
1676 }
1677
1678 /* Optimize the access just a bit. */
1679 if (MEM_P (src)
1680 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1681 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1682 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1683 && bytelen == GET_MODE_SIZE (mode))
1684 {
1685 tmps[i] = gen_reg_rtx (mode);
1686 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1687 }
1688 else if (COMPLEX_MODE_P (mode)
1689 && GET_MODE (src) == mode
1690 && bytelen == GET_MODE_SIZE (mode))
1691 /* Let emit_move_complex do the bulk of the work. */
1692 tmps[i] = src;
1693 else if (GET_CODE (src) == CONCAT)
1694 {
1695 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1696 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1697
1698 if ((bytepos == 0 && bytelen == slen0)
1699 || (bytepos != 0 && bytepos + bytelen <= slen))
1700 {
1701 /* The following assumes that the concatenated objects all
1702 have the same size. In this case, a simple calculation
1703 can be used to determine the object and the bit field
1704 to be extracted. */
1705 tmps[i] = XEXP (src, bytepos / slen0);
1706 if (! CONSTANT_P (tmps[i])
1707 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1708 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1709 (bytepos % slen0) * BITS_PER_UNIT,
1710 1, false, NULL_RTX, mode, mode);
1711 }
1712 else
1713 {
1714 rtx mem;
1715
1716 gcc_assert (!bytepos);
1717 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1718 emit_move_insn (mem, src);
1719 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1720 0, 1, false, NULL_RTX, mode, mode);
1721 }
1722 }
1723 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1724 SIMD register, which is currently broken. While we get GCC
1725 to emit proper RTL for these cases, let's dump to memory. */
1726 else if (VECTOR_MODE_P (GET_MODE (dst))
1727 && REG_P (src))
1728 {
1729 int slen = GET_MODE_SIZE (GET_MODE (src));
1730 rtx mem;
1731
1732 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1733 emit_move_insn (mem, src);
1734 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1735 }
1736 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1737 && XVECLEN (dst, 0) > 1)
1738 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1739 else if (CONSTANT_P (src))
1740 {
1741 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1742
1743 if (len == ssize)
1744 tmps[i] = src;
1745 else
1746 {
1747 rtx first, second;
1748
1749 gcc_assert (2 * len == ssize);
1750 split_double (src, &first, &second);
1751 if (i)
1752 tmps[i] = second;
1753 else
1754 tmps[i] = first;
1755 }
1756 }
1757 else if (REG_P (src) && GET_MODE (src) == mode)
1758 tmps[i] = src;
1759 else
1760 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1761 bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1762 mode, mode);
1763
1764 if (shift)
1765 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1766 shift, tmps[i], 0);
1767 }
1768 }
1769
1770 /* Emit code to move a block SRC of type TYPE to a block DST,
1771 where DST is non-consecutive registers represented by a PARALLEL.
1772 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1773 if not known. */
1774
1775 void
1776 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1777 {
1778 rtx *tmps;
1779 int i;
1780
1781 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1782 emit_group_load_1 (tmps, dst, src, type, ssize);
1783
1784 /* Copy the extracted pieces into the proper (probable) hard regs. */
1785 for (i = 0; i < XVECLEN (dst, 0); i++)
1786 {
1787 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1788 if (d == NULL)
1789 continue;
1790 emit_move_insn (d, tmps[i]);
1791 }
1792 }
1793
1794 /* Similar, but load SRC into new pseudos in a format that looks like
1795 PARALLEL. This can later be fed to emit_group_move to get things
1796 in the right place. */
1797
1798 rtx
1799 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1800 {
1801 rtvec vec;
1802 int i;
1803
1804 vec = rtvec_alloc (XVECLEN (parallel, 0));
1805 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1806
1807 /* Convert the vector to look just like the original PARALLEL, except
1808 with the computed values. */
1809 for (i = 0; i < XVECLEN (parallel, 0); i++)
1810 {
1811 rtx e = XVECEXP (parallel, 0, i);
1812 rtx d = XEXP (e, 0);
1813
1814 if (d)
1815 {
1816 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1817 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1818 }
1819 RTVEC_ELT (vec, i) = e;
1820 }
1821
1822 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1823 }
1824
1825 /* Emit code to move a block SRC to block DST, where SRC and DST are
1826 non-consecutive groups of registers, each represented by a PARALLEL. */
1827
1828 void
1829 emit_group_move (rtx dst, rtx src)
1830 {
1831 int i;
1832
1833 gcc_assert (GET_CODE (src) == PARALLEL
1834 && GET_CODE (dst) == PARALLEL
1835 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1836
1837 /* Skip first entry if NULL. */
1838 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1839 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1840 XEXP (XVECEXP (src, 0, i), 0));
1841 }
1842
1843 /* Move a group of registers represented by a PARALLEL into pseudos. */
1844
1845 rtx
1846 emit_group_move_into_temps (rtx src)
1847 {
1848 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1849 int i;
1850
1851 for (i = 0; i < XVECLEN (src, 0); i++)
1852 {
1853 rtx e = XVECEXP (src, 0, i);
1854 rtx d = XEXP (e, 0);
1855
1856 if (d)
1857 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1858 RTVEC_ELT (vec, i) = e;
1859 }
1860
1861 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1862 }
1863
1864 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1865 where SRC is non-consecutive registers represented by a PARALLEL.
1866 SSIZE represents the total size of block ORIG_DST, or -1 if not
1867 known. */
1868
1869 void
1870 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1871 {
1872 rtx *tmps, dst;
1873 int start, finish, i;
1874 enum machine_mode m = GET_MODE (orig_dst);
1875
1876 gcc_assert (GET_CODE (src) == PARALLEL);
1877
1878 if (!SCALAR_INT_MODE_P (m)
1879 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1880 {
1881 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1882 if (imode == BLKmode)
1883 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1884 else
1885 dst = gen_reg_rtx (imode);
1886 emit_group_store (dst, src, type, ssize);
1887 if (imode != BLKmode)
1888 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1889 emit_move_insn (orig_dst, dst);
1890 return;
1891 }
1892
1893 /* Check for a NULL entry, used to indicate that the parameter goes
1894 both on the stack and in registers. */
1895 if (XEXP (XVECEXP (src, 0, 0), 0))
1896 start = 0;
1897 else
1898 start = 1;
1899 finish = XVECLEN (src, 0);
1900
1901 tmps = XALLOCAVEC (rtx, finish);
1902
1903 /* Copy the (probable) hard regs into pseudos. */
1904 for (i = start; i < finish; i++)
1905 {
1906 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1907 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1908 {
1909 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1910 emit_move_insn (tmps[i], reg);
1911 }
1912 else
1913 tmps[i] = reg;
1914 }
1915
1916 /* If we won't be storing directly into memory, protect the real destination
1917 from strange tricks we might play. */
1918 dst = orig_dst;
1919 if (GET_CODE (dst) == PARALLEL)
1920 {
1921 rtx temp;
1922
1923 /* We can get a PARALLEL dst if there is a conditional expression in
1924 a return statement. In that case, the dst and src are the same,
1925 so no action is necessary. */
1926 if (rtx_equal_p (dst, src))
1927 return;
1928
1929 /* It is unclear if we can ever reach here, but we may as well handle
1930 it. Allocate a temporary, and split this into a store/load to/from
1931 the temporary. */
1932
1933 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1934 emit_group_store (temp, src, type, ssize);
1935 emit_group_load (dst, temp, type, ssize);
1936 return;
1937 }
1938 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1939 {
1940 enum machine_mode outer = GET_MODE (dst);
1941 enum machine_mode inner;
1942 HOST_WIDE_INT bytepos;
1943 bool done = false;
1944 rtx temp;
1945
1946 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1947 dst = gen_reg_rtx (outer);
1948
1949 /* Make life a bit easier for combine. */
1950 /* If the first element of the vector is the low part
1951 of the destination mode, use a paradoxical subreg to
1952 initialize the destination. */
1953 if (start < finish)
1954 {
1955 inner = GET_MODE (tmps[start]);
1956 bytepos = subreg_lowpart_offset (inner, outer);
1957 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1958 {
1959 temp = simplify_gen_subreg (outer, tmps[start],
1960 inner, 0);
1961 if (temp)
1962 {
1963 emit_move_insn (dst, temp);
1964 done = true;
1965 start++;
1966 }
1967 }
1968 }
1969
1970 /* If the first element wasn't the low part, try the last. */
1971 if (!done
1972 && start < finish - 1)
1973 {
1974 inner = GET_MODE (tmps[finish - 1]);
1975 bytepos = subreg_lowpart_offset (inner, outer);
1976 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1977 {
1978 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1979 inner, 0);
1980 if (temp)
1981 {
1982 emit_move_insn (dst, temp);
1983 done = true;
1984 finish--;
1985 }
1986 }
1987 }
1988
1989 /* Otherwise, simply initialize the result to zero. */
1990 if (!done)
1991 emit_move_insn (dst, CONST0_RTX (outer));
1992 }
1993
1994 /* Process the pieces. */
1995 for (i = start; i < finish; i++)
1996 {
1997 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1998 enum machine_mode mode = GET_MODE (tmps[i]);
1999 unsigned int bytelen = GET_MODE_SIZE (mode);
2000 unsigned int adj_bytelen = bytelen;
2001 rtx dest = dst;
2002
2003 /* Handle trailing fragments that run over the size of the struct. */
2004 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2005 adj_bytelen = ssize - bytepos;
2006
2007 if (GET_CODE (dst) == CONCAT)
2008 {
2009 if (bytepos + adj_bytelen
2010 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2011 dest = XEXP (dst, 0);
2012 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2013 {
2014 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2015 dest = XEXP (dst, 1);
2016 }
2017 else
2018 {
2019 enum machine_mode dest_mode = GET_MODE (dest);
2020 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2021
2022 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2023
2024 if (GET_MODE_ALIGNMENT (dest_mode)
2025 >= GET_MODE_ALIGNMENT (tmp_mode))
2026 {
2027 dest = assign_stack_temp (dest_mode,
2028 GET_MODE_SIZE (dest_mode),
2029 0);
2030 emit_move_insn (adjust_address (dest,
2031 tmp_mode,
2032 bytepos),
2033 tmps[i]);
2034 dst = dest;
2035 }
2036 else
2037 {
2038 dest = assign_stack_temp (tmp_mode,
2039 GET_MODE_SIZE (tmp_mode),
2040 0);
2041 emit_move_insn (dest, tmps[i]);
2042 dst = adjust_address (dest, dest_mode, bytepos);
2043 }
2044 break;
2045 }
2046 }
2047
2048 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2049 {
2050 /* store_bit_field always takes its value from the lsb.
2051 Move the fragment to the lsb if it's not already there. */
2052 if (
2053 #ifdef BLOCK_REG_PADDING
2054 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2055 == (BYTES_BIG_ENDIAN ? upward : downward)
2056 #else
2057 BYTES_BIG_ENDIAN
2058 #endif
2059 )
2060 {
2061 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2062 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2063 shift, tmps[i], 0);
2064 }
2065 bytelen = adj_bytelen;
2066 }
2067
2068 /* Optimize the access just a bit. */
2069 if (MEM_P (dest)
2070 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2071 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2072 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2073 && bytelen == GET_MODE_SIZE (mode))
2074 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2075 else
2076 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2077 0, 0, mode, tmps[i]);
2078 }
2079
2080 /* Copy from the pseudo into the (probable) hard reg. */
2081 if (orig_dst != dst)
2082 emit_move_insn (orig_dst, dst);
2083 }
2084
2085 /* Generate code to copy a BLKmode object of TYPE out of a
2086 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2087 is null, a stack temporary is created. TGTBLK is returned.
2088
2089 The purpose of this routine is to handle functions that return
2090 BLKmode structures in registers. Some machines (the PA for example)
2091 want to return all small structures in registers regardless of the
2092 structure's alignment. */
2093
2094 rtx
2095 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2096 {
2097 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2098 rtx src = NULL, dst = NULL;
2099 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2100 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2101 enum machine_mode copy_mode;
2102
2103 if (tgtblk == 0)
2104 {
2105 tgtblk = assign_temp (build_qualified_type (type,
2106 (TYPE_QUALS (type)
2107 | TYPE_QUAL_CONST)),
2108 0, 1, 1);
2109 preserve_temp_slots (tgtblk);
2110 }
2111
2112 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2113 into a new pseudo which is a full word. */
2114
2115 if (GET_MODE (srcreg) != BLKmode
2116 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2117 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2118
2119 /* If the structure doesn't take up a whole number of words, see whether
2120 SRCREG is padded on the left or on the right. If it's on the left,
2121 set PADDING_CORRECTION to the number of bits to skip.
2122
2123 In most ABIs, the structure will be returned at the least end of
2124 the register, which translates to right padding on little-endian
2125 targets and left padding on big-endian targets. The opposite
2126 holds if the structure is returned at the most significant
2127 end of the register. */
2128 if (bytes % UNITS_PER_WORD != 0
2129 && (targetm.calls.return_in_msb (type)
2130 ? !BYTES_BIG_ENDIAN
2131 : BYTES_BIG_ENDIAN))
2132 padding_correction
2133 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2134
2135 /* Copy the structure BITSIZE bits at a time. If the target lives in
2136 memory, take care of not reading/writing past its end by selecting
2137 a copy mode suited to BITSIZE. This should always be possible given
2138 how it is computed.
2139
2140 We could probably emit more efficient code for machines which do not use
2141 strict alignment, but it doesn't seem worth the effort at the current
2142 time. */
2143
2144 copy_mode = word_mode;
2145 if (MEM_P (tgtblk))
2146 {
2147 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2148 if (mem_mode != BLKmode)
2149 copy_mode = mem_mode;
2150 }
2151
2152 for (bitpos = 0, xbitpos = padding_correction;
2153 bitpos < bytes * BITS_PER_UNIT;
2154 bitpos += bitsize, xbitpos += bitsize)
2155 {
2156 /* We need a new source operand each time xbitpos is on a
2157 word boundary and when xbitpos == padding_correction
2158 (the first time through). */
2159 if (xbitpos % BITS_PER_WORD == 0
2160 || xbitpos == padding_correction)
2161 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2162 GET_MODE (srcreg));
2163
2164 /* We need a new destination operand each time bitpos is on
2165 a word boundary. */
2166 if (bitpos % BITS_PER_WORD == 0)
2167 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2168
2169 /* Use xbitpos for the source extraction (right justified) and
2170 bitpos for the destination store (left justified). */
2171 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2172 extract_bit_field (src, bitsize,
2173 xbitpos % BITS_PER_WORD, 1, false,
2174 NULL_RTX, copy_mode, copy_mode));
2175 }
2176
2177 return tgtblk;
2178 }
2179
2180 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2181 register if it contains any data, otherwise return null.
2182
2183 This is used on targets that return BLKmode values in registers. */
2184
2185 rtx
2186 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2187 {
2188 int i, n_regs;
2189 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2190 unsigned int bitsize;
2191 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2192 enum machine_mode dst_mode;
2193
2194 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2195
2196 x = expand_normal (src);
2197
2198 bytes = int_size_in_bytes (TREE_TYPE (src));
2199 if (bytes == 0)
2200 return NULL_RTX;
2201
2202 /* If the structure doesn't take up a whole number of words, see
2203 whether the register value should be padded on the left or on
2204 the right. Set PADDING_CORRECTION to the number of padding
2205 bits needed on the left side.
2206
2207 In most ABIs, the structure will be returned at the least end of
2208 the register, which translates to right padding on little-endian
2209 targets and left padding on big-endian targets. The opposite
2210 holds if the structure is returned at the most significant
2211 end of the register. */
2212 if (bytes % UNITS_PER_WORD != 0
2213 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2214 ? !BYTES_BIG_ENDIAN
2215 : BYTES_BIG_ENDIAN))
2216 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2217 * BITS_PER_UNIT));
2218
2219 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2220 dst_words = XALLOCAVEC (rtx, n_regs);
2221 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2222
2223 /* Copy the structure BITSIZE bits at a time. */
2224 for (bitpos = 0, xbitpos = padding_correction;
2225 bitpos < bytes * BITS_PER_UNIT;
2226 bitpos += bitsize, xbitpos += bitsize)
2227 {
2228 /* We need a new destination pseudo each time xbitpos is
2229 on a word boundary and when xbitpos == padding_correction
2230 (the first time through). */
2231 if (xbitpos % BITS_PER_WORD == 0
2232 || xbitpos == padding_correction)
2233 {
2234 /* Generate an appropriate register. */
2235 dst_word = gen_reg_rtx (word_mode);
2236 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2237
2238 /* Clear the destination before we move anything into it. */
2239 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2240 }
2241
2242 /* We need a new source operand each time bitpos is on a word
2243 boundary. */
2244 if (bitpos % BITS_PER_WORD == 0)
2245 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2246
2247 /* Use bitpos for the source extraction (left justified) and
2248 xbitpos for the destination store (right justified). */
2249 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2250 0, 0, word_mode,
2251 extract_bit_field (src_word, bitsize,
2252 bitpos % BITS_PER_WORD, 1, false,
2253 NULL_RTX, word_mode, word_mode));
2254 }
2255
2256 if (mode == BLKmode)
2257 {
2258 /* Find the smallest integer mode large enough to hold the
2259 entire structure. */
2260 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2261 mode != VOIDmode;
2262 mode = GET_MODE_WIDER_MODE (mode))
2263 /* Have we found a large enough mode? */
2264 if (GET_MODE_SIZE (mode) >= bytes)
2265 break;
2266
2267 /* A suitable mode should have been found. */
2268 gcc_assert (mode != VOIDmode);
2269 }
2270
2271 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2272 dst_mode = word_mode;
2273 else
2274 dst_mode = mode;
2275 dst = gen_reg_rtx (dst_mode);
2276
2277 for (i = 0; i < n_regs; i++)
2278 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2279
2280 if (mode != dst_mode)
2281 dst = gen_lowpart (mode, dst);
2282
2283 return dst;
2284 }
2285
2286 /* Add a USE expression for REG to the (possibly empty) list pointed
2287 to by CALL_FUSAGE. REG must denote a hard register. */
2288
2289 void
2290 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2291 {
2292 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2293
2294 *call_fusage
2295 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2296 }
2297
2298 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2299 starting at REGNO. All of these registers must be hard registers. */
2300
2301 void
2302 use_regs (rtx *call_fusage, int regno, int nregs)
2303 {
2304 int i;
2305
2306 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2307
2308 for (i = 0; i < nregs; i++)
2309 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2310 }
2311
2312 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2313 PARALLEL REGS. This is for calls that pass values in multiple
2314 non-contiguous locations. The Irix 6 ABI has examples of this. */
2315
2316 void
2317 use_group_regs (rtx *call_fusage, rtx regs)
2318 {
2319 int i;
2320
2321 for (i = 0; i < XVECLEN (regs, 0); i++)
2322 {
2323 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2324
2325 /* A NULL entry means the parameter goes both on the stack and in
2326 registers. This can also be a MEM for targets that pass values
2327 partially on the stack and partially in registers. */
2328 if (reg != 0 && REG_P (reg))
2329 use_reg (call_fusage, reg);
2330 }
2331 }
2332
2333 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2334 assigment and the code of the expresion on the RHS is CODE. Return
2335 NULL otherwise. */
2336
2337 static gimple
2338 get_def_for_expr (tree name, enum tree_code code)
2339 {
2340 gimple def_stmt;
2341
2342 if (TREE_CODE (name) != SSA_NAME)
2343 return NULL;
2344
2345 def_stmt = get_gimple_for_ssa_name (name);
2346 if (!def_stmt
2347 || gimple_assign_rhs_code (def_stmt) != code)
2348 return NULL;
2349
2350 return def_stmt;
2351 }
2352
2353 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2354 assigment and the class of the expresion on the RHS is CLASS. Return
2355 NULL otherwise. */
2356
2357 static gimple
2358 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2359 {
2360 gimple def_stmt;
2361
2362 if (TREE_CODE (name) != SSA_NAME)
2363 return NULL;
2364
2365 def_stmt = get_gimple_for_ssa_name (name);
2366 if (!def_stmt
2367 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2368 return NULL;
2369
2370 return def_stmt;
2371 }
2372 \f
2373
2374 /* Determine whether the LEN bytes generated by CONSTFUN can be
2375 stored to memory using several move instructions. CONSTFUNDATA is
2376 a pointer which will be passed as argument in every CONSTFUN call.
2377 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2378 a memset operation and false if it's a copy of a constant string.
2379 Return nonzero if a call to store_by_pieces should succeed. */
2380
2381 int
2382 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2383 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2384 void *constfundata, unsigned int align, bool memsetp)
2385 {
2386 unsigned HOST_WIDE_INT l;
2387 unsigned int max_size;
2388 HOST_WIDE_INT offset = 0;
2389 enum machine_mode mode;
2390 enum insn_code icode;
2391 int reverse;
2392 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2393 rtx cst ATTRIBUTE_UNUSED;
2394
2395 if (len == 0)
2396 return 1;
2397
2398 if (! (memsetp
2399 ? SET_BY_PIECES_P (len, align)
2400 : STORE_BY_PIECES_P (len, align)))
2401 return 0;
2402
2403 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2404
2405 /* We would first store what we can in the largest integer mode, then go to
2406 successively smaller modes. */
2407
2408 for (reverse = 0;
2409 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2410 reverse++)
2411 {
2412 l = len;
2413 max_size = STORE_MAX_PIECES + 1;
2414 while (max_size > 1)
2415 {
2416 mode = widest_int_mode_for_size (max_size);
2417
2418 if (mode == VOIDmode)
2419 break;
2420
2421 icode = optab_handler (mov_optab, mode);
2422 if (icode != CODE_FOR_nothing
2423 && align >= GET_MODE_ALIGNMENT (mode))
2424 {
2425 unsigned int size = GET_MODE_SIZE (mode);
2426
2427 while (l >= size)
2428 {
2429 if (reverse)
2430 offset -= size;
2431
2432 cst = (*constfun) (constfundata, offset, mode);
2433 if (!targetm.legitimate_constant_p (mode, cst))
2434 return 0;
2435
2436 if (!reverse)
2437 offset += size;
2438
2439 l -= size;
2440 }
2441 }
2442
2443 max_size = GET_MODE_SIZE (mode);
2444 }
2445
2446 /* The code above should have handled everything. */
2447 gcc_assert (!l);
2448 }
2449
2450 return 1;
2451 }
2452
2453 /* Generate several move instructions to store LEN bytes generated by
2454 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2455 pointer which will be passed as argument in every CONSTFUN call.
2456 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2457 a memset operation and false if it's a copy of a constant string.
2458 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2459 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2460 stpcpy. */
2461
2462 rtx
2463 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2464 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2465 void *constfundata, unsigned int align, bool memsetp, int endp)
2466 {
2467 enum machine_mode to_addr_mode
2468 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2469 struct store_by_pieces_d data;
2470
2471 if (len == 0)
2472 {
2473 gcc_assert (endp != 2);
2474 return to;
2475 }
2476
2477 gcc_assert (memsetp
2478 ? SET_BY_PIECES_P (len, align)
2479 : STORE_BY_PIECES_P (len, align));
2480 data.constfun = constfun;
2481 data.constfundata = constfundata;
2482 data.len = len;
2483 data.to = to;
2484 store_by_pieces_1 (&data, align);
2485 if (endp)
2486 {
2487 rtx to1;
2488
2489 gcc_assert (!data.reverse);
2490 if (data.autinc_to)
2491 {
2492 if (endp == 2)
2493 {
2494 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2495 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2496 else
2497 data.to_addr = copy_to_mode_reg (to_addr_mode,
2498 plus_constant (to_addr_mode,
2499 data.to_addr,
2500 -1));
2501 }
2502 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2503 data.offset);
2504 }
2505 else
2506 {
2507 if (endp == 2)
2508 --data.offset;
2509 to1 = adjust_address (data.to, QImode, data.offset);
2510 }
2511 return to1;
2512 }
2513 else
2514 return data.to;
2515 }
2516
2517 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2518 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2519
2520 static void
2521 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2522 {
2523 struct store_by_pieces_d data;
2524
2525 if (len == 0)
2526 return;
2527
2528 data.constfun = clear_by_pieces_1;
2529 data.constfundata = NULL;
2530 data.len = len;
2531 data.to = to;
2532 store_by_pieces_1 (&data, align);
2533 }
2534
2535 /* Callback routine for clear_by_pieces.
2536 Return const0_rtx unconditionally. */
2537
2538 static rtx
2539 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2540 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2541 enum machine_mode mode ATTRIBUTE_UNUSED)
2542 {
2543 return const0_rtx;
2544 }
2545
2546 /* Subroutine of clear_by_pieces and store_by_pieces.
2547 Generate several move instructions to store LEN bytes of block TO. (A MEM
2548 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2549
2550 static void
2551 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2552 unsigned int align ATTRIBUTE_UNUSED)
2553 {
2554 enum machine_mode to_addr_mode
2555 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2556 rtx to_addr = XEXP (data->to, 0);
2557 unsigned int max_size = STORE_MAX_PIECES + 1;
2558 enum insn_code icode;
2559
2560 data->offset = 0;
2561 data->to_addr = to_addr;
2562 data->autinc_to
2563 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2564 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2565
2566 data->explicit_inc_to = 0;
2567 data->reverse
2568 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2569 if (data->reverse)
2570 data->offset = data->len;
2571
2572 /* If storing requires more than two move insns,
2573 copy addresses to registers (to make displacements shorter)
2574 and use post-increment if available. */
2575 if (!data->autinc_to
2576 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2577 {
2578 /* Determine the main mode we'll be using.
2579 MODE might not be used depending on the definitions of the
2580 USE_* macros below. */
2581 enum machine_mode mode ATTRIBUTE_UNUSED
2582 = widest_int_mode_for_size (max_size);
2583
2584 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2585 {
2586 data->to_addr = copy_to_mode_reg (to_addr_mode,
2587 plus_constant (to_addr_mode,
2588 to_addr,
2589 data->len));
2590 data->autinc_to = 1;
2591 data->explicit_inc_to = -1;
2592 }
2593
2594 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2595 && ! data->autinc_to)
2596 {
2597 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2598 data->autinc_to = 1;
2599 data->explicit_inc_to = 1;
2600 }
2601
2602 if ( !data->autinc_to && CONSTANT_P (to_addr))
2603 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2604 }
2605
2606 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2607
2608 /* First store what we can in the largest integer mode, then go to
2609 successively smaller modes. */
2610
2611 while (max_size > 1)
2612 {
2613 enum machine_mode mode = widest_int_mode_for_size (max_size);
2614
2615 if (mode == VOIDmode)
2616 break;
2617
2618 icode = optab_handler (mov_optab, mode);
2619 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2620 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2621
2622 max_size = GET_MODE_SIZE (mode);
2623 }
2624
2625 /* The code above should have handled everything. */
2626 gcc_assert (!data->len);
2627 }
2628
2629 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2630 with move instructions for mode MODE. GENFUN is the gen_... function
2631 to make a move insn for that mode. DATA has all the other info. */
2632
2633 static void
2634 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2635 struct store_by_pieces_d *data)
2636 {
2637 unsigned int size = GET_MODE_SIZE (mode);
2638 rtx to1, cst;
2639
2640 while (data->len >= size)
2641 {
2642 if (data->reverse)
2643 data->offset -= size;
2644
2645 if (data->autinc_to)
2646 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2647 data->offset);
2648 else
2649 to1 = adjust_address (data->to, mode, data->offset);
2650
2651 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2652 emit_insn (gen_add2_insn (data->to_addr,
2653 GEN_INT (-(HOST_WIDE_INT) size)));
2654
2655 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2656 emit_insn ((*genfun) (to1, cst));
2657
2658 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2659 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2660
2661 if (! data->reverse)
2662 data->offset += size;
2663
2664 data->len -= size;
2665 }
2666 }
2667 \f
2668 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2669 its length in bytes. */
2670
2671 rtx
2672 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2673 unsigned int expected_align, HOST_WIDE_INT expected_size)
2674 {
2675 enum machine_mode mode = GET_MODE (object);
2676 unsigned int align;
2677
2678 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2679
2680 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2681 just move a zero. Otherwise, do this a piece at a time. */
2682 if (mode != BLKmode
2683 && CONST_INT_P (size)
2684 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2685 {
2686 rtx zero = CONST0_RTX (mode);
2687 if (zero != NULL)
2688 {
2689 emit_move_insn (object, zero);
2690 return NULL;
2691 }
2692
2693 if (COMPLEX_MODE_P (mode))
2694 {
2695 zero = CONST0_RTX (GET_MODE_INNER (mode));
2696 if (zero != NULL)
2697 {
2698 write_complex_part (object, zero, 0);
2699 write_complex_part (object, zero, 1);
2700 return NULL;
2701 }
2702 }
2703 }
2704
2705 if (size == const0_rtx)
2706 return NULL;
2707
2708 align = MEM_ALIGN (object);
2709
2710 if (CONST_INT_P (size)
2711 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2712 clear_by_pieces (object, INTVAL (size), align);
2713 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2714 expected_align, expected_size))
2715 ;
2716 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2717 return set_storage_via_libcall (object, size, const0_rtx,
2718 method == BLOCK_OP_TAILCALL);
2719 else
2720 gcc_unreachable ();
2721
2722 return NULL;
2723 }
2724
2725 rtx
2726 clear_storage (rtx object, rtx size, enum block_op_methods method)
2727 {
2728 return clear_storage_hints (object, size, method, 0, -1);
2729 }
2730
2731
2732 /* A subroutine of clear_storage. Expand a call to memset.
2733 Return the return value of memset, 0 otherwise. */
2734
2735 rtx
2736 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2737 {
2738 tree call_expr, fn, object_tree, size_tree, val_tree;
2739 enum machine_mode size_mode;
2740 rtx retval;
2741
2742 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2743 place those into new pseudos into a VAR_DECL and use them later. */
2744
2745 object = copy_addr_to_reg (XEXP (object, 0));
2746
2747 size_mode = TYPE_MODE (sizetype);
2748 size = convert_to_mode (size_mode, size, 1);
2749 size = copy_to_mode_reg (size_mode, size);
2750
2751 /* It is incorrect to use the libcall calling conventions to call
2752 memset in this context. This could be a user call to memset and
2753 the user may wish to examine the return value from memset. For
2754 targets where libcalls and normal calls have different conventions
2755 for returning pointers, we could end up generating incorrect code. */
2756
2757 object_tree = make_tree (ptr_type_node, object);
2758 if (!CONST_INT_P (val))
2759 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2760 size_tree = make_tree (sizetype, size);
2761 val_tree = make_tree (integer_type_node, val);
2762
2763 fn = clear_storage_libcall_fn (true);
2764 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2765 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2766
2767 retval = expand_normal (call_expr);
2768
2769 return retval;
2770 }
2771
2772 /* A subroutine of set_storage_via_libcall. Create the tree node
2773 for the function we use for block clears. */
2774
2775 tree block_clear_fn;
2776
2777 void
2778 init_block_clear_fn (const char *asmspec)
2779 {
2780 if (!block_clear_fn)
2781 {
2782 tree fn, args;
2783
2784 fn = get_identifier ("memset");
2785 args = build_function_type_list (ptr_type_node, ptr_type_node,
2786 integer_type_node, sizetype,
2787 NULL_TREE);
2788
2789 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2790 DECL_EXTERNAL (fn) = 1;
2791 TREE_PUBLIC (fn) = 1;
2792 DECL_ARTIFICIAL (fn) = 1;
2793 TREE_NOTHROW (fn) = 1;
2794 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2795 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2796
2797 block_clear_fn = fn;
2798 }
2799
2800 if (asmspec)
2801 set_user_assembler_name (block_clear_fn, asmspec);
2802 }
2803
2804 static tree
2805 clear_storage_libcall_fn (int for_call)
2806 {
2807 static bool emitted_extern;
2808
2809 if (!block_clear_fn)
2810 init_block_clear_fn (NULL);
2811
2812 if (for_call && !emitted_extern)
2813 {
2814 emitted_extern = true;
2815 make_decl_rtl (block_clear_fn);
2816 }
2817
2818 return block_clear_fn;
2819 }
2820 \f
2821 /* Expand a setmem pattern; return true if successful. */
2822
2823 bool
2824 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2825 unsigned int expected_align, HOST_WIDE_INT expected_size)
2826 {
2827 /* Try the most limited insn first, because there's no point
2828 including more than one in the machine description unless
2829 the more limited one has some advantage. */
2830
2831 enum machine_mode mode;
2832
2833 if (expected_align < align)
2834 expected_align = align;
2835
2836 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2837 mode = GET_MODE_WIDER_MODE (mode))
2838 {
2839 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2840
2841 if (code != CODE_FOR_nothing
2842 /* We don't need MODE to be narrower than
2843 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2844 the mode mask, as it is returned by the macro, it will
2845 definitely be less than the actual mode mask. */
2846 && ((CONST_INT_P (size)
2847 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2848 <= (GET_MODE_MASK (mode) >> 1)))
2849 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
2850 {
2851 struct expand_operand ops[6];
2852 unsigned int nops;
2853
2854 nops = insn_data[(int) code].n_generator_args;
2855 gcc_assert (nops == 4 || nops == 6);
2856
2857 create_fixed_operand (&ops[0], object);
2858 /* The check above guarantees that this size conversion is valid. */
2859 create_convert_operand_to (&ops[1], size, mode, true);
2860 create_convert_operand_from (&ops[2], val, byte_mode, true);
2861 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2862 if (nops == 6)
2863 {
2864 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2865 create_integer_operand (&ops[5], expected_size);
2866 }
2867 if (maybe_expand_insn (code, nops, ops))
2868 return true;
2869 }
2870 }
2871
2872 return false;
2873 }
2874
2875 \f
2876 /* Write to one of the components of the complex value CPLX. Write VAL to
2877 the real part if IMAG_P is false, and the imaginary part if its true. */
2878
2879 static void
2880 write_complex_part (rtx cplx, rtx val, bool imag_p)
2881 {
2882 enum machine_mode cmode;
2883 enum machine_mode imode;
2884 unsigned ibitsize;
2885
2886 if (GET_CODE (cplx) == CONCAT)
2887 {
2888 emit_move_insn (XEXP (cplx, imag_p), val);
2889 return;
2890 }
2891
2892 cmode = GET_MODE (cplx);
2893 imode = GET_MODE_INNER (cmode);
2894 ibitsize = GET_MODE_BITSIZE (imode);
2895
2896 /* For MEMs simplify_gen_subreg may generate an invalid new address
2897 because, e.g., the original address is considered mode-dependent
2898 by the target, which restricts simplify_subreg from invoking
2899 adjust_address_nv. Instead of preparing fallback support for an
2900 invalid address, we call adjust_address_nv directly. */
2901 if (MEM_P (cplx))
2902 {
2903 emit_move_insn (adjust_address_nv (cplx, imode,
2904 imag_p ? GET_MODE_SIZE (imode) : 0),
2905 val);
2906 return;
2907 }
2908
2909 /* If the sub-object is at least word sized, then we know that subregging
2910 will work. This special case is important, since store_bit_field
2911 wants to operate on integer modes, and there's rarely an OImode to
2912 correspond to TCmode. */
2913 if (ibitsize >= BITS_PER_WORD
2914 /* For hard regs we have exact predicates. Assume we can split
2915 the original object if it spans an even number of hard regs.
2916 This special case is important for SCmode on 64-bit platforms
2917 where the natural size of floating-point regs is 32-bit. */
2918 || (REG_P (cplx)
2919 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2920 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2921 {
2922 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2923 imag_p ? GET_MODE_SIZE (imode) : 0);
2924 if (part)
2925 {
2926 emit_move_insn (part, val);
2927 return;
2928 }
2929 else
2930 /* simplify_gen_subreg may fail for sub-word MEMs. */
2931 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2932 }
2933
2934 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
2935 }
2936
2937 /* Extract one of the components of the complex value CPLX. Extract the
2938 real part if IMAG_P is false, and the imaginary part if it's true. */
2939
2940 static rtx
2941 read_complex_part (rtx cplx, bool imag_p)
2942 {
2943 enum machine_mode cmode, imode;
2944 unsigned ibitsize;
2945
2946 if (GET_CODE (cplx) == CONCAT)
2947 return XEXP (cplx, imag_p);
2948
2949 cmode = GET_MODE (cplx);
2950 imode = GET_MODE_INNER (cmode);
2951 ibitsize = GET_MODE_BITSIZE (imode);
2952
2953 /* Special case reads from complex constants that got spilled to memory. */
2954 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2955 {
2956 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2957 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2958 {
2959 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2960 if (CONSTANT_CLASS_P (part))
2961 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2962 }
2963 }
2964
2965 /* For MEMs simplify_gen_subreg may generate an invalid new address
2966 because, e.g., the original address is considered mode-dependent
2967 by the target, which restricts simplify_subreg from invoking
2968 adjust_address_nv. Instead of preparing fallback support for an
2969 invalid address, we call adjust_address_nv directly. */
2970 if (MEM_P (cplx))
2971 return adjust_address_nv (cplx, imode,
2972 imag_p ? GET_MODE_SIZE (imode) : 0);
2973
2974 /* If the sub-object is at least word sized, then we know that subregging
2975 will work. This special case is important, since extract_bit_field
2976 wants to operate on integer modes, and there's rarely an OImode to
2977 correspond to TCmode. */
2978 if (ibitsize >= BITS_PER_WORD
2979 /* For hard regs we have exact predicates. Assume we can split
2980 the original object if it spans an even number of hard regs.
2981 This special case is important for SCmode on 64-bit platforms
2982 where the natural size of floating-point regs is 32-bit. */
2983 || (REG_P (cplx)
2984 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2985 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2986 {
2987 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2988 imag_p ? GET_MODE_SIZE (imode) : 0);
2989 if (ret)
2990 return ret;
2991 else
2992 /* simplify_gen_subreg may fail for sub-word MEMs. */
2993 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2994 }
2995
2996 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2997 true, false, NULL_RTX, imode, imode);
2998 }
2999 \f
3000 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3001 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3002 represented in NEW_MODE. If FORCE is true, this will never happen, as
3003 we'll force-create a SUBREG if needed. */
3004
3005 static rtx
3006 emit_move_change_mode (enum machine_mode new_mode,
3007 enum machine_mode old_mode, rtx x, bool force)
3008 {
3009 rtx ret;
3010
3011 if (push_operand (x, GET_MODE (x)))
3012 {
3013 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3014 MEM_COPY_ATTRIBUTES (ret, x);
3015 }
3016 else if (MEM_P (x))
3017 {
3018 /* We don't have to worry about changing the address since the
3019 size in bytes is supposed to be the same. */
3020 if (reload_in_progress)
3021 {
3022 /* Copy the MEM to change the mode and move any
3023 substitutions from the old MEM to the new one. */
3024 ret = adjust_address_nv (x, new_mode, 0);
3025 copy_replacements (x, ret);
3026 }
3027 else
3028 ret = adjust_address (x, new_mode, 0);
3029 }
3030 else
3031 {
3032 /* Note that we do want simplify_subreg's behavior of validating
3033 that the new mode is ok for a hard register. If we were to use
3034 simplify_gen_subreg, we would create the subreg, but would
3035 probably run into the target not being able to implement it. */
3036 /* Except, of course, when FORCE is true, when this is exactly what
3037 we want. Which is needed for CCmodes on some targets. */
3038 if (force)
3039 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3040 else
3041 ret = simplify_subreg (new_mode, x, old_mode, 0);
3042 }
3043
3044 return ret;
3045 }
3046
3047 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3048 an integer mode of the same size as MODE. Returns the instruction
3049 emitted, or NULL if such a move could not be generated. */
3050
3051 static rtx
3052 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3053 {
3054 enum machine_mode imode;
3055 enum insn_code code;
3056
3057 /* There must exist a mode of the exact size we require. */
3058 imode = int_mode_for_mode (mode);
3059 if (imode == BLKmode)
3060 return NULL_RTX;
3061
3062 /* The target must support moves in this mode. */
3063 code = optab_handler (mov_optab, imode);
3064 if (code == CODE_FOR_nothing)
3065 return NULL_RTX;
3066
3067 x = emit_move_change_mode (imode, mode, x, force);
3068 if (x == NULL_RTX)
3069 return NULL_RTX;
3070 y = emit_move_change_mode (imode, mode, y, force);
3071 if (y == NULL_RTX)
3072 return NULL_RTX;
3073 return emit_insn (GEN_FCN (code) (x, y));
3074 }
3075
3076 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3077 Return an equivalent MEM that does not use an auto-increment. */
3078
3079 static rtx
3080 emit_move_resolve_push (enum machine_mode mode, rtx x)
3081 {
3082 enum rtx_code code = GET_CODE (XEXP (x, 0));
3083 HOST_WIDE_INT adjust;
3084 rtx temp;
3085
3086 adjust = GET_MODE_SIZE (mode);
3087 #ifdef PUSH_ROUNDING
3088 adjust = PUSH_ROUNDING (adjust);
3089 #endif
3090 if (code == PRE_DEC || code == POST_DEC)
3091 adjust = -adjust;
3092 else if (code == PRE_MODIFY || code == POST_MODIFY)
3093 {
3094 rtx expr = XEXP (XEXP (x, 0), 1);
3095 HOST_WIDE_INT val;
3096
3097 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3098 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3099 val = INTVAL (XEXP (expr, 1));
3100 if (GET_CODE (expr) == MINUS)
3101 val = -val;
3102 gcc_assert (adjust == val || adjust == -val);
3103 adjust = val;
3104 }
3105
3106 /* Do not use anti_adjust_stack, since we don't want to update
3107 stack_pointer_delta. */
3108 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3109 GEN_INT (adjust), stack_pointer_rtx,
3110 0, OPTAB_LIB_WIDEN);
3111 if (temp != stack_pointer_rtx)
3112 emit_move_insn (stack_pointer_rtx, temp);
3113
3114 switch (code)
3115 {
3116 case PRE_INC:
3117 case PRE_DEC:
3118 case PRE_MODIFY:
3119 temp = stack_pointer_rtx;
3120 break;
3121 case POST_INC:
3122 case POST_DEC:
3123 case POST_MODIFY:
3124 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3125 break;
3126 default:
3127 gcc_unreachable ();
3128 }
3129
3130 return replace_equiv_address (x, temp);
3131 }
3132
3133 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3134 X is known to satisfy push_operand, and MODE is known to be complex.
3135 Returns the last instruction emitted. */
3136
3137 rtx
3138 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3139 {
3140 enum machine_mode submode = GET_MODE_INNER (mode);
3141 bool imag_first;
3142
3143 #ifdef PUSH_ROUNDING
3144 unsigned int submodesize = GET_MODE_SIZE (submode);
3145
3146 /* In case we output to the stack, but the size is smaller than the
3147 machine can push exactly, we need to use move instructions. */
3148 if (PUSH_ROUNDING (submodesize) != submodesize)
3149 {
3150 x = emit_move_resolve_push (mode, x);
3151 return emit_move_insn (x, y);
3152 }
3153 #endif
3154
3155 /* Note that the real part always precedes the imag part in memory
3156 regardless of machine's endianness. */
3157 switch (GET_CODE (XEXP (x, 0)))
3158 {
3159 case PRE_DEC:
3160 case POST_DEC:
3161 imag_first = true;
3162 break;
3163 case PRE_INC:
3164 case POST_INC:
3165 imag_first = false;
3166 break;
3167 default:
3168 gcc_unreachable ();
3169 }
3170
3171 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3172 read_complex_part (y, imag_first));
3173 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3174 read_complex_part (y, !imag_first));
3175 }
3176
3177 /* A subroutine of emit_move_complex. Perform the move from Y to X
3178 via two moves of the parts. Returns the last instruction emitted. */
3179
3180 rtx
3181 emit_move_complex_parts (rtx x, rtx y)
3182 {
3183 /* Show the output dies here. This is necessary for SUBREGs
3184 of pseudos since we cannot track their lifetimes correctly;
3185 hard regs shouldn't appear here except as return values. */
3186 if (!reload_completed && !reload_in_progress
3187 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3188 emit_clobber (x);
3189
3190 write_complex_part (x, read_complex_part (y, false), false);
3191 write_complex_part (x, read_complex_part (y, true), true);
3192
3193 return get_last_insn ();
3194 }
3195
3196 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3197 MODE is known to be complex. Returns the last instruction emitted. */
3198
3199 static rtx
3200 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3201 {
3202 bool try_int;
3203
3204 /* Need to take special care for pushes, to maintain proper ordering
3205 of the data, and possibly extra padding. */
3206 if (push_operand (x, mode))
3207 return emit_move_complex_push (mode, x, y);
3208
3209 /* See if we can coerce the target into moving both values at once. */
3210
3211 /* Move floating point as parts. */
3212 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3213 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3214 try_int = false;
3215 /* Not possible if the values are inherently not adjacent. */
3216 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3217 try_int = false;
3218 /* Is possible if both are registers (or subregs of registers). */
3219 else if (register_operand (x, mode) && register_operand (y, mode))
3220 try_int = true;
3221 /* If one of the operands is a memory, and alignment constraints
3222 are friendly enough, we may be able to do combined memory operations.
3223 We do not attempt this if Y is a constant because that combination is
3224 usually better with the by-parts thing below. */
3225 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3226 && (!STRICT_ALIGNMENT
3227 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3228 try_int = true;
3229 else
3230 try_int = false;
3231
3232 if (try_int)
3233 {
3234 rtx ret;
3235
3236 /* For memory to memory moves, optimal behavior can be had with the
3237 existing block move logic. */
3238 if (MEM_P (x) && MEM_P (y))
3239 {
3240 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3241 BLOCK_OP_NO_LIBCALL);
3242 return get_last_insn ();
3243 }
3244
3245 ret = emit_move_via_integer (mode, x, y, true);
3246 if (ret)
3247 return ret;
3248 }
3249
3250 return emit_move_complex_parts (x, y);
3251 }
3252
3253 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3254 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3255
3256 static rtx
3257 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3258 {
3259 rtx ret;
3260
3261 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3262 if (mode != CCmode)
3263 {
3264 enum insn_code code = optab_handler (mov_optab, CCmode);
3265 if (code != CODE_FOR_nothing)
3266 {
3267 x = emit_move_change_mode (CCmode, mode, x, true);
3268 y = emit_move_change_mode (CCmode, mode, y, true);
3269 return emit_insn (GEN_FCN (code) (x, y));
3270 }
3271 }
3272
3273 /* Otherwise, find the MODE_INT mode of the same width. */
3274 ret = emit_move_via_integer (mode, x, y, false);
3275 gcc_assert (ret != NULL);
3276 return ret;
3277 }
3278
3279 /* Return true if word I of OP lies entirely in the
3280 undefined bits of a paradoxical subreg. */
3281
3282 static bool
3283 undefined_operand_subword_p (const_rtx op, int i)
3284 {
3285 enum machine_mode innermode, innermostmode;
3286 int offset;
3287 if (GET_CODE (op) != SUBREG)
3288 return false;
3289 innermode = GET_MODE (op);
3290 innermostmode = GET_MODE (SUBREG_REG (op));
3291 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3292 /* The SUBREG_BYTE represents offset, as if the value were stored in
3293 memory, except for a paradoxical subreg where we define
3294 SUBREG_BYTE to be 0; undo this exception as in
3295 simplify_subreg. */
3296 if (SUBREG_BYTE (op) == 0
3297 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3298 {
3299 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3300 if (WORDS_BIG_ENDIAN)
3301 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3302 if (BYTES_BIG_ENDIAN)
3303 offset += difference % UNITS_PER_WORD;
3304 }
3305 if (offset >= GET_MODE_SIZE (innermostmode)
3306 || offset <= -GET_MODE_SIZE (word_mode))
3307 return true;
3308 return false;
3309 }
3310
3311 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3312 MODE is any multi-word or full-word mode that lacks a move_insn
3313 pattern. Note that you will get better code if you define such
3314 patterns, even if they must turn into multiple assembler instructions. */
3315
3316 static rtx
3317 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3318 {
3319 rtx last_insn = 0;
3320 rtx seq, inner;
3321 bool need_clobber;
3322 int i;
3323
3324 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3325
3326 /* If X is a push on the stack, do the push now and replace
3327 X with a reference to the stack pointer. */
3328 if (push_operand (x, mode))
3329 x = emit_move_resolve_push (mode, x);
3330
3331 /* If we are in reload, see if either operand is a MEM whose address
3332 is scheduled for replacement. */
3333 if (reload_in_progress && MEM_P (x)
3334 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3335 x = replace_equiv_address_nv (x, inner);
3336 if (reload_in_progress && MEM_P (y)
3337 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3338 y = replace_equiv_address_nv (y, inner);
3339
3340 start_sequence ();
3341
3342 need_clobber = false;
3343 for (i = 0;
3344 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3345 i++)
3346 {
3347 rtx xpart = operand_subword (x, i, 1, mode);
3348 rtx ypart;
3349
3350 /* Do not generate code for a move if it would come entirely
3351 from the undefined bits of a paradoxical subreg. */
3352 if (undefined_operand_subword_p (y, i))
3353 continue;
3354
3355 ypart = operand_subword (y, i, 1, mode);
3356
3357 /* If we can't get a part of Y, put Y into memory if it is a
3358 constant. Otherwise, force it into a register. Then we must
3359 be able to get a part of Y. */
3360 if (ypart == 0 && CONSTANT_P (y))
3361 {
3362 y = use_anchored_address (force_const_mem (mode, y));
3363 ypart = operand_subword (y, i, 1, mode);
3364 }
3365 else if (ypart == 0)
3366 ypart = operand_subword_force (y, i, mode);
3367
3368 gcc_assert (xpart && ypart);
3369
3370 need_clobber |= (GET_CODE (xpart) == SUBREG);
3371
3372 last_insn = emit_move_insn (xpart, ypart);
3373 }
3374
3375 seq = get_insns ();
3376 end_sequence ();
3377
3378 /* Show the output dies here. This is necessary for SUBREGs
3379 of pseudos since we cannot track their lifetimes correctly;
3380 hard regs shouldn't appear here except as return values.
3381 We never want to emit such a clobber after reload. */
3382 if (x != y
3383 && ! (reload_in_progress || reload_completed)
3384 && need_clobber != 0)
3385 emit_clobber (x);
3386
3387 emit_insn (seq);
3388
3389 return last_insn;
3390 }
3391
3392 /* Low level part of emit_move_insn.
3393 Called just like emit_move_insn, but assumes X and Y
3394 are basically valid. */
3395
3396 rtx
3397 emit_move_insn_1 (rtx x, rtx y)
3398 {
3399 enum machine_mode mode = GET_MODE (x);
3400 enum insn_code code;
3401
3402 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3403
3404 code = optab_handler (mov_optab, mode);
3405 if (code != CODE_FOR_nothing)
3406 return emit_insn (GEN_FCN (code) (x, y));
3407
3408 /* Expand complex moves by moving real part and imag part. */
3409 if (COMPLEX_MODE_P (mode))
3410 return emit_move_complex (mode, x, y);
3411
3412 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3413 || ALL_FIXED_POINT_MODE_P (mode))
3414 {
3415 rtx result = emit_move_via_integer (mode, x, y, true);
3416
3417 /* If we can't find an integer mode, use multi words. */
3418 if (result)
3419 return result;
3420 else
3421 return emit_move_multi_word (mode, x, y);
3422 }
3423
3424 if (GET_MODE_CLASS (mode) == MODE_CC)
3425 return emit_move_ccmode (mode, x, y);
3426
3427 /* Try using a move pattern for the corresponding integer mode. This is
3428 only safe when simplify_subreg can convert MODE constants into integer
3429 constants. At present, it can only do this reliably if the value
3430 fits within a HOST_WIDE_INT. */
3431 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3432 {
3433 rtx ret = emit_move_via_integer (mode, x, y, false);
3434 if (ret)
3435 return ret;
3436 }
3437
3438 return emit_move_multi_word (mode, x, y);
3439 }
3440
3441 /* Generate code to copy Y into X.
3442 Both Y and X must have the same mode, except that
3443 Y can be a constant with VOIDmode.
3444 This mode cannot be BLKmode; use emit_block_move for that.
3445
3446 Return the last instruction emitted. */
3447
3448 rtx
3449 emit_move_insn (rtx x, rtx y)
3450 {
3451 enum machine_mode mode = GET_MODE (x);
3452 rtx y_cst = NULL_RTX;
3453 rtx last_insn, set;
3454
3455 gcc_assert (mode != BLKmode
3456 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3457
3458 if (CONSTANT_P (y))
3459 {
3460 if (optimize
3461 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3462 && (last_insn = compress_float_constant (x, y)))
3463 return last_insn;
3464
3465 y_cst = y;
3466
3467 if (!targetm.legitimate_constant_p (mode, y))
3468 {
3469 y = force_const_mem (mode, y);
3470
3471 /* If the target's cannot_force_const_mem prevented the spill,
3472 assume that the target's move expanders will also take care
3473 of the non-legitimate constant. */
3474 if (!y)
3475 y = y_cst;
3476 else
3477 y = use_anchored_address (y);
3478 }
3479 }
3480
3481 /* If X or Y are memory references, verify that their addresses are valid
3482 for the machine. */
3483 if (MEM_P (x)
3484 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3485 MEM_ADDR_SPACE (x))
3486 && ! push_operand (x, GET_MODE (x))))
3487 x = validize_mem (x);
3488
3489 if (MEM_P (y)
3490 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3491 MEM_ADDR_SPACE (y)))
3492 y = validize_mem (y);
3493
3494 gcc_assert (mode != BLKmode);
3495
3496 last_insn = emit_move_insn_1 (x, y);
3497
3498 if (y_cst && REG_P (x)
3499 && (set = single_set (last_insn)) != NULL_RTX
3500 && SET_DEST (set) == x
3501 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3502 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3503
3504 return last_insn;
3505 }
3506
3507 /* If Y is representable exactly in a narrower mode, and the target can
3508 perform the extension directly from constant or memory, then emit the
3509 move as an extension. */
3510
3511 static rtx
3512 compress_float_constant (rtx x, rtx y)
3513 {
3514 enum machine_mode dstmode = GET_MODE (x);
3515 enum machine_mode orig_srcmode = GET_MODE (y);
3516 enum machine_mode srcmode;
3517 REAL_VALUE_TYPE r;
3518 int oldcost, newcost;
3519 bool speed = optimize_insn_for_speed_p ();
3520
3521 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3522
3523 if (targetm.legitimate_constant_p (dstmode, y))
3524 oldcost = set_src_cost (y, speed);
3525 else
3526 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3527
3528 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3529 srcmode != orig_srcmode;
3530 srcmode = GET_MODE_WIDER_MODE (srcmode))
3531 {
3532 enum insn_code ic;
3533 rtx trunc_y, last_insn;
3534
3535 /* Skip if the target can't extend this way. */
3536 ic = can_extend_p (dstmode, srcmode, 0);
3537 if (ic == CODE_FOR_nothing)
3538 continue;
3539
3540 /* Skip if the narrowed value isn't exact. */
3541 if (! exact_real_truncate (srcmode, &r))
3542 continue;
3543
3544 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3545
3546 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3547 {
3548 /* Skip if the target needs extra instructions to perform
3549 the extension. */
3550 if (!insn_operand_matches (ic, 1, trunc_y))
3551 continue;
3552 /* This is valid, but may not be cheaper than the original. */
3553 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3554 speed);
3555 if (oldcost < newcost)
3556 continue;
3557 }
3558 else if (float_extend_from_mem[dstmode][srcmode])
3559 {
3560 trunc_y = force_const_mem (srcmode, trunc_y);
3561 /* This is valid, but may not be cheaper than the original. */
3562 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3563 speed);
3564 if (oldcost < newcost)
3565 continue;
3566 trunc_y = validize_mem (trunc_y);
3567 }
3568 else
3569 continue;
3570
3571 /* For CSE's benefit, force the compressed constant pool entry
3572 into a new pseudo. This constant may be used in different modes,
3573 and if not, combine will put things back together for us. */
3574 trunc_y = force_reg (srcmode, trunc_y);
3575 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3576 last_insn = get_last_insn ();
3577
3578 if (REG_P (x))
3579 set_unique_reg_note (last_insn, REG_EQUAL, y);
3580
3581 return last_insn;
3582 }
3583
3584 return NULL_RTX;
3585 }
3586 \f
3587 /* Pushing data onto the stack. */
3588
3589 /* Push a block of length SIZE (perhaps variable)
3590 and return an rtx to address the beginning of the block.
3591 The value may be virtual_outgoing_args_rtx.
3592
3593 EXTRA is the number of bytes of padding to push in addition to SIZE.
3594 BELOW nonzero means this padding comes at low addresses;
3595 otherwise, the padding comes at high addresses. */
3596
3597 rtx
3598 push_block (rtx size, int extra, int below)
3599 {
3600 rtx temp;
3601
3602 size = convert_modes (Pmode, ptr_mode, size, 1);
3603 if (CONSTANT_P (size))
3604 anti_adjust_stack (plus_constant (Pmode, size, extra));
3605 else if (REG_P (size) && extra == 0)
3606 anti_adjust_stack (size);
3607 else
3608 {
3609 temp = copy_to_mode_reg (Pmode, size);
3610 if (extra != 0)
3611 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3612 temp, 0, OPTAB_LIB_WIDEN);
3613 anti_adjust_stack (temp);
3614 }
3615
3616 #ifndef STACK_GROWS_DOWNWARD
3617 if (0)
3618 #else
3619 if (1)
3620 #endif
3621 {
3622 temp = virtual_outgoing_args_rtx;
3623 if (extra != 0 && below)
3624 temp = plus_constant (Pmode, temp, extra);
3625 }
3626 else
3627 {
3628 if (CONST_INT_P (size))
3629 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3630 -INTVAL (size) - (below ? 0 : extra));
3631 else if (extra != 0 && !below)
3632 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3633 negate_rtx (Pmode, plus_constant (Pmode, size,
3634 extra)));
3635 else
3636 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3637 negate_rtx (Pmode, size));
3638 }
3639
3640 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3641 }
3642
3643 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3644
3645 static rtx
3646 mem_autoinc_base (rtx mem)
3647 {
3648 if (MEM_P (mem))
3649 {
3650 rtx addr = XEXP (mem, 0);
3651 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3652 return XEXP (addr, 0);
3653 }
3654 return NULL;
3655 }
3656
3657 /* A utility routine used here, in reload, and in try_split. The insns
3658 after PREV up to and including LAST are known to adjust the stack,
3659 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3660 placing notes as appropriate. PREV may be NULL, indicating the
3661 entire insn sequence prior to LAST should be scanned.
3662
3663 The set of allowed stack pointer modifications is small:
3664 (1) One or more auto-inc style memory references (aka pushes),
3665 (2) One or more addition/subtraction with the SP as destination,
3666 (3) A single move insn with the SP as destination,
3667 (4) A call_pop insn,
3668 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3669
3670 Insns in the sequence that do not modify the SP are ignored,
3671 except for noreturn calls.
3672
3673 The return value is the amount of adjustment that can be trivially
3674 verified, via immediate operand or auto-inc. If the adjustment
3675 cannot be trivially extracted, the return value is INT_MIN. */
3676
3677 HOST_WIDE_INT
3678 find_args_size_adjust (rtx insn)
3679 {
3680 rtx dest, set, pat;
3681 int i;
3682
3683 pat = PATTERN (insn);
3684 set = NULL;
3685
3686 /* Look for a call_pop pattern. */
3687 if (CALL_P (insn))
3688 {
3689 /* We have to allow non-call_pop patterns for the case
3690 of emit_single_push_insn of a TLS address. */
3691 if (GET_CODE (pat) != PARALLEL)
3692 return 0;
3693
3694 /* All call_pop have a stack pointer adjust in the parallel.
3695 The call itself is always first, and the stack adjust is
3696 usually last, so search from the end. */
3697 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3698 {
3699 set = XVECEXP (pat, 0, i);
3700 if (GET_CODE (set) != SET)
3701 continue;
3702 dest = SET_DEST (set);
3703 if (dest == stack_pointer_rtx)
3704 break;
3705 }
3706 /* We'd better have found the stack pointer adjust. */
3707 if (i == 0)
3708 return 0;
3709 /* Fall through to process the extracted SET and DEST
3710 as if it was a standalone insn. */
3711 }
3712 else if (GET_CODE (pat) == SET)
3713 set = pat;
3714 else if ((set = single_set (insn)) != NULL)
3715 ;
3716 else if (GET_CODE (pat) == PARALLEL)
3717 {
3718 /* ??? Some older ports use a parallel with a stack adjust
3719 and a store for a PUSH_ROUNDING pattern, rather than a
3720 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3721 /* ??? See h8300 and m68k, pushqi1. */
3722 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3723 {
3724 set = XVECEXP (pat, 0, i);
3725 if (GET_CODE (set) != SET)
3726 continue;
3727 dest = SET_DEST (set);
3728 if (dest == stack_pointer_rtx)
3729 break;
3730
3731 /* We do not expect an auto-inc of the sp in the parallel. */
3732 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3733 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3734 != stack_pointer_rtx);
3735 }
3736 if (i < 0)
3737 return 0;
3738 }
3739 else
3740 return 0;
3741
3742 dest = SET_DEST (set);
3743
3744 /* Look for direct modifications of the stack pointer. */
3745 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3746 {
3747 /* Look for a trivial adjustment, otherwise assume nothing. */
3748 /* Note that the SPU restore_stack_block pattern refers to
3749 the stack pointer in V4SImode. Consider that non-trivial. */
3750 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3751 && GET_CODE (SET_SRC (set)) == PLUS
3752 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3753 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3754 return INTVAL (XEXP (SET_SRC (set), 1));
3755 /* ??? Reload can generate no-op moves, which will be cleaned
3756 up later. Recognize it and continue searching. */
3757 else if (rtx_equal_p (dest, SET_SRC (set)))
3758 return 0;
3759 else
3760 return HOST_WIDE_INT_MIN;
3761 }
3762 else
3763 {
3764 rtx mem, addr;
3765
3766 /* Otherwise only think about autoinc patterns. */
3767 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3768 {
3769 mem = dest;
3770 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3771 != stack_pointer_rtx);
3772 }
3773 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3774 mem = SET_SRC (set);
3775 else
3776 return 0;
3777
3778 addr = XEXP (mem, 0);
3779 switch (GET_CODE (addr))
3780 {
3781 case PRE_INC:
3782 case POST_INC:
3783 return GET_MODE_SIZE (GET_MODE (mem));
3784 case PRE_DEC:
3785 case POST_DEC:
3786 return -GET_MODE_SIZE (GET_MODE (mem));
3787 case PRE_MODIFY:
3788 case POST_MODIFY:
3789 addr = XEXP (addr, 1);
3790 gcc_assert (GET_CODE (addr) == PLUS);
3791 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3792 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3793 return INTVAL (XEXP (addr, 1));
3794 default:
3795 gcc_unreachable ();
3796 }
3797 }
3798 }
3799
3800 int
3801 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3802 {
3803 int args_size = end_args_size;
3804 bool saw_unknown = false;
3805 rtx insn;
3806
3807 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3808 {
3809 HOST_WIDE_INT this_delta;
3810
3811 if (!NONDEBUG_INSN_P (insn))
3812 continue;
3813
3814 this_delta = find_args_size_adjust (insn);
3815 if (this_delta == 0)
3816 {
3817 if (!CALL_P (insn)
3818 || ACCUMULATE_OUTGOING_ARGS
3819 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3820 continue;
3821 }
3822
3823 gcc_assert (!saw_unknown);
3824 if (this_delta == HOST_WIDE_INT_MIN)
3825 saw_unknown = true;
3826
3827 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3828 #ifdef STACK_GROWS_DOWNWARD
3829 this_delta = -this_delta;
3830 #endif
3831 args_size -= this_delta;
3832 }
3833
3834 return saw_unknown ? INT_MIN : args_size;
3835 }
3836
3837 #ifdef PUSH_ROUNDING
3838 /* Emit single push insn. */
3839
3840 static void
3841 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3842 {
3843 rtx dest_addr;
3844 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3845 rtx dest;
3846 enum insn_code icode;
3847
3848 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3849 /* If there is push pattern, use it. Otherwise try old way of throwing
3850 MEM representing push operation to move expander. */
3851 icode = optab_handler (push_optab, mode);
3852 if (icode != CODE_FOR_nothing)
3853 {
3854 struct expand_operand ops[1];
3855
3856 create_input_operand (&ops[0], x, mode);
3857 if (maybe_expand_insn (icode, 1, ops))
3858 return;
3859 }
3860 if (GET_MODE_SIZE (mode) == rounded_size)
3861 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3862 /* If we are to pad downward, adjust the stack pointer first and
3863 then store X into the stack location using an offset. This is
3864 because emit_move_insn does not know how to pad; it does not have
3865 access to type. */
3866 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3867 {
3868 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3869 HOST_WIDE_INT offset;
3870
3871 emit_move_insn (stack_pointer_rtx,
3872 expand_binop (Pmode,
3873 #ifdef STACK_GROWS_DOWNWARD
3874 sub_optab,
3875 #else
3876 add_optab,
3877 #endif
3878 stack_pointer_rtx,
3879 GEN_INT (rounded_size),
3880 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3881
3882 offset = (HOST_WIDE_INT) padding_size;
3883 #ifdef STACK_GROWS_DOWNWARD
3884 if (STACK_PUSH_CODE == POST_DEC)
3885 /* We have already decremented the stack pointer, so get the
3886 previous value. */
3887 offset += (HOST_WIDE_INT) rounded_size;
3888 #else
3889 if (STACK_PUSH_CODE == POST_INC)
3890 /* We have already incremented the stack pointer, so get the
3891 previous value. */
3892 offset -= (HOST_WIDE_INT) rounded_size;
3893 #endif
3894 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3895 }
3896 else
3897 {
3898 #ifdef STACK_GROWS_DOWNWARD
3899 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3900 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3901 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3902 #else
3903 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3904 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3905 GEN_INT (rounded_size));
3906 #endif
3907 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3908 }
3909
3910 dest = gen_rtx_MEM (mode, dest_addr);
3911
3912 if (type != 0)
3913 {
3914 set_mem_attributes (dest, type, 1);
3915
3916 if (flag_optimize_sibling_calls)
3917 /* Function incoming arguments may overlap with sibling call
3918 outgoing arguments and we cannot allow reordering of reads
3919 from function arguments with stores to outgoing arguments
3920 of sibling calls. */
3921 set_mem_alias_set (dest, 0);
3922 }
3923 emit_move_insn (dest, x);
3924 }
3925
3926 /* Emit and annotate a single push insn. */
3927
3928 static void
3929 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3930 {
3931 int delta, old_delta = stack_pointer_delta;
3932 rtx prev = get_last_insn ();
3933 rtx last;
3934
3935 emit_single_push_insn_1 (mode, x, type);
3936
3937 last = get_last_insn ();
3938
3939 /* Notice the common case where we emitted exactly one insn. */
3940 if (PREV_INSN (last) == prev)
3941 {
3942 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
3943 return;
3944 }
3945
3946 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
3947 gcc_assert (delta == INT_MIN || delta == old_delta);
3948 }
3949 #endif
3950
3951 /* Generate code to push X onto the stack, assuming it has mode MODE and
3952 type TYPE.
3953 MODE is redundant except when X is a CONST_INT (since they don't
3954 carry mode info).
3955 SIZE is an rtx for the size of data to be copied (in bytes),
3956 needed only if X is BLKmode.
3957
3958 ALIGN (in bits) is maximum alignment we can assume.
3959
3960 If PARTIAL and REG are both nonzero, then copy that many of the first
3961 bytes of X into registers starting with REG, and push the rest of X.
3962 The amount of space pushed is decreased by PARTIAL bytes.
3963 REG must be a hard register in this case.
3964 If REG is zero but PARTIAL is not, take any all others actions for an
3965 argument partially in registers, but do not actually load any
3966 registers.
3967
3968 EXTRA is the amount in bytes of extra space to leave next to this arg.
3969 This is ignored if an argument block has already been allocated.
3970
3971 On a machine that lacks real push insns, ARGS_ADDR is the address of
3972 the bottom of the argument block for this call. We use indexing off there
3973 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3974 argument block has not been preallocated.
3975
3976 ARGS_SO_FAR is the size of args previously pushed for this call.
3977
3978 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3979 for arguments passed in registers. If nonzero, it will be the number
3980 of bytes required. */
3981
3982 void
3983 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3984 unsigned int align, int partial, rtx reg, int extra,
3985 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3986 rtx alignment_pad)
3987 {
3988 rtx xinner;
3989 enum direction stack_direction
3990 #ifdef STACK_GROWS_DOWNWARD
3991 = downward;
3992 #else
3993 = upward;
3994 #endif
3995
3996 /* Decide where to pad the argument: `downward' for below,
3997 `upward' for above, or `none' for don't pad it.
3998 Default is below for small data on big-endian machines; else above. */
3999 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4000
4001 /* Invert direction if stack is post-decrement.
4002 FIXME: why? */
4003 if (STACK_PUSH_CODE == POST_DEC)
4004 if (where_pad != none)
4005 where_pad = (where_pad == downward ? upward : downward);
4006
4007 xinner = x;
4008
4009 if (mode == BLKmode
4010 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4011 {
4012 /* Copy a block into the stack, entirely or partially. */
4013
4014 rtx temp;
4015 int used;
4016 int offset;
4017 int skip;
4018
4019 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4020 used = partial - offset;
4021
4022 if (mode != BLKmode)
4023 {
4024 /* A value is to be stored in an insufficiently aligned
4025 stack slot; copy via a suitably aligned slot if
4026 necessary. */
4027 size = GEN_INT (GET_MODE_SIZE (mode));
4028 if (!MEM_P (xinner))
4029 {
4030 temp = assign_temp (type, 0, 1, 1);
4031 emit_move_insn (temp, xinner);
4032 xinner = temp;
4033 }
4034 }
4035
4036 gcc_assert (size);
4037
4038 /* USED is now the # of bytes we need not copy to the stack
4039 because registers will take care of them. */
4040
4041 if (partial != 0)
4042 xinner = adjust_address (xinner, BLKmode, used);
4043
4044 /* If the partial register-part of the arg counts in its stack size,
4045 skip the part of stack space corresponding to the registers.
4046 Otherwise, start copying to the beginning of the stack space,
4047 by setting SKIP to 0. */
4048 skip = (reg_parm_stack_space == 0) ? 0 : used;
4049
4050 #ifdef PUSH_ROUNDING
4051 /* Do it with several push insns if that doesn't take lots of insns
4052 and if there is no difficulty with push insns that skip bytes
4053 on the stack for alignment purposes. */
4054 if (args_addr == 0
4055 && PUSH_ARGS
4056 && CONST_INT_P (size)
4057 && skip == 0
4058 && MEM_ALIGN (xinner) >= align
4059 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4060 /* Here we avoid the case of a structure whose weak alignment
4061 forces many pushes of a small amount of data,
4062 and such small pushes do rounding that causes trouble. */
4063 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4064 || align >= BIGGEST_ALIGNMENT
4065 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4066 == (align / BITS_PER_UNIT)))
4067 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4068 {
4069 /* Push padding now if padding above and stack grows down,
4070 or if padding below and stack grows up.
4071 But if space already allocated, this has already been done. */
4072 if (extra && args_addr == 0
4073 && where_pad != none && where_pad != stack_direction)
4074 anti_adjust_stack (GEN_INT (extra));
4075
4076 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4077 }
4078 else
4079 #endif /* PUSH_ROUNDING */
4080 {
4081 rtx target;
4082
4083 /* Otherwise make space on the stack and copy the data
4084 to the address of that space. */
4085
4086 /* Deduct words put into registers from the size we must copy. */
4087 if (partial != 0)
4088 {
4089 if (CONST_INT_P (size))
4090 size = GEN_INT (INTVAL (size) - used);
4091 else
4092 size = expand_binop (GET_MODE (size), sub_optab, size,
4093 GEN_INT (used), NULL_RTX, 0,
4094 OPTAB_LIB_WIDEN);
4095 }
4096
4097 /* Get the address of the stack space.
4098 In this case, we do not deal with EXTRA separately.
4099 A single stack adjust will do. */
4100 if (! args_addr)
4101 {
4102 temp = push_block (size, extra, where_pad == downward);
4103 extra = 0;
4104 }
4105 else if (CONST_INT_P (args_so_far))
4106 temp = memory_address (BLKmode,
4107 plus_constant (Pmode, args_addr,
4108 skip + INTVAL (args_so_far)));
4109 else
4110 temp = memory_address (BLKmode,
4111 plus_constant (Pmode,
4112 gen_rtx_PLUS (Pmode,
4113 args_addr,
4114 args_so_far),
4115 skip));
4116
4117 if (!ACCUMULATE_OUTGOING_ARGS)
4118 {
4119 /* If the source is referenced relative to the stack pointer,
4120 copy it to another register to stabilize it. We do not need
4121 to do this if we know that we won't be changing sp. */
4122
4123 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4124 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4125 temp = copy_to_reg (temp);
4126 }
4127
4128 target = gen_rtx_MEM (BLKmode, temp);
4129
4130 /* We do *not* set_mem_attributes here, because incoming arguments
4131 may overlap with sibling call outgoing arguments and we cannot
4132 allow reordering of reads from function arguments with stores
4133 to outgoing arguments of sibling calls. We do, however, want
4134 to record the alignment of the stack slot. */
4135 /* ALIGN may well be better aligned than TYPE, e.g. due to
4136 PARM_BOUNDARY. Assume the caller isn't lying. */
4137 set_mem_align (target, align);
4138
4139 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4140 }
4141 }
4142 else if (partial > 0)
4143 {
4144 /* Scalar partly in registers. */
4145
4146 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4147 int i;
4148 int not_stack;
4149 /* # bytes of start of argument
4150 that we must make space for but need not store. */
4151 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4152 int args_offset = INTVAL (args_so_far);
4153 int skip;
4154
4155 /* Push padding now if padding above and stack grows down,
4156 or if padding below and stack grows up.
4157 But if space already allocated, this has already been done. */
4158 if (extra && args_addr == 0
4159 && where_pad != none && where_pad != stack_direction)
4160 anti_adjust_stack (GEN_INT (extra));
4161
4162 /* If we make space by pushing it, we might as well push
4163 the real data. Otherwise, we can leave OFFSET nonzero
4164 and leave the space uninitialized. */
4165 if (args_addr == 0)
4166 offset = 0;
4167
4168 /* Now NOT_STACK gets the number of words that we don't need to
4169 allocate on the stack. Convert OFFSET to words too. */
4170 not_stack = (partial - offset) / UNITS_PER_WORD;
4171 offset /= UNITS_PER_WORD;
4172
4173 /* If the partial register-part of the arg counts in its stack size,
4174 skip the part of stack space corresponding to the registers.
4175 Otherwise, start copying to the beginning of the stack space,
4176 by setting SKIP to 0. */
4177 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4178
4179 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4180 x = validize_mem (force_const_mem (mode, x));
4181
4182 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4183 SUBREGs of such registers are not allowed. */
4184 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4185 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4186 x = copy_to_reg (x);
4187
4188 /* Loop over all the words allocated on the stack for this arg. */
4189 /* We can do it by words, because any scalar bigger than a word
4190 has a size a multiple of a word. */
4191 #ifndef PUSH_ARGS_REVERSED
4192 for (i = not_stack; i < size; i++)
4193 #else
4194 for (i = size - 1; i >= not_stack; i--)
4195 #endif
4196 if (i >= not_stack + offset)
4197 emit_push_insn (operand_subword_force (x, i, mode),
4198 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4199 0, args_addr,
4200 GEN_INT (args_offset + ((i - not_stack + skip)
4201 * UNITS_PER_WORD)),
4202 reg_parm_stack_space, alignment_pad);
4203 }
4204 else
4205 {
4206 rtx addr;
4207 rtx dest;
4208
4209 /* Push padding now if padding above and stack grows down,
4210 or if padding below and stack grows up.
4211 But if space already allocated, this has already been done. */
4212 if (extra && args_addr == 0
4213 && where_pad != none && where_pad != stack_direction)
4214 anti_adjust_stack (GEN_INT (extra));
4215
4216 #ifdef PUSH_ROUNDING
4217 if (args_addr == 0 && PUSH_ARGS)
4218 emit_single_push_insn (mode, x, type);
4219 else
4220 #endif
4221 {
4222 if (CONST_INT_P (args_so_far))
4223 addr
4224 = memory_address (mode,
4225 plus_constant (Pmode, args_addr,
4226 INTVAL (args_so_far)));
4227 else
4228 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4229 args_so_far));
4230 dest = gen_rtx_MEM (mode, addr);
4231
4232 /* We do *not* set_mem_attributes here, because incoming arguments
4233 may overlap with sibling call outgoing arguments and we cannot
4234 allow reordering of reads from function arguments with stores
4235 to outgoing arguments of sibling calls. We do, however, want
4236 to record the alignment of the stack slot. */
4237 /* ALIGN may well be better aligned than TYPE, e.g. due to
4238 PARM_BOUNDARY. Assume the caller isn't lying. */
4239 set_mem_align (dest, align);
4240
4241 emit_move_insn (dest, x);
4242 }
4243 }
4244
4245 /* If part should go in registers, copy that part
4246 into the appropriate registers. Do this now, at the end,
4247 since mem-to-mem copies above may do function calls. */
4248 if (partial > 0 && reg != 0)
4249 {
4250 /* Handle calls that pass values in multiple non-contiguous locations.
4251 The Irix 6 ABI has examples of this. */
4252 if (GET_CODE (reg) == PARALLEL)
4253 emit_group_load (reg, x, type, -1);
4254 else
4255 {
4256 gcc_assert (partial % UNITS_PER_WORD == 0);
4257 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4258 }
4259 }
4260
4261 if (extra && args_addr == 0 && where_pad == stack_direction)
4262 anti_adjust_stack (GEN_INT (extra));
4263
4264 if (alignment_pad && args_addr == 0)
4265 anti_adjust_stack (alignment_pad);
4266 }
4267 \f
4268 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4269 operations. */
4270
4271 static rtx
4272 get_subtarget (rtx x)
4273 {
4274 return (optimize
4275 || x == 0
4276 /* Only registers can be subtargets. */
4277 || !REG_P (x)
4278 /* Don't use hard regs to avoid extending their life. */
4279 || REGNO (x) < FIRST_PSEUDO_REGISTER
4280 ? 0 : x);
4281 }
4282
4283 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4284 FIELD is a bitfield. Returns true if the optimization was successful,
4285 and there's nothing else to do. */
4286
4287 static bool
4288 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4289 unsigned HOST_WIDE_INT bitpos,
4290 unsigned HOST_WIDE_INT bitregion_start,
4291 unsigned HOST_WIDE_INT bitregion_end,
4292 enum machine_mode mode1, rtx str_rtx,
4293 tree to, tree src)
4294 {
4295 enum machine_mode str_mode = GET_MODE (str_rtx);
4296 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4297 tree op0, op1;
4298 rtx value, result;
4299 optab binop;
4300 gimple srcstmt;
4301 enum tree_code code;
4302
4303 if (mode1 != VOIDmode
4304 || bitsize >= BITS_PER_WORD
4305 || str_bitsize > BITS_PER_WORD
4306 || TREE_SIDE_EFFECTS (to)
4307 || TREE_THIS_VOLATILE (to))
4308 return false;
4309
4310 STRIP_NOPS (src);
4311 if (TREE_CODE (src) != SSA_NAME)
4312 return false;
4313 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4314 return false;
4315
4316 srcstmt = get_gimple_for_ssa_name (src);
4317 if (!srcstmt
4318 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4319 return false;
4320
4321 code = gimple_assign_rhs_code (srcstmt);
4322
4323 op0 = gimple_assign_rhs1 (srcstmt);
4324
4325 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4326 to find its initialization. Hopefully the initialization will
4327 be from a bitfield load. */
4328 if (TREE_CODE (op0) == SSA_NAME)
4329 {
4330 gimple op0stmt = get_gimple_for_ssa_name (op0);
4331
4332 /* We want to eventually have OP0 be the same as TO, which
4333 should be a bitfield. */
4334 if (!op0stmt
4335 || !is_gimple_assign (op0stmt)
4336 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4337 return false;
4338 op0 = gimple_assign_rhs1 (op0stmt);
4339 }
4340
4341 op1 = gimple_assign_rhs2 (srcstmt);
4342
4343 if (!operand_equal_p (to, op0, 0))
4344 return false;
4345
4346 if (MEM_P (str_rtx))
4347 {
4348 unsigned HOST_WIDE_INT offset1;
4349
4350 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4351 str_mode = word_mode;
4352 str_mode = get_best_mode (bitsize, bitpos,
4353 bitregion_start, bitregion_end,
4354 MEM_ALIGN (str_rtx), str_mode, 0);
4355 if (str_mode == VOIDmode)
4356 return false;
4357 str_bitsize = GET_MODE_BITSIZE (str_mode);
4358
4359 offset1 = bitpos;
4360 bitpos %= str_bitsize;
4361 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4362 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4363 }
4364 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4365 return false;
4366
4367 /* If the bit field covers the whole REG/MEM, store_field
4368 will likely generate better code. */
4369 if (bitsize >= str_bitsize)
4370 return false;
4371
4372 /* We can't handle fields split across multiple entities. */
4373 if (bitpos + bitsize > str_bitsize)
4374 return false;
4375
4376 if (BYTES_BIG_ENDIAN)
4377 bitpos = str_bitsize - bitpos - bitsize;
4378
4379 switch (code)
4380 {
4381 case PLUS_EXPR:
4382 case MINUS_EXPR:
4383 /* For now, just optimize the case of the topmost bitfield
4384 where we don't need to do any masking and also
4385 1 bit bitfields where xor can be used.
4386 We might win by one instruction for the other bitfields
4387 too if insv/extv instructions aren't used, so that
4388 can be added later. */
4389 if (bitpos + bitsize != str_bitsize
4390 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4391 break;
4392
4393 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4394 value = convert_modes (str_mode,
4395 TYPE_MODE (TREE_TYPE (op1)), value,
4396 TYPE_UNSIGNED (TREE_TYPE (op1)));
4397
4398 /* We may be accessing data outside the field, which means
4399 we can alias adjacent data. */
4400 if (MEM_P (str_rtx))
4401 {
4402 str_rtx = shallow_copy_rtx (str_rtx);
4403 set_mem_alias_set (str_rtx, 0);
4404 set_mem_expr (str_rtx, 0);
4405 }
4406
4407 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4408 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4409 {
4410 value = expand_and (str_mode, value, const1_rtx, NULL);
4411 binop = xor_optab;
4412 }
4413 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4414 result = expand_binop (str_mode, binop, str_rtx,
4415 value, str_rtx, 1, OPTAB_WIDEN);
4416 if (result != str_rtx)
4417 emit_move_insn (str_rtx, result);
4418 return true;
4419
4420 case BIT_IOR_EXPR:
4421 case BIT_XOR_EXPR:
4422 if (TREE_CODE (op1) != INTEGER_CST)
4423 break;
4424 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4425 value = convert_modes (str_mode,
4426 TYPE_MODE (TREE_TYPE (op1)), value,
4427 TYPE_UNSIGNED (TREE_TYPE (op1)));
4428
4429 /* We may be accessing data outside the field, which means
4430 we can alias adjacent data. */
4431 if (MEM_P (str_rtx))
4432 {
4433 str_rtx = shallow_copy_rtx (str_rtx);
4434 set_mem_alias_set (str_rtx, 0);
4435 set_mem_expr (str_rtx, 0);
4436 }
4437
4438 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4439 if (bitpos + bitsize != str_bitsize)
4440 {
4441 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1);
4442 value = expand_and (str_mode, value, mask, NULL_RTX);
4443 }
4444 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4445 result = expand_binop (str_mode, binop, str_rtx,
4446 value, str_rtx, 1, OPTAB_WIDEN);
4447 if (result != str_rtx)
4448 emit_move_insn (str_rtx, result);
4449 return true;
4450
4451 default:
4452 break;
4453 }
4454
4455 return false;
4456 }
4457
4458 /* In the C++ memory model, consecutive bit fields in a structure are
4459 considered one memory location.
4460
4461 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4462 returns the bit range of consecutive bits in which this COMPONENT_REF
4463 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4464 and *OFFSET may be adjusted in the process.
4465
4466 If the access does not need to be restricted, 0 is returned in both
4467 *BITSTART and *BITEND. */
4468
4469 static void
4470 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4471 unsigned HOST_WIDE_INT *bitend,
4472 tree exp,
4473 HOST_WIDE_INT *bitpos,
4474 tree *offset)
4475 {
4476 HOST_WIDE_INT bitoffset;
4477 tree field, repr;
4478
4479 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4480
4481 field = TREE_OPERAND (exp, 1);
4482 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4483 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4484 need to limit the range we can access. */
4485 if (!repr)
4486 {
4487 *bitstart = *bitend = 0;
4488 return;
4489 }
4490
4491 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4492 part of a larger bit field, then the representative does not serve any
4493 useful purpose. This can occur in Ada. */
4494 if (handled_component_p (TREE_OPERAND (exp, 0)))
4495 {
4496 enum machine_mode rmode;
4497 HOST_WIDE_INT rbitsize, rbitpos;
4498 tree roffset;
4499 int unsignedp;
4500 int volatilep = 0;
4501 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4502 &roffset, &rmode, &unsignedp, &volatilep, false);
4503 if ((rbitpos % BITS_PER_UNIT) != 0)
4504 {
4505 *bitstart = *bitend = 0;
4506 return;
4507 }
4508 }
4509
4510 /* Compute the adjustment to bitpos from the offset of the field
4511 relative to the representative. DECL_FIELD_OFFSET of field and
4512 repr are the same by construction if they are not constants,
4513 see finish_bitfield_layout. */
4514 if (host_integerp (DECL_FIELD_OFFSET (field), 1)
4515 && host_integerp (DECL_FIELD_OFFSET (repr), 1))
4516 bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
4517 - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT;
4518 else
4519 bitoffset = 0;
4520 bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
4521 - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
4522
4523 /* If the adjustment is larger than bitpos, we would have a negative bit
4524 position for the lower bound and this may wreak havoc later. This can
4525 occur only if we have a non-null offset, so adjust offset and bitpos
4526 to make the lower bound non-negative. */
4527 if (bitoffset > *bitpos)
4528 {
4529 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4530
4531 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4532 gcc_assert (*offset != NULL_TREE);
4533
4534 *bitpos += adjust;
4535 *offset
4536 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4537 *bitstart = 0;
4538 }
4539 else
4540 *bitstart = *bitpos - bitoffset;
4541
4542 *bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
4543 }
4544
4545 /* Returns true if the MEM_REF REF refers to an object that does not
4546 reside in memory and has non-BLKmode. */
4547
4548 static bool
4549 mem_ref_refers_to_non_mem_p (tree ref)
4550 {
4551 tree base = TREE_OPERAND (ref, 0);
4552 if (TREE_CODE (base) != ADDR_EXPR)
4553 return false;
4554 base = TREE_OPERAND (base, 0);
4555 return (DECL_P (base)
4556 && !TREE_ADDRESSABLE (base)
4557 && DECL_MODE (base) != BLKmode
4558 && DECL_RTL_SET_P (base)
4559 && !MEM_P (DECL_RTL (base)));
4560 }
4561
4562 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4563 is true, try generating a nontemporal store. */
4564
4565 void
4566 expand_assignment (tree to, tree from, bool nontemporal)
4567 {
4568 rtx to_rtx = 0;
4569 rtx result;
4570 enum machine_mode mode;
4571 unsigned int align;
4572 enum insn_code icode;
4573
4574 /* Don't crash if the lhs of the assignment was erroneous. */
4575 if (TREE_CODE (to) == ERROR_MARK)
4576 {
4577 expand_normal (from);
4578 return;
4579 }
4580
4581 /* Optimize away no-op moves without side-effects. */
4582 if (operand_equal_p (to, from, 0))
4583 return;
4584
4585 /* Handle misaligned stores. */
4586 mode = TYPE_MODE (TREE_TYPE (to));
4587 if ((TREE_CODE (to) == MEM_REF
4588 || TREE_CODE (to) == TARGET_MEM_REF)
4589 && mode != BLKmode
4590 && !mem_ref_refers_to_non_mem_p (to)
4591 && ((align = get_object_or_type_alignment (to))
4592 < GET_MODE_ALIGNMENT (mode))
4593 && (((icode = optab_handler (movmisalign_optab, mode))
4594 != CODE_FOR_nothing)
4595 || SLOW_UNALIGNED_ACCESS (mode, align)))
4596 {
4597 rtx reg, mem;
4598
4599 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4600 reg = force_not_mem (reg);
4601 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4602
4603 if (icode != CODE_FOR_nothing)
4604 {
4605 struct expand_operand ops[2];
4606
4607 create_fixed_operand (&ops[0], mem);
4608 create_input_operand (&ops[1], reg, mode);
4609 /* The movmisalign<mode> pattern cannot fail, else the assignment
4610 would silently be omitted. */
4611 expand_insn (icode, 2, ops);
4612 }
4613 else
4614 store_bit_field (mem, GET_MODE_BITSIZE (mode),
4615 0, 0, 0, mode, reg);
4616 return;
4617 }
4618
4619 /* Assignment of a structure component needs special treatment
4620 if the structure component's rtx is not simply a MEM.
4621 Assignment of an array element at a constant index, and assignment of
4622 an array element in an unaligned packed structure field, has the same
4623 problem. Same for (partially) storing into a non-memory object. */
4624 if (handled_component_p (to)
4625 || (TREE_CODE (to) == MEM_REF
4626 && mem_ref_refers_to_non_mem_p (to))
4627 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4628 {
4629 enum machine_mode mode1;
4630 HOST_WIDE_INT bitsize, bitpos;
4631 unsigned HOST_WIDE_INT bitregion_start = 0;
4632 unsigned HOST_WIDE_INT bitregion_end = 0;
4633 tree offset;
4634 int unsignedp;
4635 int volatilep = 0;
4636 tree tem;
4637 bool misalignp;
4638 rtx mem = NULL_RTX;
4639
4640 push_temp_slots ();
4641 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4642 &unsignedp, &volatilep, true);
4643
4644 if (TREE_CODE (to) == COMPONENT_REF
4645 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4646 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4647
4648 /* If we are going to use store_bit_field and extract_bit_field,
4649 make sure to_rtx will be safe for multiple use. */
4650 mode = TYPE_MODE (TREE_TYPE (tem));
4651 if (TREE_CODE (tem) == MEM_REF
4652 && mode != BLKmode
4653 && ((align = get_object_or_type_alignment (tem))
4654 < GET_MODE_ALIGNMENT (mode))
4655 && ((icode = optab_handler (movmisalign_optab, mode))
4656 != CODE_FOR_nothing))
4657 {
4658 struct expand_operand ops[2];
4659
4660 misalignp = true;
4661 to_rtx = gen_reg_rtx (mode);
4662 mem = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4663
4664 /* If the misaligned store doesn't overwrite all bits, perform
4665 rmw cycle on MEM. */
4666 if (bitsize != GET_MODE_BITSIZE (mode))
4667 {
4668 create_input_operand (&ops[0], to_rtx, mode);
4669 create_fixed_operand (&ops[1], mem);
4670 /* The movmisalign<mode> pattern cannot fail, else the assignment
4671 would silently be omitted. */
4672 expand_insn (icode, 2, ops);
4673
4674 mem = copy_rtx (mem);
4675 }
4676 }
4677 else
4678 {
4679 misalignp = false;
4680 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4681 }
4682
4683 /* If the bitfield is volatile, we want to access it in the
4684 field's mode, not the computed mode.
4685 If a MEM has VOIDmode (external with incomplete type),
4686 use BLKmode for it instead. */
4687 if (MEM_P (to_rtx))
4688 {
4689 if (volatilep && flag_strict_volatile_bitfields > 0)
4690 to_rtx = adjust_address (to_rtx, mode1, 0);
4691 else if (GET_MODE (to_rtx) == VOIDmode)
4692 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4693 }
4694
4695 if (offset != 0)
4696 {
4697 enum machine_mode address_mode;
4698 rtx offset_rtx;
4699
4700 if (!MEM_P (to_rtx))
4701 {
4702 /* We can get constant negative offsets into arrays with broken
4703 user code. Translate this to a trap instead of ICEing. */
4704 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4705 expand_builtin_trap ();
4706 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4707 }
4708
4709 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4710 address_mode
4711 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4712 if (GET_MODE (offset_rtx) != address_mode)
4713 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4714
4715 /* A constant address in TO_RTX can have VOIDmode, we must not try
4716 to call force_reg for that case. Avoid that case. */
4717 if (MEM_P (to_rtx)
4718 && GET_MODE (to_rtx) == BLKmode
4719 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4720 && bitsize > 0
4721 && (bitpos % bitsize) == 0
4722 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4723 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4724 {
4725 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4726 bitpos = 0;
4727 }
4728
4729 to_rtx = offset_address (to_rtx, offset_rtx,
4730 highest_pow2_factor_for_target (to,
4731 offset));
4732 }
4733
4734 /* No action is needed if the target is not a memory and the field
4735 lies completely outside that target. This can occur if the source
4736 code contains an out-of-bounds access to a small array. */
4737 if (!MEM_P (to_rtx)
4738 && GET_MODE (to_rtx) != BLKmode
4739 && (unsigned HOST_WIDE_INT) bitpos
4740 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4741 {
4742 expand_normal (from);
4743 result = NULL;
4744 }
4745 /* Handle expand_expr of a complex value returning a CONCAT. */
4746 else if (GET_CODE (to_rtx) == CONCAT)
4747 {
4748 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4749 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4750 && bitpos == 0
4751 && bitsize == mode_bitsize)
4752 result = store_expr (from, to_rtx, false, nontemporal);
4753 else if (bitsize == mode_bitsize / 2
4754 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4755 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4756 nontemporal);
4757 else if (bitpos + bitsize <= mode_bitsize / 2)
4758 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4759 bitregion_start, bitregion_end,
4760 mode1, from, TREE_TYPE (tem),
4761 get_alias_set (to), nontemporal);
4762 else if (bitpos >= mode_bitsize / 2)
4763 result = store_field (XEXP (to_rtx, 1), bitsize,
4764 bitpos - mode_bitsize / 2,
4765 bitregion_start, bitregion_end,
4766 mode1, from,
4767 TREE_TYPE (tem), get_alias_set (to),
4768 nontemporal);
4769 else if (bitpos == 0 && bitsize == mode_bitsize)
4770 {
4771 rtx from_rtx;
4772 result = expand_normal (from);
4773 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4774 TYPE_MODE (TREE_TYPE (from)), 0);
4775 emit_move_insn (XEXP (to_rtx, 0),
4776 read_complex_part (from_rtx, false));
4777 emit_move_insn (XEXP (to_rtx, 1),
4778 read_complex_part (from_rtx, true));
4779 }
4780 else
4781 {
4782 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4783 GET_MODE_SIZE (GET_MODE (to_rtx)),
4784 0);
4785 write_complex_part (temp, XEXP (to_rtx, 0), false);
4786 write_complex_part (temp, XEXP (to_rtx, 1), true);
4787 result = store_field (temp, bitsize, bitpos,
4788 bitregion_start, bitregion_end,
4789 mode1, from,
4790 TREE_TYPE (tem), get_alias_set (to),
4791 nontemporal);
4792 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4793 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4794 }
4795 }
4796 else
4797 {
4798 if (MEM_P (to_rtx))
4799 {
4800 /* If the field is at offset zero, we could have been given the
4801 DECL_RTX of the parent struct. Don't munge it. */
4802 to_rtx = shallow_copy_rtx (to_rtx);
4803
4804 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4805
4806 /* Deal with volatile and readonly fields. The former is only
4807 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4808 if (volatilep)
4809 MEM_VOLATILE_P (to_rtx) = 1;
4810 if (component_uses_parent_alias_set (to))
4811 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4812 }
4813
4814 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4815 bitregion_start, bitregion_end,
4816 mode1,
4817 to_rtx, to, from))
4818 result = NULL;
4819 else
4820 result = store_field (to_rtx, bitsize, bitpos,
4821 bitregion_start, bitregion_end,
4822 mode1, from,
4823 TREE_TYPE (tem), get_alias_set (to),
4824 nontemporal);
4825 }
4826
4827 if (misalignp)
4828 {
4829 struct expand_operand ops[2];
4830
4831 create_fixed_operand (&ops[0], mem);
4832 create_input_operand (&ops[1], to_rtx, mode);
4833 /* The movmisalign<mode> pattern cannot fail, else the assignment
4834 would silently be omitted. */
4835 expand_insn (icode, 2, ops);
4836 }
4837
4838 if (result)
4839 preserve_temp_slots (result);
4840 free_temp_slots ();
4841 pop_temp_slots ();
4842 return;
4843 }
4844
4845 /* If the rhs is a function call and its value is not an aggregate,
4846 call the function before we start to compute the lhs.
4847 This is needed for correct code for cases such as
4848 val = setjmp (buf) on machines where reference to val
4849 requires loading up part of an address in a separate insn.
4850
4851 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4852 since it might be a promoted variable where the zero- or sign- extension
4853 needs to be done. Handling this in the normal way is safe because no
4854 computation is done before the call. The same is true for SSA names. */
4855 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4856 && COMPLETE_TYPE_P (TREE_TYPE (from))
4857 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4858 && ! (((TREE_CODE (to) == VAR_DECL
4859 || TREE_CODE (to) == PARM_DECL
4860 || TREE_CODE (to) == RESULT_DECL)
4861 && REG_P (DECL_RTL (to)))
4862 || TREE_CODE (to) == SSA_NAME))
4863 {
4864 rtx value;
4865
4866 push_temp_slots ();
4867 value = expand_normal (from);
4868 if (to_rtx == 0)
4869 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4870
4871 /* Handle calls that return values in multiple non-contiguous locations.
4872 The Irix 6 ABI has examples of this. */
4873 if (GET_CODE (to_rtx) == PARALLEL)
4874 emit_group_load (to_rtx, value, TREE_TYPE (from),
4875 int_size_in_bytes (TREE_TYPE (from)));
4876 else if (GET_MODE (to_rtx) == BLKmode)
4877 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4878 else
4879 {
4880 if (POINTER_TYPE_P (TREE_TYPE (to)))
4881 value = convert_memory_address_addr_space
4882 (GET_MODE (to_rtx), value,
4883 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4884
4885 emit_move_insn (to_rtx, value);
4886 }
4887 preserve_temp_slots (to_rtx);
4888 free_temp_slots ();
4889 pop_temp_slots ();
4890 return;
4891 }
4892
4893 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
4894 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4895
4896 /* Don't move directly into a return register. */
4897 if (TREE_CODE (to) == RESULT_DECL
4898 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4899 {
4900 rtx temp;
4901
4902 push_temp_slots ();
4903 if (REG_P (to_rtx) && TYPE_MODE (TREE_TYPE (from)) == BLKmode)
4904 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
4905 else
4906 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4907
4908 if (GET_CODE (to_rtx) == PARALLEL)
4909 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4910 int_size_in_bytes (TREE_TYPE (from)));
4911 else if (temp)
4912 emit_move_insn (to_rtx, temp);
4913
4914 preserve_temp_slots (to_rtx);
4915 free_temp_slots ();
4916 pop_temp_slots ();
4917 return;
4918 }
4919
4920 /* In case we are returning the contents of an object which overlaps
4921 the place the value is being stored, use a safe function when copying
4922 a value through a pointer into a structure value return block. */
4923 if (TREE_CODE (to) == RESULT_DECL
4924 && TREE_CODE (from) == INDIRECT_REF
4925 && ADDR_SPACE_GENERIC_P
4926 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4927 && refs_may_alias_p (to, from)
4928 && cfun->returns_struct
4929 && !cfun->returns_pcc_struct)
4930 {
4931 rtx from_rtx, size;
4932
4933 push_temp_slots ();
4934 size = expr_size (from);
4935 from_rtx = expand_normal (from);
4936
4937 emit_library_call (memmove_libfunc, LCT_NORMAL,
4938 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4939 XEXP (from_rtx, 0), Pmode,
4940 convert_to_mode (TYPE_MODE (sizetype),
4941 size, TYPE_UNSIGNED (sizetype)),
4942 TYPE_MODE (sizetype));
4943
4944 preserve_temp_slots (to_rtx);
4945 free_temp_slots ();
4946 pop_temp_slots ();
4947 return;
4948 }
4949
4950 /* Compute FROM and store the value in the rtx we got. */
4951
4952 push_temp_slots ();
4953 result = store_expr (from, to_rtx, 0, nontemporal);
4954 preserve_temp_slots (result);
4955 free_temp_slots ();
4956 pop_temp_slots ();
4957 return;
4958 }
4959
4960 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4961 succeeded, false otherwise. */
4962
4963 bool
4964 emit_storent_insn (rtx to, rtx from)
4965 {
4966 struct expand_operand ops[2];
4967 enum machine_mode mode = GET_MODE (to);
4968 enum insn_code code = optab_handler (storent_optab, mode);
4969
4970 if (code == CODE_FOR_nothing)
4971 return false;
4972
4973 create_fixed_operand (&ops[0], to);
4974 create_input_operand (&ops[1], from, mode);
4975 return maybe_expand_insn (code, 2, ops);
4976 }
4977
4978 /* Generate code for computing expression EXP,
4979 and storing the value into TARGET.
4980
4981 If the mode is BLKmode then we may return TARGET itself.
4982 It turns out that in BLKmode it doesn't cause a problem.
4983 because C has no operators that could combine two different
4984 assignments into the same BLKmode object with different values
4985 with no sequence point. Will other languages need this to
4986 be more thorough?
4987
4988 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4989 stack, and block moves may need to be treated specially.
4990
4991 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4992
4993 rtx
4994 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4995 {
4996 rtx temp;
4997 rtx alt_rtl = NULL_RTX;
4998 location_t loc = EXPR_LOCATION (exp);
4999
5000 if (VOID_TYPE_P (TREE_TYPE (exp)))
5001 {
5002 /* C++ can generate ?: expressions with a throw expression in one
5003 branch and an rvalue in the other. Here, we resolve attempts to
5004 store the throw expression's nonexistent result. */
5005 gcc_assert (!call_param_p);
5006 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5007 return NULL_RTX;
5008 }
5009 if (TREE_CODE (exp) == COMPOUND_EXPR)
5010 {
5011 /* Perform first part of compound expression, then assign from second
5012 part. */
5013 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5014 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5015 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5016 nontemporal);
5017 }
5018 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5019 {
5020 /* For conditional expression, get safe form of the target. Then
5021 test the condition, doing the appropriate assignment on either
5022 side. This avoids the creation of unnecessary temporaries.
5023 For non-BLKmode, it is more efficient not to do this. */
5024
5025 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5026
5027 do_pending_stack_adjust ();
5028 NO_DEFER_POP;
5029 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5030 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5031 nontemporal);
5032 emit_jump_insn (gen_jump (lab2));
5033 emit_barrier ();
5034 emit_label (lab1);
5035 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5036 nontemporal);
5037 emit_label (lab2);
5038 OK_DEFER_POP;
5039
5040 return NULL_RTX;
5041 }
5042 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5043 /* If this is a scalar in a register that is stored in a wider mode
5044 than the declared mode, compute the result into its declared mode
5045 and then convert to the wider mode. Our value is the computed
5046 expression. */
5047 {
5048 rtx inner_target = 0;
5049
5050 /* We can do the conversion inside EXP, which will often result
5051 in some optimizations. Do the conversion in two steps: first
5052 change the signedness, if needed, then the extend. But don't
5053 do this if the type of EXP is a subtype of something else
5054 since then the conversion might involve more than just
5055 converting modes. */
5056 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5057 && TREE_TYPE (TREE_TYPE (exp)) == 0
5058 && GET_MODE_PRECISION (GET_MODE (target))
5059 == TYPE_PRECISION (TREE_TYPE (exp)))
5060 {
5061 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5062 != SUBREG_PROMOTED_UNSIGNED_P (target))
5063 {
5064 /* Some types, e.g. Fortran's logical*4, won't have a signed
5065 version, so use the mode instead. */
5066 tree ntype
5067 = (signed_or_unsigned_type_for
5068 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5069 if (ntype == NULL)
5070 ntype = lang_hooks.types.type_for_mode
5071 (TYPE_MODE (TREE_TYPE (exp)),
5072 SUBREG_PROMOTED_UNSIGNED_P (target));
5073
5074 exp = fold_convert_loc (loc, ntype, exp);
5075 }
5076
5077 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5078 (GET_MODE (SUBREG_REG (target)),
5079 SUBREG_PROMOTED_UNSIGNED_P (target)),
5080 exp);
5081
5082 inner_target = SUBREG_REG (target);
5083 }
5084
5085 temp = expand_expr (exp, inner_target, VOIDmode,
5086 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5087
5088 /* If TEMP is a VOIDmode constant, use convert_modes to make
5089 sure that we properly convert it. */
5090 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5091 {
5092 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5093 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5094 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5095 GET_MODE (target), temp,
5096 SUBREG_PROMOTED_UNSIGNED_P (target));
5097 }
5098
5099 convert_move (SUBREG_REG (target), temp,
5100 SUBREG_PROMOTED_UNSIGNED_P (target));
5101
5102 return NULL_RTX;
5103 }
5104 else if ((TREE_CODE (exp) == STRING_CST
5105 || (TREE_CODE (exp) == MEM_REF
5106 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5107 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5108 == STRING_CST
5109 && integer_zerop (TREE_OPERAND (exp, 1))))
5110 && !nontemporal && !call_param_p
5111 && MEM_P (target))
5112 {
5113 /* Optimize initialization of an array with a STRING_CST. */
5114 HOST_WIDE_INT exp_len, str_copy_len;
5115 rtx dest_mem;
5116 tree str = TREE_CODE (exp) == STRING_CST
5117 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5118
5119 exp_len = int_expr_size (exp);
5120 if (exp_len <= 0)
5121 goto normal_expr;
5122
5123 if (TREE_STRING_LENGTH (str) <= 0)
5124 goto normal_expr;
5125
5126 str_copy_len = strlen (TREE_STRING_POINTER (str));
5127 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5128 goto normal_expr;
5129
5130 str_copy_len = TREE_STRING_LENGTH (str);
5131 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5132 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5133 {
5134 str_copy_len += STORE_MAX_PIECES - 1;
5135 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5136 }
5137 str_copy_len = MIN (str_copy_len, exp_len);
5138 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5139 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5140 MEM_ALIGN (target), false))
5141 goto normal_expr;
5142
5143 dest_mem = target;
5144
5145 dest_mem = store_by_pieces (dest_mem,
5146 str_copy_len, builtin_strncpy_read_str,
5147 CONST_CAST (char *,
5148 TREE_STRING_POINTER (str)),
5149 MEM_ALIGN (target), false,
5150 exp_len > str_copy_len ? 1 : 0);
5151 if (exp_len > str_copy_len)
5152 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5153 GEN_INT (exp_len - str_copy_len),
5154 BLOCK_OP_NORMAL);
5155 return NULL_RTX;
5156 }
5157 else
5158 {
5159 rtx tmp_target;
5160
5161 normal_expr:
5162 /* If we want to use a nontemporal store, force the value to
5163 register first. */
5164 tmp_target = nontemporal ? NULL_RTX : target;
5165 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5166 (call_param_p
5167 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5168 &alt_rtl);
5169 }
5170
5171 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5172 the same as that of TARGET, adjust the constant. This is needed, for
5173 example, in case it is a CONST_DOUBLE and we want only a word-sized
5174 value. */
5175 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5176 && TREE_CODE (exp) != ERROR_MARK
5177 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5178 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5179 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5180
5181 /* If value was not generated in the target, store it there.
5182 Convert the value to TARGET's type first if necessary and emit the
5183 pending incrementations that have been queued when expanding EXP.
5184 Note that we cannot emit the whole queue blindly because this will
5185 effectively disable the POST_INC optimization later.
5186
5187 If TEMP and TARGET compare equal according to rtx_equal_p, but
5188 one or both of them are volatile memory refs, we have to distinguish
5189 two cases:
5190 - expand_expr has used TARGET. In this case, we must not generate
5191 another copy. This can be detected by TARGET being equal according
5192 to == .
5193 - expand_expr has not used TARGET - that means that the source just
5194 happens to have the same RTX form. Since temp will have been created
5195 by expand_expr, it will compare unequal according to == .
5196 We must generate a copy in this case, to reach the correct number
5197 of volatile memory references. */
5198
5199 if ((! rtx_equal_p (temp, target)
5200 || (temp != target && (side_effects_p (temp)
5201 || side_effects_p (target))))
5202 && TREE_CODE (exp) != ERROR_MARK
5203 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5204 but TARGET is not valid memory reference, TEMP will differ
5205 from TARGET although it is really the same location. */
5206 && !(alt_rtl
5207 && rtx_equal_p (alt_rtl, target)
5208 && !side_effects_p (alt_rtl)
5209 && !side_effects_p (target))
5210 /* If there's nothing to copy, don't bother. Don't call
5211 expr_size unless necessary, because some front-ends (C++)
5212 expr_size-hook must not be given objects that are not
5213 supposed to be bit-copied or bit-initialized. */
5214 && expr_size (exp) != const0_rtx)
5215 {
5216 if (GET_MODE (temp) != GET_MODE (target)
5217 && GET_MODE (temp) != VOIDmode)
5218 {
5219 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5220 if (GET_MODE (target) == BLKmode
5221 && GET_MODE (temp) == BLKmode)
5222 emit_block_move (target, temp, expr_size (exp),
5223 (call_param_p
5224 ? BLOCK_OP_CALL_PARM
5225 : BLOCK_OP_NORMAL));
5226 else if (GET_MODE (target) == BLKmode)
5227 store_bit_field (target, INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5228 0, 0, 0, GET_MODE (temp), temp);
5229 else
5230 convert_move (target, temp, unsignedp);
5231 }
5232
5233 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5234 {
5235 /* Handle copying a string constant into an array. The string
5236 constant may be shorter than the array. So copy just the string's
5237 actual length, and clear the rest. First get the size of the data
5238 type of the string, which is actually the size of the target. */
5239 rtx size = expr_size (exp);
5240
5241 if (CONST_INT_P (size)
5242 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5243 emit_block_move (target, temp, size,
5244 (call_param_p
5245 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5246 else
5247 {
5248 enum machine_mode pointer_mode
5249 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5250 enum machine_mode address_mode
5251 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
5252
5253 /* Compute the size of the data to copy from the string. */
5254 tree copy_size
5255 = size_binop_loc (loc, MIN_EXPR,
5256 make_tree (sizetype, size),
5257 size_int (TREE_STRING_LENGTH (exp)));
5258 rtx copy_size_rtx
5259 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5260 (call_param_p
5261 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5262 rtx label = 0;
5263
5264 /* Copy that much. */
5265 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5266 TYPE_UNSIGNED (sizetype));
5267 emit_block_move (target, temp, copy_size_rtx,
5268 (call_param_p
5269 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5270
5271 /* Figure out how much is left in TARGET that we have to clear.
5272 Do all calculations in pointer_mode. */
5273 if (CONST_INT_P (copy_size_rtx))
5274 {
5275 size = plus_constant (address_mode, size,
5276 -INTVAL (copy_size_rtx));
5277 target = adjust_address (target, BLKmode,
5278 INTVAL (copy_size_rtx));
5279 }
5280 else
5281 {
5282 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5283 copy_size_rtx, NULL_RTX, 0,
5284 OPTAB_LIB_WIDEN);
5285
5286 if (GET_MODE (copy_size_rtx) != address_mode)
5287 copy_size_rtx = convert_to_mode (address_mode,
5288 copy_size_rtx,
5289 TYPE_UNSIGNED (sizetype));
5290
5291 target = offset_address (target, copy_size_rtx,
5292 highest_pow2_factor (copy_size));
5293 label = gen_label_rtx ();
5294 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5295 GET_MODE (size), 0, label);
5296 }
5297
5298 if (size != const0_rtx)
5299 clear_storage (target, size, BLOCK_OP_NORMAL);
5300
5301 if (label)
5302 emit_label (label);
5303 }
5304 }
5305 /* Handle calls that return values in multiple non-contiguous locations.
5306 The Irix 6 ABI has examples of this. */
5307 else if (GET_CODE (target) == PARALLEL)
5308 emit_group_load (target, temp, TREE_TYPE (exp),
5309 int_size_in_bytes (TREE_TYPE (exp)));
5310 else if (GET_MODE (temp) == BLKmode)
5311 emit_block_move (target, temp, expr_size (exp),
5312 (call_param_p
5313 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5314 else if (nontemporal
5315 && emit_storent_insn (target, temp))
5316 /* If we managed to emit a nontemporal store, there is nothing else to
5317 do. */
5318 ;
5319 else
5320 {
5321 temp = force_operand (temp, target);
5322 if (temp != target)
5323 emit_move_insn (target, temp);
5324 }
5325 }
5326
5327 return NULL_RTX;
5328 }
5329 \f
5330 /* Return true if field F of structure TYPE is a flexible array. */
5331
5332 static bool
5333 flexible_array_member_p (const_tree f, const_tree type)
5334 {
5335 const_tree tf;
5336
5337 tf = TREE_TYPE (f);
5338 return (DECL_CHAIN (f) == NULL
5339 && TREE_CODE (tf) == ARRAY_TYPE
5340 && TYPE_DOMAIN (tf)
5341 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5342 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5343 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5344 && int_size_in_bytes (type) >= 0);
5345 }
5346
5347 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5348 must have in order for it to completely initialize a value of type TYPE.
5349 Return -1 if the number isn't known.
5350
5351 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5352
5353 static HOST_WIDE_INT
5354 count_type_elements (const_tree type, bool for_ctor_p)
5355 {
5356 switch (TREE_CODE (type))
5357 {
5358 case ARRAY_TYPE:
5359 {
5360 tree nelts;
5361
5362 nelts = array_type_nelts (type);
5363 if (nelts && host_integerp (nelts, 1))
5364 {
5365 unsigned HOST_WIDE_INT n;
5366
5367 n = tree_low_cst (nelts, 1) + 1;
5368 if (n == 0 || for_ctor_p)
5369 return n;
5370 else
5371 return n * count_type_elements (TREE_TYPE (type), false);
5372 }
5373 return for_ctor_p ? -1 : 1;
5374 }
5375
5376 case RECORD_TYPE:
5377 {
5378 unsigned HOST_WIDE_INT n;
5379 tree f;
5380
5381 n = 0;
5382 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5383 if (TREE_CODE (f) == FIELD_DECL)
5384 {
5385 if (!for_ctor_p)
5386 n += count_type_elements (TREE_TYPE (f), false);
5387 else if (!flexible_array_member_p (f, type))
5388 /* Don't count flexible arrays, which are not supposed
5389 to be initialized. */
5390 n += 1;
5391 }
5392
5393 return n;
5394 }
5395
5396 case UNION_TYPE:
5397 case QUAL_UNION_TYPE:
5398 {
5399 tree f;
5400 HOST_WIDE_INT n, m;
5401
5402 gcc_assert (!for_ctor_p);
5403 /* Estimate the number of scalars in each field and pick the
5404 maximum. Other estimates would do instead; the idea is simply
5405 to make sure that the estimate is not sensitive to the ordering
5406 of the fields. */
5407 n = 1;
5408 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5409 if (TREE_CODE (f) == FIELD_DECL)
5410 {
5411 m = count_type_elements (TREE_TYPE (f), false);
5412 /* If the field doesn't span the whole union, add an extra
5413 scalar for the rest. */
5414 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5415 TYPE_SIZE (type)) != 1)
5416 m++;
5417 if (n < m)
5418 n = m;
5419 }
5420 return n;
5421 }
5422
5423 case COMPLEX_TYPE:
5424 return 2;
5425
5426 case VECTOR_TYPE:
5427 return TYPE_VECTOR_SUBPARTS (type);
5428
5429 case INTEGER_TYPE:
5430 case REAL_TYPE:
5431 case FIXED_POINT_TYPE:
5432 case ENUMERAL_TYPE:
5433 case BOOLEAN_TYPE:
5434 case POINTER_TYPE:
5435 case OFFSET_TYPE:
5436 case REFERENCE_TYPE:
5437 case NULLPTR_TYPE:
5438 return 1;
5439
5440 case ERROR_MARK:
5441 return 0;
5442
5443 case VOID_TYPE:
5444 case METHOD_TYPE:
5445 case FUNCTION_TYPE:
5446 case LANG_TYPE:
5447 default:
5448 gcc_unreachable ();
5449 }
5450 }
5451
5452 /* Helper for categorize_ctor_elements. Identical interface. */
5453
5454 static bool
5455 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5456 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5457 {
5458 unsigned HOST_WIDE_INT idx;
5459 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5460 tree value, purpose, elt_type;
5461
5462 /* Whether CTOR is a valid constant initializer, in accordance with what
5463 initializer_constant_valid_p does. If inferred from the constructor
5464 elements, true until proven otherwise. */
5465 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5466 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5467
5468 nz_elts = 0;
5469 init_elts = 0;
5470 num_fields = 0;
5471 elt_type = NULL_TREE;
5472
5473 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5474 {
5475 HOST_WIDE_INT mult = 1;
5476
5477 if (TREE_CODE (purpose) == RANGE_EXPR)
5478 {
5479 tree lo_index = TREE_OPERAND (purpose, 0);
5480 tree hi_index = TREE_OPERAND (purpose, 1);
5481
5482 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
5483 mult = (tree_low_cst (hi_index, 1)
5484 - tree_low_cst (lo_index, 1) + 1);
5485 }
5486 num_fields += mult;
5487 elt_type = TREE_TYPE (value);
5488
5489 switch (TREE_CODE (value))
5490 {
5491 case CONSTRUCTOR:
5492 {
5493 HOST_WIDE_INT nz = 0, ic = 0;
5494
5495 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5496 p_complete);
5497
5498 nz_elts += mult * nz;
5499 init_elts += mult * ic;
5500
5501 if (const_from_elts_p && const_p)
5502 const_p = const_elt_p;
5503 }
5504 break;
5505
5506 case INTEGER_CST:
5507 case REAL_CST:
5508 case FIXED_CST:
5509 if (!initializer_zerop (value))
5510 nz_elts += mult;
5511 init_elts += mult;
5512 break;
5513
5514 case STRING_CST:
5515 nz_elts += mult * TREE_STRING_LENGTH (value);
5516 init_elts += mult * TREE_STRING_LENGTH (value);
5517 break;
5518
5519 case COMPLEX_CST:
5520 if (!initializer_zerop (TREE_REALPART (value)))
5521 nz_elts += mult;
5522 if (!initializer_zerop (TREE_IMAGPART (value)))
5523 nz_elts += mult;
5524 init_elts += mult;
5525 break;
5526
5527 case VECTOR_CST:
5528 {
5529 unsigned i;
5530 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5531 {
5532 tree v = VECTOR_CST_ELT (value, i);
5533 if (!initializer_zerop (v))
5534 nz_elts += mult;
5535 init_elts += mult;
5536 }
5537 }
5538 break;
5539
5540 default:
5541 {
5542 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5543 nz_elts += mult * tc;
5544 init_elts += mult * tc;
5545
5546 if (const_from_elts_p && const_p)
5547 const_p = initializer_constant_valid_p (value, elt_type)
5548 != NULL_TREE;
5549 }
5550 break;
5551 }
5552 }
5553
5554 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5555 num_fields, elt_type))
5556 *p_complete = false;
5557
5558 *p_nz_elts += nz_elts;
5559 *p_init_elts += init_elts;
5560
5561 return const_p;
5562 }
5563
5564 /* Examine CTOR to discover:
5565 * how many scalar fields are set to nonzero values,
5566 and place it in *P_NZ_ELTS;
5567 * how many scalar fields in total are in CTOR,
5568 and place it in *P_ELT_COUNT.
5569 * whether the constructor is complete -- in the sense that every
5570 meaningful byte is explicitly given a value --
5571 and place it in *P_COMPLETE.
5572
5573 Return whether or not CTOR is a valid static constant initializer, the same
5574 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5575
5576 bool
5577 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5578 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5579 {
5580 *p_nz_elts = 0;
5581 *p_init_elts = 0;
5582 *p_complete = true;
5583
5584 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5585 }
5586
5587 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5588 of which had type LAST_TYPE. Each element was itself a complete
5589 initializer, in the sense that every meaningful byte was explicitly
5590 given a value. Return true if the same is true for the constructor
5591 as a whole. */
5592
5593 bool
5594 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5595 const_tree last_type)
5596 {
5597 if (TREE_CODE (type) == UNION_TYPE
5598 || TREE_CODE (type) == QUAL_UNION_TYPE)
5599 {
5600 if (num_elts == 0)
5601 return false;
5602
5603 gcc_assert (num_elts == 1 && last_type);
5604
5605 /* ??? We could look at each element of the union, and find the
5606 largest element. Which would avoid comparing the size of the
5607 initialized element against any tail padding in the union.
5608 Doesn't seem worth the effort... */
5609 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5610 }
5611
5612 return count_type_elements (type, true) == num_elts;
5613 }
5614
5615 /* Return 1 if EXP contains mostly (3/4) zeros. */
5616
5617 static int
5618 mostly_zeros_p (const_tree exp)
5619 {
5620 if (TREE_CODE (exp) == CONSTRUCTOR)
5621 {
5622 HOST_WIDE_INT nz_elts, init_elts;
5623 bool complete_p;
5624
5625 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5626 return !complete_p || nz_elts < init_elts / 4;
5627 }
5628
5629 return initializer_zerop (exp);
5630 }
5631
5632 /* Return 1 if EXP contains all zeros. */
5633
5634 static int
5635 all_zeros_p (const_tree exp)
5636 {
5637 if (TREE_CODE (exp) == CONSTRUCTOR)
5638 {
5639 HOST_WIDE_INT nz_elts, init_elts;
5640 bool complete_p;
5641
5642 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5643 return nz_elts == 0;
5644 }
5645
5646 return initializer_zerop (exp);
5647 }
5648 \f
5649 /* Helper function for store_constructor.
5650 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5651 TYPE is the type of the CONSTRUCTOR, not the element type.
5652 CLEARED is as for store_constructor.
5653 ALIAS_SET is the alias set to use for any stores.
5654
5655 This provides a recursive shortcut back to store_constructor when it isn't
5656 necessary to go through store_field. This is so that we can pass through
5657 the cleared field to let store_constructor know that we may not have to
5658 clear a substructure if the outer structure has already been cleared. */
5659
5660 static void
5661 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5662 HOST_WIDE_INT bitpos, enum machine_mode mode,
5663 tree exp, tree type, int cleared,
5664 alias_set_type alias_set)
5665 {
5666 if (TREE_CODE (exp) == CONSTRUCTOR
5667 /* We can only call store_constructor recursively if the size and
5668 bit position are on a byte boundary. */
5669 && bitpos % BITS_PER_UNIT == 0
5670 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5671 /* If we have a nonzero bitpos for a register target, then we just
5672 let store_field do the bitfield handling. This is unlikely to
5673 generate unnecessary clear instructions anyways. */
5674 && (bitpos == 0 || MEM_P (target)))
5675 {
5676 if (MEM_P (target))
5677 target
5678 = adjust_address (target,
5679 GET_MODE (target) == BLKmode
5680 || 0 != (bitpos
5681 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5682 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5683
5684
5685 /* Update the alias set, if required. */
5686 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5687 && MEM_ALIAS_SET (target) != 0)
5688 {
5689 target = copy_rtx (target);
5690 set_mem_alias_set (target, alias_set);
5691 }
5692
5693 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5694 }
5695 else
5696 store_field (target, bitsize, bitpos, 0, 0, mode, exp, type, alias_set,
5697 false);
5698 }
5699
5700 /* Store the value of constructor EXP into the rtx TARGET.
5701 TARGET is either a REG or a MEM; we know it cannot conflict, since
5702 safe_from_p has been called.
5703 CLEARED is true if TARGET is known to have been zero'd.
5704 SIZE is the number of bytes of TARGET we are allowed to modify: this
5705 may not be the same as the size of EXP if we are assigning to a field
5706 which has been packed to exclude padding bits. */
5707
5708 static void
5709 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5710 {
5711 tree type = TREE_TYPE (exp);
5712 #ifdef WORD_REGISTER_OPERATIONS
5713 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5714 #endif
5715
5716 switch (TREE_CODE (type))
5717 {
5718 case RECORD_TYPE:
5719 case UNION_TYPE:
5720 case QUAL_UNION_TYPE:
5721 {
5722 unsigned HOST_WIDE_INT idx;
5723 tree field, value;
5724
5725 /* If size is zero or the target is already cleared, do nothing. */
5726 if (size == 0 || cleared)
5727 cleared = 1;
5728 /* We either clear the aggregate or indicate the value is dead. */
5729 else if ((TREE_CODE (type) == UNION_TYPE
5730 || TREE_CODE (type) == QUAL_UNION_TYPE)
5731 && ! CONSTRUCTOR_ELTS (exp))
5732 /* If the constructor is empty, clear the union. */
5733 {
5734 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5735 cleared = 1;
5736 }
5737
5738 /* If we are building a static constructor into a register,
5739 set the initial value as zero so we can fold the value into
5740 a constant. But if more than one register is involved,
5741 this probably loses. */
5742 else if (REG_P (target) && TREE_STATIC (exp)
5743 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5744 {
5745 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5746 cleared = 1;
5747 }
5748
5749 /* If the constructor has fewer fields than the structure or
5750 if we are initializing the structure to mostly zeros, clear
5751 the whole structure first. Don't do this if TARGET is a
5752 register whose mode size isn't equal to SIZE since
5753 clear_storage can't handle this case. */
5754 else if (size > 0
5755 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5756 != fields_length (type))
5757 || mostly_zeros_p (exp))
5758 && (!REG_P (target)
5759 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5760 == size)))
5761 {
5762 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5763 cleared = 1;
5764 }
5765
5766 if (REG_P (target) && !cleared)
5767 emit_clobber (target);
5768
5769 /* Store each element of the constructor into the
5770 corresponding field of TARGET. */
5771 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5772 {
5773 enum machine_mode mode;
5774 HOST_WIDE_INT bitsize;
5775 HOST_WIDE_INT bitpos = 0;
5776 tree offset;
5777 rtx to_rtx = target;
5778
5779 /* Just ignore missing fields. We cleared the whole
5780 structure, above, if any fields are missing. */
5781 if (field == 0)
5782 continue;
5783
5784 if (cleared && initializer_zerop (value))
5785 continue;
5786
5787 if (host_integerp (DECL_SIZE (field), 1))
5788 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5789 else
5790 bitsize = -1;
5791
5792 mode = DECL_MODE (field);
5793 if (DECL_BIT_FIELD (field))
5794 mode = VOIDmode;
5795
5796 offset = DECL_FIELD_OFFSET (field);
5797 if (host_integerp (offset, 0)
5798 && host_integerp (bit_position (field), 0))
5799 {
5800 bitpos = int_bit_position (field);
5801 offset = 0;
5802 }
5803 else
5804 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5805
5806 if (offset)
5807 {
5808 enum machine_mode address_mode;
5809 rtx offset_rtx;
5810
5811 offset
5812 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5813 make_tree (TREE_TYPE (exp),
5814 target));
5815
5816 offset_rtx = expand_normal (offset);
5817 gcc_assert (MEM_P (to_rtx));
5818
5819 address_mode
5820 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5821 if (GET_MODE (offset_rtx) != address_mode)
5822 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5823
5824 to_rtx = offset_address (to_rtx, offset_rtx,
5825 highest_pow2_factor (offset));
5826 }
5827
5828 #ifdef WORD_REGISTER_OPERATIONS
5829 /* If this initializes a field that is smaller than a
5830 word, at the start of a word, try to widen it to a full
5831 word. This special case allows us to output C++ member
5832 function initializations in a form that the optimizers
5833 can understand. */
5834 if (REG_P (target)
5835 && bitsize < BITS_PER_WORD
5836 && bitpos % BITS_PER_WORD == 0
5837 && GET_MODE_CLASS (mode) == MODE_INT
5838 && TREE_CODE (value) == INTEGER_CST
5839 && exp_size >= 0
5840 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5841 {
5842 tree type = TREE_TYPE (value);
5843
5844 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5845 {
5846 type = lang_hooks.types.type_for_mode
5847 (word_mode, TYPE_UNSIGNED (type));
5848 value = fold_convert (type, value);
5849 }
5850
5851 if (BYTES_BIG_ENDIAN)
5852 value
5853 = fold_build2 (LSHIFT_EXPR, type, value,
5854 build_int_cst (type,
5855 BITS_PER_WORD - bitsize));
5856 bitsize = BITS_PER_WORD;
5857 mode = word_mode;
5858 }
5859 #endif
5860
5861 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5862 && DECL_NONADDRESSABLE_P (field))
5863 {
5864 to_rtx = copy_rtx (to_rtx);
5865 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5866 }
5867
5868 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5869 value, type, cleared,
5870 get_alias_set (TREE_TYPE (field)));
5871 }
5872 break;
5873 }
5874 case ARRAY_TYPE:
5875 {
5876 tree value, index;
5877 unsigned HOST_WIDE_INT i;
5878 int need_to_clear;
5879 tree domain;
5880 tree elttype = TREE_TYPE (type);
5881 int const_bounds_p;
5882 HOST_WIDE_INT minelt = 0;
5883 HOST_WIDE_INT maxelt = 0;
5884
5885 domain = TYPE_DOMAIN (type);
5886 const_bounds_p = (TYPE_MIN_VALUE (domain)
5887 && TYPE_MAX_VALUE (domain)
5888 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5889 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5890
5891 /* If we have constant bounds for the range of the type, get them. */
5892 if (const_bounds_p)
5893 {
5894 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5895 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5896 }
5897
5898 /* If the constructor has fewer elements than the array, clear
5899 the whole array first. Similarly if this is static
5900 constructor of a non-BLKmode object. */
5901 if (cleared)
5902 need_to_clear = 0;
5903 else if (REG_P (target) && TREE_STATIC (exp))
5904 need_to_clear = 1;
5905 else
5906 {
5907 unsigned HOST_WIDE_INT idx;
5908 tree index, value;
5909 HOST_WIDE_INT count = 0, zero_count = 0;
5910 need_to_clear = ! const_bounds_p;
5911
5912 /* This loop is a more accurate version of the loop in
5913 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5914 is also needed to check for missing elements. */
5915 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5916 {
5917 HOST_WIDE_INT this_node_count;
5918
5919 if (need_to_clear)
5920 break;
5921
5922 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5923 {
5924 tree lo_index = TREE_OPERAND (index, 0);
5925 tree hi_index = TREE_OPERAND (index, 1);
5926
5927 if (! host_integerp (lo_index, 1)
5928 || ! host_integerp (hi_index, 1))
5929 {
5930 need_to_clear = 1;
5931 break;
5932 }
5933
5934 this_node_count = (tree_low_cst (hi_index, 1)
5935 - tree_low_cst (lo_index, 1) + 1);
5936 }
5937 else
5938 this_node_count = 1;
5939
5940 count += this_node_count;
5941 if (mostly_zeros_p (value))
5942 zero_count += this_node_count;
5943 }
5944
5945 /* Clear the entire array first if there are any missing
5946 elements, or if the incidence of zero elements is >=
5947 75%. */
5948 if (! need_to_clear
5949 && (count < maxelt - minelt + 1
5950 || 4 * zero_count >= 3 * count))
5951 need_to_clear = 1;
5952 }
5953
5954 if (need_to_clear && size > 0)
5955 {
5956 if (REG_P (target))
5957 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5958 else
5959 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5960 cleared = 1;
5961 }
5962
5963 if (!cleared && REG_P (target))
5964 /* Inform later passes that the old value is dead. */
5965 emit_clobber (target);
5966
5967 /* Store each element of the constructor into the
5968 corresponding element of TARGET, determined by counting the
5969 elements. */
5970 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5971 {
5972 enum machine_mode mode;
5973 HOST_WIDE_INT bitsize;
5974 HOST_WIDE_INT bitpos;
5975 rtx xtarget = target;
5976
5977 if (cleared && initializer_zerop (value))
5978 continue;
5979
5980 mode = TYPE_MODE (elttype);
5981 if (mode == BLKmode)
5982 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5983 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5984 : -1);
5985 else
5986 bitsize = GET_MODE_BITSIZE (mode);
5987
5988 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5989 {
5990 tree lo_index = TREE_OPERAND (index, 0);
5991 tree hi_index = TREE_OPERAND (index, 1);
5992 rtx index_r, pos_rtx;
5993 HOST_WIDE_INT lo, hi, count;
5994 tree position;
5995
5996 /* If the range is constant and "small", unroll the loop. */
5997 if (const_bounds_p
5998 && host_integerp (lo_index, 0)
5999 && host_integerp (hi_index, 0)
6000 && (lo = tree_low_cst (lo_index, 0),
6001 hi = tree_low_cst (hi_index, 0),
6002 count = hi - lo + 1,
6003 (!MEM_P (target)
6004 || count <= 2
6005 || (host_integerp (TYPE_SIZE (elttype), 1)
6006 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
6007 <= 40 * 8)))))
6008 {
6009 lo -= minelt; hi -= minelt;
6010 for (; lo <= hi; lo++)
6011 {
6012 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
6013
6014 if (MEM_P (target)
6015 && !MEM_KEEP_ALIAS_SET_P (target)
6016 && TREE_CODE (type) == ARRAY_TYPE
6017 && TYPE_NONALIASED_COMPONENT (type))
6018 {
6019 target = copy_rtx (target);
6020 MEM_KEEP_ALIAS_SET_P (target) = 1;
6021 }
6022
6023 store_constructor_field
6024 (target, bitsize, bitpos, mode, value, type, cleared,
6025 get_alias_set (elttype));
6026 }
6027 }
6028 else
6029 {
6030 rtx loop_start = gen_label_rtx ();
6031 rtx loop_end = gen_label_rtx ();
6032 tree exit_cond;
6033
6034 expand_normal (hi_index);
6035
6036 index = build_decl (EXPR_LOCATION (exp),
6037 VAR_DECL, NULL_TREE, domain);
6038 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6039 SET_DECL_RTL (index, index_r);
6040 store_expr (lo_index, index_r, 0, false);
6041
6042 /* Build the head of the loop. */
6043 do_pending_stack_adjust ();
6044 emit_label (loop_start);
6045
6046 /* Assign value to element index. */
6047 position =
6048 fold_convert (ssizetype,
6049 fold_build2 (MINUS_EXPR,
6050 TREE_TYPE (index),
6051 index,
6052 TYPE_MIN_VALUE (domain)));
6053
6054 position =
6055 size_binop (MULT_EXPR, position,
6056 fold_convert (ssizetype,
6057 TYPE_SIZE_UNIT (elttype)));
6058
6059 pos_rtx = expand_normal (position);
6060 xtarget = offset_address (target, pos_rtx,
6061 highest_pow2_factor (position));
6062 xtarget = adjust_address (xtarget, mode, 0);
6063 if (TREE_CODE (value) == CONSTRUCTOR)
6064 store_constructor (value, xtarget, cleared,
6065 bitsize / BITS_PER_UNIT);
6066 else
6067 store_expr (value, xtarget, 0, false);
6068
6069 /* Generate a conditional jump to exit the loop. */
6070 exit_cond = build2 (LT_EXPR, integer_type_node,
6071 index, hi_index);
6072 jumpif (exit_cond, loop_end, -1);
6073
6074 /* Update the loop counter, and jump to the head of
6075 the loop. */
6076 expand_assignment (index,
6077 build2 (PLUS_EXPR, TREE_TYPE (index),
6078 index, integer_one_node),
6079 false);
6080
6081 emit_jump (loop_start);
6082
6083 /* Build the end of the loop. */
6084 emit_label (loop_end);
6085 }
6086 }
6087 else if ((index != 0 && ! host_integerp (index, 0))
6088 || ! host_integerp (TYPE_SIZE (elttype), 1))
6089 {
6090 tree position;
6091
6092 if (index == 0)
6093 index = ssize_int (1);
6094
6095 if (minelt)
6096 index = fold_convert (ssizetype,
6097 fold_build2 (MINUS_EXPR,
6098 TREE_TYPE (index),
6099 index,
6100 TYPE_MIN_VALUE (domain)));
6101
6102 position =
6103 size_binop (MULT_EXPR, index,
6104 fold_convert (ssizetype,
6105 TYPE_SIZE_UNIT (elttype)));
6106 xtarget = offset_address (target,
6107 expand_normal (position),
6108 highest_pow2_factor (position));
6109 xtarget = adjust_address (xtarget, mode, 0);
6110 store_expr (value, xtarget, 0, false);
6111 }
6112 else
6113 {
6114 if (index != 0)
6115 bitpos = ((tree_low_cst (index, 0) - minelt)
6116 * tree_low_cst (TYPE_SIZE (elttype), 1));
6117 else
6118 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
6119
6120 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6121 && TREE_CODE (type) == ARRAY_TYPE
6122 && TYPE_NONALIASED_COMPONENT (type))
6123 {
6124 target = copy_rtx (target);
6125 MEM_KEEP_ALIAS_SET_P (target) = 1;
6126 }
6127 store_constructor_field (target, bitsize, bitpos, mode, value,
6128 type, cleared, get_alias_set (elttype));
6129 }
6130 }
6131 break;
6132 }
6133
6134 case VECTOR_TYPE:
6135 {
6136 unsigned HOST_WIDE_INT idx;
6137 constructor_elt *ce;
6138 int i;
6139 int need_to_clear;
6140 int icode = 0;
6141 tree elttype = TREE_TYPE (type);
6142 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
6143 enum machine_mode eltmode = TYPE_MODE (elttype);
6144 HOST_WIDE_INT bitsize;
6145 HOST_WIDE_INT bitpos;
6146 rtvec vector = NULL;
6147 unsigned n_elts;
6148 alias_set_type alias;
6149
6150 gcc_assert (eltmode != BLKmode);
6151
6152 n_elts = TYPE_VECTOR_SUBPARTS (type);
6153 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6154 {
6155 enum machine_mode mode = GET_MODE (target);
6156
6157 icode = (int) optab_handler (vec_init_optab, mode);
6158 if (icode != CODE_FOR_nothing)
6159 {
6160 unsigned int i;
6161
6162 vector = rtvec_alloc (n_elts);
6163 for (i = 0; i < n_elts; i++)
6164 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6165 }
6166 }
6167
6168 /* If the constructor has fewer elements than the vector,
6169 clear the whole array first. Similarly if this is static
6170 constructor of a non-BLKmode object. */
6171 if (cleared)
6172 need_to_clear = 0;
6173 else if (REG_P (target) && TREE_STATIC (exp))
6174 need_to_clear = 1;
6175 else
6176 {
6177 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6178 tree value;
6179
6180 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6181 {
6182 int n_elts_here = tree_low_cst
6183 (int_const_binop (TRUNC_DIV_EXPR,
6184 TYPE_SIZE (TREE_TYPE (value)),
6185 TYPE_SIZE (elttype)), 1);
6186
6187 count += n_elts_here;
6188 if (mostly_zeros_p (value))
6189 zero_count += n_elts_here;
6190 }
6191
6192 /* Clear the entire vector first if there are any missing elements,
6193 or if the incidence of zero elements is >= 75%. */
6194 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6195 }
6196
6197 if (need_to_clear && size > 0 && !vector)
6198 {
6199 if (REG_P (target))
6200 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6201 else
6202 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6203 cleared = 1;
6204 }
6205
6206 /* Inform later passes that the old value is dead. */
6207 if (!cleared && !vector && REG_P (target))
6208 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6209
6210 if (MEM_P (target))
6211 alias = MEM_ALIAS_SET (target);
6212 else
6213 alias = get_alias_set (elttype);
6214
6215 /* Store each element of the constructor into the corresponding
6216 element of TARGET, determined by counting the elements. */
6217 for (idx = 0, i = 0;
6218 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6219 idx++, i += bitsize / elt_size)
6220 {
6221 HOST_WIDE_INT eltpos;
6222 tree value = ce->value;
6223
6224 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
6225 if (cleared && initializer_zerop (value))
6226 continue;
6227
6228 if (ce->index)
6229 eltpos = tree_low_cst (ce->index, 1);
6230 else
6231 eltpos = i;
6232
6233 if (vector)
6234 {
6235 /* Vector CONSTRUCTORs should only be built from smaller
6236 vectors in the case of BLKmode vectors. */
6237 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6238 RTVEC_ELT (vector, eltpos)
6239 = expand_normal (value);
6240 }
6241 else
6242 {
6243 enum machine_mode value_mode =
6244 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6245 ? TYPE_MODE (TREE_TYPE (value))
6246 : eltmode;
6247 bitpos = eltpos * elt_size;
6248 store_constructor_field (target, bitsize, bitpos,
6249 value_mode, value, type,
6250 cleared, alias);
6251 }
6252 }
6253
6254 if (vector)
6255 emit_insn (GEN_FCN (icode)
6256 (target,
6257 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6258 break;
6259 }
6260
6261 default:
6262 gcc_unreachable ();
6263 }
6264 }
6265
6266 /* Store the value of EXP (an expression tree)
6267 into a subfield of TARGET which has mode MODE and occupies
6268 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6269 If MODE is VOIDmode, it means that we are storing into a bit-field.
6270
6271 BITREGION_START is bitpos of the first bitfield in this region.
6272 BITREGION_END is the bitpos of the ending bitfield in this region.
6273 These two fields are 0, if the C++ memory model does not apply,
6274 or we are not interested in keeping track of bitfield regions.
6275
6276 Always return const0_rtx unless we have something particular to
6277 return.
6278
6279 TYPE is the type of the underlying object,
6280
6281 ALIAS_SET is the alias set for the destination. This value will
6282 (in general) be different from that for TARGET, since TARGET is a
6283 reference to the containing structure.
6284
6285 If NONTEMPORAL is true, try generating a nontemporal store. */
6286
6287 static rtx
6288 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6289 unsigned HOST_WIDE_INT bitregion_start,
6290 unsigned HOST_WIDE_INT bitregion_end,
6291 enum machine_mode mode, tree exp, tree type,
6292 alias_set_type alias_set, bool nontemporal)
6293 {
6294 if (TREE_CODE (exp) == ERROR_MARK)
6295 return const0_rtx;
6296
6297 /* If we have nothing to store, do nothing unless the expression has
6298 side-effects. */
6299 if (bitsize == 0)
6300 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6301
6302 /* If we are storing into an unaligned field of an aligned union that is
6303 in a register, we may have the mode of TARGET being an integer mode but
6304 MODE == BLKmode. In that case, get an aligned object whose size and
6305 alignment are the same as TARGET and store TARGET into it (we can avoid
6306 the store if the field being stored is the entire width of TARGET). Then
6307 call ourselves recursively to store the field into a BLKmode version of
6308 that object. Finally, load from the object into TARGET. This is not
6309 very efficient in general, but should only be slightly more expensive
6310 than the otherwise-required unaligned accesses. Perhaps this can be
6311 cleaned up later. It's tempting to make OBJECT readonly, but it's set
6312 twice, once with emit_move_insn and once via store_field. */
6313
6314 if (mode == BLKmode
6315 && (REG_P (target) || GET_CODE (target) == SUBREG))
6316 {
6317 rtx object = assign_temp (type, 0, 1, 1);
6318 rtx blk_object = adjust_address (object, BLKmode, 0);
6319
6320 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
6321 emit_move_insn (object, target);
6322
6323 store_field (blk_object, bitsize, bitpos,
6324 bitregion_start, bitregion_end,
6325 mode, exp, type, MEM_ALIAS_SET (blk_object), nontemporal);
6326
6327 emit_move_insn (target, object);
6328
6329 /* We want to return the BLKmode version of the data. */
6330 return blk_object;
6331 }
6332
6333 if (GET_CODE (target) == CONCAT)
6334 {
6335 /* We're storing into a struct containing a single __complex. */
6336
6337 gcc_assert (!bitpos);
6338 return store_expr (exp, target, 0, nontemporal);
6339 }
6340
6341 /* If the structure is in a register or if the component
6342 is a bit field, we cannot use addressing to access it.
6343 Use bit-field techniques or SUBREG to store in it. */
6344
6345 if (mode == VOIDmode
6346 || (mode != BLKmode && ! direct_store[(int) mode]
6347 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6348 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6349 || REG_P (target)
6350 || GET_CODE (target) == SUBREG
6351 /* If the field isn't aligned enough to store as an ordinary memref,
6352 store it as a bit field. */
6353 || (mode != BLKmode
6354 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6355 || bitpos % GET_MODE_ALIGNMENT (mode))
6356 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6357 || (bitpos % BITS_PER_UNIT != 0)))
6358 || (bitsize >= 0 && mode != BLKmode
6359 && GET_MODE_BITSIZE (mode) > bitsize)
6360 /* If the RHS and field are a constant size and the size of the
6361 RHS isn't the same size as the bitfield, we must use bitfield
6362 operations. */
6363 || (bitsize >= 0
6364 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6365 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6366 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6367 decl we must use bitfield operations. */
6368 || (bitsize >= 0
6369 && TREE_CODE (exp) == MEM_REF
6370 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6371 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6372 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6373 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6374 {
6375 rtx temp;
6376 gimple nop_def;
6377
6378 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6379 implies a mask operation. If the precision is the same size as
6380 the field we're storing into, that mask is redundant. This is
6381 particularly common with bit field assignments generated by the
6382 C front end. */
6383 nop_def = get_def_for_expr (exp, NOP_EXPR);
6384 if (nop_def)
6385 {
6386 tree type = TREE_TYPE (exp);
6387 if (INTEGRAL_TYPE_P (type)
6388 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6389 && bitsize == TYPE_PRECISION (type))
6390 {
6391 tree op = gimple_assign_rhs1 (nop_def);
6392 type = TREE_TYPE (op);
6393 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6394 exp = op;
6395 }
6396 }
6397
6398 temp = expand_normal (exp);
6399
6400 /* If BITSIZE is narrower than the size of the type of EXP
6401 we will be narrowing TEMP. Normally, what's wanted are the
6402 low-order bits. However, if EXP's type is a record and this is
6403 big-endian machine, we want the upper BITSIZE bits. */
6404 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6405 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6406 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6407 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6408 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6409 NULL_RTX, 1);
6410
6411 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6412 if (mode != VOIDmode && mode != BLKmode
6413 && mode != TYPE_MODE (TREE_TYPE (exp)))
6414 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6415
6416 /* If the modes of TEMP and TARGET are both BLKmode, both
6417 must be in memory and BITPOS must be aligned on a byte
6418 boundary. If so, we simply do a block copy. Likewise
6419 for a BLKmode-like TARGET. */
6420 if (GET_MODE (temp) == BLKmode
6421 && (GET_MODE (target) == BLKmode
6422 || (MEM_P (target)
6423 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6424 && (bitpos % BITS_PER_UNIT) == 0
6425 && (bitsize % BITS_PER_UNIT) == 0)))
6426 {
6427 gcc_assert (MEM_P (target) && MEM_P (temp)
6428 && (bitpos % BITS_PER_UNIT) == 0);
6429
6430 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6431 emit_block_move (target, temp,
6432 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6433 / BITS_PER_UNIT),
6434 BLOCK_OP_NORMAL);
6435
6436 return const0_rtx;
6437 }
6438
6439 /* Store the value in the bitfield. */
6440 store_bit_field (target, bitsize, bitpos,
6441 bitregion_start, bitregion_end,
6442 mode, temp);
6443
6444 return const0_rtx;
6445 }
6446 else
6447 {
6448 /* Now build a reference to just the desired component. */
6449 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6450
6451 if (to_rtx == target)
6452 to_rtx = copy_rtx (to_rtx);
6453
6454 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6455 set_mem_alias_set (to_rtx, alias_set);
6456
6457 return store_expr (exp, to_rtx, 0, nontemporal);
6458 }
6459 }
6460 \f
6461 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6462 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6463 codes and find the ultimate containing object, which we return.
6464
6465 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6466 bit position, and *PUNSIGNEDP to the signedness of the field.
6467 If the position of the field is variable, we store a tree
6468 giving the variable offset (in units) in *POFFSET.
6469 This offset is in addition to the bit position.
6470 If the position is not variable, we store 0 in *POFFSET.
6471
6472 If any of the extraction expressions is volatile,
6473 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6474
6475 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6476 Otherwise, it is a mode that can be used to access the field.
6477
6478 If the field describes a variable-sized object, *PMODE is set to
6479 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6480 this case, but the address of the object can be found.
6481
6482 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6483 look through nodes that serve as markers of a greater alignment than
6484 the one that can be deduced from the expression. These nodes make it
6485 possible for front-ends to prevent temporaries from being created by
6486 the middle-end on alignment considerations. For that purpose, the
6487 normal operating mode at high-level is to always pass FALSE so that
6488 the ultimate containing object is really returned; moreover, the
6489 associated predicate handled_component_p will always return TRUE
6490 on these nodes, thus indicating that they are essentially handled
6491 by get_inner_reference. TRUE should only be passed when the caller
6492 is scanning the expression in order to build another representation
6493 and specifically knows how to handle these nodes; as such, this is
6494 the normal operating mode in the RTL expanders. */
6495
6496 tree
6497 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6498 HOST_WIDE_INT *pbitpos, tree *poffset,
6499 enum machine_mode *pmode, int *punsignedp,
6500 int *pvolatilep, bool keep_aligning)
6501 {
6502 tree size_tree = 0;
6503 enum machine_mode mode = VOIDmode;
6504 bool blkmode_bitfield = false;
6505 tree offset = size_zero_node;
6506 double_int bit_offset = double_int_zero;
6507
6508 /* First get the mode, signedness, and size. We do this from just the
6509 outermost expression. */
6510 *pbitsize = -1;
6511 if (TREE_CODE (exp) == COMPONENT_REF)
6512 {
6513 tree field = TREE_OPERAND (exp, 1);
6514 size_tree = DECL_SIZE (field);
6515 if (!DECL_BIT_FIELD (field))
6516 mode = DECL_MODE (field);
6517 else if (DECL_MODE (field) == BLKmode)
6518 blkmode_bitfield = true;
6519 else if (TREE_THIS_VOLATILE (exp)
6520 && flag_strict_volatile_bitfields > 0)
6521 /* Volatile bitfields should be accessed in the mode of the
6522 field's type, not the mode computed based on the bit
6523 size. */
6524 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6525
6526 *punsignedp = DECL_UNSIGNED (field);
6527 }
6528 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6529 {
6530 size_tree = TREE_OPERAND (exp, 1);
6531 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6532 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6533
6534 /* For vector types, with the correct size of access, use the mode of
6535 inner type. */
6536 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6537 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6538 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6539 mode = TYPE_MODE (TREE_TYPE (exp));
6540 }
6541 else
6542 {
6543 mode = TYPE_MODE (TREE_TYPE (exp));
6544 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6545
6546 if (mode == BLKmode)
6547 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6548 else
6549 *pbitsize = GET_MODE_BITSIZE (mode);
6550 }
6551
6552 if (size_tree != 0)
6553 {
6554 if (! host_integerp (size_tree, 1))
6555 mode = BLKmode, *pbitsize = -1;
6556 else
6557 *pbitsize = tree_low_cst (size_tree, 1);
6558 }
6559
6560 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6561 and find the ultimate containing object. */
6562 while (1)
6563 {
6564 switch (TREE_CODE (exp))
6565 {
6566 case BIT_FIELD_REF:
6567 bit_offset
6568 = double_int_add (bit_offset,
6569 tree_to_double_int (TREE_OPERAND (exp, 2)));
6570 break;
6571
6572 case COMPONENT_REF:
6573 {
6574 tree field = TREE_OPERAND (exp, 1);
6575 tree this_offset = component_ref_field_offset (exp);
6576
6577 /* If this field hasn't been filled in yet, don't go past it.
6578 This should only happen when folding expressions made during
6579 type construction. */
6580 if (this_offset == 0)
6581 break;
6582
6583 offset = size_binop (PLUS_EXPR, offset, this_offset);
6584 bit_offset = double_int_add (bit_offset,
6585 tree_to_double_int
6586 (DECL_FIELD_BIT_OFFSET (field)));
6587
6588 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6589 }
6590 break;
6591
6592 case ARRAY_REF:
6593 case ARRAY_RANGE_REF:
6594 {
6595 tree index = TREE_OPERAND (exp, 1);
6596 tree low_bound = array_ref_low_bound (exp);
6597 tree unit_size = array_ref_element_size (exp);
6598
6599 /* We assume all arrays have sizes that are a multiple of a byte.
6600 First subtract the lower bound, if any, in the type of the
6601 index, then convert to sizetype and multiply by the size of
6602 the array element. */
6603 if (! integer_zerop (low_bound))
6604 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6605 index, low_bound);
6606
6607 offset = size_binop (PLUS_EXPR, offset,
6608 size_binop (MULT_EXPR,
6609 fold_convert (sizetype, index),
6610 unit_size));
6611 }
6612 break;
6613
6614 case REALPART_EXPR:
6615 break;
6616
6617 case IMAGPART_EXPR:
6618 bit_offset = double_int_add (bit_offset,
6619 uhwi_to_double_int (*pbitsize));
6620 break;
6621
6622 case VIEW_CONVERT_EXPR:
6623 if (keep_aligning && STRICT_ALIGNMENT
6624 && (TYPE_ALIGN (TREE_TYPE (exp))
6625 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6626 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6627 < BIGGEST_ALIGNMENT)
6628 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6629 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6630 goto done;
6631 break;
6632
6633 case MEM_REF:
6634 /* Hand back the decl for MEM[&decl, off]. */
6635 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6636 {
6637 tree off = TREE_OPERAND (exp, 1);
6638 if (!integer_zerop (off))
6639 {
6640 double_int boff, coff = mem_ref_offset (exp);
6641 boff = double_int_lshift (coff,
6642 BITS_PER_UNIT == 8
6643 ? 3 : exact_log2 (BITS_PER_UNIT),
6644 HOST_BITS_PER_DOUBLE_INT, true);
6645 bit_offset = double_int_add (bit_offset, boff);
6646 }
6647 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6648 }
6649 goto done;
6650
6651 default:
6652 goto done;
6653 }
6654
6655 /* If any reference in the chain is volatile, the effect is volatile. */
6656 if (TREE_THIS_VOLATILE (exp))
6657 *pvolatilep = 1;
6658
6659 exp = TREE_OPERAND (exp, 0);
6660 }
6661 done:
6662
6663 /* If OFFSET is constant, see if we can return the whole thing as a
6664 constant bit position. Make sure to handle overflow during
6665 this conversion. */
6666 if (TREE_CODE (offset) == INTEGER_CST)
6667 {
6668 double_int tem = tree_to_double_int (offset);
6669 tem = double_int_sext (tem, TYPE_PRECISION (sizetype));
6670 tem = double_int_lshift (tem,
6671 BITS_PER_UNIT == 8
6672 ? 3 : exact_log2 (BITS_PER_UNIT),
6673 HOST_BITS_PER_DOUBLE_INT, true);
6674 tem = double_int_add (tem, bit_offset);
6675 if (double_int_fits_in_shwi_p (tem))
6676 {
6677 *pbitpos = double_int_to_shwi (tem);
6678 *poffset = offset = NULL_TREE;
6679 }
6680 }
6681
6682 /* Otherwise, split it up. */
6683 if (offset)
6684 {
6685 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6686 if (double_int_negative_p (bit_offset))
6687 {
6688 double_int mask
6689 = double_int_mask (BITS_PER_UNIT == 8
6690 ? 3 : exact_log2 (BITS_PER_UNIT));
6691 double_int tem = double_int_and_not (bit_offset, mask);
6692 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6693 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6694 bit_offset = double_int_sub (bit_offset, tem);
6695 tem = double_int_rshift (tem,
6696 BITS_PER_UNIT == 8
6697 ? 3 : exact_log2 (BITS_PER_UNIT),
6698 HOST_BITS_PER_DOUBLE_INT, true);
6699 offset = size_binop (PLUS_EXPR, offset,
6700 double_int_to_tree (sizetype, tem));
6701 }
6702
6703 *pbitpos = double_int_to_shwi (bit_offset);
6704 *poffset = offset;
6705 }
6706
6707 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6708 if (mode == VOIDmode
6709 && blkmode_bitfield
6710 && (*pbitpos % BITS_PER_UNIT) == 0
6711 && (*pbitsize % BITS_PER_UNIT) == 0)
6712 *pmode = BLKmode;
6713 else
6714 *pmode = mode;
6715
6716 return exp;
6717 }
6718
6719 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6720 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6721 EXP is marked as PACKED. */
6722
6723 bool
6724 contains_packed_reference (const_tree exp)
6725 {
6726 bool packed_p = false;
6727
6728 while (1)
6729 {
6730 switch (TREE_CODE (exp))
6731 {
6732 case COMPONENT_REF:
6733 {
6734 tree field = TREE_OPERAND (exp, 1);
6735 packed_p = DECL_PACKED (field)
6736 || TYPE_PACKED (TREE_TYPE (field))
6737 || TYPE_PACKED (TREE_TYPE (exp));
6738 if (packed_p)
6739 goto done;
6740 }
6741 break;
6742
6743 case BIT_FIELD_REF:
6744 case ARRAY_REF:
6745 case ARRAY_RANGE_REF:
6746 case REALPART_EXPR:
6747 case IMAGPART_EXPR:
6748 case VIEW_CONVERT_EXPR:
6749 break;
6750
6751 default:
6752 goto done;
6753 }
6754 exp = TREE_OPERAND (exp, 0);
6755 }
6756 done:
6757 return packed_p;
6758 }
6759
6760 /* Return a tree of sizetype representing the size, in bytes, of the element
6761 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6762
6763 tree
6764 array_ref_element_size (tree exp)
6765 {
6766 tree aligned_size = TREE_OPERAND (exp, 3);
6767 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6768 location_t loc = EXPR_LOCATION (exp);
6769
6770 /* If a size was specified in the ARRAY_REF, it's the size measured
6771 in alignment units of the element type. So multiply by that value. */
6772 if (aligned_size)
6773 {
6774 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6775 sizetype from another type of the same width and signedness. */
6776 if (TREE_TYPE (aligned_size) != sizetype)
6777 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6778 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6779 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6780 }
6781
6782 /* Otherwise, take the size from that of the element type. Substitute
6783 any PLACEHOLDER_EXPR that we have. */
6784 else
6785 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6786 }
6787
6788 /* Return a tree representing the lower bound of the array mentioned in
6789 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6790
6791 tree
6792 array_ref_low_bound (tree exp)
6793 {
6794 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6795
6796 /* If a lower bound is specified in EXP, use it. */
6797 if (TREE_OPERAND (exp, 2))
6798 return TREE_OPERAND (exp, 2);
6799
6800 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6801 substituting for a PLACEHOLDER_EXPR as needed. */
6802 if (domain_type && TYPE_MIN_VALUE (domain_type))
6803 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6804
6805 /* Otherwise, return a zero of the appropriate type. */
6806 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6807 }
6808
6809 /* Returns true if REF is an array reference to an array at the end of
6810 a structure. If this is the case, the array may be allocated larger
6811 than its upper bound implies. */
6812
6813 bool
6814 array_at_struct_end_p (tree ref)
6815 {
6816 if (TREE_CODE (ref) != ARRAY_REF
6817 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6818 return false;
6819
6820 while (handled_component_p (ref))
6821 {
6822 /* If the reference chain contains a component reference to a
6823 non-union type and there follows another field the reference
6824 is not at the end of a structure. */
6825 if (TREE_CODE (ref) == COMPONENT_REF
6826 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6827 {
6828 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6829 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6830 nextf = DECL_CHAIN (nextf);
6831 if (nextf)
6832 return false;
6833 }
6834
6835 ref = TREE_OPERAND (ref, 0);
6836 }
6837
6838 /* If the reference is based on a declared entity, the size of the array
6839 is constrained by its given domain. */
6840 if (DECL_P (ref))
6841 return false;
6842
6843 return true;
6844 }
6845
6846 /* Return a tree representing the upper bound of the array mentioned in
6847 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6848
6849 tree
6850 array_ref_up_bound (tree exp)
6851 {
6852 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6853
6854 /* If there is a domain type and it has an upper bound, use it, substituting
6855 for a PLACEHOLDER_EXPR as needed. */
6856 if (domain_type && TYPE_MAX_VALUE (domain_type))
6857 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6858
6859 /* Otherwise fail. */
6860 return NULL_TREE;
6861 }
6862
6863 /* Return a tree representing the offset, in bytes, of the field referenced
6864 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6865
6866 tree
6867 component_ref_field_offset (tree exp)
6868 {
6869 tree aligned_offset = TREE_OPERAND (exp, 2);
6870 tree field = TREE_OPERAND (exp, 1);
6871 location_t loc = EXPR_LOCATION (exp);
6872
6873 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6874 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6875 value. */
6876 if (aligned_offset)
6877 {
6878 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6879 sizetype from another type of the same width and signedness. */
6880 if (TREE_TYPE (aligned_offset) != sizetype)
6881 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6882 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6883 size_int (DECL_OFFSET_ALIGN (field)
6884 / BITS_PER_UNIT));
6885 }
6886
6887 /* Otherwise, take the offset from that of the field. Substitute
6888 any PLACEHOLDER_EXPR that we have. */
6889 else
6890 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6891 }
6892
6893 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6894
6895 static unsigned HOST_WIDE_INT
6896 target_align (const_tree target)
6897 {
6898 /* We might have a chain of nested references with intermediate misaligning
6899 bitfields components, so need to recurse to find out. */
6900
6901 unsigned HOST_WIDE_INT this_align, outer_align;
6902
6903 switch (TREE_CODE (target))
6904 {
6905 case BIT_FIELD_REF:
6906 return 1;
6907
6908 case COMPONENT_REF:
6909 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6910 outer_align = target_align (TREE_OPERAND (target, 0));
6911 return MIN (this_align, outer_align);
6912
6913 case ARRAY_REF:
6914 case ARRAY_RANGE_REF:
6915 this_align = TYPE_ALIGN (TREE_TYPE (target));
6916 outer_align = target_align (TREE_OPERAND (target, 0));
6917 return MIN (this_align, outer_align);
6918
6919 CASE_CONVERT:
6920 case NON_LVALUE_EXPR:
6921 case VIEW_CONVERT_EXPR:
6922 this_align = TYPE_ALIGN (TREE_TYPE (target));
6923 outer_align = target_align (TREE_OPERAND (target, 0));
6924 return MAX (this_align, outer_align);
6925
6926 default:
6927 return TYPE_ALIGN (TREE_TYPE (target));
6928 }
6929 }
6930
6931 \f
6932 /* Given an rtx VALUE that may contain additions and multiplications, return
6933 an equivalent value that just refers to a register, memory, or constant.
6934 This is done by generating instructions to perform the arithmetic and
6935 returning a pseudo-register containing the value.
6936
6937 The returned value may be a REG, SUBREG, MEM or constant. */
6938
6939 rtx
6940 force_operand (rtx value, rtx target)
6941 {
6942 rtx op1, op2;
6943 /* Use subtarget as the target for operand 0 of a binary operation. */
6944 rtx subtarget = get_subtarget (target);
6945 enum rtx_code code = GET_CODE (value);
6946
6947 /* Check for subreg applied to an expression produced by loop optimizer. */
6948 if (code == SUBREG
6949 && !REG_P (SUBREG_REG (value))
6950 && !MEM_P (SUBREG_REG (value)))
6951 {
6952 value
6953 = simplify_gen_subreg (GET_MODE (value),
6954 force_reg (GET_MODE (SUBREG_REG (value)),
6955 force_operand (SUBREG_REG (value),
6956 NULL_RTX)),
6957 GET_MODE (SUBREG_REG (value)),
6958 SUBREG_BYTE (value));
6959 code = GET_CODE (value);
6960 }
6961
6962 /* Check for a PIC address load. */
6963 if ((code == PLUS || code == MINUS)
6964 && XEXP (value, 0) == pic_offset_table_rtx
6965 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6966 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6967 || GET_CODE (XEXP (value, 1)) == CONST))
6968 {
6969 if (!subtarget)
6970 subtarget = gen_reg_rtx (GET_MODE (value));
6971 emit_move_insn (subtarget, value);
6972 return subtarget;
6973 }
6974
6975 if (ARITHMETIC_P (value))
6976 {
6977 op2 = XEXP (value, 1);
6978 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6979 subtarget = 0;
6980 if (code == MINUS && CONST_INT_P (op2))
6981 {
6982 code = PLUS;
6983 op2 = negate_rtx (GET_MODE (value), op2);
6984 }
6985
6986 /* Check for an addition with OP2 a constant integer and our first
6987 operand a PLUS of a virtual register and something else. In that
6988 case, we want to emit the sum of the virtual register and the
6989 constant first and then add the other value. This allows virtual
6990 register instantiation to simply modify the constant rather than
6991 creating another one around this addition. */
6992 if (code == PLUS && CONST_INT_P (op2)
6993 && GET_CODE (XEXP (value, 0)) == PLUS
6994 && REG_P (XEXP (XEXP (value, 0), 0))
6995 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6996 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6997 {
6998 rtx temp = expand_simple_binop (GET_MODE (value), code,
6999 XEXP (XEXP (value, 0), 0), op2,
7000 subtarget, 0, OPTAB_LIB_WIDEN);
7001 return expand_simple_binop (GET_MODE (value), code, temp,
7002 force_operand (XEXP (XEXP (value,
7003 0), 1), 0),
7004 target, 0, OPTAB_LIB_WIDEN);
7005 }
7006
7007 op1 = force_operand (XEXP (value, 0), subtarget);
7008 op2 = force_operand (op2, NULL_RTX);
7009 switch (code)
7010 {
7011 case MULT:
7012 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7013 case DIV:
7014 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7015 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7016 target, 1, OPTAB_LIB_WIDEN);
7017 else
7018 return expand_divmod (0,
7019 FLOAT_MODE_P (GET_MODE (value))
7020 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7021 GET_MODE (value), op1, op2, target, 0);
7022 case MOD:
7023 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7024 target, 0);
7025 case UDIV:
7026 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7027 target, 1);
7028 case UMOD:
7029 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7030 target, 1);
7031 case ASHIFTRT:
7032 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7033 target, 0, OPTAB_LIB_WIDEN);
7034 default:
7035 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7036 target, 1, OPTAB_LIB_WIDEN);
7037 }
7038 }
7039 if (UNARY_P (value))
7040 {
7041 if (!target)
7042 target = gen_reg_rtx (GET_MODE (value));
7043 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7044 switch (code)
7045 {
7046 case ZERO_EXTEND:
7047 case SIGN_EXTEND:
7048 case TRUNCATE:
7049 case FLOAT_EXTEND:
7050 case FLOAT_TRUNCATE:
7051 convert_move (target, op1, code == ZERO_EXTEND);
7052 return target;
7053
7054 case FIX:
7055 case UNSIGNED_FIX:
7056 expand_fix (target, op1, code == UNSIGNED_FIX);
7057 return target;
7058
7059 case FLOAT:
7060 case UNSIGNED_FLOAT:
7061 expand_float (target, op1, code == UNSIGNED_FLOAT);
7062 return target;
7063
7064 default:
7065 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7066 }
7067 }
7068
7069 #ifdef INSN_SCHEDULING
7070 /* On machines that have insn scheduling, we want all memory reference to be
7071 explicit, so we need to deal with such paradoxical SUBREGs. */
7072 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7073 value
7074 = simplify_gen_subreg (GET_MODE (value),
7075 force_reg (GET_MODE (SUBREG_REG (value)),
7076 force_operand (SUBREG_REG (value),
7077 NULL_RTX)),
7078 GET_MODE (SUBREG_REG (value)),
7079 SUBREG_BYTE (value));
7080 #endif
7081
7082 return value;
7083 }
7084 \f
7085 /* Subroutine of expand_expr: return nonzero iff there is no way that
7086 EXP can reference X, which is being modified. TOP_P is nonzero if this
7087 call is going to be used to determine whether we need a temporary
7088 for EXP, as opposed to a recursive call to this function.
7089
7090 It is always safe for this routine to return zero since it merely
7091 searches for optimization opportunities. */
7092
7093 int
7094 safe_from_p (const_rtx x, tree exp, int top_p)
7095 {
7096 rtx exp_rtl = 0;
7097 int i, nops;
7098
7099 if (x == 0
7100 /* If EXP has varying size, we MUST use a target since we currently
7101 have no way of allocating temporaries of variable size
7102 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7103 So we assume here that something at a higher level has prevented a
7104 clash. This is somewhat bogus, but the best we can do. Only
7105 do this when X is BLKmode and when we are at the top level. */
7106 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7107 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7108 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7109 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7110 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7111 != INTEGER_CST)
7112 && GET_MODE (x) == BLKmode)
7113 /* If X is in the outgoing argument area, it is always safe. */
7114 || (MEM_P (x)
7115 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7116 || (GET_CODE (XEXP (x, 0)) == PLUS
7117 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7118 return 1;
7119
7120 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7121 find the underlying pseudo. */
7122 if (GET_CODE (x) == SUBREG)
7123 {
7124 x = SUBREG_REG (x);
7125 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7126 return 0;
7127 }
7128
7129 /* Now look at our tree code and possibly recurse. */
7130 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7131 {
7132 case tcc_declaration:
7133 exp_rtl = DECL_RTL_IF_SET (exp);
7134 break;
7135
7136 case tcc_constant:
7137 return 1;
7138
7139 case tcc_exceptional:
7140 if (TREE_CODE (exp) == TREE_LIST)
7141 {
7142 while (1)
7143 {
7144 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7145 return 0;
7146 exp = TREE_CHAIN (exp);
7147 if (!exp)
7148 return 1;
7149 if (TREE_CODE (exp) != TREE_LIST)
7150 return safe_from_p (x, exp, 0);
7151 }
7152 }
7153 else if (TREE_CODE (exp) == CONSTRUCTOR)
7154 {
7155 constructor_elt *ce;
7156 unsigned HOST_WIDE_INT idx;
7157
7158 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce)
7159 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7160 || !safe_from_p (x, ce->value, 0))
7161 return 0;
7162 return 1;
7163 }
7164 else if (TREE_CODE (exp) == ERROR_MARK)
7165 return 1; /* An already-visited SAVE_EXPR? */
7166 else
7167 return 0;
7168
7169 case tcc_statement:
7170 /* The only case we look at here is the DECL_INITIAL inside a
7171 DECL_EXPR. */
7172 return (TREE_CODE (exp) != DECL_EXPR
7173 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7174 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7175 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7176
7177 case tcc_binary:
7178 case tcc_comparison:
7179 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7180 return 0;
7181 /* Fall through. */
7182
7183 case tcc_unary:
7184 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7185
7186 case tcc_expression:
7187 case tcc_reference:
7188 case tcc_vl_exp:
7189 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7190 the expression. If it is set, we conflict iff we are that rtx or
7191 both are in memory. Otherwise, we check all operands of the
7192 expression recursively. */
7193
7194 switch (TREE_CODE (exp))
7195 {
7196 case ADDR_EXPR:
7197 /* If the operand is static or we are static, we can't conflict.
7198 Likewise if we don't conflict with the operand at all. */
7199 if (staticp (TREE_OPERAND (exp, 0))
7200 || TREE_STATIC (exp)
7201 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7202 return 1;
7203
7204 /* Otherwise, the only way this can conflict is if we are taking
7205 the address of a DECL a that address if part of X, which is
7206 very rare. */
7207 exp = TREE_OPERAND (exp, 0);
7208 if (DECL_P (exp))
7209 {
7210 if (!DECL_RTL_SET_P (exp)
7211 || !MEM_P (DECL_RTL (exp)))
7212 return 0;
7213 else
7214 exp_rtl = XEXP (DECL_RTL (exp), 0);
7215 }
7216 break;
7217
7218 case MEM_REF:
7219 if (MEM_P (x)
7220 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7221 get_alias_set (exp)))
7222 return 0;
7223 break;
7224
7225 case CALL_EXPR:
7226 /* Assume that the call will clobber all hard registers and
7227 all of memory. */
7228 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7229 || MEM_P (x))
7230 return 0;
7231 break;
7232
7233 case WITH_CLEANUP_EXPR:
7234 case CLEANUP_POINT_EXPR:
7235 /* Lowered by gimplify.c. */
7236 gcc_unreachable ();
7237
7238 case SAVE_EXPR:
7239 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7240
7241 default:
7242 break;
7243 }
7244
7245 /* If we have an rtx, we do not need to scan our operands. */
7246 if (exp_rtl)
7247 break;
7248
7249 nops = TREE_OPERAND_LENGTH (exp);
7250 for (i = 0; i < nops; i++)
7251 if (TREE_OPERAND (exp, i) != 0
7252 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7253 return 0;
7254
7255 break;
7256
7257 case tcc_type:
7258 /* Should never get a type here. */
7259 gcc_unreachable ();
7260 }
7261
7262 /* If we have an rtl, find any enclosed object. Then see if we conflict
7263 with it. */
7264 if (exp_rtl)
7265 {
7266 if (GET_CODE (exp_rtl) == SUBREG)
7267 {
7268 exp_rtl = SUBREG_REG (exp_rtl);
7269 if (REG_P (exp_rtl)
7270 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7271 return 0;
7272 }
7273
7274 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7275 are memory and they conflict. */
7276 return ! (rtx_equal_p (x, exp_rtl)
7277 || (MEM_P (x) && MEM_P (exp_rtl)
7278 && true_dependence (exp_rtl, VOIDmode, x)));
7279 }
7280
7281 /* If we reach here, it is safe. */
7282 return 1;
7283 }
7284
7285 \f
7286 /* Return the highest power of two that EXP is known to be a multiple of.
7287 This is used in updating alignment of MEMs in array references. */
7288
7289 unsigned HOST_WIDE_INT
7290 highest_pow2_factor (const_tree exp)
7291 {
7292 unsigned HOST_WIDE_INT c0, c1;
7293
7294 switch (TREE_CODE (exp))
7295 {
7296 case INTEGER_CST:
7297 /* We can find the lowest bit that's a one. If the low
7298 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
7299 We need to handle this case since we can find it in a COND_EXPR,
7300 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
7301 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
7302 later ICE. */
7303 if (TREE_OVERFLOW (exp))
7304 return BIGGEST_ALIGNMENT;
7305 else
7306 {
7307 /* Note: tree_low_cst is intentionally not used here,
7308 we don't care about the upper bits. */
7309 c0 = TREE_INT_CST_LOW (exp);
7310 c0 &= -c0;
7311 return c0 ? c0 : BIGGEST_ALIGNMENT;
7312 }
7313 break;
7314
7315 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
7316 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7317 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7318 return MIN (c0, c1);
7319
7320 case MULT_EXPR:
7321 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7322 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7323 return c0 * c1;
7324
7325 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
7326 case CEIL_DIV_EXPR:
7327 if (integer_pow2p (TREE_OPERAND (exp, 1))
7328 && host_integerp (TREE_OPERAND (exp, 1), 1))
7329 {
7330 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7331 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
7332 return MAX (1, c0 / c1);
7333 }
7334 break;
7335
7336 case BIT_AND_EXPR:
7337 /* The highest power of two of a bit-and expression is the maximum of
7338 that of its operands. We typically get here for a complex LHS and
7339 a constant negative power of two on the RHS to force an explicit
7340 alignment, so don't bother looking at the LHS. */
7341 return highest_pow2_factor (TREE_OPERAND (exp, 1));
7342
7343 CASE_CONVERT:
7344 case SAVE_EXPR:
7345 return highest_pow2_factor (TREE_OPERAND (exp, 0));
7346
7347 case COMPOUND_EXPR:
7348 return highest_pow2_factor (TREE_OPERAND (exp, 1));
7349
7350 case COND_EXPR:
7351 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7352 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
7353 return MIN (c0, c1);
7354
7355 default:
7356 break;
7357 }
7358
7359 return 1;
7360 }
7361
7362 /* Similar, except that the alignment requirements of TARGET are
7363 taken into account. Assume it is at least as aligned as its
7364 type, unless it is a COMPONENT_REF in which case the layout of
7365 the structure gives the alignment. */
7366
7367 static unsigned HOST_WIDE_INT
7368 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7369 {
7370 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7371 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7372
7373 return MAX (factor, talign);
7374 }
7375 \f
7376 /* Convert the tree comparision code TCODE to the rtl one where the
7377 signedness is UNSIGNEDP. */
7378
7379 static enum rtx_code
7380 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7381 {
7382 enum rtx_code code;
7383 switch (tcode)
7384 {
7385 case EQ_EXPR:
7386 code = EQ;
7387 break;
7388 case NE_EXPR:
7389 code = NE;
7390 break;
7391 case LT_EXPR:
7392 code = unsignedp ? LTU : LT;
7393 break;
7394 case LE_EXPR:
7395 code = unsignedp ? LEU : LE;
7396 break;
7397 case GT_EXPR:
7398 code = unsignedp ? GTU : GT;
7399 break;
7400 case GE_EXPR:
7401 code = unsignedp ? GEU : GE;
7402 break;
7403 case UNORDERED_EXPR:
7404 code = UNORDERED;
7405 break;
7406 case ORDERED_EXPR:
7407 code = ORDERED;
7408 break;
7409 case UNLT_EXPR:
7410 code = UNLT;
7411 break;
7412 case UNLE_EXPR:
7413 code = UNLE;
7414 break;
7415 case UNGT_EXPR:
7416 code = UNGT;
7417 break;
7418 case UNGE_EXPR:
7419 code = UNGE;
7420 break;
7421 case UNEQ_EXPR:
7422 code = UNEQ;
7423 break;
7424 case LTGT_EXPR:
7425 code = LTGT;
7426 break;
7427
7428 default:
7429 gcc_unreachable ();
7430 }
7431 return code;
7432 }
7433
7434 /* Subroutine of expand_expr. Expand the two operands of a binary
7435 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7436 The value may be stored in TARGET if TARGET is nonzero. The
7437 MODIFIER argument is as documented by expand_expr. */
7438
7439 static void
7440 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7441 enum expand_modifier modifier)
7442 {
7443 if (! safe_from_p (target, exp1, 1))
7444 target = 0;
7445 if (operand_equal_p (exp0, exp1, 0))
7446 {
7447 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7448 *op1 = copy_rtx (*op0);
7449 }
7450 else
7451 {
7452 /* If we need to preserve evaluation order, copy exp0 into its own
7453 temporary variable so that it can't be clobbered by exp1. */
7454 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7455 exp0 = save_expr (exp0);
7456 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7457 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7458 }
7459 }
7460
7461 \f
7462 /* Return a MEM that contains constant EXP. DEFER is as for
7463 output_constant_def and MODIFIER is as for expand_expr. */
7464
7465 static rtx
7466 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7467 {
7468 rtx mem;
7469
7470 mem = output_constant_def (exp, defer);
7471 if (modifier != EXPAND_INITIALIZER)
7472 mem = use_anchored_address (mem);
7473 return mem;
7474 }
7475
7476 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7477 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7478
7479 static rtx
7480 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7481 enum expand_modifier modifier, addr_space_t as)
7482 {
7483 rtx result, subtarget;
7484 tree inner, offset;
7485 HOST_WIDE_INT bitsize, bitpos;
7486 int volatilep, unsignedp;
7487 enum machine_mode mode1;
7488
7489 /* If we are taking the address of a constant and are at the top level,
7490 we have to use output_constant_def since we can't call force_const_mem
7491 at top level. */
7492 /* ??? This should be considered a front-end bug. We should not be
7493 generating ADDR_EXPR of something that isn't an LVALUE. The only
7494 exception here is STRING_CST. */
7495 if (CONSTANT_CLASS_P (exp))
7496 {
7497 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7498 if (modifier < EXPAND_SUM)
7499 result = force_operand (result, target);
7500 return result;
7501 }
7502
7503 /* Everything must be something allowed by is_gimple_addressable. */
7504 switch (TREE_CODE (exp))
7505 {
7506 case INDIRECT_REF:
7507 /* This case will happen via recursion for &a->b. */
7508 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7509
7510 case MEM_REF:
7511 {
7512 tree tem = TREE_OPERAND (exp, 0);
7513 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7514 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7515 return expand_expr (tem, target, tmode, modifier);
7516 }
7517
7518 case CONST_DECL:
7519 /* Expand the initializer like constants above. */
7520 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7521 0, modifier), 0);
7522 if (modifier < EXPAND_SUM)
7523 result = force_operand (result, target);
7524 return result;
7525
7526 case REALPART_EXPR:
7527 /* The real part of the complex number is always first, therefore
7528 the address is the same as the address of the parent object. */
7529 offset = 0;
7530 bitpos = 0;
7531 inner = TREE_OPERAND (exp, 0);
7532 break;
7533
7534 case IMAGPART_EXPR:
7535 /* The imaginary part of the complex number is always second.
7536 The expression is therefore always offset by the size of the
7537 scalar type. */
7538 offset = 0;
7539 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7540 inner = TREE_OPERAND (exp, 0);
7541 break;
7542
7543 default:
7544 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7545 expand_expr, as that can have various side effects; LABEL_DECLs for
7546 example, may not have their DECL_RTL set yet. Expand the rtl of
7547 CONSTRUCTORs too, which should yield a memory reference for the
7548 constructor's contents. Assume language specific tree nodes can
7549 be expanded in some interesting way. */
7550 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7551 if (DECL_P (exp)
7552 || TREE_CODE (exp) == CONSTRUCTOR
7553 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7554 {
7555 result = expand_expr (exp, target, tmode,
7556 modifier == EXPAND_INITIALIZER
7557 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7558
7559 /* If the DECL isn't in memory, then the DECL wasn't properly
7560 marked TREE_ADDRESSABLE, which will be either a front-end
7561 or a tree optimizer bug. */
7562
7563 if (TREE_ADDRESSABLE (exp)
7564 && ! MEM_P (result)
7565 && ! targetm.calls.allocate_stack_slots_for_args())
7566 {
7567 error ("local frame unavailable (naked function?)");
7568 return result;
7569 }
7570 else
7571 gcc_assert (MEM_P (result));
7572 result = XEXP (result, 0);
7573
7574 /* ??? Is this needed anymore? */
7575 if (DECL_P (exp))
7576 TREE_USED (exp) = 1;
7577
7578 if (modifier != EXPAND_INITIALIZER
7579 && modifier != EXPAND_CONST_ADDRESS
7580 && modifier != EXPAND_SUM)
7581 result = force_operand (result, target);
7582 return result;
7583 }
7584
7585 /* Pass FALSE as the last argument to get_inner_reference although
7586 we are expanding to RTL. The rationale is that we know how to
7587 handle "aligning nodes" here: we can just bypass them because
7588 they won't change the final object whose address will be returned
7589 (they actually exist only for that purpose). */
7590 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7591 &mode1, &unsignedp, &volatilep, false);
7592 break;
7593 }
7594
7595 /* We must have made progress. */
7596 gcc_assert (inner != exp);
7597
7598 subtarget = offset || bitpos ? NULL_RTX : target;
7599 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7600 inner alignment, force the inner to be sufficiently aligned. */
7601 if (CONSTANT_CLASS_P (inner)
7602 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7603 {
7604 inner = copy_node (inner);
7605 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7606 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7607 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7608 }
7609 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7610
7611 if (offset)
7612 {
7613 rtx tmp;
7614
7615 if (modifier != EXPAND_NORMAL)
7616 result = force_operand (result, NULL);
7617 tmp = expand_expr (offset, NULL_RTX, tmode,
7618 modifier == EXPAND_INITIALIZER
7619 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7620
7621 result = convert_memory_address_addr_space (tmode, result, as);
7622 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7623
7624 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7625 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7626 else
7627 {
7628 subtarget = bitpos ? NULL_RTX : target;
7629 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7630 1, OPTAB_LIB_WIDEN);
7631 }
7632 }
7633
7634 if (bitpos)
7635 {
7636 /* Someone beforehand should have rejected taking the address
7637 of such an object. */
7638 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7639
7640 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7641 if (modifier < EXPAND_SUM)
7642 result = force_operand (result, target);
7643 }
7644
7645 return result;
7646 }
7647
7648 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7649 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7650
7651 static rtx
7652 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7653 enum expand_modifier modifier)
7654 {
7655 addr_space_t as = ADDR_SPACE_GENERIC;
7656 enum machine_mode address_mode = Pmode;
7657 enum machine_mode pointer_mode = ptr_mode;
7658 enum machine_mode rmode;
7659 rtx result;
7660
7661 /* Target mode of VOIDmode says "whatever's natural". */
7662 if (tmode == VOIDmode)
7663 tmode = TYPE_MODE (TREE_TYPE (exp));
7664
7665 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7666 {
7667 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7668 address_mode = targetm.addr_space.address_mode (as);
7669 pointer_mode = targetm.addr_space.pointer_mode (as);
7670 }
7671
7672 /* We can get called with some Weird Things if the user does silliness
7673 like "(short) &a". In that case, convert_memory_address won't do
7674 the right thing, so ignore the given target mode. */
7675 if (tmode != address_mode && tmode != pointer_mode)
7676 tmode = address_mode;
7677
7678 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7679 tmode, modifier, as);
7680
7681 /* Despite expand_expr claims concerning ignoring TMODE when not
7682 strictly convenient, stuff breaks if we don't honor it. Note
7683 that combined with the above, we only do this for pointer modes. */
7684 rmode = GET_MODE (result);
7685 if (rmode == VOIDmode)
7686 rmode = tmode;
7687 if (rmode != tmode)
7688 result = convert_memory_address_addr_space (tmode, result, as);
7689
7690 return result;
7691 }
7692
7693 /* Generate code for computing CONSTRUCTOR EXP.
7694 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7695 is TRUE, instead of creating a temporary variable in memory
7696 NULL is returned and the caller needs to handle it differently. */
7697
7698 static rtx
7699 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7700 bool avoid_temp_mem)
7701 {
7702 tree type = TREE_TYPE (exp);
7703 enum machine_mode mode = TYPE_MODE (type);
7704
7705 /* Try to avoid creating a temporary at all. This is possible
7706 if all of the initializer is zero.
7707 FIXME: try to handle all [0..255] initializers we can handle
7708 with memset. */
7709 if (TREE_STATIC (exp)
7710 && !TREE_ADDRESSABLE (exp)
7711 && target != 0 && mode == BLKmode
7712 && all_zeros_p (exp))
7713 {
7714 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7715 return target;
7716 }
7717
7718 /* All elts simple constants => refer to a constant in memory. But
7719 if this is a non-BLKmode mode, let it store a field at a time
7720 since that should make a CONST_INT or CONST_DOUBLE when we
7721 fold. Likewise, if we have a target we can use, it is best to
7722 store directly into the target unless the type is large enough
7723 that memcpy will be used. If we are making an initializer and
7724 all operands are constant, put it in memory as well.
7725
7726 FIXME: Avoid trying to fill vector constructors piece-meal.
7727 Output them with output_constant_def below unless we're sure
7728 they're zeros. This should go away when vector initializers
7729 are treated like VECTOR_CST instead of arrays. */
7730 if ((TREE_STATIC (exp)
7731 && ((mode == BLKmode
7732 && ! (target != 0 && safe_from_p (target, exp, 1)))
7733 || TREE_ADDRESSABLE (exp)
7734 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7735 && (! MOVE_BY_PIECES_P
7736 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7737 TYPE_ALIGN (type)))
7738 && ! mostly_zeros_p (exp))))
7739 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7740 && TREE_CONSTANT (exp)))
7741 {
7742 rtx constructor;
7743
7744 if (avoid_temp_mem)
7745 return NULL_RTX;
7746
7747 constructor = expand_expr_constant (exp, 1, modifier);
7748
7749 if (modifier != EXPAND_CONST_ADDRESS
7750 && modifier != EXPAND_INITIALIZER
7751 && modifier != EXPAND_SUM)
7752 constructor = validize_mem (constructor);
7753
7754 return constructor;
7755 }
7756
7757 /* Handle calls that pass values in multiple non-contiguous
7758 locations. The Irix 6 ABI has examples of this. */
7759 if (target == 0 || ! safe_from_p (target, exp, 1)
7760 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7761 {
7762 if (avoid_temp_mem)
7763 return NULL_RTX;
7764
7765 target
7766 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7767 | (TREE_READONLY (exp)
7768 * TYPE_QUAL_CONST))),
7769 0, TREE_ADDRESSABLE (exp), 1);
7770 }
7771
7772 store_constructor (exp, target, 0, int_expr_size (exp));
7773 return target;
7774 }
7775
7776
7777 /* expand_expr: generate code for computing expression EXP.
7778 An rtx for the computed value is returned. The value is never null.
7779 In the case of a void EXP, const0_rtx is returned.
7780
7781 The value may be stored in TARGET if TARGET is nonzero.
7782 TARGET is just a suggestion; callers must assume that
7783 the rtx returned may not be the same as TARGET.
7784
7785 If TARGET is CONST0_RTX, it means that the value will be ignored.
7786
7787 If TMODE is not VOIDmode, it suggests generating the
7788 result in mode TMODE. But this is done only when convenient.
7789 Otherwise, TMODE is ignored and the value generated in its natural mode.
7790 TMODE is just a suggestion; callers must assume that
7791 the rtx returned may not have mode TMODE.
7792
7793 Note that TARGET may have neither TMODE nor MODE. In that case, it
7794 probably will not be used.
7795
7796 If MODIFIER is EXPAND_SUM then when EXP is an addition
7797 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7798 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7799 products as above, or REG or MEM, or constant.
7800 Ordinarily in such cases we would output mul or add instructions
7801 and then return a pseudo reg containing the sum.
7802
7803 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7804 it also marks a label as absolutely required (it can't be dead).
7805 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7806 This is used for outputting expressions used in initializers.
7807
7808 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7809 with a constant address even if that address is not normally legitimate.
7810 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7811
7812 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7813 a call parameter. Such targets require special care as we haven't yet
7814 marked TARGET so that it's safe from being trashed by libcalls. We
7815 don't want to use TARGET for anything but the final result;
7816 Intermediate values must go elsewhere. Additionally, calls to
7817 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7818
7819 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7820 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7821 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7822 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7823 recursively. */
7824
7825 rtx
7826 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7827 enum expand_modifier modifier, rtx *alt_rtl)
7828 {
7829 rtx ret;
7830
7831 /* Handle ERROR_MARK before anybody tries to access its type. */
7832 if (TREE_CODE (exp) == ERROR_MARK
7833 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7834 {
7835 ret = CONST0_RTX (tmode);
7836 return ret ? ret : const0_rtx;
7837 }
7838
7839 /* If this is an expression of some kind and it has an associated line
7840 number, then emit the line number before expanding the expression.
7841
7842 We need to save and restore the file and line information so that
7843 errors discovered during expansion are emitted with the right
7844 information. It would be better of the diagnostic routines
7845 used the file/line information embedded in the tree nodes rather
7846 than globals. */
7847 if (cfun && EXPR_HAS_LOCATION (exp))
7848 {
7849 location_t saved_location = input_location;
7850 location_t saved_curr_loc = get_curr_insn_source_location ();
7851 tree saved_block = get_curr_insn_block ();
7852 input_location = EXPR_LOCATION (exp);
7853 set_curr_insn_source_location (input_location);
7854
7855 /* Record where the insns produced belong. */
7856 set_curr_insn_block (TREE_BLOCK (exp));
7857
7858 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7859
7860 input_location = saved_location;
7861 set_curr_insn_block (saved_block);
7862 set_curr_insn_source_location (saved_curr_loc);
7863 }
7864 else
7865 {
7866 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7867 }
7868
7869 return ret;
7870 }
7871
7872 /* Try to expand the conditional expression which is represented by
7873 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7874 return the rtl reg which repsents the result. Otherwise return
7875 NULL_RTL. */
7876
7877 static rtx
7878 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7879 tree treeop1 ATTRIBUTE_UNUSED,
7880 tree treeop2 ATTRIBUTE_UNUSED)
7881 {
7882 #ifdef HAVE_conditional_move
7883 rtx insn;
7884 rtx op00, op01, op1, op2;
7885 enum rtx_code comparison_code;
7886 enum machine_mode comparison_mode;
7887 gimple srcstmt;
7888 rtx temp;
7889 tree type = TREE_TYPE (treeop1);
7890 int unsignedp = TYPE_UNSIGNED (type);
7891 enum machine_mode mode = TYPE_MODE (type);
7892
7893 temp = assign_temp (type, 0, 0, 1);
7894
7895 /* If we cannot do a conditional move on the mode, try doing it
7896 with the promoted mode. */
7897 if (!can_conditionally_move_p (mode))
7898 mode = promote_mode (type, mode, &unsignedp);
7899
7900 if (!can_conditionally_move_p (mode))
7901 return NULL_RTX;
7902
7903 start_sequence ();
7904 expand_operands (treeop1, treeop2,
7905 temp, &op1, &op2, EXPAND_NORMAL);
7906
7907 if (TREE_CODE (treeop0) == SSA_NAME
7908 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7909 {
7910 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7911 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7912 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7913 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7914 comparison_mode = TYPE_MODE (type);
7915 unsignedp = TYPE_UNSIGNED (type);
7916 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7917 }
7918 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7919 {
7920 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7921 enum tree_code cmpcode = TREE_CODE (treeop0);
7922 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7923 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7924 unsignedp = TYPE_UNSIGNED (type);
7925 comparison_mode = TYPE_MODE (type);
7926 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7927 }
7928 else
7929 {
7930 op00 = expand_normal (treeop0);
7931 op01 = const0_rtx;
7932 comparison_code = NE;
7933 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7934 }
7935
7936 if (GET_MODE (op1) != mode)
7937 op1 = gen_lowpart (mode, op1);
7938
7939 if (GET_MODE (op2) != mode)
7940 op2 = gen_lowpart (mode, op2);
7941
7942 /* Try to emit the conditional move. */
7943 insn = emit_conditional_move (temp, comparison_code,
7944 op00, op01, comparison_mode,
7945 op1, op2, mode,
7946 unsignedp);
7947
7948 /* If we could do the conditional move, emit the sequence,
7949 and return. */
7950 if (insn)
7951 {
7952 rtx seq = get_insns ();
7953 end_sequence ();
7954 emit_insn (seq);
7955 return temp;
7956 }
7957
7958 /* Otherwise discard the sequence and fall back to code with
7959 branches. */
7960 end_sequence ();
7961 #endif
7962 return NULL_RTX;
7963 }
7964
7965 rtx
7966 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7967 enum expand_modifier modifier)
7968 {
7969 rtx op0, op1, op2, temp;
7970 tree type;
7971 int unsignedp;
7972 enum machine_mode mode;
7973 enum tree_code code = ops->code;
7974 optab this_optab;
7975 rtx subtarget, original_target;
7976 int ignore;
7977 bool reduce_bit_field;
7978 location_t loc = ops->location;
7979 tree treeop0, treeop1, treeop2;
7980 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7981 ? reduce_to_bit_field_precision ((expr), \
7982 target, \
7983 type) \
7984 : (expr))
7985
7986 type = ops->type;
7987 mode = TYPE_MODE (type);
7988 unsignedp = TYPE_UNSIGNED (type);
7989
7990 treeop0 = ops->op0;
7991 treeop1 = ops->op1;
7992 treeop2 = ops->op2;
7993
7994 /* We should be called only on simple (binary or unary) expressions,
7995 exactly those that are valid in gimple expressions that aren't
7996 GIMPLE_SINGLE_RHS (or invalid). */
7997 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7998 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7999 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8000
8001 ignore = (target == const0_rtx
8002 || ((CONVERT_EXPR_CODE_P (code)
8003 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8004 && TREE_CODE (type) == VOID_TYPE));
8005
8006 /* We should be called only if we need the result. */
8007 gcc_assert (!ignore);
8008
8009 /* An operation in what may be a bit-field type needs the
8010 result to be reduced to the precision of the bit-field type,
8011 which is narrower than that of the type's mode. */
8012 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8013 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8014
8015 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8016 target = 0;
8017
8018 /* Use subtarget as the target for operand 0 of a binary operation. */
8019 subtarget = get_subtarget (target);
8020 original_target = target;
8021
8022 switch (code)
8023 {
8024 case NON_LVALUE_EXPR:
8025 case PAREN_EXPR:
8026 CASE_CONVERT:
8027 if (treeop0 == error_mark_node)
8028 return const0_rtx;
8029
8030 if (TREE_CODE (type) == UNION_TYPE)
8031 {
8032 tree valtype = TREE_TYPE (treeop0);
8033
8034 /* If both input and output are BLKmode, this conversion isn't doing
8035 anything except possibly changing memory attribute. */
8036 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8037 {
8038 rtx result = expand_expr (treeop0, target, tmode,
8039 modifier);
8040
8041 result = copy_rtx (result);
8042 set_mem_attributes (result, type, 0);
8043 return result;
8044 }
8045
8046 if (target == 0)
8047 {
8048 if (TYPE_MODE (type) != BLKmode)
8049 target = gen_reg_rtx (TYPE_MODE (type));
8050 else
8051 target = assign_temp (type, 0, 1, 1);
8052 }
8053
8054 if (MEM_P (target))
8055 /* Store data into beginning of memory target. */
8056 store_expr (treeop0,
8057 adjust_address (target, TYPE_MODE (valtype), 0),
8058 modifier == EXPAND_STACK_PARM,
8059 false);
8060
8061 else
8062 {
8063 gcc_assert (REG_P (target));
8064
8065 /* Store this field into a union of the proper type. */
8066 store_field (target,
8067 MIN ((int_size_in_bytes (TREE_TYPE
8068 (treeop0))
8069 * BITS_PER_UNIT),
8070 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8071 0, 0, 0, TYPE_MODE (valtype), treeop0,
8072 type, 0, false);
8073 }
8074
8075 /* Return the entire union. */
8076 return target;
8077 }
8078
8079 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8080 {
8081 op0 = expand_expr (treeop0, target, VOIDmode,
8082 modifier);
8083
8084 /* If the signedness of the conversion differs and OP0 is
8085 a promoted SUBREG, clear that indication since we now
8086 have to do the proper extension. */
8087 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8088 && GET_CODE (op0) == SUBREG)
8089 SUBREG_PROMOTED_VAR_P (op0) = 0;
8090
8091 return REDUCE_BIT_FIELD (op0);
8092 }
8093
8094 op0 = expand_expr (treeop0, NULL_RTX, mode,
8095 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8096 if (GET_MODE (op0) == mode)
8097 ;
8098
8099 /* If OP0 is a constant, just convert it into the proper mode. */
8100 else if (CONSTANT_P (op0))
8101 {
8102 tree inner_type = TREE_TYPE (treeop0);
8103 enum machine_mode inner_mode = GET_MODE (op0);
8104
8105 if (inner_mode == VOIDmode)
8106 inner_mode = TYPE_MODE (inner_type);
8107
8108 if (modifier == EXPAND_INITIALIZER)
8109 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8110 subreg_lowpart_offset (mode,
8111 inner_mode));
8112 else
8113 op0= convert_modes (mode, inner_mode, op0,
8114 TYPE_UNSIGNED (inner_type));
8115 }
8116
8117 else if (modifier == EXPAND_INITIALIZER)
8118 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8119
8120 else if (target == 0)
8121 op0 = convert_to_mode (mode, op0,
8122 TYPE_UNSIGNED (TREE_TYPE
8123 (treeop0)));
8124 else
8125 {
8126 convert_move (target, op0,
8127 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8128 op0 = target;
8129 }
8130
8131 return REDUCE_BIT_FIELD (op0);
8132
8133 case ADDR_SPACE_CONVERT_EXPR:
8134 {
8135 tree treeop0_type = TREE_TYPE (treeop0);
8136 addr_space_t as_to;
8137 addr_space_t as_from;
8138
8139 gcc_assert (POINTER_TYPE_P (type));
8140 gcc_assert (POINTER_TYPE_P (treeop0_type));
8141
8142 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8143 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8144
8145 /* Conversions between pointers to the same address space should
8146 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8147 gcc_assert (as_to != as_from);
8148
8149 /* Ask target code to handle conversion between pointers
8150 to overlapping address spaces. */
8151 if (targetm.addr_space.subset_p (as_to, as_from)
8152 || targetm.addr_space.subset_p (as_from, as_to))
8153 {
8154 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8155 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8156 gcc_assert (op0);
8157 return op0;
8158 }
8159
8160 /* For disjoint address spaces, converting anything but
8161 a null pointer invokes undefined behaviour. We simply
8162 always return a null pointer here. */
8163 return CONST0_RTX (mode);
8164 }
8165
8166 case POINTER_PLUS_EXPR:
8167 /* Even though the sizetype mode and the pointer's mode can be different
8168 expand is able to handle this correctly and get the correct result out
8169 of the PLUS_EXPR code. */
8170 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8171 if sizetype precision is smaller than pointer precision. */
8172 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8173 treeop1 = fold_convert_loc (loc, type,
8174 fold_convert_loc (loc, ssizetype,
8175 treeop1));
8176 /* If sizetype precision is larger than pointer precision, truncate the
8177 offset to have matching modes. */
8178 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8179 treeop1 = fold_convert_loc (loc, type, treeop1);
8180
8181 case PLUS_EXPR:
8182 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8183 something else, make sure we add the register to the constant and
8184 then to the other thing. This case can occur during strength
8185 reduction and doing it this way will produce better code if the
8186 frame pointer or argument pointer is eliminated.
8187
8188 fold-const.c will ensure that the constant is always in the inner
8189 PLUS_EXPR, so the only case we need to do anything about is if
8190 sp, ap, or fp is our second argument, in which case we must swap
8191 the innermost first argument and our second argument. */
8192
8193 if (TREE_CODE (treeop0) == PLUS_EXPR
8194 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8195 && TREE_CODE (treeop1) == VAR_DECL
8196 && (DECL_RTL (treeop1) == frame_pointer_rtx
8197 || DECL_RTL (treeop1) == stack_pointer_rtx
8198 || DECL_RTL (treeop1) == arg_pointer_rtx))
8199 {
8200 tree t = treeop1;
8201
8202 treeop1 = TREE_OPERAND (treeop0, 0);
8203 TREE_OPERAND (treeop0, 0) = t;
8204 }
8205
8206 /* If the result is to be ptr_mode and we are adding an integer to
8207 something, we might be forming a constant. So try to use
8208 plus_constant. If it produces a sum and we can't accept it,
8209 use force_operand. This allows P = &ARR[const] to generate
8210 efficient code on machines where a SYMBOL_REF is not a valid
8211 address.
8212
8213 If this is an EXPAND_SUM call, always return the sum. */
8214 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8215 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8216 {
8217 if (modifier == EXPAND_STACK_PARM)
8218 target = 0;
8219 if (TREE_CODE (treeop0) == INTEGER_CST
8220 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8221 && TREE_CONSTANT (treeop1))
8222 {
8223 rtx constant_part;
8224
8225 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8226 EXPAND_SUM);
8227 /* Use immed_double_const to ensure that the constant is
8228 truncated according to the mode of OP1, then sign extended
8229 to a HOST_WIDE_INT. Using the constant directly can result
8230 in non-canonical RTL in a 64x32 cross compile. */
8231 constant_part
8232 = immed_double_const (TREE_INT_CST_LOW (treeop0),
8233 (HOST_WIDE_INT) 0,
8234 TYPE_MODE (TREE_TYPE (treeop1)));
8235 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8236 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8237 op1 = force_operand (op1, target);
8238 return REDUCE_BIT_FIELD (op1);
8239 }
8240
8241 else if (TREE_CODE (treeop1) == INTEGER_CST
8242 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8243 && TREE_CONSTANT (treeop0))
8244 {
8245 rtx constant_part;
8246
8247 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8248 (modifier == EXPAND_INITIALIZER
8249 ? EXPAND_INITIALIZER : EXPAND_SUM));
8250 if (! CONSTANT_P (op0))
8251 {
8252 op1 = expand_expr (treeop1, NULL_RTX,
8253 VOIDmode, modifier);
8254 /* Return a PLUS if modifier says it's OK. */
8255 if (modifier == EXPAND_SUM
8256 || modifier == EXPAND_INITIALIZER)
8257 return simplify_gen_binary (PLUS, mode, op0, op1);
8258 goto binop2;
8259 }
8260 /* Use immed_double_const to ensure that the constant is
8261 truncated according to the mode of OP1, then sign extended
8262 to a HOST_WIDE_INT. Using the constant directly can result
8263 in non-canonical RTL in a 64x32 cross compile. */
8264 constant_part
8265 = immed_double_const (TREE_INT_CST_LOW (treeop1),
8266 (HOST_WIDE_INT) 0,
8267 TYPE_MODE (TREE_TYPE (treeop0)));
8268 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8269 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8270 op0 = force_operand (op0, target);
8271 return REDUCE_BIT_FIELD (op0);
8272 }
8273 }
8274
8275 /* Use TER to expand pointer addition of a negated value
8276 as pointer subtraction. */
8277 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8278 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8279 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8280 && TREE_CODE (treeop1) == SSA_NAME
8281 && TYPE_MODE (TREE_TYPE (treeop0))
8282 == TYPE_MODE (TREE_TYPE (treeop1)))
8283 {
8284 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8285 if (def)
8286 {
8287 treeop1 = gimple_assign_rhs1 (def);
8288 code = MINUS_EXPR;
8289 goto do_minus;
8290 }
8291 }
8292
8293 /* No sense saving up arithmetic to be done
8294 if it's all in the wrong mode to form part of an address.
8295 And force_operand won't know whether to sign-extend or
8296 zero-extend. */
8297 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8298 || mode != ptr_mode)
8299 {
8300 expand_operands (treeop0, treeop1,
8301 subtarget, &op0, &op1, EXPAND_NORMAL);
8302 if (op0 == const0_rtx)
8303 return op1;
8304 if (op1 == const0_rtx)
8305 return op0;
8306 goto binop2;
8307 }
8308
8309 expand_operands (treeop0, treeop1,
8310 subtarget, &op0, &op1, modifier);
8311 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8312
8313 case MINUS_EXPR:
8314 do_minus:
8315 /* For initializers, we are allowed to return a MINUS of two
8316 symbolic constants. Here we handle all cases when both operands
8317 are constant. */
8318 /* Handle difference of two symbolic constants,
8319 for the sake of an initializer. */
8320 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8321 && really_constant_p (treeop0)
8322 && really_constant_p (treeop1))
8323 {
8324 expand_operands (treeop0, treeop1,
8325 NULL_RTX, &op0, &op1, modifier);
8326
8327 /* If the last operand is a CONST_INT, use plus_constant of
8328 the negated constant. Else make the MINUS. */
8329 if (CONST_INT_P (op1))
8330 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8331 -INTVAL (op1)));
8332 else
8333 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8334 }
8335
8336 /* No sense saving up arithmetic to be done
8337 if it's all in the wrong mode to form part of an address.
8338 And force_operand won't know whether to sign-extend or
8339 zero-extend. */
8340 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8341 || mode != ptr_mode)
8342 goto binop;
8343
8344 expand_operands (treeop0, treeop1,
8345 subtarget, &op0, &op1, modifier);
8346
8347 /* Convert A - const to A + (-const). */
8348 if (CONST_INT_P (op1))
8349 {
8350 op1 = negate_rtx (mode, op1);
8351 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8352 }
8353
8354 goto binop2;
8355
8356 case WIDEN_MULT_PLUS_EXPR:
8357 case WIDEN_MULT_MINUS_EXPR:
8358 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8359 op2 = expand_normal (treeop2);
8360 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8361 target, unsignedp);
8362 return target;
8363
8364 case WIDEN_MULT_EXPR:
8365 /* If first operand is constant, swap them.
8366 Thus the following special case checks need only
8367 check the second operand. */
8368 if (TREE_CODE (treeop0) == INTEGER_CST)
8369 {
8370 tree t1 = treeop0;
8371 treeop0 = treeop1;
8372 treeop1 = t1;
8373 }
8374
8375 /* First, check if we have a multiplication of one signed and one
8376 unsigned operand. */
8377 if (TREE_CODE (treeop1) != INTEGER_CST
8378 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8379 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8380 {
8381 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8382 this_optab = usmul_widen_optab;
8383 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8384 != CODE_FOR_nothing)
8385 {
8386 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8387 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8388 EXPAND_NORMAL);
8389 else
8390 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8391 EXPAND_NORMAL);
8392 goto binop3;
8393 }
8394 }
8395 /* Check for a multiplication with matching signedness. */
8396 else if ((TREE_CODE (treeop1) == INTEGER_CST
8397 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8398 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8399 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8400 {
8401 tree op0type = TREE_TYPE (treeop0);
8402 enum machine_mode innermode = TYPE_MODE (op0type);
8403 bool zextend_p = TYPE_UNSIGNED (op0type);
8404 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8405 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8406
8407 if (TREE_CODE (treeop0) != INTEGER_CST)
8408 {
8409 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8410 != CODE_FOR_nothing)
8411 {
8412 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8413 EXPAND_NORMAL);
8414 temp = expand_widening_mult (mode, op0, op1, target,
8415 unsignedp, this_optab);
8416 return REDUCE_BIT_FIELD (temp);
8417 }
8418 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8419 != CODE_FOR_nothing
8420 && innermode == word_mode)
8421 {
8422 rtx htem, hipart;
8423 op0 = expand_normal (treeop0);
8424 if (TREE_CODE (treeop1) == INTEGER_CST)
8425 op1 = convert_modes (innermode, mode,
8426 expand_normal (treeop1), unsignedp);
8427 else
8428 op1 = expand_normal (treeop1);
8429 temp = expand_binop (mode, other_optab, op0, op1, target,
8430 unsignedp, OPTAB_LIB_WIDEN);
8431 hipart = gen_highpart (innermode, temp);
8432 htem = expand_mult_highpart_adjust (innermode, hipart,
8433 op0, op1, hipart,
8434 zextend_p);
8435 if (htem != hipart)
8436 emit_move_insn (hipart, htem);
8437 return REDUCE_BIT_FIELD (temp);
8438 }
8439 }
8440 }
8441 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8442 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8443 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8444 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8445
8446 case FMA_EXPR:
8447 {
8448 optab opt = fma_optab;
8449 gimple def0, def2;
8450
8451 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8452 call. */
8453 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8454 {
8455 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8456 tree call_expr;
8457
8458 gcc_assert (fn != NULL_TREE);
8459 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8460 return expand_builtin (call_expr, target, subtarget, mode, false);
8461 }
8462
8463 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8464 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8465
8466 op0 = op2 = NULL;
8467
8468 if (def0 && def2
8469 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8470 {
8471 opt = fnms_optab;
8472 op0 = expand_normal (gimple_assign_rhs1 (def0));
8473 op2 = expand_normal (gimple_assign_rhs1 (def2));
8474 }
8475 else if (def0
8476 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8477 {
8478 opt = fnma_optab;
8479 op0 = expand_normal (gimple_assign_rhs1 (def0));
8480 }
8481 else if (def2
8482 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8483 {
8484 opt = fms_optab;
8485 op2 = expand_normal (gimple_assign_rhs1 (def2));
8486 }
8487
8488 if (op0 == NULL)
8489 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8490 if (op2 == NULL)
8491 op2 = expand_normal (treeop2);
8492 op1 = expand_normal (treeop1);
8493
8494 return expand_ternary_op (TYPE_MODE (type), opt,
8495 op0, op1, op2, target, 0);
8496 }
8497
8498 case MULT_EXPR:
8499 /* If this is a fixed-point operation, then we cannot use the code
8500 below because "expand_mult" doesn't support sat/no-sat fixed-point
8501 multiplications. */
8502 if (ALL_FIXED_POINT_MODE_P (mode))
8503 goto binop;
8504
8505 /* If first operand is constant, swap them.
8506 Thus the following special case checks need only
8507 check the second operand. */
8508 if (TREE_CODE (treeop0) == INTEGER_CST)
8509 {
8510 tree t1 = treeop0;
8511 treeop0 = treeop1;
8512 treeop1 = t1;
8513 }
8514
8515 /* Attempt to return something suitable for generating an
8516 indexed address, for machines that support that. */
8517
8518 if (modifier == EXPAND_SUM && mode == ptr_mode
8519 && host_integerp (treeop1, 0))
8520 {
8521 tree exp1 = treeop1;
8522
8523 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8524 EXPAND_SUM);
8525
8526 if (!REG_P (op0))
8527 op0 = force_operand (op0, NULL_RTX);
8528 if (!REG_P (op0))
8529 op0 = copy_to_mode_reg (mode, op0);
8530
8531 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8532 gen_int_mode (tree_low_cst (exp1, 0),
8533 TYPE_MODE (TREE_TYPE (exp1)))));
8534 }
8535
8536 if (modifier == EXPAND_STACK_PARM)
8537 target = 0;
8538
8539 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8540 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8541
8542 case TRUNC_DIV_EXPR:
8543 case FLOOR_DIV_EXPR:
8544 case CEIL_DIV_EXPR:
8545 case ROUND_DIV_EXPR:
8546 case EXACT_DIV_EXPR:
8547 /* If this is a fixed-point operation, then we cannot use the code
8548 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8549 divisions. */
8550 if (ALL_FIXED_POINT_MODE_P (mode))
8551 goto binop;
8552
8553 if (modifier == EXPAND_STACK_PARM)
8554 target = 0;
8555 /* Possible optimization: compute the dividend with EXPAND_SUM
8556 then if the divisor is constant can optimize the case
8557 where some terms of the dividend have coeffs divisible by it. */
8558 expand_operands (treeop0, treeop1,
8559 subtarget, &op0, &op1, EXPAND_NORMAL);
8560 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8561
8562 case RDIV_EXPR:
8563 goto binop;
8564
8565 case TRUNC_MOD_EXPR:
8566 case FLOOR_MOD_EXPR:
8567 case CEIL_MOD_EXPR:
8568 case ROUND_MOD_EXPR:
8569 if (modifier == EXPAND_STACK_PARM)
8570 target = 0;
8571 expand_operands (treeop0, treeop1,
8572 subtarget, &op0, &op1, EXPAND_NORMAL);
8573 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8574
8575 case FIXED_CONVERT_EXPR:
8576 op0 = expand_normal (treeop0);
8577 if (target == 0 || modifier == EXPAND_STACK_PARM)
8578 target = gen_reg_rtx (mode);
8579
8580 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8581 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8582 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8583 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8584 else
8585 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8586 return target;
8587
8588 case FIX_TRUNC_EXPR:
8589 op0 = expand_normal (treeop0);
8590 if (target == 0 || modifier == EXPAND_STACK_PARM)
8591 target = gen_reg_rtx (mode);
8592 expand_fix (target, op0, unsignedp);
8593 return target;
8594
8595 case FLOAT_EXPR:
8596 op0 = expand_normal (treeop0);
8597 if (target == 0 || modifier == EXPAND_STACK_PARM)
8598 target = gen_reg_rtx (mode);
8599 /* expand_float can't figure out what to do if FROM has VOIDmode.
8600 So give it the correct mode. With -O, cse will optimize this. */
8601 if (GET_MODE (op0) == VOIDmode)
8602 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8603 op0);
8604 expand_float (target, op0,
8605 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8606 return target;
8607
8608 case NEGATE_EXPR:
8609 op0 = expand_expr (treeop0, subtarget,
8610 VOIDmode, EXPAND_NORMAL);
8611 if (modifier == EXPAND_STACK_PARM)
8612 target = 0;
8613 temp = expand_unop (mode,
8614 optab_for_tree_code (NEGATE_EXPR, type,
8615 optab_default),
8616 op0, target, 0);
8617 gcc_assert (temp);
8618 return REDUCE_BIT_FIELD (temp);
8619
8620 case ABS_EXPR:
8621 op0 = expand_expr (treeop0, subtarget,
8622 VOIDmode, EXPAND_NORMAL);
8623 if (modifier == EXPAND_STACK_PARM)
8624 target = 0;
8625
8626 /* ABS_EXPR is not valid for complex arguments. */
8627 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8628 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8629
8630 /* Unsigned abs is simply the operand. Testing here means we don't
8631 risk generating incorrect code below. */
8632 if (TYPE_UNSIGNED (type))
8633 return op0;
8634
8635 return expand_abs (mode, op0, target, unsignedp,
8636 safe_from_p (target, treeop0, 1));
8637
8638 case MAX_EXPR:
8639 case MIN_EXPR:
8640 target = original_target;
8641 if (target == 0
8642 || modifier == EXPAND_STACK_PARM
8643 || (MEM_P (target) && MEM_VOLATILE_P (target))
8644 || GET_MODE (target) != mode
8645 || (REG_P (target)
8646 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8647 target = gen_reg_rtx (mode);
8648 expand_operands (treeop0, treeop1,
8649 target, &op0, &op1, EXPAND_NORMAL);
8650
8651 /* First try to do it with a special MIN or MAX instruction.
8652 If that does not win, use a conditional jump to select the proper
8653 value. */
8654 this_optab = optab_for_tree_code (code, type, optab_default);
8655 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8656 OPTAB_WIDEN);
8657 if (temp != 0)
8658 return temp;
8659
8660 /* At this point, a MEM target is no longer useful; we will get better
8661 code without it. */
8662
8663 if (! REG_P (target))
8664 target = gen_reg_rtx (mode);
8665
8666 /* If op1 was placed in target, swap op0 and op1. */
8667 if (target != op0 && target == op1)
8668 {
8669 temp = op0;
8670 op0 = op1;
8671 op1 = temp;
8672 }
8673
8674 /* We generate better code and avoid problems with op1 mentioning
8675 target by forcing op1 into a pseudo if it isn't a constant. */
8676 if (! CONSTANT_P (op1))
8677 op1 = force_reg (mode, op1);
8678
8679 {
8680 enum rtx_code comparison_code;
8681 rtx cmpop1 = op1;
8682
8683 if (code == MAX_EXPR)
8684 comparison_code = unsignedp ? GEU : GE;
8685 else
8686 comparison_code = unsignedp ? LEU : LE;
8687
8688 /* Canonicalize to comparisons against 0. */
8689 if (op1 == const1_rtx)
8690 {
8691 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8692 or (a != 0 ? a : 1) for unsigned.
8693 For MIN we are safe converting (a <= 1 ? a : 1)
8694 into (a <= 0 ? a : 1) */
8695 cmpop1 = const0_rtx;
8696 if (code == MAX_EXPR)
8697 comparison_code = unsignedp ? NE : GT;
8698 }
8699 if (op1 == constm1_rtx && !unsignedp)
8700 {
8701 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8702 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8703 cmpop1 = const0_rtx;
8704 if (code == MIN_EXPR)
8705 comparison_code = LT;
8706 }
8707 #ifdef HAVE_conditional_move
8708 /* Use a conditional move if possible. */
8709 if (can_conditionally_move_p (mode))
8710 {
8711 rtx insn;
8712
8713 /* ??? Same problem as in expmed.c: emit_conditional_move
8714 forces a stack adjustment via compare_from_rtx, and we
8715 lose the stack adjustment if the sequence we are about
8716 to create is discarded. */
8717 do_pending_stack_adjust ();
8718
8719 start_sequence ();
8720
8721 /* Try to emit the conditional move. */
8722 insn = emit_conditional_move (target, comparison_code,
8723 op0, cmpop1, mode,
8724 op0, op1, mode,
8725 unsignedp);
8726
8727 /* If we could do the conditional move, emit the sequence,
8728 and return. */
8729 if (insn)
8730 {
8731 rtx seq = get_insns ();
8732 end_sequence ();
8733 emit_insn (seq);
8734 return target;
8735 }
8736
8737 /* Otherwise discard the sequence and fall back to code with
8738 branches. */
8739 end_sequence ();
8740 }
8741 #endif
8742 if (target != op0)
8743 emit_move_insn (target, op0);
8744
8745 temp = gen_label_rtx ();
8746 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8747 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8748 -1);
8749 }
8750 emit_move_insn (target, op1);
8751 emit_label (temp);
8752 return target;
8753
8754 case BIT_NOT_EXPR:
8755 op0 = expand_expr (treeop0, subtarget,
8756 VOIDmode, EXPAND_NORMAL);
8757 if (modifier == EXPAND_STACK_PARM)
8758 target = 0;
8759 /* In case we have to reduce the result to bitfield precision
8760 for unsigned bitfield expand this as XOR with a proper constant
8761 instead. */
8762 if (reduce_bit_field && TYPE_UNSIGNED (type))
8763 temp = expand_binop (mode, xor_optab, op0,
8764 immed_double_int_const
8765 (double_int_mask (TYPE_PRECISION (type)), mode),
8766 target, 1, OPTAB_LIB_WIDEN);
8767 else
8768 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8769 gcc_assert (temp);
8770 return temp;
8771
8772 /* ??? Can optimize bitwise operations with one arg constant.
8773 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8774 and (a bitwise1 b) bitwise2 b (etc)
8775 but that is probably not worth while. */
8776
8777 case BIT_AND_EXPR:
8778 case BIT_IOR_EXPR:
8779 case BIT_XOR_EXPR:
8780 goto binop;
8781
8782 case LROTATE_EXPR:
8783 case RROTATE_EXPR:
8784 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8785 || (GET_MODE_PRECISION (TYPE_MODE (type))
8786 == TYPE_PRECISION (type)));
8787 /* fall through */
8788
8789 case LSHIFT_EXPR:
8790 case RSHIFT_EXPR:
8791 /* If this is a fixed-point operation, then we cannot use the code
8792 below because "expand_shift" doesn't support sat/no-sat fixed-point
8793 shifts. */
8794 if (ALL_FIXED_POINT_MODE_P (mode))
8795 goto binop;
8796
8797 if (! safe_from_p (subtarget, treeop1, 1))
8798 subtarget = 0;
8799 if (modifier == EXPAND_STACK_PARM)
8800 target = 0;
8801 op0 = expand_expr (treeop0, subtarget,
8802 VOIDmode, EXPAND_NORMAL);
8803 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8804 unsignedp);
8805 if (code == LSHIFT_EXPR)
8806 temp = REDUCE_BIT_FIELD (temp);
8807 return temp;
8808
8809 /* Could determine the answer when only additive constants differ. Also,
8810 the addition of one can be handled by changing the condition. */
8811 case LT_EXPR:
8812 case LE_EXPR:
8813 case GT_EXPR:
8814 case GE_EXPR:
8815 case EQ_EXPR:
8816 case NE_EXPR:
8817 case UNORDERED_EXPR:
8818 case ORDERED_EXPR:
8819 case UNLT_EXPR:
8820 case UNLE_EXPR:
8821 case UNGT_EXPR:
8822 case UNGE_EXPR:
8823 case UNEQ_EXPR:
8824 case LTGT_EXPR:
8825 temp = do_store_flag (ops,
8826 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8827 tmode != VOIDmode ? tmode : mode);
8828 if (temp)
8829 return temp;
8830
8831 /* Use a compare and a jump for BLKmode comparisons, or for function
8832 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8833
8834 if ((target == 0
8835 || modifier == EXPAND_STACK_PARM
8836 || ! safe_from_p (target, treeop0, 1)
8837 || ! safe_from_p (target, treeop1, 1)
8838 /* Make sure we don't have a hard reg (such as function's return
8839 value) live across basic blocks, if not optimizing. */
8840 || (!optimize && REG_P (target)
8841 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8842 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8843
8844 emit_move_insn (target, const0_rtx);
8845
8846 op1 = gen_label_rtx ();
8847 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8848
8849 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8850 emit_move_insn (target, constm1_rtx);
8851 else
8852 emit_move_insn (target, const1_rtx);
8853
8854 emit_label (op1);
8855 return target;
8856
8857 case COMPLEX_EXPR:
8858 /* Get the rtx code of the operands. */
8859 op0 = expand_normal (treeop0);
8860 op1 = expand_normal (treeop1);
8861
8862 if (!target)
8863 target = gen_reg_rtx (TYPE_MODE (type));
8864
8865 /* Move the real (op0) and imaginary (op1) parts to their location. */
8866 write_complex_part (target, op0, false);
8867 write_complex_part (target, op1, true);
8868
8869 return target;
8870
8871 case WIDEN_SUM_EXPR:
8872 {
8873 tree oprnd0 = treeop0;
8874 tree oprnd1 = treeop1;
8875
8876 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8877 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8878 target, unsignedp);
8879 return target;
8880 }
8881
8882 case REDUC_MAX_EXPR:
8883 case REDUC_MIN_EXPR:
8884 case REDUC_PLUS_EXPR:
8885 {
8886 op0 = expand_normal (treeop0);
8887 this_optab = optab_for_tree_code (code, type, optab_default);
8888 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8889 gcc_assert (temp);
8890 return temp;
8891 }
8892
8893 case VEC_LSHIFT_EXPR:
8894 case VEC_RSHIFT_EXPR:
8895 {
8896 target = expand_vec_shift_expr (ops, target);
8897 return target;
8898 }
8899
8900 case VEC_UNPACK_HI_EXPR:
8901 case VEC_UNPACK_LO_EXPR:
8902 {
8903 op0 = expand_normal (treeop0);
8904 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8905 target, unsignedp);
8906 gcc_assert (temp);
8907 return temp;
8908 }
8909
8910 case VEC_UNPACK_FLOAT_HI_EXPR:
8911 case VEC_UNPACK_FLOAT_LO_EXPR:
8912 {
8913 op0 = expand_normal (treeop0);
8914 /* The signedness is determined from input operand. */
8915 temp = expand_widen_pattern_expr
8916 (ops, op0, NULL_RTX, NULL_RTX,
8917 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8918
8919 gcc_assert (temp);
8920 return temp;
8921 }
8922
8923 case VEC_WIDEN_MULT_HI_EXPR:
8924 case VEC_WIDEN_MULT_LO_EXPR:
8925 {
8926 tree oprnd0 = treeop0;
8927 tree oprnd1 = treeop1;
8928
8929 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8930 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8931 target, unsignedp);
8932 gcc_assert (target);
8933 return target;
8934 }
8935
8936 case VEC_WIDEN_LSHIFT_HI_EXPR:
8937 case VEC_WIDEN_LSHIFT_LO_EXPR:
8938 {
8939 tree oprnd0 = treeop0;
8940 tree oprnd1 = treeop1;
8941
8942 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8943 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8944 target, unsignedp);
8945 gcc_assert (target);
8946 return target;
8947 }
8948
8949 case VEC_PACK_TRUNC_EXPR:
8950 case VEC_PACK_SAT_EXPR:
8951 case VEC_PACK_FIX_TRUNC_EXPR:
8952 mode = TYPE_MODE (TREE_TYPE (treeop0));
8953 goto binop;
8954
8955 case VEC_PERM_EXPR:
8956 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
8957 op2 = expand_normal (treeop2);
8958
8959 /* Careful here: if the target doesn't support integral vector modes,
8960 a constant selection vector could wind up smooshed into a normal
8961 integral constant. */
8962 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
8963 {
8964 tree sel_type = TREE_TYPE (treeop2);
8965 enum machine_mode vmode
8966 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
8967 TYPE_VECTOR_SUBPARTS (sel_type));
8968 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
8969 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
8970 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
8971 }
8972 else
8973 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
8974
8975 temp = expand_vec_perm (mode, op0, op1, op2, target);
8976 gcc_assert (temp);
8977 return temp;
8978
8979 case DOT_PROD_EXPR:
8980 {
8981 tree oprnd0 = treeop0;
8982 tree oprnd1 = treeop1;
8983 tree oprnd2 = treeop2;
8984 rtx op2;
8985
8986 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8987 op2 = expand_normal (oprnd2);
8988 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8989 target, unsignedp);
8990 return target;
8991 }
8992
8993 case REALIGN_LOAD_EXPR:
8994 {
8995 tree oprnd0 = treeop0;
8996 tree oprnd1 = treeop1;
8997 tree oprnd2 = treeop2;
8998 rtx op2;
8999
9000 this_optab = optab_for_tree_code (code, type, optab_default);
9001 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9002 op2 = expand_normal (oprnd2);
9003 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9004 target, unsignedp);
9005 gcc_assert (temp);
9006 return temp;
9007 }
9008
9009 case COND_EXPR:
9010 /* A COND_EXPR with its type being VOID_TYPE represents a
9011 conditional jump and is handled in
9012 expand_gimple_cond_expr. */
9013 gcc_assert (!VOID_TYPE_P (type));
9014
9015 /* Note that COND_EXPRs whose type is a structure or union
9016 are required to be constructed to contain assignments of
9017 a temporary variable, so that we can evaluate them here
9018 for side effect only. If type is void, we must do likewise. */
9019
9020 gcc_assert (!TREE_ADDRESSABLE (type)
9021 && !ignore
9022 && TREE_TYPE (treeop1) != void_type_node
9023 && TREE_TYPE (treeop2) != void_type_node);
9024
9025 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9026 if (temp)
9027 return temp;
9028
9029 /* If we are not to produce a result, we have no target. Otherwise,
9030 if a target was specified use it; it will not be used as an
9031 intermediate target unless it is safe. If no target, use a
9032 temporary. */
9033
9034 if (modifier != EXPAND_STACK_PARM
9035 && original_target
9036 && safe_from_p (original_target, treeop0, 1)
9037 && GET_MODE (original_target) == mode
9038 && !MEM_P (original_target))
9039 temp = original_target;
9040 else
9041 temp = assign_temp (type, 0, 0, 1);
9042
9043 do_pending_stack_adjust ();
9044 NO_DEFER_POP;
9045 op0 = gen_label_rtx ();
9046 op1 = gen_label_rtx ();
9047 jumpifnot (treeop0, op0, -1);
9048 store_expr (treeop1, temp,
9049 modifier == EXPAND_STACK_PARM,
9050 false);
9051
9052 emit_jump_insn (gen_jump (op1));
9053 emit_barrier ();
9054 emit_label (op0);
9055 store_expr (treeop2, temp,
9056 modifier == EXPAND_STACK_PARM,
9057 false);
9058
9059 emit_label (op1);
9060 OK_DEFER_POP;
9061 return temp;
9062
9063 case VEC_COND_EXPR:
9064 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9065 return target;
9066
9067 default:
9068 gcc_unreachable ();
9069 }
9070
9071 /* Here to do an ordinary binary operator. */
9072 binop:
9073 expand_operands (treeop0, treeop1,
9074 subtarget, &op0, &op1, EXPAND_NORMAL);
9075 binop2:
9076 this_optab = optab_for_tree_code (code, type, optab_default);
9077 binop3:
9078 if (modifier == EXPAND_STACK_PARM)
9079 target = 0;
9080 temp = expand_binop (mode, this_optab, op0, op1, target,
9081 unsignedp, OPTAB_LIB_WIDEN);
9082 gcc_assert (temp);
9083 /* Bitwise operations do not need bitfield reduction as we expect their
9084 operands being properly truncated. */
9085 if (code == BIT_XOR_EXPR
9086 || code == BIT_AND_EXPR
9087 || code == BIT_IOR_EXPR)
9088 return temp;
9089 return REDUCE_BIT_FIELD (temp);
9090 }
9091 #undef REDUCE_BIT_FIELD
9092
9093 rtx
9094 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9095 enum expand_modifier modifier, rtx *alt_rtl)
9096 {
9097 rtx op0, op1, temp, decl_rtl;
9098 tree type;
9099 int unsignedp;
9100 enum machine_mode mode;
9101 enum tree_code code = TREE_CODE (exp);
9102 rtx subtarget, original_target;
9103 int ignore;
9104 tree context;
9105 bool reduce_bit_field;
9106 location_t loc = EXPR_LOCATION (exp);
9107 struct separate_ops ops;
9108 tree treeop0, treeop1, treeop2;
9109 tree ssa_name = NULL_TREE;
9110 gimple g;
9111
9112 type = TREE_TYPE (exp);
9113 mode = TYPE_MODE (type);
9114 unsignedp = TYPE_UNSIGNED (type);
9115
9116 treeop0 = treeop1 = treeop2 = NULL_TREE;
9117 if (!VL_EXP_CLASS_P (exp))
9118 switch (TREE_CODE_LENGTH (code))
9119 {
9120 default:
9121 case 3: treeop2 = TREE_OPERAND (exp, 2);
9122 case 2: treeop1 = TREE_OPERAND (exp, 1);
9123 case 1: treeop0 = TREE_OPERAND (exp, 0);
9124 case 0: break;
9125 }
9126 ops.code = code;
9127 ops.type = type;
9128 ops.op0 = treeop0;
9129 ops.op1 = treeop1;
9130 ops.op2 = treeop2;
9131 ops.location = loc;
9132
9133 ignore = (target == const0_rtx
9134 || ((CONVERT_EXPR_CODE_P (code)
9135 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9136 && TREE_CODE (type) == VOID_TYPE));
9137
9138 /* An operation in what may be a bit-field type needs the
9139 result to be reduced to the precision of the bit-field type,
9140 which is narrower than that of the type's mode. */
9141 reduce_bit_field = (!ignore
9142 && INTEGRAL_TYPE_P (type)
9143 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9144
9145 /* If we are going to ignore this result, we need only do something
9146 if there is a side-effect somewhere in the expression. If there
9147 is, short-circuit the most common cases here. Note that we must
9148 not call expand_expr with anything but const0_rtx in case this
9149 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9150
9151 if (ignore)
9152 {
9153 if (! TREE_SIDE_EFFECTS (exp))
9154 return const0_rtx;
9155
9156 /* Ensure we reference a volatile object even if value is ignored, but
9157 don't do this if all we are doing is taking its address. */
9158 if (TREE_THIS_VOLATILE (exp)
9159 && TREE_CODE (exp) != FUNCTION_DECL
9160 && mode != VOIDmode && mode != BLKmode
9161 && modifier != EXPAND_CONST_ADDRESS)
9162 {
9163 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9164 if (MEM_P (temp))
9165 copy_to_reg (temp);
9166 return const0_rtx;
9167 }
9168
9169 if (TREE_CODE_CLASS (code) == tcc_unary
9170 || code == COMPONENT_REF || code == INDIRECT_REF)
9171 return expand_expr (treeop0, const0_rtx, VOIDmode,
9172 modifier);
9173
9174 else if (TREE_CODE_CLASS (code) == tcc_binary
9175 || TREE_CODE_CLASS (code) == tcc_comparison
9176 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9177 {
9178 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9179 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9180 return const0_rtx;
9181 }
9182 else if (code == BIT_FIELD_REF)
9183 {
9184 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9185 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9186 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
9187 return const0_rtx;
9188 }
9189
9190 target = 0;
9191 }
9192
9193 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9194 target = 0;
9195
9196 /* Use subtarget as the target for operand 0 of a binary operation. */
9197 subtarget = get_subtarget (target);
9198 original_target = target;
9199
9200 switch (code)
9201 {
9202 case LABEL_DECL:
9203 {
9204 tree function = decl_function_context (exp);
9205
9206 temp = label_rtx (exp);
9207 temp = gen_rtx_LABEL_REF (Pmode, temp);
9208
9209 if (function != current_function_decl
9210 && function != 0)
9211 LABEL_REF_NONLOCAL_P (temp) = 1;
9212
9213 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9214 return temp;
9215 }
9216
9217 case SSA_NAME:
9218 /* ??? ivopts calls expander, without any preparation from
9219 out-of-ssa. So fake instructions as if this was an access to the
9220 base variable. This unnecessarily allocates a pseudo, see how we can
9221 reuse it, if partition base vars have it set already. */
9222 if (!currently_expanding_to_rtl)
9223 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
9224 NULL);
9225
9226 g = get_gimple_for_ssa_name (exp);
9227 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9228 if (g == NULL
9229 && modifier == EXPAND_INITIALIZER
9230 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9231 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9232 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9233 g = SSA_NAME_DEF_STMT (exp);
9234 if (g)
9235 {
9236 rtx r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
9237 tmode, modifier, NULL);
9238 if (REG_P (r) && !REG_EXPR (r))
9239 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9240 return r;
9241 }
9242
9243 ssa_name = exp;
9244 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9245 exp = SSA_NAME_VAR (ssa_name);
9246 goto expand_decl_rtl;
9247
9248 case PARM_DECL:
9249 case VAR_DECL:
9250 /* If a static var's type was incomplete when the decl was written,
9251 but the type is complete now, lay out the decl now. */
9252 if (DECL_SIZE (exp) == 0
9253 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9254 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9255 layout_decl (exp, 0);
9256
9257 /* ... fall through ... */
9258
9259 case FUNCTION_DECL:
9260 case RESULT_DECL:
9261 decl_rtl = DECL_RTL (exp);
9262 expand_decl_rtl:
9263 gcc_assert (decl_rtl);
9264 decl_rtl = copy_rtx (decl_rtl);
9265 /* Record writes to register variables. */
9266 if (modifier == EXPAND_WRITE
9267 && REG_P (decl_rtl)
9268 && HARD_REGISTER_P (decl_rtl))
9269 add_to_hard_reg_set (&crtl->asm_clobbers,
9270 GET_MODE (decl_rtl), REGNO (decl_rtl));
9271
9272 /* Ensure variable marked as used even if it doesn't go through
9273 a parser. If it hasn't be used yet, write out an external
9274 definition. */
9275 TREE_USED (exp) = 1;
9276
9277 /* Show we haven't gotten RTL for this yet. */
9278 temp = 0;
9279
9280 /* Variables inherited from containing functions should have
9281 been lowered by this point. */
9282 context = decl_function_context (exp);
9283 gcc_assert (!context
9284 || context == current_function_decl
9285 || TREE_STATIC (exp)
9286 || DECL_EXTERNAL (exp)
9287 /* ??? C++ creates functions that are not TREE_STATIC. */
9288 || TREE_CODE (exp) == FUNCTION_DECL);
9289
9290 /* This is the case of an array whose size is to be determined
9291 from its initializer, while the initializer is still being parsed.
9292 See expand_decl. */
9293
9294 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9295 temp = validize_mem (decl_rtl);
9296
9297 /* If DECL_RTL is memory, we are in the normal case and the
9298 address is not valid, get the address into a register. */
9299
9300 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9301 {
9302 if (alt_rtl)
9303 *alt_rtl = decl_rtl;
9304 decl_rtl = use_anchored_address (decl_rtl);
9305 if (modifier != EXPAND_CONST_ADDRESS
9306 && modifier != EXPAND_SUM
9307 && !memory_address_addr_space_p (DECL_MODE (exp),
9308 XEXP (decl_rtl, 0),
9309 MEM_ADDR_SPACE (decl_rtl)))
9310 temp = replace_equiv_address (decl_rtl,
9311 copy_rtx (XEXP (decl_rtl, 0)));
9312 }
9313
9314 /* If we got something, return it. But first, set the alignment
9315 if the address is a register. */
9316 if (temp != 0)
9317 {
9318 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9319 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9320
9321 return temp;
9322 }
9323
9324 /* If the mode of DECL_RTL does not match that of the decl,
9325 there are two cases: we are dealing with a BLKmode value
9326 that is returned in a register, or we are dealing with
9327 a promoted value. In the latter case, return a SUBREG
9328 of the wanted mode, but mark it so that we know that it
9329 was already extended. */
9330 if (REG_P (decl_rtl)
9331 && DECL_MODE (exp) != BLKmode
9332 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9333 {
9334 enum machine_mode pmode;
9335
9336 /* Get the signedness to be used for this variable. Ensure we get
9337 the same mode we got when the variable was declared. */
9338 if (code == SSA_NAME
9339 && (g = SSA_NAME_DEF_STMT (ssa_name))
9340 && gimple_code (g) == GIMPLE_CALL)
9341 {
9342 gcc_assert (!gimple_call_internal_p (g));
9343 pmode = promote_function_mode (type, mode, &unsignedp,
9344 gimple_call_fntype (g),
9345 2);
9346 }
9347 else
9348 pmode = promote_decl_mode (exp, &unsignedp);
9349 gcc_assert (GET_MODE (decl_rtl) == pmode);
9350
9351 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9352 SUBREG_PROMOTED_VAR_P (temp) = 1;
9353 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9354 return temp;
9355 }
9356
9357 return decl_rtl;
9358
9359 case INTEGER_CST:
9360 temp = immed_double_const (TREE_INT_CST_LOW (exp),
9361 TREE_INT_CST_HIGH (exp), mode);
9362
9363 return temp;
9364
9365 case VECTOR_CST:
9366 {
9367 tree tmp = NULL_TREE;
9368 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9369 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9370 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9371 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9372 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9373 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9374 return const_vector_from_tree (exp);
9375 if (GET_MODE_CLASS (mode) == MODE_INT)
9376 {
9377 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9378 if (type_for_mode)
9379 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9380 }
9381 if (!tmp)
9382 {
9383 VEC(constructor_elt,gc) *v;
9384 unsigned i;
9385 v = VEC_alloc (constructor_elt, gc, VECTOR_CST_NELTS (exp));
9386 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9387 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9388 tmp = build_constructor (type, v);
9389 }
9390 return expand_expr (tmp, ignore ? const0_rtx : target,
9391 tmode, modifier);
9392 }
9393
9394 case CONST_DECL:
9395 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9396
9397 case REAL_CST:
9398 /* If optimized, generate immediate CONST_DOUBLE
9399 which will be turned into memory by reload if necessary.
9400
9401 We used to force a register so that loop.c could see it. But
9402 this does not allow gen_* patterns to perform optimizations with
9403 the constants. It also produces two insns in cases like "x = 1.0;".
9404 On most machines, floating-point constants are not permitted in
9405 many insns, so we'd end up copying it to a register in any case.
9406
9407 Now, we do the copying in expand_binop, if appropriate. */
9408 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9409 TYPE_MODE (TREE_TYPE (exp)));
9410
9411 case FIXED_CST:
9412 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9413 TYPE_MODE (TREE_TYPE (exp)));
9414
9415 case COMPLEX_CST:
9416 /* Handle evaluating a complex constant in a CONCAT target. */
9417 if (original_target && GET_CODE (original_target) == CONCAT)
9418 {
9419 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9420 rtx rtarg, itarg;
9421
9422 rtarg = XEXP (original_target, 0);
9423 itarg = XEXP (original_target, 1);
9424
9425 /* Move the real and imaginary parts separately. */
9426 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9427 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9428
9429 if (op0 != rtarg)
9430 emit_move_insn (rtarg, op0);
9431 if (op1 != itarg)
9432 emit_move_insn (itarg, op1);
9433
9434 return original_target;
9435 }
9436
9437 /* ... fall through ... */
9438
9439 case STRING_CST:
9440 temp = expand_expr_constant (exp, 1, modifier);
9441
9442 /* temp contains a constant address.
9443 On RISC machines where a constant address isn't valid,
9444 make some insns to get that address into a register. */
9445 if (modifier != EXPAND_CONST_ADDRESS
9446 && modifier != EXPAND_INITIALIZER
9447 && modifier != EXPAND_SUM
9448 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9449 MEM_ADDR_SPACE (temp)))
9450 return replace_equiv_address (temp,
9451 copy_rtx (XEXP (temp, 0)));
9452 return temp;
9453
9454 case SAVE_EXPR:
9455 {
9456 tree val = treeop0;
9457 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
9458
9459 if (!SAVE_EXPR_RESOLVED_P (exp))
9460 {
9461 /* We can indeed still hit this case, typically via builtin
9462 expanders calling save_expr immediately before expanding
9463 something. Assume this means that we only have to deal
9464 with non-BLKmode values. */
9465 gcc_assert (GET_MODE (ret) != BLKmode);
9466
9467 val = build_decl (EXPR_LOCATION (exp),
9468 VAR_DECL, NULL, TREE_TYPE (exp));
9469 DECL_ARTIFICIAL (val) = 1;
9470 DECL_IGNORED_P (val) = 1;
9471 treeop0 = val;
9472 TREE_OPERAND (exp, 0) = treeop0;
9473 SAVE_EXPR_RESOLVED_P (exp) = 1;
9474
9475 if (!CONSTANT_P (ret))
9476 ret = copy_to_reg (ret);
9477 SET_DECL_RTL (val, ret);
9478 }
9479
9480 return ret;
9481 }
9482
9483
9484 case CONSTRUCTOR:
9485 /* If we don't need the result, just ensure we evaluate any
9486 subexpressions. */
9487 if (ignore)
9488 {
9489 unsigned HOST_WIDE_INT idx;
9490 tree value;
9491
9492 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9493 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9494
9495 return const0_rtx;
9496 }
9497
9498 return expand_constructor (exp, target, modifier, false);
9499
9500 case TARGET_MEM_REF:
9501 {
9502 addr_space_t as
9503 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9504 struct mem_address addr;
9505 enum insn_code icode;
9506 unsigned int align;
9507
9508 get_address_description (exp, &addr);
9509 op0 = addr_for_mem_ref (&addr, as, true);
9510 op0 = memory_address_addr_space (mode, op0, as);
9511 temp = gen_rtx_MEM (mode, op0);
9512 set_mem_attributes (temp, exp, 0);
9513 set_mem_addr_space (temp, as);
9514 align = get_object_or_type_alignment (exp);
9515 if (modifier != EXPAND_WRITE
9516 && mode != BLKmode
9517 && align < GET_MODE_ALIGNMENT (mode)
9518 /* If the target does not have special handling for unaligned
9519 loads of mode then it can use regular moves for them. */
9520 && ((icode = optab_handler (movmisalign_optab, mode))
9521 != CODE_FOR_nothing))
9522 {
9523 struct expand_operand ops[2];
9524
9525 /* We've already validated the memory, and we're creating a
9526 new pseudo destination. The predicates really can't fail,
9527 nor can the generator. */
9528 create_output_operand (&ops[0], NULL_RTX, mode);
9529 create_fixed_operand (&ops[1], temp);
9530 expand_insn (icode, 2, ops);
9531 return ops[0].value;
9532 }
9533 return temp;
9534 }
9535
9536 case MEM_REF:
9537 {
9538 addr_space_t as
9539 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9540 enum machine_mode address_mode;
9541 tree base = TREE_OPERAND (exp, 0);
9542 gimple def_stmt;
9543 enum insn_code icode;
9544 unsigned align;
9545 /* Handle expansion of non-aliased memory with non-BLKmode. That
9546 might end up in a register. */
9547 if (mem_ref_refers_to_non_mem_p (exp))
9548 {
9549 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9550 tree bit_offset;
9551 tree bftype;
9552 base = TREE_OPERAND (base, 0);
9553 if (offset == 0
9554 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
9555 && (GET_MODE_BITSIZE (DECL_MODE (base))
9556 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
9557 return expand_expr (build1 (VIEW_CONVERT_EXPR,
9558 TREE_TYPE (exp), base),
9559 target, tmode, modifier);
9560 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
9561 bftype = TREE_TYPE (base);
9562 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
9563 bftype = TREE_TYPE (exp);
9564 else
9565 {
9566 temp = assign_stack_temp (DECL_MODE (base),
9567 GET_MODE_SIZE (DECL_MODE (base)),
9568 0);
9569 store_expr (base, temp, 0, false);
9570 temp = adjust_address (temp, BLKmode, offset);
9571 set_mem_size (temp, int_size_in_bytes (TREE_TYPE (exp)));
9572 return temp;
9573 }
9574 return expand_expr (build3 (BIT_FIELD_REF, bftype,
9575 base,
9576 TYPE_SIZE (TREE_TYPE (exp)),
9577 bit_offset),
9578 target, tmode, modifier);
9579 }
9580 address_mode = targetm.addr_space.address_mode (as);
9581 base = TREE_OPERAND (exp, 0);
9582 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9583 {
9584 tree mask = gimple_assign_rhs2 (def_stmt);
9585 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9586 gimple_assign_rhs1 (def_stmt), mask);
9587 TREE_OPERAND (exp, 0) = base;
9588 }
9589 align = get_object_or_type_alignment (exp);
9590 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9591 op0 = memory_address_addr_space (address_mode, op0, as);
9592 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9593 {
9594 rtx off
9595 = immed_double_int_const (mem_ref_offset (exp), address_mode);
9596 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9597 }
9598 op0 = memory_address_addr_space (mode, op0, as);
9599 temp = gen_rtx_MEM (mode, op0);
9600 set_mem_attributes (temp, exp, 0);
9601 set_mem_addr_space (temp, as);
9602 if (TREE_THIS_VOLATILE (exp))
9603 MEM_VOLATILE_P (temp) = 1;
9604 if (modifier != EXPAND_WRITE
9605 && mode != BLKmode
9606 && align < GET_MODE_ALIGNMENT (mode))
9607 {
9608 if ((icode = optab_handler (movmisalign_optab, mode))
9609 != CODE_FOR_nothing)
9610 {
9611 struct expand_operand ops[2];
9612
9613 /* We've already validated the memory, and we're creating a
9614 new pseudo destination. The predicates really can't fail,
9615 nor can the generator. */
9616 create_output_operand (&ops[0], NULL_RTX, mode);
9617 create_fixed_operand (&ops[1], temp);
9618 expand_insn (icode, 2, ops);
9619 return ops[0].value;
9620 }
9621 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9622 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9623 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9624 true, (modifier == EXPAND_STACK_PARM
9625 ? NULL_RTX : target),
9626 mode, mode);
9627 }
9628 return temp;
9629 }
9630
9631 case ARRAY_REF:
9632
9633 {
9634 tree array = treeop0;
9635 tree index = treeop1;
9636
9637 /* Fold an expression like: "foo"[2].
9638 This is not done in fold so it won't happen inside &.
9639 Don't fold if this is for wide characters since it's too
9640 difficult to do correctly and this is a very rare case. */
9641
9642 if (modifier != EXPAND_CONST_ADDRESS
9643 && modifier != EXPAND_INITIALIZER
9644 && modifier != EXPAND_MEMORY)
9645 {
9646 tree t = fold_read_from_constant_string (exp);
9647
9648 if (t)
9649 return expand_expr (t, target, tmode, modifier);
9650 }
9651
9652 /* If this is a constant index into a constant array,
9653 just get the value from the array. Handle both the cases when
9654 we have an explicit constructor and when our operand is a variable
9655 that was declared const. */
9656
9657 if (modifier != EXPAND_CONST_ADDRESS
9658 && modifier != EXPAND_INITIALIZER
9659 && modifier != EXPAND_MEMORY
9660 && TREE_CODE (array) == CONSTRUCTOR
9661 && ! TREE_SIDE_EFFECTS (array)
9662 && TREE_CODE (index) == INTEGER_CST)
9663 {
9664 unsigned HOST_WIDE_INT ix;
9665 tree field, value;
9666
9667 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9668 field, value)
9669 if (tree_int_cst_equal (field, index))
9670 {
9671 if (!TREE_SIDE_EFFECTS (value))
9672 return expand_expr (fold (value), target, tmode, modifier);
9673 break;
9674 }
9675 }
9676
9677 else if (optimize >= 1
9678 && modifier != EXPAND_CONST_ADDRESS
9679 && modifier != EXPAND_INITIALIZER
9680 && modifier != EXPAND_MEMORY
9681 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9682 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
9683 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
9684 && const_value_known_p (array))
9685 {
9686 if (TREE_CODE (index) == INTEGER_CST)
9687 {
9688 tree init = DECL_INITIAL (array);
9689
9690 if (TREE_CODE (init) == CONSTRUCTOR)
9691 {
9692 unsigned HOST_WIDE_INT ix;
9693 tree field, value;
9694
9695 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9696 field, value)
9697 if (tree_int_cst_equal (field, index))
9698 {
9699 if (TREE_SIDE_EFFECTS (value))
9700 break;
9701
9702 if (TREE_CODE (value) == CONSTRUCTOR)
9703 {
9704 /* If VALUE is a CONSTRUCTOR, this
9705 optimization is only useful if
9706 this doesn't store the CONSTRUCTOR
9707 into memory. If it does, it is more
9708 efficient to just load the data from
9709 the array directly. */
9710 rtx ret = expand_constructor (value, target,
9711 modifier, true);
9712 if (ret == NULL_RTX)
9713 break;
9714 }
9715
9716 return expand_expr (fold (value), target, tmode,
9717 modifier);
9718 }
9719 }
9720 else if(TREE_CODE (init) == STRING_CST)
9721 {
9722 tree index1 = index;
9723 tree low_bound = array_ref_low_bound (exp);
9724 index1 = fold_convert_loc (loc, sizetype,
9725 treeop1);
9726
9727 /* Optimize the special-case of a zero lower bound.
9728
9729 We convert the low_bound to sizetype to avoid some problems
9730 with constant folding. (E.g. suppose the lower bound is 1,
9731 and its mode is QI. Without the conversion,l (ARRAY
9732 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9733 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
9734
9735 if (! integer_zerop (low_bound))
9736 index1 = size_diffop_loc (loc, index1,
9737 fold_convert_loc (loc, sizetype,
9738 low_bound));
9739
9740 if (0 > compare_tree_int (index1,
9741 TREE_STRING_LENGTH (init)))
9742 {
9743 tree type = TREE_TYPE (TREE_TYPE (init));
9744 enum machine_mode mode = TYPE_MODE (type);
9745
9746 if (GET_MODE_CLASS (mode) == MODE_INT
9747 && GET_MODE_SIZE (mode) == 1)
9748 return gen_int_mode (TREE_STRING_POINTER (init)
9749 [TREE_INT_CST_LOW (index1)],
9750 mode);
9751 }
9752 }
9753 }
9754 }
9755 }
9756 goto normal_inner_ref;
9757
9758 case COMPONENT_REF:
9759 /* If the operand is a CONSTRUCTOR, we can just extract the
9760 appropriate field if it is present. */
9761 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9762 {
9763 unsigned HOST_WIDE_INT idx;
9764 tree field, value;
9765
9766 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9767 idx, field, value)
9768 if (field == treeop1
9769 /* We can normally use the value of the field in the
9770 CONSTRUCTOR. However, if this is a bitfield in
9771 an integral mode that we can fit in a HOST_WIDE_INT,
9772 we must mask only the number of bits in the bitfield,
9773 since this is done implicitly by the constructor. If
9774 the bitfield does not meet either of those conditions,
9775 we can't do this optimization. */
9776 && (! DECL_BIT_FIELD (field)
9777 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9778 && (GET_MODE_PRECISION (DECL_MODE (field))
9779 <= HOST_BITS_PER_WIDE_INT))))
9780 {
9781 if (DECL_BIT_FIELD (field)
9782 && modifier == EXPAND_STACK_PARM)
9783 target = 0;
9784 op0 = expand_expr (value, target, tmode, modifier);
9785 if (DECL_BIT_FIELD (field))
9786 {
9787 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9788 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9789
9790 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9791 {
9792 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
9793 op0 = expand_and (imode, op0, op1, target);
9794 }
9795 else
9796 {
9797 int count = GET_MODE_PRECISION (imode) - bitsize;
9798
9799 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9800 target, 0);
9801 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9802 target, 0);
9803 }
9804 }
9805
9806 return op0;
9807 }
9808 }
9809 goto normal_inner_ref;
9810
9811 case BIT_FIELD_REF:
9812 case ARRAY_RANGE_REF:
9813 normal_inner_ref:
9814 {
9815 enum machine_mode mode1, mode2;
9816 HOST_WIDE_INT bitsize, bitpos;
9817 tree offset;
9818 int volatilep = 0, must_force_mem;
9819 bool packedp = false;
9820 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9821 &mode1, &unsignedp, &volatilep, true);
9822 rtx orig_op0, memloc;
9823 bool mem_attrs_from_type = false;
9824
9825 /* If we got back the original object, something is wrong. Perhaps
9826 we are evaluating an expression too early. In any event, don't
9827 infinitely recurse. */
9828 gcc_assert (tem != exp);
9829
9830 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
9831 || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
9832 && DECL_PACKED (TREE_OPERAND (exp, 1))))
9833 packedp = true;
9834
9835 /* If TEM's type is a union of variable size, pass TARGET to the inner
9836 computation, since it will need a temporary and TARGET is known
9837 to have to do. This occurs in unchecked conversion in Ada. */
9838 orig_op0 = op0
9839 = expand_expr (tem,
9840 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9841 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9842 != INTEGER_CST)
9843 && modifier != EXPAND_STACK_PARM
9844 ? target : NULL_RTX),
9845 VOIDmode,
9846 (modifier == EXPAND_INITIALIZER
9847 || modifier == EXPAND_CONST_ADDRESS
9848 || modifier == EXPAND_STACK_PARM)
9849 ? modifier : EXPAND_NORMAL);
9850
9851
9852 /* If the bitfield is volatile, we want to access it in the
9853 field's mode, not the computed mode.
9854 If a MEM has VOIDmode (external with incomplete type),
9855 use BLKmode for it instead. */
9856 if (MEM_P (op0))
9857 {
9858 if (volatilep && flag_strict_volatile_bitfields > 0)
9859 op0 = adjust_address (op0, mode1, 0);
9860 else if (GET_MODE (op0) == VOIDmode)
9861 op0 = adjust_address (op0, BLKmode, 0);
9862 }
9863
9864 mode2
9865 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9866
9867 /* If we have either an offset, a BLKmode result, or a reference
9868 outside the underlying object, we must force it to memory.
9869 Such a case can occur in Ada if we have unchecked conversion
9870 of an expression from a scalar type to an aggregate type or
9871 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9872 passed a partially uninitialized object or a view-conversion
9873 to a larger size. */
9874 must_force_mem = (offset
9875 || mode1 == BLKmode
9876 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9877
9878 /* Handle CONCAT first. */
9879 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9880 {
9881 if (bitpos == 0
9882 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9883 return op0;
9884 if (bitpos == 0
9885 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9886 && bitsize)
9887 {
9888 op0 = XEXP (op0, 0);
9889 mode2 = GET_MODE (op0);
9890 }
9891 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9892 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9893 && bitpos
9894 && bitsize)
9895 {
9896 op0 = XEXP (op0, 1);
9897 bitpos = 0;
9898 mode2 = GET_MODE (op0);
9899 }
9900 else
9901 /* Otherwise force into memory. */
9902 must_force_mem = 1;
9903 }
9904
9905 /* If this is a constant, put it in a register if it is a legitimate
9906 constant and we don't need a memory reference. */
9907 if (CONSTANT_P (op0)
9908 && mode2 != BLKmode
9909 && targetm.legitimate_constant_p (mode2, op0)
9910 && !must_force_mem)
9911 op0 = force_reg (mode2, op0);
9912
9913 /* Otherwise, if this is a constant, try to force it to the constant
9914 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9915 is a legitimate constant. */
9916 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9917 op0 = validize_mem (memloc);
9918
9919 /* Otherwise, if this is a constant or the object is not in memory
9920 and need be, put it there. */
9921 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9922 {
9923 tree nt = build_qualified_type (TREE_TYPE (tem),
9924 (TYPE_QUALS (TREE_TYPE (tem))
9925 | TYPE_QUAL_CONST));
9926 memloc = assign_temp (nt, 1, 1, 1);
9927 emit_move_insn (memloc, op0);
9928 op0 = memloc;
9929 mem_attrs_from_type = true;
9930 }
9931
9932 if (offset)
9933 {
9934 enum machine_mode address_mode;
9935 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9936 EXPAND_SUM);
9937
9938 gcc_assert (MEM_P (op0));
9939
9940 address_mode
9941 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9942 if (GET_MODE (offset_rtx) != address_mode)
9943 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9944
9945 if (GET_MODE (op0) == BLKmode
9946 /* A constant address in OP0 can have VOIDmode, we must
9947 not try to call force_reg in that case. */
9948 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9949 && bitsize != 0
9950 && (bitpos % bitsize) == 0
9951 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9952 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9953 {
9954 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9955 bitpos = 0;
9956 }
9957
9958 op0 = offset_address (op0, offset_rtx,
9959 highest_pow2_factor (offset));
9960 }
9961
9962 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9963 record its alignment as BIGGEST_ALIGNMENT. */
9964 if (MEM_P (op0) && bitpos == 0 && offset != 0
9965 && is_aligning_offset (offset, tem))
9966 set_mem_align (op0, BIGGEST_ALIGNMENT);
9967
9968 /* Don't forget about volatility even if this is a bitfield. */
9969 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9970 {
9971 if (op0 == orig_op0)
9972 op0 = copy_rtx (op0);
9973
9974 MEM_VOLATILE_P (op0) = 1;
9975 }
9976
9977 /* In cases where an aligned union has an unaligned object
9978 as a field, we might be extracting a BLKmode value from
9979 an integer-mode (e.g., SImode) object. Handle this case
9980 by doing the extract into an object as wide as the field
9981 (which we know to be the width of a basic mode), then
9982 storing into memory, and changing the mode to BLKmode. */
9983 if (mode1 == VOIDmode
9984 || REG_P (op0) || GET_CODE (op0) == SUBREG
9985 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9986 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9987 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9988 && modifier != EXPAND_CONST_ADDRESS
9989 && modifier != EXPAND_INITIALIZER)
9990 /* If the field is volatile, we always want an aligned
9991 access. Do this in following two situations:
9992 1. the access is not already naturally
9993 aligned, otherwise "normal" (non-bitfield) volatile fields
9994 become non-addressable.
9995 2. the bitsize is narrower than the access size. Need
9996 to extract bitfields from the access. */
9997 || (volatilep && flag_strict_volatile_bitfields > 0
9998 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0
9999 || (mode1 != BLKmode
10000 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
10001 /* If the field isn't aligned enough to fetch as a memref,
10002 fetch it as a bit field. */
10003 || (mode1 != BLKmode
10004 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10005 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10006 || (MEM_P (op0)
10007 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10008 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10009 && ((modifier == EXPAND_CONST_ADDRESS
10010 || modifier == EXPAND_INITIALIZER)
10011 ? STRICT_ALIGNMENT
10012 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10013 || (bitpos % BITS_PER_UNIT != 0)))
10014 /* If the type and the field are a constant size and the
10015 size of the type isn't the same size as the bitfield,
10016 we must use bitfield operations. */
10017 || (bitsize >= 0
10018 && TYPE_SIZE (TREE_TYPE (exp))
10019 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10020 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10021 bitsize)))
10022 {
10023 enum machine_mode ext_mode = mode;
10024
10025 if (ext_mode == BLKmode
10026 && ! (target != 0 && MEM_P (op0)
10027 && MEM_P (target)
10028 && bitpos % BITS_PER_UNIT == 0))
10029 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10030
10031 if (ext_mode == BLKmode)
10032 {
10033 if (target == 0)
10034 target = assign_temp (type, 0, 1, 1);
10035
10036 if (bitsize == 0)
10037 return target;
10038
10039 /* In this case, BITPOS must start at a byte boundary and
10040 TARGET, if specified, must be a MEM. */
10041 gcc_assert (MEM_P (op0)
10042 && (!target || MEM_P (target))
10043 && !(bitpos % BITS_PER_UNIT));
10044
10045 emit_block_move (target,
10046 adjust_address (op0, VOIDmode,
10047 bitpos / BITS_PER_UNIT),
10048 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10049 / BITS_PER_UNIT),
10050 (modifier == EXPAND_STACK_PARM
10051 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10052
10053 return target;
10054 }
10055
10056 op0 = validize_mem (op0);
10057
10058 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10059 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10060
10061 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
10062 (modifier == EXPAND_STACK_PARM
10063 ? NULL_RTX : target),
10064 ext_mode, ext_mode);
10065
10066 /* If the result is a record type and BITSIZE is narrower than
10067 the mode of OP0, an integral mode, and this is a big endian
10068 machine, we must put the field into the high-order bits. */
10069 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10070 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10071 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10072 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10073 GET_MODE_BITSIZE (GET_MODE (op0))
10074 - bitsize, op0, 1);
10075
10076 /* If the result type is BLKmode, store the data into a temporary
10077 of the appropriate type, but with the mode corresponding to the
10078 mode for the data we have (op0's mode). It's tempting to make
10079 this a constant type, since we know it's only being stored once,
10080 but that can cause problems if we are taking the address of this
10081 COMPONENT_REF because the MEM of any reference via that address
10082 will have flags corresponding to the type, which will not
10083 necessarily be constant. */
10084 if (mode == BLKmode)
10085 {
10086 rtx new_rtx;
10087
10088 new_rtx = assign_stack_temp_for_type (ext_mode,
10089 GET_MODE_BITSIZE (ext_mode),
10090 0, type);
10091 emit_move_insn (new_rtx, op0);
10092 op0 = copy_rtx (new_rtx);
10093 PUT_MODE (op0, BLKmode);
10094 }
10095
10096 return op0;
10097 }
10098
10099 /* If the result is BLKmode, use that to access the object
10100 now as well. */
10101 if (mode == BLKmode)
10102 mode1 = BLKmode;
10103
10104 /* Get a reference to just this component. */
10105 if (modifier == EXPAND_CONST_ADDRESS
10106 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10107 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10108 else
10109 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10110
10111 if (op0 == orig_op0)
10112 op0 = copy_rtx (op0);
10113
10114 /* If op0 is a temporary because of forcing to memory, pass only the
10115 type to set_mem_attributes so that the original expression is never
10116 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10117 if (mem_attrs_from_type)
10118 set_mem_attributes (op0, type, 0);
10119 else
10120 set_mem_attributes (op0, exp, 0);
10121
10122 if (REG_P (XEXP (op0, 0)))
10123 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10124
10125 MEM_VOLATILE_P (op0) |= volatilep;
10126 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10127 || modifier == EXPAND_CONST_ADDRESS
10128 || modifier == EXPAND_INITIALIZER)
10129 return op0;
10130 else if (target == 0)
10131 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10132
10133 convert_move (target, op0, unsignedp);
10134 return target;
10135 }
10136
10137 case OBJ_TYPE_REF:
10138 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10139
10140 case CALL_EXPR:
10141 /* All valid uses of __builtin_va_arg_pack () are removed during
10142 inlining. */
10143 if (CALL_EXPR_VA_ARG_PACK (exp))
10144 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10145 {
10146 tree fndecl = get_callee_fndecl (exp), attr;
10147
10148 if (fndecl
10149 && (attr = lookup_attribute ("error",
10150 DECL_ATTRIBUTES (fndecl))) != NULL)
10151 error ("%Kcall to %qs declared with attribute error: %s",
10152 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10153 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10154 if (fndecl
10155 && (attr = lookup_attribute ("warning",
10156 DECL_ATTRIBUTES (fndecl))) != NULL)
10157 warning_at (tree_nonartificial_location (exp),
10158 0, "%Kcall to %qs declared with attribute warning: %s",
10159 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10160 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10161
10162 /* Check for a built-in function. */
10163 if (fndecl && DECL_BUILT_IN (fndecl))
10164 {
10165 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10166 return expand_builtin (exp, target, subtarget, tmode, ignore);
10167 }
10168 }
10169 return expand_call (exp, target, ignore);
10170
10171 case VIEW_CONVERT_EXPR:
10172 op0 = NULL_RTX;
10173
10174 /* If we are converting to BLKmode, try to avoid an intermediate
10175 temporary by fetching an inner memory reference. */
10176 if (mode == BLKmode
10177 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10178 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10179 && handled_component_p (treeop0))
10180 {
10181 enum machine_mode mode1;
10182 HOST_WIDE_INT bitsize, bitpos;
10183 tree offset;
10184 int unsignedp;
10185 int volatilep = 0;
10186 tree tem
10187 = get_inner_reference (treeop0, &bitsize, &bitpos,
10188 &offset, &mode1, &unsignedp, &volatilep,
10189 true);
10190 rtx orig_op0;
10191
10192 /* ??? We should work harder and deal with non-zero offsets. */
10193 if (!offset
10194 && (bitpos % BITS_PER_UNIT) == 0
10195 && bitsize >= 0
10196 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
10197 {
10198 /* See the normal_inner_ref case for the rationale. */
10199 orig_op0
10200 = expand_expr (tem,
10201 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10202 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10203 != INTEGER_CST)
10204 && modifier != EXPAND_STACK_PARM
10205 ? target : NULL_RTX),
10206 VOIDmode,
10207 (modifier == EXPAND_INITIALIZER
10208 || modifier == EXPAND_CONST_ADDRESS
10209 || modifier == EXPAND_STACK_PARM)
10210 ? modifier : EXPAND_NORMAL);
10211
10212 if (MEM_P (orig_op0))
10213 {
10214 op0 = orig_op0;
10215
10216 /* Get a reference to just this component. */
10217 if (modifier == EXPAND_CONST_ADDRESS
10218 || modifier == EXPAND_SUM
10219 || modifier == EXPAND_INITIALIZER)
10220 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10221 else
10222 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10223
10224 if (op0 == orig_op0)
10225 op0 = copy_rtx (op0);
10226
10227 set_mem_attributes (op0, treeop0, 0);
10228 if (REG_P (XEXP (op0, 0)))
10229 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10230
10231 MEM_VOLATILE_P (op0) |= volatilep;
10232 }
10233 }
10234 }
10235
10236 if (!op0)
10237 op0 = expand_expr (treeop0,
10238 NULL_RTX, VOIDmode, modifier);
10239
10240 /* If the input and output modes are both the same, we are done. */
10241 if (mode == GET_MODE (op0))
10242 ;
10243 /* If neither mode is BLKmode, and both modes are the same size
10244 then we can use gen_lowpart. */
10245 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10246 && (GET_MODE_PRECISION (mode)
10247 == GET_MODE_PRECISION (GET_MODE (op0)))
10248 && !COMPLEX_MODE_P (GET_MODE (op0)))
10249 {
10250 if (GET_CODE (op0) == SUBREG)
10251 op0 = force_reg (GET_MODE (op0), op0);
10252 temp = gen_lowpart_common (mode, op0);
10253 if (temp)
10254 op0 = temp;
10255 else
10256 {
10257 if (!REG_P (op0) && !MEM_P (op0))
10258 op0 = force_reg (GET_MODE (op0), op0);
10259 op0 = gen_lowpart (mode, op0);
10260 }
10261 }
10262 /* If both types are integral, convert from one mode to the other. */
10263 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10264 op0 = convert_modes (mode, GET_MODE (op0), op0,
10265 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10266 /* As a last resort, spill op0 to memory, and reload it in a
10267 different mode. */
10268 else if (!MEM_P (op0))
10269 {
10270 /* If the operand is not a MEM, force it into memory. Since we
10271 are going to be changing the mode of the MEM, don't call
10272 force_const_mem for constants because we don't allow pool
10273 constants to change mode. */
10274 tree inner_type = TREE_TYPE (treeop0);
10275
10276 gcc_assert (!TREE_ADDRESSABLE (exp));
10277
10278 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10279 target
10280 = assign_stack_temp_for_type
10281 (TYPE_MODE (inner_type),
10282 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
10283
10284 emit_move_insn (target, op0);
10285 op0 = target;
10286 }
10287
10288 /* At this point, OP0 is in the correct mode. If the output type is
10289 such that the operand is known to be aligned, indicate that it is.
10290 Otherwise, we need only be concerned about alignment for non-BLKmode
10291 results. */
10292 if (MEM_P (op0))
10293 {
10294 enum insn_code icode;
10295
10296 op0 = copy_rtx (op0);
10297
10298 if (TYPE_ALIGN_OK (type))
10299 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10300 else if (mode != BLKmode
10301 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
10302 /* If the target does have special handling for unaligned
10303 loads of mode then use them. */
10304 && ((icode = optab_handler (movmisalign_optab, mode))
10305 != CODE_FOR_nothing))
10306 {
10307 rtx reg, insn;
10308
10309 op0 = adjust_address (op0, mode, 0);
10310 /* We've already validated the memory, and we're creating a
10311 new pseudo destination. The predicates really can't
10312 fail. */
10313 reg = gen_reg_rtx (mode);
10314
10315 /* Nor can the insn generator. */
10316 insn = GEN_FCN (icode) (reg, op0);
10317 emit_insn (insn);
10318 return reg;
10319 }
10320 else if (STRICT_ALIGNMENT
10321 && mode != BLKmode
10322 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10323 {
10324 tree inner_type = TREE_TYPE (treeop0);
10325 HOST_WIDE_INT temp_size
10326 = MAX (int_size_in_bytes (inner_type),
10327 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10328 rtx new_rtx
10329 = assign_stack_temp_for_type (mode, temp_size, 0, type);
10330 rtx new_with_op0_mode
10331 = adjust_address (new_rtx, GET_MODE (op0), 0);
10332
10333 gcc_assert (!TREE_ADDRESSABLE (exp));
10334
10335 if (GET_MODE (op0) == BLKmode)
10336 emit_block_move (new_with_op0_mode, op0,
10337 GEN_INT (GET_MODE_SIZE (mode)),
10338 (modifier == EXPAND_STACK_PARM
10339 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10340 else
10341 emit_move_insn (new_with_op0_mode, op0);
10342
10343 op0 = new_rtx;
10344 }
10345
10346 op0 = adjust_address (op0, mode, 0);
10347 }
10348
10349 return op0;
10350
10351 case MODIFY_EXPR:
10352 {
10353 tree lhs = treeop0;
10354 tree rhs = treeop1;
10355 gcc_assert (ignore);
10356
10357 /* Check for |= or &= of a bitfield of size one into another bitfield
10358 of size 1. In this case, (unless we need the result of the
10359 assignment) we can do this more efficiently with a
10360 test followed by an assignment, if necessary.
10361
10362 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10363 things change so we do, this code should be enhanced to
10364 support it. */
10365 if (TREE_CODE (lhs) == COMPONENT_REF
10366 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10367 || TREE_CODE (rhs) == BIT_AND_EXPR)
10368 && TREE_OPERAND (rhs, 0) == lhs
10369 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10370 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10371 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10372 {
10373 rtx label = gen_label_rtx ();
10374 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10375 do_jump (TREE_OPERAND (rhs, 1),
10376 value ? label : 0,
10377 value ? 0 : label, -1);
10378 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10379 MOVE_NONTEMPORAL (exp));
10380 do_pending_stack_adjust ();
10381 emit_label (label);
10382 return const0_rtx;
10383 }
10384
10385 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
10386 return const0_rtx;
10387 }
10388
10389 case ADDR_EXPR:
10390 return expand_expr_addr_expr (exp, target, tmode, modifier);
10391
10392 case REALPART_EXPR:
10393 op0 = expand_normal (treeop0);
10394 return read_complex_part (op0, false);
10395
10396 case IMAGPART_EXPR:
10397 op0 = expand_normal (treeop0);
10398 return read_complex_part (op0, true);
10399
10400 case RETURN_EXPR:
10401 case LABEL_EXPR:
10402 case GOTO_EXPR:
10403 case SWITCH_EXPR:
10404 case ASM_EXPR:
10405 /* Expanded in cfgexpand.c. */
10406 gcc_unreachable ();
10407
10408 case TRY_CATCH_EXPR:
10409 case CATCH_EXPR:
10410 case EH_FILTER_EXPR:
10411 case TRY_FINALLY_EXPR:
10412 /* Lowered by tree-eh.c. */
10413 gcc_unreachable ();
10414
10415 case WITH_CLEANUP_EXPR:
10416 case CLEANUP_POINT_EXPR:
10417 case TARGET_EXPR:
10418 case CASE_LABEL_EXPR:
10419 case VA_ARG_EXPR:
10420 case BIND_EXPR:
10421 case INIT_EXPR:
10422 case CONJ_EXPR:
10423 case COMPOUND_EXPR:
10424 case PREINCREMENT_EXPR:
10425 case PREDECREMENT_EXPR:
10426 case POSTINCREMENT_EXPR:
10427 case POSTDECREMENT_EXPR:
10428 case LOOP_EXPR:
10429 case EXIT_EXPR:
10430 /* Lowered by gimplify.c. */
10431 gcc_unreachable ();
10432
10433 case FDESC_EXPR:
10434 /* Function descriptors are not valid except for as
10435 initialization constants, and should not be expanded. */
10436 gcc_unreachable ();
10437
10438 case WITH_SIZE_EXPR:
10439 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10440 have pulled out the size to use in whatever context it needed. */
10441 return expand_expr_real (treeop0, original_target, tmode,
10442 modifier, alt_rtl);
10443
10444 case COMPOUND_LITERAL_EXPR:
10445 {
10446 /* Initialize the anonymous variable declared in the compound
10447 literal, then return the variable. */
10448 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
10449
10450 /* Create RTL for this variable. */
10451 if (!DECL_RTL_SET_P (decl))
10452 {
10453 if (DECL_HARD_REGISTER (decl))
10454 /* The user specified an assembler name for this variable.
10455 Set that up now. */
10456 rest_of_decl_compilation (decl, 0, 0);
10457 else
10458 expand_decl (decl);
10459 }
10460
10461 return expand_expr_real (decl, original_target, tmode,
10462 modifier, alt_rtl);
10463 }
10464
10465 default:
10466 return expand_expr_real_2 (&ops, target, tmode, modifier);
10467 }
10468 }
10469 \f
10470 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10471 signedness of TYPE), possibly returning the result in TARGET. */
10472 static rtx
10473 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10474 {
10475 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10476 if (target && GET_MODE (target) != GET_MODE (exp))
10477 target = 0;
10478 /* For constant values, reduce using build_int_cst_type. */
10479 if (CONST_INT_P (exp))
10480 {
10481 HOST_WIDE_INT value = INTVAL (exp);
10482 tree t = build_int_cst_type (type, value);
10483 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10484 }
10485 else if (TYPE_UNSIGNED (type))
10486 {
10487 rtx mask = immed_double_int_const (double_int_mask (prec),
10488 GET_MODE (exp));
10489 return expand_and (GET_MODE (exp), exp, mask, target);
10490 }
10491 else
10492 {
10493 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10494 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10495 exp, count, target, 0);
10496 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10497 exp, count, target, 0);
10498 }
10499 }
10500 \f
10501 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10502 when applied to the address of EXP produces an address known to be
10503 aligned more than BIGGEST_ALIGNMENT. */
10504
10505 static int
10506 is_aligning_offset (const_tree offset, const_tree exp)
10507 {
10508 /* Strip off any conversions. */
10509 while (CONVERT_EXPR_P (offset))
10510 offset = TREE_OPERAND (offset, 0);
10511
10512 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10513 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10514 if (TREE_CODE (offset) != BIT_AND_EXPR
10515 || !host_integerp (TREE_OPERAND (offset, 1), 1)
10516 || compare_tree_int (TREE_OPERAND (offset, 1),
10517 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10518 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
10519 return 0;
10520
10521 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10522 It must be NEGATE_EXPR. Then strip any more conversions. */
10523 offset = TREE_OPERAND (offset, 0);
10524 while (CONVERT_EXPR_P (offset))
10525 offset = TREE_OPERAND (offset, 0);
10526
10527 if (TREE_CODE (offset) != NEGATE_EXPR)
10528 return 0;
10529
10530 offset = TREE_OPERAND (offset, 0);
10531 while (CONVERT_EXPR_P (offset))
10532 offset = TREE_OPERAND (offset, 0);
10533
10534 /* This must now be the address of EXP. */
10535 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10536 }
10537 \f
10538 /* Return the tree node if an ARG corresponds to a string constant or zero
10539 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10540 in bytes within the string that ARG is accessing. The type of the
10541 offset will be `sizetype'. */
10542
10543 tree
10544 string_constant (tree arg, tree *ptr_offset)
10545 {
10546 tree array, offset, lower_bound;
10547 STRIP_NOPS (arg);
10548
10549 if (TREE_CODE (arg) == ADDR_EXPR)
10550 {
10551 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10552 {
10553 *ptr_offset = size_zero_node;
10554 return TREE_OPERAND (arg, 0);
10555 }
10556 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10557 {
10558 array = TREE_OPERAND (arg, 0);
10559 offset = size_zero_node;
10560 }
10561 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10562 {
10563 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10564 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10565 if (TREE_CODE (array) != STRING_CST
10566 && TREE_CODE (array) != VAR_DECL)
10567 return 0;
10568
10569 /* Check if the array has a nonzero lower bound. */
10570 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10571 if (!integer_zerop (lower_bound))
10572 {
10573 /* If the offset and base aren't both constants, return 0. */
10574 if (TREE_CODE (lower_bound) != INTEGER_CST)
10575 return 0;
10576 if (TREE_CODE (offset) != INTEGER_CST)
10577 return 0;
10578 /* Adjust offset by the lower bound. */
10579 offset = size_diffop (fold_convert (sizetype, offset),
10580 fold_convert (sizetype, lower_bound));
10581 }
10582 }
10583 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10584 {
10585 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10586 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10587 if (TREE_CODE (array) != ADDR_EXPR)
10588 return 0;
10589 array = TREE_OPERAND (array, 0);
10590 if (TREE_CODE (array) != STRING_CST
10591 && TREE_CODE (array) != VAR_DECL)
10592 return 0;
10593 }
10594 else
10595 return 0;
10596 }
10597 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10598 {
10599 tree arg0 = TREE_OPERAND (arg, 0);
10600 tree arg1 = TREE_OPERAND (arg, 1);
10601
10602 STRIP_NOPS (arg0);
10603 STRIP_NOPS (arg1);
10604
10605 if (TREE_CODE (arg0) == ADDR_EXPR
10606 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10607 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10608 {
10609 array = TREE_OPERAND (arg0, 0);
10610 offset = arg1;
10611 }
10612 else if (TREE_CODE (arg1) == ADDR_EXPR
10613 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10614 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10615 {
10616 array = TREE_OPERAND (arg1, 0);
10617 offset = arg0;
10618 }
10619 else
10620 return 0;
10621 }
10622 else
10623 return 0;
10624
10625 if (TREE_CODE (array) == STRING_CST)
10626 {
10627 *ptr_offset = fold_convert (sizetype, offset);
10628 return array;
10629 }
10630 else if (TREE_CODE (array) == VAR_DECL
10631 || TREE_CODE (array) == CONST_DECL)
10632 {
10633 int length;
10634
10635 /* Variables initialized to string literals can be handled too. */
10636 if (!const_value_known_p (array)
10637 || !DECL_INITIAL (array)
10638 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
10639 return 0;
10640
10641 /* Avoid const char foo[4] = "abcde"; */
10642 if (DECL_SIZE_UNIT (array) == NULL_TREE
10643 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10644 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
10645 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10646 return 0;
10647
10648 /* If variable is bigger than the string literal, OFFSET must be constant
10649 and inside of the bounds of the string literal. */
10650 offset = fold_convert (sizetype, offset);
10651 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10652 && (! host_integerp (offset, 1)
10653 || compare_tree_int (offset, length) >= 0))
10654 return 0;
10655
10656 *ptr_offset = offset;
10657 return DECL_INITIAL (array);
10658 }
10659
10660 return 0;
10661 }
10662 \f
10663 /* Generate code to calculate OPS, and exploded expression
10664 using a store-flag instruction and return an rtx for the result.
10665 OPS reflects a comparison.
10666
10667 If TARGET is nonzero, store the result there if convenient.
10668
10669 Return zero if there is no suitable set-flag instruction
10670 available on this machine.
10671
10672 Once expand_expr has been called on the arguments of the comparison,
10673 we are committed to doing the store flag, since it is not safe to
10674 re-evaluate the expression. We emit the store-flag insn by calling
10675 emit_store_flag, but only expand the arguments if we have a reason
10676 to believe that emit_store_flag will be successful. If we think that
10677 it will, but it isn't, we have to simulate the store-flag with a
10678 set/jump/set sequence. */
10679
10680 static rtx
10681 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10682 {
10683 enum rtx_code code;
10684 tree arg0, arg1, type;
10685 tree tem;
10686 enum machine_mode operand_mode;
10687 int unsignedp;
10688 rtx op0, op1;
10689 rtx subtarget = target;
10690 location_t loc = ops->location;
10691
10692 arg0 = ops->op0;
10693 arg1 = ops->op1;
10694
10695 /* Don't crash if the comparison was erroneous. */
10696 if (arg0 == error_mark_node || arg1 == error_mark_node)
10697 return const0_rtx;
10698
10699 type = TREE_TYPE (arg0);
10700 operand_mode = TYPE_MODE (type);
10701 unsignedp = TYPE_UNSIGNED (type);
10702
10703 /* We won't bother with BLKmode store-flag operations because it would mean
10704 passing a lot of information to emit_store_flag. */
10705 if (operand_mode == BLKmode)
10706 return 0;
10707
10708 /* We won't bother with store-flag operations involving function pointers
10709 when function pointers must be canonicalized before comparisons. */
10710 #ifdef HAVE_canonicalize_funcptr_for_compare
10711 if (HAVE_canonicalize_funcptr_for_compare
10712 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10713 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10714 == FUNCTION_TYPE))
10715 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10716 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10717 == FUNCTION_TYPE))))
10718 return 0;
10719 #endif
10720
10721 STRIP_NOPS (arg0);
10722 STRIP_NOPS (arg1);
10723
10724 /* For vector typed comparisons emit code to generate the desired
10725 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10726 expander for this. */
10727 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10728 {
10729 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10730 tree if_true = constant_boolean_node (true, ops->type);
10731 tree if_false = constant_boolean_node (false, ops->type);
10732 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10733 }
10734
10735 /* For vector typed comparisons emit code to generate the desired
10736 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10737 expander for this. */
10738 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10739 {
10740 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10741 tree if_true = constant_boolean_node (true, ops->type);
10742 tree if_false = constant_boolean_node (false, ops->type);
10743 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10744 }
10745
10746 /* Get the rtx comparison code to use. We know that EXP is a comparison
10747 operation of some type. Some comparisons against 1 and -1 can be
10748 converted to comparisons with zero. Do so here so that the tests
10749 below will be aware that we have a comparison with zero. These
10750 tests will not catch constants in the first operand, but constants
10751 are rarely passed as the first operand. */
10752
10753 switch (ops->code)
10754 {
10755 case EQ_EXPR:
10756 code = EQ;
10757 break;
10758 case NE_EXPR:
10759 code = NE;
10760 break;
10761 case LT_EXPR:
10762 if (integer_onep (arg1))
10763 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10764 else
10765 code = unsignedp ? LTU : LT;
10766 break;
10767 case LE_EXPR:
10768 if (! unsignedp && integer_all_onesp (arg1))
10769 arg1 = integer_zero_node, code = LT;
10770 else
10771 code = unsignedp ? LEU : LE;
10772 break;
10773 case GT_EXPR:
10774 if (! unsignedp && integer_all_onesp (arg1))
10775 arg1 = integer_zero_node, code = GE;
10776 else
10777 code = unsignedp ? GTU : GT;
10778 break;
10779 case GE_EXPR:
10780 if (integer_onep (arg1))
10781 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10782 else
10783 code = unsignedp ? GEU : GE;
10784 break;
10785
10786 case UNORDERED_EXPR:
10787 code = UNORDERED;
10788 break;
10789 case ORDERED_EXPR:
10790 code = ORDERED;
10791 break;
10792 case UNLT_EXPR:
10793 code = UNLT;
10794 break;
10795 case UNLE_EXPR:
10796 code = UNLE;
10797 break;
10798 case UNGT_EXPR:
10799 code = UNGT;
10800 break;
10801 case UNGE_EXPR:
10802 code = UNGE;
10803 break;
10804 case UNEQ_EXPR:
10805 code = UNEQ;
10806 break;
10807 case LTGT_EXPR:
10808 code = LTGT;
10809 break;
10810
10811 default:
10812 gcc_unreachable ();
10813 }
10814
10815 /* Put a constant second. */
10816 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10817 || TREE_CODE (arg0) == FIXED_CST)
10818 {
10819 tem = arg0; arg0 = arg1; arg1 = tem;
10820 code = swap_condition (code);
10821 }
10822
10823 /* If this is an equality or inequality test of a single bit, we can
10824 do this by shifting the bit being tested to the low-order bit and
10825 masking the result with the constant 1. If the condition was EQ,
10826 we xor it with 1. This does not require an scc insn and is faster
10827 than an scc insn even if we have it.
10828
10829 The code to make this transformation was moved into fold_single_bit_test,
10830 so we just call into the folder and expand its result. */
10831
10832 if ((code == NE || code == EQ)
10833 && integer_zerop (arg1)
10834 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10835 {
10836 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10837 if (srcstmt
10838 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10839 {
10840 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10841 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10842 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10843 gimple_assign_rhs1 (srcstmt),
10844 gimple_assign_rhs2 (srcstmt));
10845 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10846 if (temp)
10847 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10848 }
10849 }
10850
10851 if (! get_subtarget (target)
10852 || GET_MODE (subtarget) != operand_mode)
10853 subtarget = 0;
10854
10855 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10856
10857 if (target == 0)
10858 target = gen_reg_rtx (mode);
10859
10860 /* Try a cstore if possible. */
10861 return emit_store_flag_force (target, code, op0, op1,
10862 operand_mode, unsignedp,
10863 (TYPE_PRECISION (ops->type) == 1
10864 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10865 }
10866 \f
10867
10868 /* Stubs in case we haven't got a casesi insn. */
10869 #ifndef HAVE_casesi
10870 # define HAVE_casesi 0
10871 # define gen_casesi(a, b, c, d, e) (0)
10872 # define CODE_FOR_casesi CODE_FOR_nothing
10873 #endif
10874
10875 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10876 0 otherwise (i.e. if there is no casesi instruction). */
10877 int
10878 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10879 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10880 rtx fallback_label ATTRIBUTE_UNUSED)
10881 {
10882 struct expand_operand ops[5];
10883 enum machine_mode index_mode = SImode;
10884 rtx op1, op2, index;
10885
10886 if (! HAVE_casesi)
10887 return 0;
10888
10889 /* Convert the index to SImode. */
10890 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10891 {
10892 enum machine_mode omode = TYPE_MODE (index_type);
10893 rtx rangertx = expand_normal (range);
10894
10895 /* We must handle the endpoints in the original mode. */
10896 index_expr = build2 (MINUS_EXPR, index_type,
10897 index_expr, minval);
10898 minval = integer_zero_node;
10899 index = expand_normal (index_expr);
10900 if (default_label)
10901 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10902 omode, 1, default_label);
10903 /* Now we can safely truncate. */
10904 index = convert_to_mode (index_mode, index, 0);
10905 }
10906 else
10907 {
10908 if (TYPE_MODE (index_type) != index_mode)
10909 {
10910 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
10911 index_expr = fold_convert (index_type, index_expr);
10912 }
10913
10914 index = expand_normal (index_expr);
10915 }
10916
10917 do_pending_stack_adjust ();
10918
10919 op1 = expand_normal (minval);
10920 op2 = expand_normal (range);
10921
10922 create_input_operand (&ops[0], index, index_mode);
10923 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10924 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10925 create_fixed_operand (&ops[3], table_label);
10926 create_fixed_operand (&ops[4], (default_label
10927 ? default_label
10928 : fallback_label));
10929 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10930 return 1;
10931 }
10932
10933 /* Attempt to generate a tablejump instruction; same concept. */
10934 #ifndef HAVE_tablejump
10935 #define HAVE_tablejump 0
10936 #define gen_tablejump(x, y) (0)
10937 #endif
10938
10939 /* Subroutine of the next function.
10940
10941 INDEX is the value being switched on, with the lowest value
10942 in the table already subtracted.
10943 MODE is its expected mode (needed if INDEX is constant).
10944 RANGE is the length of the jump table.
10945 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10946
10947 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10948 index value is out of range. */
10949
10950 static void
10951 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10952 rtx default_label)
10953 {
10954 rtx temp, vector;
10955
10956 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10957 cfun->cfg->max_jumptable_ents = INTVAL (range);
10958
10959 /* Do an unsigned comparison (in the proper mode) between the index
10960 expression and the value which represents the length of the range.
10961 Since we just finished subtracting the lower bound of the range
10962 from the index expression, this comparison allows us to simultaneously
10963 check that the original index expression value is both greater than
10964 or equal to the minimum value of the range and less than or equal to
10965 the maximum value of the range. */
10966
10967 if (default_label)
10968 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10969 default_label);
10970
10971 /* If index is in range, it must fit in Pmode.
10972 Convert to Pmode so we can index with it. */
10973 if (mode != Pmode)
10974 index = convert_to_mode (Pmode, index, 1);
10975
10976 /* Don't let a MEM slip through, because then INDEX that comes
10977 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10978 and break_out_memory_refs will go to work on it and mess it up. */
10979 #ifdef PIC_CASE_VECTOR_ADDRESS
10980 if (flag_pic && !REG_P (index))
10981 index = copy_to_mode_reg (Pmode, index);
10982 #endif
10983
10984 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10985 GET_MODE_SIZE, because this indicates how large insns are. The other
10986 uses should all be Pmode, because they are addresses. This code
10987 could fail if addresses and insns are not the same size. */
10988 index = gen_rtx_PLUS (Pmode,
10989 gen_rtx_MULT (Pmode, index,
10990 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10991 gen_rtx_LABEL_REF (Pmode, table_label));
10992 #ifdef PIC_CASE_VECTOR_ADDRESS
10993 if (flag_pic)
10994 index = PIC_CASE_VECTOR_ADDRESS (index);
10995 else
10996 #endif
10997 index = memory_address (CASE_VECTOR_MODE, index);
10998 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10999 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11000 convert_move (temp, vector, 0);
11001
11002 emit_jump_insn (gen_tablejump (temp, table_label));
11003
11004 /* If we are generating PIC code or if the table is PC-relative, the
11005 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11006 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11007 emit_barrier ();
11008 }
11009
11010 int
11011 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11012 rtx table_label, rtx default_label)
11013 {
11014 rtx index;
11015
11016 if (! HAVE_tablejump)
11017 return 0;
11018
11019 index_expr = fold_build2 (MINUS_EXPR, index_type,
11020 fold_convert (index_type, index_expr),
11021 fold_convert (index_type, minval));
11022 index = expand_normal (index_expr);
11023 do_pending_stack_adjust ();
11024
11025 do_tablejump (index, TYPE_MODE (index_type),
11026 convert_modes (TYPE_MODE (index_type),
11027 TYPE_MODE (TREE_TYPE (range)),
11028 expand_normal (range),
11029 TYPE_UNSIGNED (TREE_TYPE (range))),
11030 table_label, default_label);
11031 return 1;
11032 }
11033
11034 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11035 static rtx
11036 const_vector_from_tree (tree exp)
11037 {
11038 rtvec v;
11039 unsigned i;
11040 int units;
11041 tree elt;
11042 enum machine_mode inner, mode;
11043
11044 mode = TYPE_MODE (TREE_TYPE (exp));
11045
11046 if (initializer_zerop (exp))
11047 return CONST0_RTX (mode);
11048
11049 units = GET_MODE_NUNITS (mode);
11050 inner = GET_MODE_INNER (mode);
11051
11052 v = rtvec_alloc (units);
11053
11054 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11055 {
11056 elt = VECTOR_CST_ELT (exp, i);
11057
11058 if (TREE_CODE (elt) == REAL_CST)
11059 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11060 inner);
11061 else if (TREE_CODE (elt) == FIXED_CST)
11062 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11063 inner);
11064 else
11065 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
11066 inner);
11067 }
11068
11069 return gen_rtx_CONST_VECTOR (mode, v);
11070 }
11071
11072 /* Build a decl for a personality function given a language prefix. */
11073
11074 tree
11075 build_personality_function (const char *lang)
11076 {
11077 const char *unwind_and_version;
11078 tree decl, type;
11079 char *name;
11080
11081 switch (targetm_common.except_unwind_info (&global_options))
11082 {
11083 case UI_NONE:
11084 return NULL;
11085 case UI_SJLJ:
11086 unwind_and_version = "_sj0";
11087 break;
11088 case UI_DWARF2:
11089 case UI_TARGET:
11090 unwind_and_version = "_v0";
11091 break;
11092 default:
11093 gcc_unreachable ();
11094 }
11095
11096 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11097
11098 type = build_function_type_list (integer_type_node, integer_type_node,
11099 long_long_unsigned_type_node,
11100 ptr_type_node, ptr_type_node, NULL_TREE);
11101 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11102 get_identifier (name), type);
11103 DECL_ARTIFICIAL (decl) = 1;
11104 DECL_EXTERNAL (decl) = 1;
11105 TREE_PUBLIC (decl) = 1;
11106
11107 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11108 are the flags assigned by targetm.encode_section_info. */
11109 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11110
11111 return decl;
11112 }
11113
11114 /* Extracts the personality function of DECL and returns the corresponding
11115 libfunc. */
11116
11117 rtx
11118 get_personality_function (tree decl)
11119 {
11120 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11121 enum eh_personality_kind pk;
11122
11123 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11124 if (pk == eh_personality_none)
11125 return NULL;
11126
11127 if (!personality
11128 && pk == eh_personality_any)
11129 personality = lang_hooks.eh_personality ();
11130
11131 if (pk == eh_personality_lang)
11132 gcc_assert (personality != NULL_TREE);
11133
11134 return XEXP (DECL_RTL (personality), 0);
11135 }
11136
11137 #include "gt-expr.h"