Daily bump.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "reload.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "langhooks.h"
45 #include "intl.h"
46 #include "tm_p.h"
47 #include "tree-iterator.h"
48 #include "tree-flow.h"
49 #include "target.h"
50 #include "common/common-target.h"
51 #include "timevar.h"
52 #include "df.h"
53 #include "diagnostic.h"
54 #include "ssaexpand.h"
55 #include "target-globals.h"
56 #include "params.h"
57
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
60
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
63
64 #ifdef PUSH_ROUNDING
65
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
69 #endif
70 #endif
71
72 #endif
73
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
77 #else
78 #define STACK_PUSH_CODE PRE_INC
79 #endif
80 #endif
81
82
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
90
91 /* This structure is used by move_by_pieces to describe the move to
92 be performed. */
93 struct move_by_pieces_d
94 {
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
99 rtx from;
100 rtx from_addr;
101 int autinc_from;
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
105 int reverse;
106 };
107
108 /* This structure is used by store_by_pieces to describe the clear to
109 be performed. */
110
111 struct store_by_pieces_d
112 {
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
120 void *constfundata;
121 int reverse;
122 };
123
124 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
125 struct move_by_pieces_d *);
126 static bool block_move_libcall_safe_for_call_parm (void);
127 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
128 static tree emit_block_move_libcall_fn (int);
129 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
130 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
131 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
132 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
133 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
134 struct store_by_pieces_d *);
135 static tree clear_storage_libcall_fn (int);
136 static rtx compress_float_constant (rtx, rtx);
137 static rtx get_subtarget (rtx);
138 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
139 HOST_WIDE_INT, enum machine_mode,
140 tree, int, alias_set_type);
141 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
142 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
143 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
144 enum machine_mode, tree, alias_set_type, bool);
145
146 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
147
148 static int is_aligning_offset (const_tree, const_tree);
149 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
150 enum expand_modifier);
151 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
152 static rtx do_store_flag (sepops, rtx, enum machine_mode);
153 #ifdef PUSH_ROUNDING
154 static void emit_single_push_insn (enum machine_mode, rtx, tree);
155 #endif
156 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
157 static rtx const_vector_from_tree (tree);
158 static void write_complex_part (rtx, rtx, bool);
159
160 /* This macro is used to determine whether move_by_pieces should be called
161 to perform a structure copy. */
162 #ifndef MOVE_BY_PIECES_P
163 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
164 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
165 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
166 #endif
167
168 /* This macro is used to determine whether clear_by_pieces should be
169 called to clear storage. */
170 #ifndef CLEAR_BY_PIECES_P
171 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
172 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
173 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
174 #endif
175
176 /* This macro is used to determine whether store_by_pieces should be
177 called to "memset" storage with byte values other than zero. */
178 #ifndef SET_BY_PIECES_P
179 #define SET_BY_PIECES_P(SIZE, ALIGN) \
180 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
181 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
182 #endif
183
184 /* This macro is used to determine whether store_by_pieces should be
185 called to "memcpy" storage when the source is a constant string. */
186 #ifndef STORE_BY_PIECES_P
187 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
188 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
189 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
190 #endif
191 \f
192 /* This is run to set up which modes can be used
193 directly in memory and to initialize the block move optab. It is run
194 at the beginning of compilation and when the target is reinitialized. */
195
196 void
197 init_expr_target (void)
198 {
199 rtx insn, pat;
200 enum machine_mode mode;
201 int num_clobbers;
202 rtx mem, mem1;
203 rtx reg;
204
205 /* Try indexing by frame ptr and try by stack ptr.
206 It is known that on the Convex the stack ptr isn't a valid index.
207 With luck, one or the other is valid on any machine. */
208 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
209 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
210
211 /* A scratch register we can modify in-place below to avoid
212 useless RTL allocations. */
213 reg = gen_rtx_REG (VOIDmode, -1);
214
215 insn = rtx_alloc (INSN);
216 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
217 PATTERN (insn) = pat;
218
219 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
220 mode = (enum machine_mode) ((int) mode + 1))
221 {
222 int regno;
223
224 direct_load[(int) mode] = direct_store[(int) mode] = 0;
225 PUT_MODE (mem, mode);
226 PUT_MODE (mem1, mode);
227 PUT_MODE (reg, mode);
228
229 /* See if there is some register that can be used in this mode and
230 directly loaded or stored from memory. */
231
232 if (mode != VOIDmode && mode != BLKmode)
233 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
234 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
235 regno++)
236 {
237 if (! HARD_REGNO_MODE_OK (regno, mode))
238 continue;
239
240 SET_REGNO (reg, regno);
241
242 SET_SRC (pat) = mem;
243 SET_DEST (pat) = reg;
244 if (recog (pat, insn, &num_clobbers) >= 0)
245 direct_load[(int) mode] = 1;
246
247 SET_SRC (pat) = mem1;
248 SET_DEST (pat) = reg;
249 if (recog (pat, insn, &num_clobbers) >= 0)
250 direct_load[(int) mode] = 1;
251
252 SET_SRC (pat) = reg;
253 SET_DEST (pat) = mem;
254 if (recog (pat, insn, &num_clobbers) >= 0)
255 direct_store[(int) mode] = 1;
256
257 SET_SRC (pat) = reg;
258 SET_DEST (pat) = mem1;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_store[(int) mode] = 1;
261 }
262 }
263
264 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
265
266 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
267 mode = GET_MODE_WIDER_MODE (mode))
268 {
269 enum machine_mode srcmode;
270 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
271 srcmode = GET_MODE_WIDER_MODE (srcmode))
272 {
273 enum insn_code ic;
274
275 ic = can_extend_p (mode, srcmode, 0);
276 if (ic == CODE_FOR_nothing)
277 continue;
278
279 PUT_MODE (mem, srcmode);
280
281 if (insn_operand_matches (ic, 1, mem))
282 float_extend_from_mem[mode][srcmode] = true;
283 }
284 }
285 }
286
287 /* This is run at the start of compiling a function. */
288
289 void
290 init_expr (void)
291 {
292 memset (&crtl->expr, 0, sizeof (crtl->expr));
293 }
294 \f
295 /* Copy data from FROM to TO, where the machine modes are not the same.
296 Both modes may be integer, or both may be floating, or both may be
297 fixed-point.
298 UNSIGNEDP should be nonzero if FROM is an unsigned type.
299 This causes zero-extension instead of sign-extension. */
300
301 void
302 convert_move (rtx to, rtx from, int unsignedp)
303 {
304 enum machine_mode to_mode = GET_MODE (to);
305 enum machine_mode from_mode = GET_MODE (from);
306 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
307 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
308 enum insn_code code;
309 rtx libcall;
310
311 /* rtx code for making an equivalent value. */
312 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
313 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
314
315
316 gcc_assert (to_real == from_real);
317 gcc_assert (to_mode != BLKmode);
318 gcc_assert (from_mode != BLKmode);
319
320 /* If the source and destination are already the same, then there's
321 nothing to do. */
322 if (to == from)
323 return;
324
325 /* If FROM is a SUBREG that indicates that we have already done at least
326 the required extension, strip it. We don't handle such SUBREGs as
327 TO here. */
328
329 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
330 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
331 >= GET_MODE_PRECISION (to_mode))
332 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
333 from = gen_lowpart (to_mode, from), from_mode = to_mode;
334
335 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
336
337 if (to_mode == from_mode
338 || (from_mode == VOIDmode && CONSTANT_P (from)))
339 {
340 emit_move_insn (to, from);
341 return;
342 }
343
344 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
345 {
346 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
347
348 if (VECTOR_MODE_P (to_mode))
349 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
350 else
351 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
352
353 emit_move_insn (to, from);
354 return;
355 }
356
357 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
358 {
359 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
360 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
361 return;
362 }
363
364 if (to_real)
365 {
366 rtx value, insns;
367 convert_optab tab;
368
369 gcc_assert ((GET_MODE_PRECISION (from_mode)
370 != GET_MODE_PRECISION (to_mode))
371 || (DECIMAL_FLOAT_MODE_P (from_mode)
372 != DECIMAL_FLOAT_MODE_P (to_mode)));
373
374 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
375 /* Conversion between decimal float and binary float, same size. */
376 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
377 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
378 tab = sext_optab;
379 else
380 tab = trunc_optab;
381
382 /* Try converting directly if the insn is supported. */
383
384 code = convert_optab_handler (tab, to_mode, from_mode);
385 if (code != CODE_FOR_nothing)
386 {
387 emit_unop_insn (code, to, from,
388 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
389 return;
390 }
391
392 /* Otherwise use a libcall. */
393 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
394
395 /* Is this conversion implemented yet? */
396 gcc_assert (libcall);
397
398 start_sequence ();
399 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
400 1, from, from_mode);
401 insns = get_insns ();
402 end_sequence ();
403 emit_libcall_block (insns, to, value,
404 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
405 from)
406 : gen_rtx_FLOAT_EXTEND (to_mode, from));
407 return;
408 }
409
410 /* Handle pointer conversion. */ /* SPEE 900220. */
411 /* Targets are expected to provide conversion insns between PxImode and
412 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
413 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
414 {
415 enum machine_mode full_mode
416 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
417
418 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
419 != CODE_FOR_nothing);
420
421 if (full_mode != from_mode)
422 from = convert_to_mode (full_mode, from, unsignedp);
423 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
424 to, from, UNKNOWN);
425 return;
426 }
427 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
428 {
429 rtx new_from;
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
432 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
433 enum insn_code icode;
434
435 icode = convert_optab_handler (ctab, full_mode, from_mode);
436 gcc_assert (icode != CODE_FOR_nothing);
437
438 if (to_mode == full_mode)
439 {
440 emit_unop_insn (icode, to, from, UNKNOWN);
441 return;
442 }
443
444 new_from = gen_reg_rtx (full_mode);
445 emit_unop_insn (icode, new_from, from, UNKNOWN);
446
447 /* else proceed to integer conversions below. */
448 from_mode = full_mode;
449 from = new_from;
450 }
451
452 /* Make sure both are fixed-point modes or both are not. */
453 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
454 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
455 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
456 {
457 /* If we widen from_mode to to_mode and they are in the same class,
458 we won't saturate the result.
459 Otherwise, always saturate the result to play safe. */
460 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
461 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
462 expand_fixed_convert (to, from, 0, 0);
463 else
464 expand_fixed_convert (to, from, 0, 1);
465 return;
466 }
467
468 /* Now both modes are integers. */
469
470 /* Handle expanding beyond a word. */
471 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
472 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
473 {
474 rtx insns;
475 rtx lowpart;
476 rtx fill_value;
477 rtx lowfrom;
478 int i;
479 enum machine_mode lowpart_mode;
480 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
481
482 /* Try converting directly if the insn is supported. */
483 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
484 != CODE_FOR_nothing)
485 {
486 /* If FROM is a SUBREG, put it into a register. Do this
487 so that we always generate the same set of insns for
488 better cse'ing; if an intermediate assignment occurred,
489 we won't be doing the operation directly on the SUBREG. */
490 if (optimize > 0 && GET_CODE (from) == SUBREG)
491 from = force_reg (from_mode, from);
492 emit_unop_insn (code, to, from, equiv_code);
493 return;
494 }
495 /* Next, try converting via full word. */
496 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
497 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
498 != CODE_FOR_nothing))
499 {
500 rtx word_to = gen_reg_rtx (word_mode);
501 if (REG_P (to))
502 {
503 if (reg_overlap_mentioned_p (to, from))
504 from = force_reg (from_mode, from);
505 emit_clobber (to);
506 }
507 convert_move (word_to, from, unsignedp);
508 emit_unop_insn (code, to, word_to, equiv_code);
509 return;
510 }
511
512 /* No special multiword conversion insn; do it by hand. */
513 start_sequence ();
514
515 /* Since we will turn this into a no conflict block, we must ensure the
516 the source does not overlap the target so force it into an isolated
517 register when maybe so. Likewise for any MEM input, since the
518 conversion sequence might require several references to it and we
519 must ensure we're getting the same value every time. */
520
521 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
522 from = force_reg (from_mode, from);
523
524 /* Get a copy of FROM widened to a word, if necessary. */
525 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
526 lowpart_mode = word_mode;
527 else
528 lowpart_mode = from_mode;
529
530 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
531
532 lowpart = gen_lowpart (lowpart_mode, to);
533 emit_move_insn (lowpart, lowfrom);
534
535 /* Compute the value to put in each remaining word. */
536 if (unsignedp)
537 fill_value = const0_rtx;
538 else
539 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
540 LT, lowfrom, const0_rtx,
541 VOIDmode, 0, -1);
542
543 /* Fill the remaining words. */
544 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
545 {
546 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
547 rtx subword = operand_subword (to, index, 1, to_mode);
548
549 gcc_assert (subword);
550
551 if (fill_value != subword)
552 emit_move_insn (subword, fill_value);
553 }
554
555 insns = get_insns ();
556 end_sequence ();
557
558 emit_insn (insns);
559 return;
560 }
561
562 /* Truncating multi-word to a word or less. */
563 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
564 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
565 {
566 if (!((MEM_P (from)
567 && ! MEM_VOLATILE_P (from)
568 && direct_load[(int) to_mode]
569 && ! mode_dependent_address_p (XEXP (from, 0),
570 MEM_ADDR_SPACE (from)))
571 || REG_P (from)
572 || GET_CODE (from) == SUBREG))
573 from = force_reg (from_mode, from);
574 convert_move (to, gen_lowpart (word_mode, from), 0);
575 return;
576 }
577
578 /* Now follow all the conversions between integers
579 no more than a word long. */
580
581 /* For truncation, usually we can just refer to FROM in a narrower mode. */
582 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
583 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
584 {
585 if (!((MEM_P (from)
586 && ! MEM_VOLATILE_P (from)
587 && direct_load[(int) to_mode]
588 && ! mode_dependent_address_p (XEXP (from, 0),
589 MEM_ADDR_SPACE (from)))
590 || REG_P (from)
591 || GET_CODE (from) == SUBREG))
592 from = force_reg (from_mode, from);
593 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
594 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
595 from = copy_to_reg (from);
596 emit_move_insn (to, gen_lowpart (to_mode, from));
597 return;
598 }
599
600 /* Handle extension. */
601 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
602 {
603 /* Convert directly if that works. */
604 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
605 != CODE_FOR_nothing)
606 {
607 emit_unop_insn (code, to, from, equiv_code);
608 return;
609 }
610 else
611 {
612 enum machine_mode intermediate;
613 rtx tmp;
614 int shift_amount;
615
616 /* Search for a mode to convert via. */
617 for (intermediate = from_mode; intermediate != VOIDmode;
618 intermediate = GET_MODE_WIDER_MODE (intermediate))
619 if (((can_extend_p (to_mode, intermediate, unsignedp)
620 != CODE_FOR_nothing)
621 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
622 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
623 && (can_extend_p (intermediate, from_mode, unsignedp)
624 != CODE_FOR_nothing))
625 {
626 convert_move (to, convert_to_mode (intermediate, from,
627 unsignedp), unsignedp);
628 return;
629 }
630
631 /* No suitable intermediate mode.
632 Generate what we need with shifts. */
633 shift_amount = (GET_MODE_PRECISION (to_mode)
634 - GET_MODE_PRECISION (from_mode));
635 from = gen_lowpart (to_mode, force_reg (from_mode, from));
636 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
637 to, unsignedp);
638 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
639 to, unsignedp);
640 if (tmp != to)
641 emit_move_insn (to, tmp);
642 return;
643 }
644 }
645
646 /* Support special truncate insns for certain modes. */
647 if (convert_optab_handler (trunc_optab, to_mode,
648 from_mode) != CODE_FOR_nothing)
649 {
650 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
651 to, from, UNKNOWN);
652 return;
653 }
654
655 /* Handle truncation of volatile memrefs, and so on;
656 the things that couldn't be truncated directly,
657 and for which there was no special instruction.
658
659 ??? Code above formerly short-circuited this, for most integer
660 mode pairs, with a force_reg in from_mode followed by a recursive
661 call to this routine. Appears always to have been wrong. */
662 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
663 {
664 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
665 emit_move_insn (to, temp);
666 return;
667 }
668
669 /* Mode combination is not recognized. */
670 gcc_unreachable ();
671 }
672
673 /* Return an rtx for a value that would result
674 from converting X to mode MODE.
675 Both X and MODE may be floating, or both integer.
676 UNSIGNEDP is nonzero if X is an unsigned value.
677 This can be done by referring to a part of X in place
678 or by copying to a new temporary with conversion. */
679
680 rtx
681 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
682 {
683 return convert_modes (mode, VOIDmode, x, unsignedp);
684 }
685
686 /* Return an rtx for a value that would result
687 from converting X from mode OLDMODE to mode MODE.
688 Both modes may be floating, or both integer.
689 UNSIGNEDP is nonzero if X is an unsigned value.
690
691 This can be done by referring to a part of X in place
692 or by copying to a new temporary with conversion.
693
694 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
695
696 rtx
697 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
698 {
699 rtx temp;
700
701 /* If FROM is a SUBREG that indicates that we have already done at least
702 the required extension, strip it. */
703
704 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
705 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
706 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
707 x = gen_lowpart (mode, x);
708
709 if (GET_MODE (x) != VOIDmode)
710 oldmode = GET_MODE (x);
711
712 if (mode == oldmode)
713 return x;
714
715 /* There is one case that we must handle specially: If we are converting
716 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
717 we are to interpret the constant as unsigned, gen_lowpart will do
718 the wrong if the constant appears negative. What we want to do is
719 make the high-order word of the constant zero, not all ones. */
720
721 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
722 && GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT
723 && CONST_INT_P (x) && INTVAL (x) < 0)
724 {
725 double_int val = double_int::from_uhwi (INTVAL (x));
726
727 /* We need to zero extend VAL. */
728 if (oldmode != VOIDmode)
729 val = val.zext (GET_MODE_BITSIZE (oldmode));
730
731 return immed_double_int_const (val, mode);
732 }
733
734 /* We can do this with a gen_lowpart if both desired and current modes
735 are integer, and this is either a constant integer, a register, or a
736 non-volatile MEM. Except for the constant case where MODE is no
737 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
738
739 if ((CONST_INT_P (x)
740 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
741 || (GET_MODE_CLASS (mode) == MODE_INT
742 && GET_MODE_CLASS (oldmode) == MODE_INT
743 && (CONST_DOUBLE_AS_INT_P (x)
744 || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
745 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
746 && direct_load[(int) mode])
747 || (REG_P (x)
748 && (! HARD_REGISTER_P (x)
749 || HARD_REGNO_MODE_OK (REGNO (x), mode))
750 && TRULY_NOOP_TRUNCATION_MODES_P (mode,
751 GET_MODE (x))))))))
752 {
753 /* ?? If we don't know OLDMODE, we have to assume here that
754 X does not need sign- or zero-extension. This may not be
755 the case, but it's the best we can do. */
756 if (CONST_INT_P (x) && oldmode != VOIDmode
757 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
758 {
759 HOST_WIDE_INT val = INTVAL (x);
760
761 /* We must sign or zero-extend in this case. Start by
762 zero-extending, then sign extend if we need to. */
763 val &= GET_MODE_MASK (oldmode);
764 if (! unsignedp
765 && val_signbit_known_set_p (oldmode, val))
766 val |= ~GET_MODE_MASK (oldmode);
767
768 return gen_int_mode (val, mode);
769 }
770
771 return gen_lowpart (mode, x);
772 }
773
774 /* Converting from integer constant into mode is always equivalent to an
775 subreg operation. */
776 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
777 {
778 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
779 return simplify_gen_subreg (mode, x, oldmode, 0);
780 }
781
782 temp = gen_reg_rtx (mode);
783 convert_move (temp, x, unsignedp);
784 return temp;
785 }
786 \f
787 /* Return the largest alignment we can use for doing a move (or store)
788 of MAX_PIECES. ALIGN is the largest alignment we could use. */
789
790 static unsigned int
791 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
792 {
793 enum machine_mode tmode;
794
795 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
796 if (align >= GET_MODE_ALIGNMENT (tmode))
797 align = GET_MODE_ALIGNMENT (tmode);
798 else
799 {
800 enum machine_mode tmode, xmode;
801
802 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
803 tmode != VOIDmode;
804 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
805 if (GET_MODE_SIZE (tmode) > max_pieces
806 || SLOW_UNALIGNED_ACCESS (tmode, align))
807 break;
808
809 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
810 }
811
812 return align;
813 }
814
815 /* Return the widest integer mode no wider than SIZE. If no such mode
816 can be found, return VOIDmode. */
817
818 static enum machine_mode
819 widest_int_mode_for_size (unsigned int size)
820 {
821 enum machine_mode tmode, mode = VOIDmode;
822
823 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
824 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
825 if (GET_MODE_SIZE (tmode) < size)
826 mode = tmode;
827
828 return mode;
829 }
830
831 /* STORE_MAX_PIECES is the number of bytes at a time that we can
832 store efficiently. Due to internal GCC limitations, this is
833 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
834 for an immediate constant. */
835
836 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
837
838 /* Determine whether the LEN bytes can be moved by using several move
839 instructions. Return nonzero if a call to move_by_pieces should
840 succeed. */
841
842 int
843 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
844 unsigned int align ATTRIBUTE_UNUSED)
845 {
846 return MOVE_BY_PIECES_P (len, align);
847 }
848
849 /* Generate several move instructions to copy LEN bytes from block FROM to
850 block TO. (These are MEM rtx's with BLKmode).
851
852 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
853 used to push FROM to the stack.
854
855 ALIGN is maximum stack alignment we can assume.
856
857 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
858 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
859 stpcpy. */
860
861 rtx
862 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
863 unsigned int align, int endp)
864 {
865 struct move_by_pieces_d data;
866 enum machine_mode to_addr_mode;
867 enum machine_mode from_addr_mode = get_address_mode (from);
868 rtx to_addr, from_addr = XEXP (from, 0);
869 unsigned int max_size = MOVE_MAX_PIECES + 1;
870 enum insn_code icode;
871
872 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
873
874 data.offset = 0;
875 data.from_addr = from_addr;
876 if (to)
877 {
878 to_addr_mode = get_address_mode (to);
879 to_addr = XEXP (to, 0);
880 data.to = to;
881 data.autinc_to
882 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
883 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
884 data.reverse
885 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
886 }
887 else
888 {
889 to_addr_mode = VOIDmode;
890 to_addr = NULL_RTX;
891 data.to = NULL_RTX;
892 data.autinc_to = 1;
893 #ifdef STACK_GROWS_DOWNWARD
894 data.reverse = 1;
895 #else
896 data.reverse = 0;
897 #endif
898 }
899 data.to_addr = to_addr;
900 data.from = from;
901 data.autinc_from
902 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
903 || GET_CODE (from_addr) == POST_INC
904 || GET_CODE (from_addr) == POST_DEC);
905
906 data.explicit_inc_from = 0;
907 data.explicit_inc_to = 0;
908 if (data.reverse) data.offset = len;
909 data.len = len;
910
911 /* If copying requires more than two move insns,
912 copy addresses to registers (to make displacements shorter)
913 and use post-increment if available. */
914 if (!(data.autinc_from && data.autinc_to)
915 && move_by_pieces_ninsns (len, align, max_size) > 2)
916 {
917 /* Find the mode of the largest move...
918 MODE might not be used depending on the definitions of the
919 USE_* macros below. */
920 enum machine_mode mode ATTRIBUTE_UNUSED
921 = widest_int_mode_for_size (max_size);
922
923 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
924 {
925 data.from_addr = copy_to_mode_reg (from_addr_mode,
926 plus_constant (from_addr_mode,
927 from_addr, len));
928 data.autinc_from = 1;
929 data.explicit_inc_from = -1;
930 }
931 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
932 {
933 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
934 data.autinc_from = 1;
935 data.explicit_inc_from = 1;
936 }
937 if (!data.autinc_from && CONSTANT_P (from_addr))
938 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
939 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
940 {
941 data.to_addr = copy_to_mode_reg (to_addr_mode,
942 plus_constant (to_addr_mode,
943 to_addr, len));
944 data.autinc_to = 1;
945 data.explicit_inc_to = -1;
946 }
947 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
948 {
949 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
950 data.autinc_to = 1;
951 data.explicit_inc_to = 1;
952 }
953 if (!data.autinc_to && CONSTANT_P (to_addr))
954 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
955 }
956
957 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
958
959 /* First move what we can in the largest integer mode, then go to
960 successively smaller modes. */
961
962 while (max_size > 1 && data.len > 0)
963 {
964 enum machine_mode mode = widest_int_mode_for_size (max_size);
965
966 if (mode == VOIDmode)
967 break;
968
969 icode = optab_handler (mov_optab, mode);
970 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
971 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
972
973 max_size = GET_MODE_SIZE (mode);
974 }
975
976 /* The code above should have handled everything. */
977 gcc_assert (!data.len);
978
979 if (endp)
980 {
981 rtx to1;
982
983 gcc_assert (!data.reverse);
984 if (data.autinc_to)
985 {
986 if (endp == 2)
987 {
988 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
989 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
990 else
991 data.to_addr = copy_to_mode_reg (to_addr_mode,
992 plus_constant (to_addr_mode,
993 data.to_addr,
994 -1));
995 }
996 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
997 data.offset);
998 }
999 else
1000 {
1001 if (endp == 2)
1002 --data.offset;
1003 to1 = adjust_address (data.to, QImode, data.offset);
1004 }
1005 return to1;
1006 }
1007 else
1008 return data.to;
1009 }
1010
1011 /* Return number of insns required to move L bytes by pieces.
1012 ALIGN (in bits) is maximum alignment we can assume. */
1013
1014 unsigned HOST_WIDE_INT
1015 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1016 unsigned int max_size)
1017 {
1018 unsigned HOST_WIDE_INT n_insns = 0;
1019
1020 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1021
1022 while (max_size > 1 && l > 0)
1023 {
1024 enum machine_mode mode;
1025 enum insn_code icode;
1026
1027 mode = widest_int_mode_for_size (max_size);
1028
1029 if (mode == VOIDmode)
1030 break;
1031
1032 icode = optab_handler (mov_optab, mode);
1033 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1034 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1035
1036 max_size = GET_MODE_SIZE (mode);
1037 }
1038
1039 gcc_assert (!l);
1040 return n_insns;
1041 }
1042
1043 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1044 with move instructions for mode MODE. GENFUN is the gen_... function
1045 to make a move insn for that mode. DATA has all the other info. */
1046
1047 static void
1048 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1049 struct move_by_pieces_d *data)
1050 {
1051 unsigned int size = GET_MODE_SIZE (mode);
1052 rtx to1 = NULL_RTX, from1;
1053
1054 while (data->len >= size)
1055 {
1056 if (data->reverse)
1057 data->offset -= size;
1058
1059 if (data->to)
1060 {
1061 if (data->autinc_to)
1062 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1063 data->offset);
1064 else
1065 to1 = adjust_address (data->to, mode, data->offset);
1066 }
1067
1068 if (data->autinc_from)
1069 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1070 data->offset);
1071 else
1072 from1 = adjust_address (data->from, mode, data->offset);
1073
1074 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1075 emit_insn (gen_add2_insn (data->to_addr,
1076 GEN_INT (-(HOST_WIDE_INT)size)));
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1078 emit_insn (gen_add2_insn (data->from_addr,
1079 GEN_INT (-(HOST_WIDE_INT)size)));
1080
1081 if (data->to)
1082 emit_insn ((*genfun) (to1, from1));
1083 else
1084 {
1085 #ifdef PUSH_ROUNDING
1086 emit_single_push_insn (mode, from1, NULL);
1087 #else
1088 gcc_unreachable ();
1089 #endif
1090 }
1091
1092 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1093 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1094 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1096
1097 if (! data->reverse)
1098 data->offset += size;
1099
1100 data->len -= size;
1101 }
1102 }
1103 \f
1104 /* Emit code to move a block Y to a block X. This may be done with
1105 string-move instructions, with multiple scalar move instructions,
1106 or with a library call.
1107
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1109 SIZE is an rtx that says how long they are.
1110 ALIGN is the maximum alignment we can assume they have.
1111 METHOD describes what kind of copy this is, and what mechanisms may be used.
1112
1113 Return the address of the new block, if memcpy is called and returns it,
1114 0 otherwise. */
1115
1116 rtx
1117 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1118 unsigned int expected_align, HOST_WIDE_INT expected_size)
1119 {
1120 bool may_use_call;
1121 rtx retval = 0;
1122 unsigned int align;
1123
1124 gcc_assert (size);
1125 if (CONST_INT_P (size)
1126 && INTVAL (size) == 0)
1127 return 0;
1128
1129 switch (method)
1130 {
1131 case BLOCK_OP_NORMAL:
1132 case BLOCK_OP_TAILCALL:
1133 may_use_call = true;
1134 break;
1135
1136 case BLOCK_OP_CALL_PARM:
1137 may_use_call = block_move_libcall_safe_for_call_parm ();
1138
1139 /* Make inhibit_defer_pop nonzero around the library call
1140 to force it to pop the arguments right away. */
1141 NO_DEFER_POP;
1142 break;
1143
1144 case BLOCK_OP_NO_LIBCALL:
1145 may_use_call = false;
1146 break;
1147
1148 default:
1149 gcc_unreachable ();
1150 }
1151
1152 gcc_assert (MEM_P (x) && MEM_P (y));
1153 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1154 gcc_assert (align >= BITS_PER_UNIT);
1155
1156 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1157 block copy is more efficient for other large modes, e.g. DCmode. */
1158 x = adjust_address (x, BLKmode, 0);
1159 y = adjust_address (y, BLKmode, 0);
1160
1161 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1162 can be incorrect is coming from __builtin_memcpy. */
1163 if (CONST_INT_P (size))
1164 {
1165 x = shallow_copy_rtx (x);
1166 y = shallow_copy_rtx (y);
1167 set_mem_size (x, INTVAL (size));
1168 set_mem_size (y, INTVAL (size));
1169 }
1170
1171 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1172 move_by_pieces (x, y, INTVAL (size), align, 0);
1173 else if (emit_block_move_via_movmem (x, y, size, align,
1174 expected_align, expected_size))
1175 ;
1176 else if (may_use_call
1177 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1178 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1179 {
1180 /* Since x and y are passed to a libcall, mark the corresponding
1181 tree EXPR as addressable. */
1182 tree y_expr = MEM_EXPR (y);
1183 tree x_expr = MEM_EXPR (x);
1184 if (y_expr)
1185 mark_addressable (y_expr);
1186 if (x_expr)
1187 mark_addressable (x_expr);
1188 retval = emit_block_move_via_libcall (x, y, size,
1189 method == BLOCK_OP_TAILCALL);
1190 }
1191
1192 else
1193 emit_block_move_via_loop (x, y, size, align);
1194
1195 if (method == BLOCK_OP_CALL_PARM)
1196 OK_DEFER_POP;
1197
1198 return retval;
1199 }
1200
1201 rtx
1202 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1203 {
1204 return emit_block_move_hints (x, y, size, method, 0, -1);
1205 }
1206
1207 /* A subroutine of emit_block_move. Returns true if calling the
1208 block move libcall will not clobber any parameters which may have
1209 already been placed on the stack. */
1210
1211 static bool
1212 block_move_libcall_safe_for_call_parm (void)
1213 {
1214 #if defined (REG_PARM_STACK_SPACE)
1215 tree fn;
1216 #endif
1217
1218 /* If arguments are pushed on the stack, then they're safe. */
1219 if (PUSH_ARGS)
1220 return true;
1221
1222 /* If registers go on the stack anyway, any argument is sure to clobber
1223 an outgoing argument. */
1224 #if defined (REG_PARM_STACK_SPACE)
1225 fn = emit_block_move_libcall_fn (false);
1226 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1227 depend on its argument. */
1228 (void) fn;
1229 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1230 && REG_PARM_STACK_SPACE (fn) != 0)
1231 return false;
1232 #endif
1233
1234 /* If any argument goes in memory, then it might clobber an outgoing
1235 argument. */
1236 {
1237 CUMULATIVE_ARGS args_so_far_v;
1238 cumulative_args_t args_so_far;
1239 tree fn, arg;
1240
1241 fn = emit_block_move_libcall_fn (false);
1242 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1243 args_so_far = pack_cumulative_args (&args_so_far_v);
1244
1245 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1246 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1247 {
1248 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1249 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1250 NULL_TREE, true);
1251 if (!tmp || !REG_P (tmp))
1252 return false;
1253 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1254 return false;
1255 targetm.calls.function_arg_advance (args_so_far, mode,
1256 NULL_TREE, true);
1257 }
1258 }
1259 return true;
1260 }
1261
1262 /* A subroutine of emit_block_move. Expand a movmem pattern;
1263 return true if successful. */
1264
1265 static bool
1266 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1267 unsigned int expected_align, HOST_WIDE_INT expected_size)
1268 {
1269 int save_volatile_ok = volatile_ok;
1270 enum machine_mode mode;
1271
1272 if (expected_align < align)
1273 expected_align = align;
1274
1275 /* Since this is a move insn, we don't care about volatility. */
1276 volatile_ok = 1;
1277
1278 /* Try the most limited insn first, because there's no point
1279 including more than one in the machine description unless
1280 the more limited one has some advantage. */
1281
1282 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1283 mode = GET_MODE_WIDER_MODE (mode))
1284 {
1285 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1286
1287 if (code != CODE_FOR_nothing
1288 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1289 here because if SIZE is less than the mode mask, as it is
1290 returned by the macro, it will definitely be less than the
1291 actual mode mask. */
1292 && ((CONST_INT_P (size)
1293 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1294 <= (GET_MODE_MASK (mode) >> 1)))
1295 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
1296 {
1297 struct expand_operand ops[6];
1298 unsigned int nops;
1299
1300 /* ??? When called via emit_block_move_for_call, it'd be
1301 nice if there were some way to inform the backend, so
1302 that it doesn't fail the expansion because it thinks
1303 emitting the libcall would be more efficient. */
1304 nops = insn_data[(int) code].n_generator_args;
1305 gcc_assert (nops == 4 || nops == 6);
1306
1307 create_fixed_operand (&ops[0], x);
1308 create_fixed_operand (&ops[1], y);
1309 /* The check above guarantees that this size conversion is valid. */
1310 create_convert_operand_to (&ops[2], size, mode, true);
1311 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1312 if (nops == 6)
1313 {
1314 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1315 create_integer_operand (&ops[5], expected_size);
1316 }
1317 if (maybe_expand_insn (code, nops, ops))
1318 {
1319 volatile_ok = save_volatile_ok;
1320 return true;
1321 }
1322 }
1323 }
1324
1325 volatile_ok = save_volatile_ok;
1326 return false;
1327 }
1328
1329 /* A subroutine of emit_block_move. Expand a call to memcpy.
1330 Return the return value from memcpy, 0 otherwise. */
1331
1332 rtx
1333 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1334 {
1335 rtx dst_addr, src_addr;
1336 tree call_expr, fn, src_tree, dst_tree, size_tree;
1337 enum machine_mode size_mode;
1338 rtx retval;
1339
1340 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1341 pseudos. We can then place those new pseudos into a VAR_DECL and
1342 use them later. */
1343
1344 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1345 src_addr = copy_addr_to_reg (XEXP (src, 0));
1346
1347 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1348 src_addr = convert_memory_address (ptr_mode, src_addr);
1349
1350 dst_tree = make_tree (ptr_type_node, dst_addr);
1351 src_tree = make_tree (ptr_type_node, src_addr);
1352
1353 size_mode = TYPE_MODE (sizetype);
1354
1355 size = convert_to_mode (size_mode, size, 1);
1356 size = copy_to_mode_reg (size_mode, size);
1357
1358 /* It is incorrect to use the libcall calling conventions to call
1359 memcpy in this context. This could be a user call to memcpy and
1360 the user may wish to examine the return value from memcpy. For
1361 targets where libcalls and normal calls have different conventions
1362 for returning pointers, we could end up generating incorrect code. */
1363
1364 size_tree = make_tree (sizetype, size);
1365
1366 fn = emit_block_move_libcall_fn (true);
1367 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1368 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1369
1370 retval = expand_normal (call_expr);
1371
1372 return retval;
1373 }
1374
1375 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1376 for the function we use for block copies. */
1377
1378 static GTY(()) tree block_move_fn;
1379
1380 void
1381 init_block_move_fn (const char *asmspec)
1382 {
1383 if (!block_move_fn)
1384 {
1385 tree args, fn, attrs, attr_args;
1386
1387 fn = get_identifier ("memcpy");
1388 args = build_function_type_list (ptr_type_node, ptr_type_node,
1389 const_ptr_type_node, sizetype,
1390 NULL_TREE);
1391
1392 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1393 DECL_EXTERNAL (fn) = 1;
1394 TREE_PUBLIC (fn) = 1;
1395 DECL_ARTIFICIAL (fn) = 1;
1396 TREE_NOTHROW (fn) = 1;
1397 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1398 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1399
1400 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1401 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1402
1403 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1404
1405 block_move_fn = fn;
1406 }
1407
1408 if (asmspec)
1409 set_user_assembler_name (block_move_fn, asmspec);
1410 }
1411
1412 static tree
1413 emit_block_move_libcall_fn (int for_call)
1414 {
1415 static bool emitted_extern;
1416
1417 if (!block_move_fn)
1418 init_block_move_fn (NULL);
1419
1420 if (for_call && !emitted_extern)
1421 {
1422 emitted_extern = true;
1423 make_decl_rtl (block_move_fn);
1424 }
1425
1426 return block_move_fn;
1427 }
1428
1429 /* A subroutine of emit_block_move. Copy the data via an explicit
1430 loop. This is used only when libcalls are forbidden. */
1431 /* ??? It'd be nice to copy in hunks larger than QImode. */
1432
1433 static void
1434 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1435 unsigned int align ATTRIBUTE_UNUSED)
1436 {
1437 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1438 enum machine_mode x_addr_mode = get_address_mode (x);
1439 enum machine_mode y_addr_mode = get_address_mode (y);
1440 enum machine_mode iter_mode;
1441
1442 iter_mode = GET_MODE (size);
1443 if (iter_mode == VOIDmode)
1444 iter_mode = word_mode;
1445
1446 top_label = gen_label_rtx ();
1447 cmp_label = gen_label_rtx ();
1448 iter = gen_reg_rtx (iter_mode);
1449
1450 emit_move_insn (iter, const0_rtx);
1451
1452 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1453 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1454 do_pending_stack_adjust ();
1455
1456 emit_jump (cmp_label);
1457 emit_label (top_label);
1458
1459 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1460 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1461
1462 if (x_addr_mode != y_addr_mode)
1463 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1464 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1465
1466 x = change_address (x, QImode, x_addr);
1467 y = change_address (y, QImode, y_addr);
1468
1469 emit_move_insn (x, y);
1470
1471 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1472 true, OPTAB_LIB_WIDEN);
1473 if (tmp != iter)
1474 emit_move_insn (iter, tmp);
1475
1476 emit_label (cmp_label);
1477
1478 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1479 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1480 }
1481 \f
1482 /* Copy all or part of a value X into registers starting at REGNO.
1483 The number of registers to be filled is NREGS. */
1484
1485 void
1486 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1487 {
1488 int i;
1489 #ifdef HAVE_load_multiple
1490 rtx pat;
1491 rtx last;
1492 #endif
1493
1494 if (nregs == 0)
1495 return;
1496
1497 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1498 x = validize_mem (force_const_mem (mode, x));
1499
1500 /* See if the machine can do this with a load multiple insn. */
1501 #ifdef HAVE_load_multiple
1502 if (HAVE_load_multiple)
1503 {
1504 last = get_last_insn ();
1505 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1506 GEN_INT (nregs));
1507 if (pat)
1508 {
1509 emit_insn (pat);
1510 return;
1511 }
1512 else
1513 delete_insns_since (last);
1514 }
1515 #endif
1516
1517 for (i = 0; i < nregs; i++)
1518 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1519 operand_subword_force (x, i, mode));
1520 }
1521
1522 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1523 The number of registers to be filled is NREGS. */
1524
1525 void
1526 move_block_from_reg (int regno, rtx x, int nregs)
1527 {
1528 int i;
1529
1530 if (nregs == 0)
1531 return;
1532
1533 /* See if the machine can do this with a store multiple insn. */
1534 #ifdef HAVE_store_multiple
1535 if (HAVE_store_multiple)
1536 {
1537 rtx last = get_last_insn ();
1538 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1539 GEN_INT (nregs));
1540 if (pat)
1541 {
1542 emit_insn (pat);
1543 return;
1544 }
1545 else
1546 delete_insns_since (last);
1547 }
1548 #endif
1549
1550 for (i = 0; i < nregs; i++)
1551 {
1552 rtx tem = operand_subword (x, i, 1, BLKmode);
1553
1554 gcc_assert (tem);
1555
1556 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1557 }
1558 }
1559
1560 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1561 ORIG, where ORIG is a non-consecutive group of registers represented by
1562 a PARALLEL. The clone is identical to the original except in that the
1563 original set of registers is replaced by a new set of pseudo registers.
1564 The new set has the same modes as the original set. */
1565
1566 rtx
1567 gen_group_rtx (rtx orig)
1568 {
1569 int i, length;
1570 rtx *tmps;
1571
1572 gcc_assert (GET_CODE (orig) == PARALLEL);
1573
1574 length = XVECLEN (orig, 0);
1575 tmps = XALLOCAVEC (rtx, length);
1576
1577 /* Skip a NULL entry in first slot. */
1578 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1579
1580 if (i)
1581 tmps[0] = 0;
1582
1583 for (; i < length; i++)
1584 {
1585 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1586 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1587
1588 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1589 }
1590
1591 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1592 }
1593
1594 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1595 except that values are placed in TMPS[i], and must later be moved
1596 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1597
1598 static void
1599 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1600 {
1601 rtx src;
1602 int start, i;
1603 enum machine_mode m = GET_MODE (orig_src);
1604
1605 gcc_assert (GET_CODE (dst) == PARALLEL);
1606
1607 if (m != VOIDmode
1608 && !SCALAR_INT_MODE_P (m)
1609 && !MEM_P (orig_src)
1610 && GET_CODE (orig_src) != CONCAT)
1611 {
1612 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1613 if (imode == BLKmode)
1614 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1615 else
1616 src = gen_reg_rtx (imode);
1617 if (imode != BLKmode)
1618 src = gen_lowpart (GET_MODE (orig_src), src);
1619 emit_move_insn (src, orig_src);
1620 /* ...and back again. */
1621 if (imode != BLKmode)
1622 src = gen_lowpart (imode, src);
1623 emit_group_load_1 (tmps, dst, src, type, ssize);
1624 return;
1625 }
1626
1627 /* Check for a NULL entry, used to indicate that the parameter goes
1628 both on the stack and in registers. */
1629 if (XEXP (XVECEXP (dst, 0, 0), 0))
1630 start = 0;
1631 else
1632 start = 1;
1633
1634 /* Process the pieces. */
1635 for (i = start; i < XVECLEN (dst, 0); i++)
1636 {
1637 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1638 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1639 unsigned int bytelen = GET_MODE_SIZE (mode);
1640 int shift = 0;
1641
1642 /* Handle trailing fragments that run over the size of the struct. */
1643 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1644 {
1645 /* Arrange to shift the fragment to where it belongs.
1646 extract_bit_field loads to the lsb of the reg. */
1647 if (
1648 #ifdef BLOCK_REG_PADDING
1649 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1650 == (BYTES_BIG_ENDIAN ? upward : downward)
1651 #else
1652 BYTES_BIG_ENDIAN
1653 #endif
1654 )
1655 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1656 bytelen = ssize - bytepos;
1657 gcc_assert (bytelen > 0);
1658 }
1659
1660 /* If we won't be loading directly from memory, protect the real source
1661 from strange tricks we might play; but make sure that the source can
1662 be loaded directly into the destination. */
1663 src = orig_src;
1664 if (!MEM_P (orig_src)
1665 && (!CONSTANT_P (orig_src)
1666 || (GET_MODE (orig_src) != mode
1667 && GET_MODE (orig_src) != VOIDmode)))
1668 {
1669 if (GET_MODE (orig_src) == VOIDmode)
1670 src = gen_reg_rtx (mode);
1671 else
1672 src = gen_reg_rtx (GET_MODE (orig_src));
1673
1674 emit_move_insn (src, orig_src);
1675 }
1676
1677 /* Optimize the access just a bit. */
1678 if (MEM_P (src)
1679 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1680 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1681 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1682 && bytelen == GET_MODE_SIZE (mode))
1683 {
1684 tmps[i] = gen_reg_rtx (mode);
1685 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1686 }
1687 else if (COMPLEX_MODE_P (mode)
1688 && GET_MODE (src) == mode
1689 && bytelen == GET_MODE_SIZE (mode))
1690 /* Let emit_move_complex do the bulk of the work. */
1691 tmps[i] = src;
1692 else if (GET_CODE (src) == CONCAT)
1693 {
1694 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1695 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1696
1697 if ((bytepos == 0 && bytelen == slen0)
1698 || (bytepos != 0 && bytepos + bytelen <= slen))
1699 {
1700 /* The following assumes that the concatenated objects all
1701 have the same size. In this case, a simple calculation
1702 can be used to determine the object and the bit field
1703 to be extracted. */
1704 tmps[i] = XEXP (src, bytepos / slen0);
1705 if (! CONSTANT_P (tmps[i])
1706 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1707 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1708 (bytepos % slen0) * BITS_PER_UNIT,
1709 1, false, NULL_RTX, mode, mode);
1710 }
1711 else
1712 {
1713 rtx mem;
1714
1715 gcc_assert (!bytepos);
1716 mem = assign_stack_temp (GET_MODE (src), slen);
1717 emit_move_insn (mem, src);
1718 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1719 0, 1, false, NULL_RTX, mode, mode);
1720 }
1721 }
1722 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1723 SIMD register, which is currently broken. While we get GCC
1724 to emit proper RTL for these cases, let's dump to memory. */
1725 else if (VECTOR_MODE_P (GET_MODE (dst))
1726 && REG_P (src))
1727 {
1728 int slen = GET_MODE_SIZE (GET_MODE (src));
1729 rtx mem;
1730
1731 mem = assign_stack_temp (GET_MODE (src), slen);
1732 emit_move_insn (mem, src);
1733 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1734 }
1735 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1736 && XVECLEN (dst, 0) > 1)
1737 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1738 else if (CONSTANT_P (src))
1739 {
1740 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1741
1742 if (len == ssize)
1743 tmps[i] = src;
1744 else
1745 {
1746 rtx first, second;
1747
1748 gcc_assert (2 * len == ssize);
1749 split_double (src, &first, &second);
1750 if (i)
1751 tmps[i] = second;
1752 else
1753 tmps[i] = first;
1754 }
1755 }
1756 else if (REG_P (src) && GET_MODE (src) == mode)
1757 tmps[i] = src;
1758 else
1759 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1760 bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1761 mode, mode);
1762
1763 if (shift)
1764 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1765 shift, tmps[i], 0);
1766 }
1767 }
1768
1769 /* Emit code to move a block SRC of type TYPE to a block DST,
1770 where DST is non-consecutive registers represented by a PARALLEL.
1771 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1772 if not known. */
1773
1774 void
1775 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1776 {
1777 rtx *tmps;
1778 int i;
1779
1780 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1781 emit_group_load_1 (tmps, dst, src, type, ssize);
1782
1783 /* Copy the extracted pieces into the proper (probable) hard regs. */
1784 for (i = 0; i < XVECLEN (dst, 0); i++)
1785 {
1786 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1787 if (d == NULL)
1788 continue;
1789 emit_move_insn (d, tmps[i]);
1790 }
1791 }
1792
1793 /* Similar, but load SRC into new pseudos in a format that looks like
1794 PARALLEL. This can later be fed to emit_group_move to get things
1795 in the right place. */
1796
1797 rtx
1798 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1799 {
1800 rtvec vec;
1801 int i;
1802
1803 vec = rtvec_alloc (XVECLEN (parallel, 0));
1804 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1805
1806 /* Convert the vector to look just like the original PARALLEL, except
1807 with the computed values. */
1808 for (i = 0; i < XVECLEN (parallel, 0); i++)
1809 {
1810 rtx e = XVECEXP (parallel, 0, i);
1811 rtx d = XEXP (e, 0);
1812
1813 if (d)
1814 {
1815 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1816 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1817 }
1818 RTVEC_ELT (vec, i) = e;
1819 }
1820
1821 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1822 }
1823
1824 /* Emit code to move a block SRC to block DST, where SRC and DST are
1825 non-consecutive groups of registers, each represented by a PARALLEL. */
1826
1827 void
1828 emit_group_move (rtx dst, rtx src)
1829 {
1830 int i;
1831
1832 gcc_assert (GET_CODE (src) == PARALLEL
1833 && GET_CODE (dst) == PARALLEL
1834 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1835
1836 /* Skip first entry if NULL. */
1837 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1838 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1839 XEXP (XVECEXP (src, 0, i), 0));
1840 }
1841
1842 /* Move a group of registers represented by a PARALLEL into pseudos. */
1843
1844 rtx
1845 emit_group_move_into_temps (rtx src)
1846 {
1847 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1848 int i;
1849
1850 for (i = 0; i < XVECLEN (src, 0); i++)
1851 {
1852 rtx e = XVECEXP (src, 0, i);
1853 rtx d = XEXP (e, 0);
1854
1855 if (d)
1856 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1857 RTVEC_ELT (vec, i) = e;
1858 }
1859
1860 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1861 }
1862
1863 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1864 where SRC is non-consecutive registers represented by a PARALLEL.
1865 SSIZE represents the total size of block ORIG_DST, or -1 if not
1866 known. */
1867
1868 void
1869 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1870 {
1871 rtx *tmps, dst;
1872 int start, finish, i;
1873 enum machine_mode m = GET_MODE (orig_dst);
1874
1875 gcc_assert (GET_CODE (src) == PARALLEL);
1876
1877 if (!SCALAR_INT_MODE_P (m)
1878 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1879 {
1880 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1881 if (imode == BLKmode)
1882 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1883 else
1884 dst = gen_reg_rtx (imode);
1885 emit_group_store (dst, src, type, ssize);
1886 if (imode != BLKmode)
1887 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1888 emit_move_insn (orig_dst, dst);
1889 return;
1890 }
1891
1892 /* Check for a NULL entry, used to indicate that the parameter goes
1893 both on the stack and in registers. */
1894 if (XEXP (XVECEXP (src, 0, 0), 0))
1895 start = 0;
1896 else
1897 start = 1;
1898 finish = XVECLEN (src, 0);
1899
1900 tmps = XALLOCAVEC (rtx, finish);
1901
1902 /* Copy the (probable) hard regs into pseudos. */
1903 for (i = start; i < finish; i++)
1904 {
1905 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1906 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1907 {
1908 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1909 emit_move_insn (tmps[i], reg);
1910 }
1911 else
1912 tmps[i] = reg;
1913 }
1914
1915 /* If we won't be storing directly into memory, protect the real destination
1916 from strange tricks we might play. */
1917 dst = orig_dst;
1918 if (GET_CODE (dst) == PARALLEL)
1919 {
1920 rtx temp;
1921
1922 /* We can get a PARALLEL dst if there is a conditional expression in
1923 a return statement. In that case, the dst and src are the same,
1924 so no action is necessary. */
1925 if (rtx_equal_p (dst, src))
1926 return;
1927
1928 /* It is unclear if we can ever reach here, but we may as well handle
1929 it. Allocate a temporary, and split this into a store/load to/from
1930 the temporary. */
1931
1932 temp = assign_stack_temp (GET_MODE (dst), ssize);
1933 emit_group_store (temp, src, type, ssize);
1934 emit_group_load (dst, temp, type, ssize);
1935 return;
1936 }
1937 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1938 {
1939 enum machine_mode outer = GET_MODE (dst);
1940 enum machine_mode inner;
1941 HOST_WIDE_INT bytepos;
1942 bool done = false;
1943 rtx temp;
1944
1945 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1946 dst = gen_reg_rtx (outer);
1947
1948 /* Make life a bit easier for combine. */
1949 /* If the first element of the vector is the low part
1950 of the destination mode, use a paradoxical subreg to
1951 initialize the destination. */
1952 if (start < finish)
1953 {
1954 inner = GET_MODE (tmps[start]);
1955 bytepos = subreg_lowpart_offset (inner, outer);
1956 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1957 {
1958 temp = simplify_gen_subreg (outer, tmps[start],
1959 inner, 0);
1960 if (temp)
1961 {
1962 emit_move_insn (dst, temp);
1963 done = true;
1964 start++;
1965 }
1966 }
1967 }
1968
1969 /* If the first element wasn't the low part, try the last. */
1970 if (!done
1971 && start < finish - 1)
1972 {
1973 inner = GET_MODE (tmps[finish - 1]);
1974 bytepos = subreg_lowpart_offset (inner, outer);
1975 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1976 {
1977 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1978 inner, 0);
1979 if (temp)
1980 {
1981 emit_move_insn (dst, temp);
1982 done = true;
1983 finish--;
1984 }
1985 }
1986 }
1987
1988 /* Otherwise, simply initialize the result to zero. */
1989 if (!done)
1990 emit_move_insn (dst, CONST0_RTX (outer));
1991 }
1992
1993 /* Process the pieces. */
1994 for (i = start; i < finish; i++)
1995 {
1996 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1997 enum machine_mode mode = GET_MODE (tmps[i]);
1998 unsigned int bytelen = GET_MODE_SIZE (mode);
1999 unsigned int adj_bytelen = bytelen;
2000 rtx dest = dst;
2001
2002 /* Handle trailing fragments that run over the size of the struct. */
2003 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2004 adj_bytelen = ssize - bytepos;
2005
2006 if (GET_CODE (dst) == CONCAT)
2007 {
2008 if (bytepos + adj_bytelen
2009 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2010 dest = XEXP (dst, 0);
2011 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2012 {
2013 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2014 dest = XEXP (dst, 1);
2015 }
2016 else
2017 {
2018 enum machine_mode dest_mode = GET_MODE (dest);
2019 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2020
2021 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2022
2023 if (GET_MODE_ALIGNMENT (dest_mode)
2024 >= GET_MODE_ALIGNMENT (tmp_mode))
2025 {
2026 dest = assign_stack_temp (dest_mode,
2027 GET_MODE_SIZE (dest_mode));
2028 emit_move_insn (adjust_address (dest,
2029 tmp_mode,
2030 bytepos),
2031 tmps[i]);
2032 dst = dest;
2033 }
2034 else
2035 {
2036 dest = assign_stack_temp (tmp_mode,
2037 GET_MODE_SIZE (tmp_mode));
2038 emit_move_insn (dest, tmps[i]);
2039 dst = adjust_address (dest, dest_mode, bytepos);
2040 }
2041 break;
2042 }
2043 }
2044
2045 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2046 {
2047 /* store_bit_field always takes its value from the lsb.
2048 Move the fragment to the lsb if it's not already there. */
2049 if (
2050 #ifdef BLOCK_REG_PADDING
2051 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2052 == (BYTES_BIG_ENDIAN ? upward : downward)
2053 #else
2054 BYTES_BIG_ENDIAN
2055 #endif
2056 )
2057 {
2058 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2059 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2060 shift, tmps[i], 0);
2061 }
2062 bytelen = adj_bytelen;
2063 }
2064
2065 /* Optimize the access just a bit. */
2066 if (MEM_P (dest)
2067 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2068 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2069 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2070 && bytelen == GET_MODE_SIZE (mode))
2071 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2072 else
2073 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2074 0, 0, mode, tmps[i]);
2075 }
2076
2077 /* Copy from the pseudo into the (probable) hard reg. */
2078 if (orig_dst != dst)
2079 emit_move_insn (orig_dst, dst);
2080 }
2081
2082 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2083
2084 This is used on targets that return BLKmode values in registers. */
2085
2086 void
2087 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2088 {
2089 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2090 rtx src = NULL, dst = NULL;
2091 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2092 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2093 enum machine_mode mode = GET_MODE (srcreg);
2094 enum machine_mode tmode = GET_MODE (target);
2095 enum machine_mode copy_mode;
2096
2097 /* BLKmode registers created in the back-end shouldn't have survived. */
2098 gcc_assert (mode != BLKmode);
2099
2100 /* If the structure doesn't take up a whole number of words, see whether
2101 SRCREG is padded on the left or on the right. If it's on the left,
2102 set PADDING_CORRECTION to the number of bits to skip.
2103
2104 In most ABIs, the structure will be returned at the least end of
2105 the register, which translates to right padding on little-endian
2106 targets and left padding on big-endian targets. The opposite
2107 holds if the structure is returned at the most significant
2108 end of the register. */
2109 if (bytes % UNITS_PER_WORD != 0
2110 && (targetm.calls.return_in_msb (type)
2111 ? !BYTES_BIG_ENDIAN
2112 : BYTES_BIG_ENDIAN))
2113 padding_correction
2114 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2115
2116 /* We can use a single move if we have an exact mode for the size. */
2117 else if (MEM_P (target)
2118 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2119 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2120 && bytes == GET_MODE_SIZE (mode))
2121 {
2122 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2123 return;
2124 }
2125
2126 /* And if we additionally have the same mode for a register. */
2127 else if (REG_P (target)
2128 && GET_MODE (target) == mode
2129 && bytes == GET_MODE_SIZE (mode))
2130 {
2131 emit_move_insn (target, srcreg);
2132 return;
2133 }
2134
2135 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2136 into a new pseudo which is a full word. */
2137 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2138 {
2139 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2140 mode = word_mode;
2141 }
2142
2143 /* Copy the structure BITSIZE bits at a time. If the target lives in
2144 memory, take care of not reading/writing past its end by selecting
2145 a copy mode suited to BITSIZE. This should always be possible given
2146 how it is computed.
2147
2148 If the target lives in register, make sure not to select a copy mode
2149 larger than the mode of the register.
2150
2151 We could probably emit more efficient code for machines which do not use
2152 strict alignment, but it doesn't seem worth the effort at the current
2153 time. */
2154
2155 copy_mode = word_mode;
2156 if (MEM_P (target))
2157 {
2158 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2159 if (mem_mode != BLKmode)
2160 copy_mode = mem_mode;
2161 }
2162 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2163 copy_mode = tmode;
2164
2165 for (bitpos = 0, xbitpos = padding_correction;
2166 bitpos < bytes * BITS_PER_UNIT;
2167 bitpos += bitsize, xbitpos += bitsize)
2168 {
2169 /* We need a new source operand each time xbitpos is on a
2170 word boundary and when xbitpos == padding_correction
2171 (the first time through). */
2172 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2173 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2174
2175 /* We need a new destination operand each time bitpos is on
2176 a word boundary. */
2177 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2178 dst = target;
2179 else if (bitpos % BITS_PER_WORD == 0)
2180 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2181
2182 /* Use xbitpos for the source extraction (right justified) and
2183 bitpos for the destination store (left justified). */
2184 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2185 extract_bit_field (src, bitsize,
2186 xbitpos % BITS_PER_WORD, 1, false,
2187 NULL_RTX, copy_mode, copy_mode));
2188 }
2189 }
2190
2191 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2192 register if it contains any data, otherwise return null.
2193
2194 This is used on targets that return BLKmode values in registers. */
2195
2196 rtx
2197 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2198 {
2199 int i, n_regs;
2200 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2201 unsigned int bitsize;
2202 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2203 enum machine_mode dst_mode;
2204
2205 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2206
2207 x = expand_normal (src);
2208
2209 bytes = int_size_in_bytes (TREE_TYPE (src));
2210 if (bytes == 0)
2211 return NULL_RTX;
2212
2213 /* If the structure doesn't take up a whole number of words, see
2214 whether the register value should be padded on the left or on
2215 the right. Set PADDING_CORRECTION to the number of padding
2216 bits needed on the left side.
2217
2218 In most ABIs, the structure will be returned at the least end of
2219 the register, which translates to right padding on little-endian
2220 targets and left padding on big-endian targets. The opposite
2221 holds if the structure is returned at the most significant
2222 end of the register. */
2223 if (bytes % UNITS_PER_WORD != 0
2224 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2225 ? !BYTES_BIG_ENDIAN
2226 : BYTES_BIG_ENDIAN))
2227 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2228 * BITS_PER_UNIT));
2229
2230 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2231 dst_words = XALLOCAVEC (rtx, n_regs);
2232 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2233
2234 /* Copy the structure BITSIZE bits at a time. */
2235 for (bitpos = 0, xbitpos = padding_correction;
2236 bitpos < bytes * BITS_PER_UNIT;
2237 bitpos += bitsize, xbitpos += bitsize)
2238 {
2239 /* We need a new destination pseudo each time xbitpos is
2240 on a word boundary and when xbitpos == padding_correction
2241 (the first time through). */
2242 if (xbitpos % BITS_PER_WORD == 0
2243 || xbitpos == padding_correction)
2244 {
2245 /* Generate an appropriate register. */
2246 dst_word = gen_reg_rtx (word_mode);
2247 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2248
2249 /* Clear the destination before we move anything into it. */
2250 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2251 }
2252
2253 /* We need a new source operand each time bitpos is on a word
2254 boundary. */
2255 if (bitpos % BITS_PER_WORD == 0)
2256 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2257
2258 /* Use bitpos for the source extraction (left justified) and
2259 xbitpos for the destination store (right justified). */
2260 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2261 0, 0, word_mode,
2262 extract_bit_field (src_word, bitsize,
2263 bitpos % BITS_PER_WORD, 1, false,
2264 NULL_RTX, word_mode, word_mode));
2265 }
2266
2267 if (mode == BLKmode)
2268 {
2269 /* Find the smallest integer mode large enough to hold the
2270 entire structure. */
2271 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2272 mode != VOIDmode;
2273 mode = GET_MODE_WIDER_MODE (mode))
2274 /* Have we found a large enough mode? */
2275 if (GET_MODE_SIZE (mode) >= bytes)
2276 break;
2277
2278 /* A suitable mode should have been found. */
2279 gcc_assert (mode != VOIDmode);
2280 }
2281
2282 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2283 dst_mode = word_mode;
2284 else
2285 dst_mode = mode;
2286 dst = gen_reg_rtx (dst_mode);
2287
2288 for (i = 0; i < n_regs; i++)
2289 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2290
2291 if (mode != dst_mode)
2292 dst = gen_lowpart (mode, dst);
2293
2294 return dst;
2295 }
2296
2297 /* Add a USE expression for REG to the (possibly empty) list pointed
2298 to by CALL_FUSAGE. REG must denote a hard register. */
2299
2300 void
2301 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2302 {
2303 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2304
2305 *call_fusage
2306 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2307 }
2308
2309 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2310 starting at REGNO. All of these registers must be hard registers. */
2311
2312 void
2313 use_regs (rtx *call_fusage, int regno, int nregs)
2314 {
2315 int i;
2316
2317 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2318
2319 for (i = 0; i < nregs; i++)
2320 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2321 }
2322
2323 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2324 PARALLEL REGS. This is for calls that pass values in multiple
2325 non-contiguous locations. The Irix 6 ABI has examples of this. */
2326
2327 void
2328 use_group_regs (rtx *call_fusage, rtx regs)
2329 {
2330 int i;
2331
2332 for (i = 0; i < XVECLEN (regs, 0); i++)
2333 {
2334 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2335
2336 /* A NULL entry means the parameter goes both on the stack and in
2337 registers. This can also be a MEM for targets that pass values
2338 partially on the stack and partially in registers. */
2339 if (reg != 0 && REG_P (reg))
2340 use_reg (call_fusage, reg);
2341 }
2342 }
2343
2344 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2345 assigment and the code of the expresion on the RHS is CODE. Return
2346 NULL otherwise. */
2347
2348 static gimple
2349 get_def_for_expr (tree name, enum tree_code code)
2350 {
2351 gimple def_stmt;
2352
2353 if (TREE_CODE (name) != SSA_NAME)
2354 return NULL;
2355
2356 def_stmt = get_gimple_for_ssa_name (name);
2357 if (!def_stmt
2358 || gimple_assign_rhs_code (def_stmt) != code)
2359 return NULL;
2360
2361 return def_stmt;
2362 }
2363
2364 #ifdef HAVE_conditional_move
2365 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2366 assigment and the class of the expresion on the RHS is CLASS. Return
2367 NULL otherwise. */
2368
2369 static gimple
2370 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2371 {
2372 gimple def_stmt;
2373
2374 if (TREE_CODE (name) != SSA_NAME)
2375 return NULL;
2376
2377 def_stmt = get_gimple_for_ssa_name (name);
2378 if (!def_stmt
2379 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2380 return NULL;
2381
2382 return def_stmt;
2383 }
2384 #endif
2385 \f
2386
2387 /* Determine whether the LEN bytes generated by CONSTFUN can be
2388 stored to memory using several move instructions. CONSTFUNDATA is
2389 a pointer which will be passed as argument in every CONSTFUN call.
2390 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2391 a memset operation and false if it's a copy of a constant string.
2392 Return nonzero if a call to store_by_pieces should succeed. */
2393
2394 int
2395 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2396 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2397 void *constfundata, unsigned int align, bool memsetp)
2398 {
2399 unsigned HOST_WIDE_INT l;
2400 unsigned int max_size;
2401 HOST_WIDE_INT offset = 0;
2402 enum machine_mode mode;
2403 enum insn_code icode;
2404 int reverse;
2405 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2406 rtx cst ATTRIBUTE_UNUSED;
2407
2408 if (len == 0)
2409 return 1;
2410
2411 if (! (memsetp
2412 ? SET_BY_PIECES_P (len, align)
2413 : STORE_BY_PIECES_P (len, align)))
2414 return 0;
2415
2416 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2417
2418 /* We would first store what we can in the largest integer mode, then go to
2419 successively smaller modes. */
2420
2421 for (reverse = 0;
2422 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2423 reverse++)
2424 {
2425 l = len;
2426 max_size = STORE_MAX_PIECES + 1;
2427 while (max_size > 1 && l > 0)
2428 {
2429 mode = widest_int_mode_for_size (max_size);
2430
2431 if (mode == VOIDmode)
2432 break;
2433
2434 icode = optab_handler (mov_optab, mode);
2435 if (icode != CODE_FOR_nothing
2436 && align >= GET_MODE_ALIGNMENT (mode))
2437 {
2438 unsigned int size = GET_MODE_SIZE (mode);
2439
2440 while (l >= size)
2441 {
2442 if (reverse)
2443 offset -= size;
2444
2445 cst = (*constfun) (constfundata, offset, mode);
2446 if (!targetm.legitimate_constant_p (mode, cst))
2447 return 0;
2448
2449 if (!reverse)
2450 offset += size;
2451
2452 l -= size;
2453 }
2454 }
2455
2456 max_size = GET_MODE_SIZE (mode);
2457 }
2458
2459 /* The code above should have handled everything. */
2460 gcc_assert (!l);
2461 }
2462
2463 return 1;
2464 }
2465
2466 /* Generate several move instructions to store LEN bytes generated by
2467 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2468 pointer which will be passed as argument in every CONSTFUN call.
2469 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2470 a memset operation and false if it's a copy of a constant string.
2471 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2472 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2473 stpcpy. */
2474
2475 rtx
2476 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2477 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2478 void *constfundata, unsigned int align, bool memsetp, int endp)
2479 {
2480 enum machine_mode to_addr_mode = get_address_mode (to);
2481 struct store_by_pieces_d data;
2482
2483 if (len == 0)
2484 {
2485 gcc_assert (endp != 2);
2486 return to;
2487 }
2488
2489 gcc_assert (memsetp
2490 ? SET_BY_PIECES_P (len, align)
2491 : STORE_BY_PIECES_P (len, align));
2492 data.constfun = constfun;
2493 data.constfundata = constfundata;
2494 data.len = len;
2495 data.to = to;
2496 store_by_pieces_1 (&data, align);
2497 if (endp)
2498 {
2499 rtx to1;
2500
2501 gcc_assert (!data.reverse);
2502 if (data.autinc_to)
2503 {
2504 if (endp == 2)
2505 {
2506 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2507 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2508 else
2509 data.to_addr = copy_to_mode_reg (to_addr_mode,
2510 plus_constant (to_addr_mode,
2511 data.to_addr,
2512 -1));
2513 }
2514 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2515 data.offset);
2516 }
2517 else
2518 {
2519 if (endp == 2)
2520 --data.offset;
2521 to1 = adjust_address (data.to, QImode, data.offset);
2522 }
2523 return to1;
2524 }
2525 else
2526 return data.to;
2527 }
2528
2529 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2530 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2531
2532 static void
2533 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2534 {
2535 struct store_by_pieces_d data;
2536
2537 if (len == 0)
2538 return;
2539
2540 data.constfun = clear_by_pieces_1;
2541 data.constfundata = NULL;
2542 data.len = len;
2543 data.to = to;
2544 store_by_pieces_1 (&data, align);
2545 }
2546
2547 /* Callback routine for clear_by_pieces.
2548 Return const0_rtx unconditionally. */
2549
2550 static rtx
2551 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2552 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2553 enum machine_mode mode ATTRIBUTE_UNUSED)
2554 {
2555 return const0_rtx;
2556 }
2557
2558 /* Subroutine of clear_by_pieces and store_by_pieces.
2559 Generate several move instructions to store LEN bytes of block TO. (A MEM
2560 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2561
2562 static void
2563 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2564 unsigned int align ATTRIBUTE_UNUSED)
2565 {
2566 enum machine_mode to_addr_mode = get_address_mode (data->to);
2567 rtx to_addr = XEXP (data->to, 0);
2568 unsigned int max_size = STORE_MAX_PIECES + 1;
2569 enum insn_code icode;
2570
2571 data->offset = 0;
2572 data->to_addr = to_addr;
2573 data->autinc_to
2574 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2575 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2576
2577 data->explicit_inc_to = 0;
2578 data->reverse
2579 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2580 if (data->reverse)
2581 data->offset = data->len;
2582
2583 /* If storing requires more than two move insns,
2584 copy addresses to registers (to make displacements shorter)
2585 and use post-increment if available. */
2586 if (!data->autinc_to
2587 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2588 {
2589 /* Determine the main mode we'll be using.
2590 MODE might not be used depending on the definitions of the
2591 USE_* macros below. */
2592 enum machine_mode mode ATTRIBUTE_UNUSED
2593 = widest_int_mode_for_size (max_size);
2594
2595 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2596 {
2597 data->to_addr = copy_to_mode_reg (to_addr_mode,
2598 plus_constant (to_addr_mode,
2599 to_addr,
2600 data->len));
2601 data->autinc_to = 1;
2602 data->explicit_inc_to = -1;
2603 }
2604
2605 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2606 && ! data->autinc_to)
2607 {
2608 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2609 data->autinc_to = 1;
2610 data->explicit_inc_to = 1;
2611 }
2612
2613 if ( !data->autinc_to && CONSTANT_P (to_addr))
2614 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2615 }
2616
2617 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2618
2619 /* First store what we can in the largest integer mode, then go to
2620 successively smaller modes. */
2621
2622 while (max_size > 1 && data->len > 0)
2623 {
2624 enum machine_mode mode = widest_int_mode_for_size (max_size);
2625
2626 if (mode == VOIDmode)
2627 break;
2628
2629 icode = optab_handler (mov_optab, mode);
2630 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2631 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2632
2633 max_size = GET_MODE_SIZE (mode);
2634 }
2635
2636 /* The code above should have handled everything. */
2637 gcc_assert (!data->len);
2638 }
2639
2640 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2641 with move instructions for mode MODE. GENFUN is the gen_... function
2642 to make a move insn for that mode. DATA has all the other info. */
2643
2644 static void
2645 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2646 struct store_by_pieces_d *data)
2647 {
2648 unsigned int size = GET_MODE_SIZE (mode);
2649 rtx to1, cst;
2650
2651 while (data->len >= size)
2652 {
2653 if (data->reverse)
2654 data->offset -= size;
2655
2656 if (data->autinc_to)
2657 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2658 data->offset);
2659 else
2660 to1 = adjust_address (data->to, mode, data->offset);
2661
2662 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2663 emit_insn (gen_add2_insn (data->to_addr,
2664 GEN_INT (-(HOST_WIDE_INT) size)));
2665
2666 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2667 emit_insn ((*genfun) (to1, cst));
2668
2669 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2670 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2671
2672 if (! data->reverse)
2673 data->offset += size;
2674
2675 data->len -= size;
2676 }
2677 }
2678 \f
2679 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2680 its length in bytes. */
2681
2682 rtx
2683 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2684 unsigned int expected_align, HOST_WIDE_INT expected_size)
2685 {
2686 enum machine_mode mode = GET_MODE (object);
2687 unsigned int align;
2688
2689 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2690
2691 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2692 just move a zero. Otherwise, do this a piece at a time. */
2693 if (mode != BLKmode
2694 && CONST_INT_P (size)
2695 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2696 {
2697 rtx zero = CONST0_RTX (mode);
2698 if (zero != NULL)
2699 {
2700 emit_move_insn (object, zero);
2701 return NULL;
2702 }
2703
2704 if (COMPLEX_MODE_P (mode))
2705 {
2706 zero = CONST0_RTX (GET_MODE_INNER (mode));
2707 if (zero != NULL)
2708 {
2709 write_complex_part (object, zero, 0);
2710 write_complex_part (object, zero, 1);
2711 return NULL;
2712 }
2713 }
2714 }
2715
2716 if (size == const0_rtx)
2717 return NULL;
2718
2719 align = MEM_ALIGN (object);
2720
2721 if (CONST_INT_P (size)
2722 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2723 clear_by_pieces (object, INTVAL (size), align);
2724 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2725 expected_align, expected_size))
2726 ;
2727 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2728 return set_storage_via_libcall (object, size, const0_rtx,
2729 method == BLOCK_OP_TAILCALL);
2730 else
2731 gcc_unreachable ();
2732
2733 return NULL;
2734 }
2735
2736 rtx
2737 clear_storage (rtx object, rtx size, enum block_op_methods method)
2738 {
2739 return clear_storage_hints (object, size, method, 0, -1);
2740 }
2741
2742
2743 /* A subroutine of clear_storage. Expand a call to memset.
2744 Return the return value of memset, 0 otherwise. */
2745
2746 rtx
2747 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2748 {
2749 tree call_expr, fn, object_tree, size_tree, val_tree;
2750 enum machine_mode size_mode;
2751 rtx retval;
2752
2753 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2754 place those into new pseudos into a VAR_DECL and use them later. */
2755
2756 object = copy_addr_to_reg (XEXP (object, 0));
2757
2758 size_mode = TYPE_MODE (sizetype);
2759 size = convert_to_mode (size_mode, size, 1);
2760 size = copy_to_mode_reg (size_mode, size);
2761
2762 /* It is incorrect to use the libcall calling conventions to call
2763 memset in this context. This could be a user call to memset and
2764 the user may wish to examine the return value from memset. For
2765 targets where libcalls and normal calls have different conventions
2766 for returning pointers, we could end up generating incorrect code. */
2767
2768 object_tree = make_tree (ptr_type_node, object);
2769 if (!CONST_INT_P (val))
2770 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2771 size_tree = make_tree (sizetype, size);
2772 val_tree = make_tree (integer_type_node, val);
2773
2774 fn = clear_storage_libcall_fn (true);
2775 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2776 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2777
2778 retval = expand_normal (call_expr);
2779
2780 return retval;
2781 }
2782
2783 /* A subroutine of set_storage_via_libcall. Create the tree node
2784 for the function we use for block clears. */
2785
2786 tree block_clear_fn;
2787
2788 void
2789 init_block_clear_fn (const char *asmspec)
2790 {
2791 if (!block_clear_fn)
2792 {
2793 tree fn, args;
2794
2795 fn = get_identifier ("memset");
2796 args = build_function_type_list (ptr_type_node, ptr_type_node,
2797 integer_type_node, sizetype,
2798 NULL_TREE);
2799
2800 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2801 DECL_EXTERNAL (fn) = 1;
2802 TREE_PUBLIC (fn) = 1;
2803 DECL_ARTIFICIAL (fn) = 1;
2804 TREE_NOTHROW (fn) = 1;
2805 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2806 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2807
2808 block_clear_fn = fn;
2809 }
2810
2811 if (asmspec)
2812 set_user_assembler_name (block_clear_fn, asmspec);
2813 }
2814
2815 static tree
2816 clear_storage_libcall_fn (int for_call)
2817 {
2818 static bool emitted_extern;
2819
2820 if (!block_clear_fn)
2821 init_block_clear_fn (NULL);
2822
2823 if (for_call && !emitted_extern)
2824 {
2825 emitted_extern = true;
2826 make_decl_rtl (block_clear_fn);
2827 }
2828
2829 return block_clear_fn;
2830 }
2831 \f
2832 /* Expand a setmem pattern; return true if successful. */
2833
2834 bool
2835 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2836 unsigned int expected_align, HOST_WIDE_INT expected_size)
2837 {
2838 /* Try the most limited insn first, because there's no point
2839 including more than one in the machine description unless
2840 the more limited one has some advantage. */
2841
2842 enum machine_mode mode;
2843
2844 if (expected_align < align)
2845 expected_align = align;
2846
2847 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2848 mode = GET_MODE_WIDER_MODE (mode))
2849 {
2850 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2851
2852 if (code != CODE_FOR_nothing
2853 /* We don't need MODE to be narrower than
2854 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2855 the mode mask, as it is returned by the macro, it will
2856 definitely be less than the actual mode mask. */
2857 && ((CONST_INT_P (size)
2858 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2859 <= (GET_MODE_MASK (mode) >> 1)))
2860 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
2861 {
2862 struct expand_operand ops[6];
2863 unsigned int nops;
2864
2865 nops = insn_data[(int) code].n_generator_args;
2866 gcc_assert (nops == 4 || nops == 6);
2867
2868 create_fixed_operand (&ops[0], object);
2869 /* The check above guarantees that this size conversion is valid. */
2870 create_convert_operand_to (&ops[1], size, mode, true);
2871 create_convert_operand_from (&ops[2], val, byte_mode, true);
2872 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2873 if (nops == 6)
2874 {
2875 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2876 create_integer_operand (&ops[5], expected_size);
2877 }
2878 if (maybe_expand_insn (code, nops, ops))
2879 return true;
2880 }
2881 }
2882
2883 return false;
2884 }
2885
2886 \f
2887 /* Write to one of the components of the complex value CPLX. Write VAL to
2888 the real part if IMAG_P is false, and the imaginary part if its true. */
2889
2890 static void
2891 write_complex_part (rtx cplx, rtx val, bool imag_p)
2892 {
2893 enum machine_mode cmode;
2894 enum machine_mode imode;
2895 unsigned ibitsize;
2896
2897 if (GET_CODE (cplx) == CONCAT)
2898 {
2899 emit_move_insn (XEXP (cplx, imag_p), val);
2900 return;
2901 }
2902
2903 cmode = GET_MODE (cplx);
2904 imode = GET_MODE_INNER (cmode);
2905 ibitsize = GET_MODE_BITSIZE (imode);
2906
2907 /* For MEMs simplify_gen_subreg may generate an invalid new address
2908 because, e.g., the original address is considered mode-dependent
2909 by the target, which restricts simplify_subreg from invoking
2910 adjust_address_nv. Instead of preparing fallback support for an
2911 invalid address, we call adjust_address_nv directly. */
2912 if (MEM_P (cplx))
2913 {
2914 emit_move_insn (adjust_address_nv (cplx, imode,
2915 imag_p ? GET_MODE_SIZE (imode) : 0),
2916 val);
2917 return;
2918 }
2919
2920 /* If the sub-object is at least word sized, then we know that subregging
2921 will work. This special case is important, since store_bit_field
2922 wants to operate on integer modes, and there's rarely an OImode to
2923 correspond to TCmode. */
2924 if (ibitsize >= BITS_PER_WORD
2925 /* For hard regs we have exact predicates. Assume we can split
2926 the original object if it spans an even number of hard regs.
2927 This special case is important for SCmode on 64-bit platforms
2928 where the natural size of floating-point regs is 32-bit. */
2929 || (REG_P (cplx)
2930 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2931 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2932 {
2933 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2934 imag_p ? GET_MODE_SIZE (imode) : 0);
2935 if (part)
2936 {
2937 emit_move_insn (part, val);
2938 return;
2939 }
2940 else
2941 /* simplify_gen_subreg may fail for sub-word MEMs. */
2942 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2943 }
2944
2945 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
2946 }
2947
2948 /* Extract one of the components of the complex value CPLX. Extract the
2949 real part if IMAG_P is false, and the imaginary part if it's true. */
2950
2951 static rtx
2952 read_complex_part (rtx cplx, bool imag_p)
2953 {
2954 enum machine_mode cmode, imode;
2955 unsigned ibitsize;
2956
2957 if (GET_CODE (cplx) == CONCAT)
2958 return XEXP (cplx, imag_p);
2959
2960 cmode = GET_MODE (cplx);
2961 imode = GET_MODE_INNER (cmode);
2962 ibitsize = GET_MODE_BITSIZE (imode);
2963
2964 /* Special case reads from complex constants that got spilled to memory. */
2965 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2966 {
2967 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2968 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2969 {
2970 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2971 if (CONSTANT_CLASS_P (part))
2972 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2973 }
2974 }
2975
2976 /* For MEMs simplify_gen_subreg may generate an invalid new address
2977 because, e.g., the original address is considered mode-dependent
2978 by the target, which restricts simplify_subreg from invoking
2979 adjust_address_nv. Instead of preparing fallback support for an
2980 invalid address, we call adjust_address_nv directly. */
2981 if (MEM_P (cplx))
2982 return adjust_address_nv (cplx, imode,
2983 imag_p ? GET_MODE_SIZE (imode) : 0);
2984
2985 /* If the sub-object is at least word sized, then we know that subregging
2986 will work. This special case is important, since extract_bit_field
2987 wants to operate on integer modes, and there's rarely an OImode to
2988 correspond to TCmode. */
2989 if (ibitsize >= BITS_PER_WORD
2990 /* For hard regs we have exact predicates. Assume we can split
2991 the original object if it spans an even number of hard regs.
2992 This special case is important for SCmode on 64-bit platforms
2993 where the natural size of floating-point regs is 32-bit. */
2994 || (REG_P (cplx)
2995 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2996 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2997 {
2998 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2999 imag_p ? GET_MODE_SIZE (imode) : 0);
3000 if (ret)
3001 return ret;
3002 else
3003 /* simplify_gen_subreg may fail for sub-word MEMs. */
3004 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3005 }
3006
3007 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3008 true, false, NULL_RTX, imode, imode);
3009 }
3010 \f
3011 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3012 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3013 represented in NEW_MODE. If FORCE is true, this will never happen, as
3014 we'll force-create a SUBREG if needed. */
3015
3016 static rtx
3017 emit_move_change_mode (enum machine_mode new_mode,
3018 enum machine_mode old_mode, rtx x, bool force)
3019 {
3020 rtx ret;
3021
3022 if (push_operand (x, GET_MODE (x)))
3023 {
3024 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3025 MEM_COPY_ATTRIBUTES (ret, x);
3026 }
3027 else if (MEM_P (x))
3028 {
3029 /* We don't have to worry about changing the address since the
3030 size in bytes is supposed to be the same. */
3031 if (reload_in_progress)
3032 {
3033 /* Copy the MEM to change the mode and move any
3034 substitutions from the old MEM to the new one. */
3035 ret = adjust_address_nv (x, new_mode, 0);
3036 copy_replacements (x, ret);
3037 }
3038 else
3039 ret = adjust_address (x, new_mode, 0);
3040 }
3041 else
3042 {
3043 /* Note that we do want simplify_subreg's behavior of validating
3044 that the new mode is ok for a hard register. If we were to use
3045 simplify_gen_subreg, we would create the subreg, but would
3046 probably run into the target not being able to implement it. */
3047 /* Except, of course, when FORCE is true, when this is exactly what
3048 we want. Which is needed for CCmodes on some targets. */
3049 if (force)
3050 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3051 else
3052 ret = simplify_subreg (new_mode, x, old_mode, 0);
3053 }
3054
3055 return ret;
3056 }
3057
3058 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3059 an integer mode of the same size as MODE. Returns the instruction
3060 emitted, or NULL if such a move could not be generated. */
3061
3062 static rtx
3063 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3064 {
3065 enum machine_mode imode;
3066 enum insn_code code;
3067
3068 /* There must exist a mode of the exact size we require. */
3069 imode = int_mode_for_mode (mode);
3070 if (imode == BLKmode)
3071 return NULL_RTX;
3072
3073 /* The target must support moves in this mode. */
3074 code = optab_handler (mov_optab, imode);
3075 if (code == CODE_FOR_nothing)
3076 return NULL_RTX;
3077
3078 x = emit_move_change_mode (imode, mode, x, force);
3079 if (x == NULL_RTX)
3080 return NULL_RTX;
3081 y = emit_move_change_mode (imode, mode, y, force);
3082 if (y == NULL_RTX)
3083 return NULL_RTX;
3084 return emit_insn (GEN_FCN (code) (x, y));
3085 }
3086
3087 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3088 Return an equivalent MEM that does not use an auto-increment. */
3089
3090 static rtx
3091 emit_move_resolve_push (enum machine_mode mode, rtx x)
3092 {
3093 enum rtx_code code = GET_CODE (XEXP (x, 0));
3094 HOST_WIDE_INT adjust;
3095 rtx temp;
3096
3097 adjust = GET_MODE_SIZE (mode);
3098 #ifdef PUSH_ROUNDING
3099 adjust = PUSH_ROUNDING (adjust);
3100 #endif
3101 if (code == PRE_DEC || code == POST_DEC)
3102 adjust = -adjust;
3103 else if (code == PRE_MODIFY || code == POST_MODIFY)
3104 {
3105 rtx expr = XEXP (XEXP (x, 0), 1);
3106 HOST_WIDE_INT val;
3107
3108 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3109 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3110 val = INTVAL (XEXP (expr, 1));
3111 if (GET_CODE (expr) == MINUS)
3112 val = -val;
3113 gcc_assert (adjust == val || adjust == -val);
3114 adjust = val;
3115 }
3116
3117 /* Do not use anti_adjust_stack, since we don't want to update
3118 stack_pointer_delta. */
3119 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3120 GEN_INT (adjust), stack_pointer_rtx,
3121 0, OPTAB_LIB_WIDEN);
3122 if (temp != stack_pointer_rtx)
3123 emit_move_insn (stack_pointer_rtx, temp);
3124
3125 switch (code)
3126 {
3127 case PRE_INC:
3128 case PRE_DEC:
3129 case PRE_MODIFY:
3130 temp = stack_pointer_rtx;
3131 break;
3132 case POST_INC:
3133 case POST_DEC:
3134 case POST_MODIFY:
3135 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3136 break;
3137 default:
3138 gcc_unreachable ();
3139 }
3140
3141 return replace_equiv_address (x, temp);
3142 }
3143
3144 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3145 X is known to satisfy push_operand, and MODE is known to be complex.
3146 Returns the last instruction emitted. */
3147
3148 rtx
3149 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3150 {
3151 enum machine_mode submode = GET_MODE_INNER (mode);
3152 bool imag_first;
3153
3154 #ifdef PUSH_ROUNDING
3155 unsigned int submodesize = GET_MODE_SIZE (submode);
3156
3157 /* In case we output to the stack, but the size is smaller than the
3158 machine can push exactly, we need to use move instructions. */
3159 if (PUSH_ROUNDING (submodesize) != submodesize)
3160 {
3161 x = emit_move_resolve_push (mode, x);
3162 return emit_move_insn (x, y);
3163 }
3164 #endif
3165
3166 /* Note that the real part always precedes the imag part in memory
3167 regardless of machine's endianness. */
3168 switch (GET_CODE (XEXP (x, 0)))
3169 {
3170 case PRE_DEC:
3171 case POST_DEC:
3172 imag_first = true;
3173 break;
3174 case PRE_INC:
3175 case POST_INC:
3176 imag_first = false;
3177 break;
3178 default:
3179 gcc_unreachable ();
3180 }
3181
3182 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3183 read_complex_part (y, imag_first));
3184 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3185 read_complex_part (y, !imag_first));
3186 }
3187
3188 /* A subroutine of emit_move_complex. Perform the move from Y to X
3189 via two moves of the parts. Returns the last instruction emitted. */
3190
3191 rtx
3192 emit_move_complex_parts (rtx x, rtx y)
3193 {
3194 /* Show the output dies here. This is necessary for SUBREGs
3195 of pseudos since we cannot track their lifetimes correctly;
3196 hard regs shouldn't appear here except as return values. */
3197 if (!reload_completed && !reload_in_progress
3198 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3199 emit_clobber (x);
3200
3201 write_complex_part (x, read_complex_part (y, false), false);
3202 write_complex_part (x, read_complex_part (y, true), true);
3203
3204 return get_last_insn ();
3205 }
3206
3207 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3208 MODE is known to be complex. Returns the last instruction emitted. */
3209
3210 static rtx
3211 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3212 {
3213 bool try_int;
3214
3215 /* Need to take special care for pushes, to maintain proper ordering
3216 of the data, and possibly extra padding. */
3217 if (push_operand (x, mode))
3218 return emit_move_complex_push (mode, x, y);
3219
3220 /* See if we can coerce the target into moving both values at once. */
3221
3222 /* Move floating point as parts. */
3223 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3224 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3225 try_int = false;
3226 /* Not possible if the values are inherently not adjacent. */
3227 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3228 try_int = false;
3229 /* Is possible if both are registers (or subregs of registers). */
3230 else if (register_operand (x, mode) && register_operand (y, mode))
3231 try_int = true;
3232 /* If one of the operands is a memory, and alignment constraints
3233 are friendly enough, we may be able to do combined memory operations.
3234 We do not attempt this if Y is a constant because that combination is
3235 usually better with the by-parts thing below. */
3236 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3237 && (!STRICT_ALIGNMENT
3238 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3239 try_int = true;
3240 else
3241 try_int = false;
3242
3243 if (try_int)
3244 {
3245 rtx ret;
3246
3247 /* For memory to memory moves, optimal behavior can be had with the
3248 existing block move logic. */
3249 if (MEM_P (x) && MEM_P (y))
3250 {
3251 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3252 BLOCK_OP_NO_LIBCALL);
3253 return get_last_insn ();
3254 }
3255
3256 ret = emit_move_via_integer (mode, x, y, true);
3257 if (ret)
3258 return ret;
3259 }
3260
3261 return emit_move_complex_parts (x, y);
3262 }
3263
3264 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3265 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3266
3267 static rtx
3268 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3269 {
3270 rtx ret;
3271
3272 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3273 if (mode != CCmode)
3274 {
3275 enum insn_code code = optab_handler (mov_optab, CCmode);
3276 if (code != CODE_FOR_nothing)
3277 {
3278 x = emit_move_change_mode (CCmode, mode, x, true);
3279 y = emit_move_change_mode (CCmode, mode, y, true);
3280 return emit_insn (GEN_FCN (code) (x, y));
3281 }
3282 }
3283
3284 /* Otherwise, find the MODE_INT mode of the same width. */
3285 ret = emit_move_via_integer (mode, x, y, false);
3286 gcc_assert (ret != NULL);
3287 return ret;
3288 }
3289
3290 /* Return true if word I of OP lies entirely in the
3291 undefined bits of a paradoxical subreg. */
3292
3293 static bool
3294 undefined_operand_subword_p (const_rtx op, int i)
3295 {
3296 enum machine_mode innermode, innermostmode;
3297 int offset;
3298 if (GET_CODE (op) != SUBREG)
3299 return false;
3300 innermode = GET_MODE (op);
3301 innermostmode = GET_MODE (SUBREG_REG (op));
3302 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3303 /* The SUBREG_BYTE represents offset, as if the value were stored in
3304 memory, except for a paradoxical subreg where we define
3305 SUBREG_BYTE to be 0; undo this exception as in
3306 simplify_subreg. */
3307 if (SUBREG_BYTE (op) == 0
3308 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3309 {
3310 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3311 if (WORDS_BIG_ENDIAN)
3312 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3313 if (BYTES_BIG_ENDIAN)
3314 offset += difference % UNITS_PER_WORD;
3315 }
3316 if (offset >= GET_MODE_SIZE (innermostmode)
3317 || offset <= -GET_MODE_SIZE (word_mode))
3318 return true;
3319 return false;
3320 }
3321
3322 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3323 MODE is any multi-word or full-word mode that lacks a move_insn
3324 pattern. Note that you will get better code if you define such
3325 patterns, even if they must turn into multiple assembler instructions. */
3326
3327 static rtx
3328 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3329 {
3330 rtx last_insn = 0;
3331 rtx seq, inner;
3332 bool need_clobber;
3333 int i;
3334
3335 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3336
3337 /* If X is a push on the stack, do the push now and replace
3338 X with a reference to the stack pointer. */
3339 if (push_operand (x, mode))
3340 x = emit_move_resolve_push (mode, x);
3341
3342 /* If we are in reload, see if either operand is a MEM whose address
3343 is scheduled for replacement. */
3344 if (reload_in_progress && MEM_P (x)
3345 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3346 x = replace_equiv_address_nv (x, inner);
3347 if (reload_in_progress && MEM_P (y)
3348 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3349 y = replace_equiv_address_nv (y, inner);
3350
3351 start_sequence ();
3352
3353 need_clobber = false;
3354 for (i = 0;
3355 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3356 i++)
3357 {
3358 rtx xpart = operand_subword (x, i, 1, mode);
3359 rtx ypart;
3360
3361 /* Do not generate code for a move if it would come entirely
3362 from the undefined bits of a paradoxical subreg. */
3363 if (undefined_operand_subword_p (y, i))
3364 continue;
3365
3366 ypart = operand_subword (y, i, 1, mode);
3367
3368 /* If we can't get a part of Y, put Y into memory if it is a
3369 constant. Otherwise, force it into a register. Then we must
3370 be able to get a part of Y. */
3371 if (ypart == 0 && CONSTANT_P (y))
3372 {
3373 y = use_anchored_address (force_const_mem (mode, y));
3374 ypart = operand_subword (y, i, 1, mode);
3375 }
3376 else if (ypart == 0)
3377 ypart = operand_subword_force (y, i, mode);
3378
3379 gcc_assert (xpart && ypart);
3380
3381 need_clobber |= (GET_CODE (xpart) == SUBREG);
3382
3383 last_insn = emit_move_insn (xpart, ypart);
3384 }
3385
3386 seq = get_insns ();
3387 end_sequence ();
3388
3389 /* Show the output dies here. This is necessary for SUBREGs
3390 of pseudos since we cannot track their lifetimes correctly;
3391 hard regs shouldn't appear here except as return values.
3392 We never want to emit such a clobber after reload. */
3393 if (x != y
3394 && ! (reload_in_progress || reload_completed)
3395 && need_clobber != 0)
3396 emit_clobber (x);
3397
3398 emit_insn (seq);
3399
3400 return last_insn;
3401 }
3402
3403 /* Low level part of emit_move_insn.
3404 Called just like emit_move_insn, but assumes X and Y
3405 are basically valid. */
3406
3407 rtx
3408 emit_move_insn_1 (rtx x, rtx y)
3409 {
3410 enum machine_mode mode = GET_MODE (x);
3411 enum insn_code code;
3412
3413 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3414
3415 code = optab_handler (mov_optab, mode);
3416 if (code != CODE_FOR_nothing)
3417 return emit_insn (GEN_FCN (code) (x, y));
3418
3419 /* Expand complex moves by moving real part and imag part. */
3420 if (COMPLEX_MODE_P (mode))
3421 return emit_move_complex (mode, x, y);
3422
3423 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3424 || ALL_FIXED_POINT_MODE_P (mode))
3425 {
3426 rtx result = emit_move_via_integer (mode, x, y, true);
3427
3428 /* If we can't find an integer mode, use multi words. */
3429 if (result)
3430 return result;
3431 else
3432 return emit_move_multi_word (mode, x, y);
3433 }
3434
3435 if (GET_MODE_CLASS (mode) == MODE_CC)
3436 return emit_move_ccmode (mode, x, y);
3437
3438 /* Try using a move pattern for the corresponding integer mode. This is
3439 only safe when simplify_subreg can convert MODE constants into integer
3440 constants. At present, it can only do this reliably if the value
3441 fits within a HOST_WIDE_INT. */
3442 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3443 {
3444 rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3445
3446 if (ret)
3447 {
3448 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3449 return ret;
3450 }
3451 }
3452
3453 return emit_move_multi_word (mode, x, y);
3454 }
3455
3456 /* Generate code to copy Y into X.
3457 Both Y and X must have the same mode, except that
3458 Y can be a constant with VOIDmode.
3459 This mode cannot be BLKmode; use emit_block_move for that.
3460
3461 Return the last instruction emitted. */
3462
3463 rtx
3464 emit_move_insn (rtx x, rtx y)
3465 {
3466 enum machine_mode mode = GET_MODE (x);
3467 rtx y_cst = NULL_RTX;
3468 rtx last_insn, set;
3469
3470 gcc_assert (mode != BLKmode
3471 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3472
3473 if (CONSTANT_P (y))
3474 {
3475 if (optimize
3476 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3477 && (last_insn = compress_float_constant (x, y)))
3478 return last_insn;
3479
3480 y_cst = y;
3481
3482 if (!targetm.legitimate_constant_p (mode, y))
3483 {
3484 y = force_const_mem (mode, y);
3485
3486 /* If the target's cannot_force_const_mem prevented the spill,
3487 assume that the target's move expanders will also take care
3488 of the non-legitimate constant. */
3489 if (!y)
3490 y = y_cst;
3491 else
3492 y = use_anchored_address (y);
3493 }
3494 }
3495
3496 /* If X or Y are memory references, verify that their addresses are valid
3497 for the machine. */
3498 if (MEM_P (x)
3499 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3500 MEM_ADDR_SPACE (x))
3501 && ! push_operand (x, GET_MODE (x))))
3502 x = validize_mem (x);
3503
3504 if (MEM_P (y)
3505 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3506 MEM_ADDR_SPACE (y)))
3507 y = validize_mem (y);
3508
3509 gcc_assert (mode != BLKmode);
3510
3511 last_insn = emit_move_insn_1 (x, y);
3512
3513 if (y_cst && REG_P (x)
3514 && (set = single_set (last_insn)) != NULL_RTX
3515 && SET_DEST (set) == x
3516 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3517 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3518
3519 return last_insn;
3520 }
3521
3522 /* If Y is representable exactly in a narrower mode, and the target can
3523 perform the extension directly from constant or memory, then emit the
3524 move as an extension. */
3525
3526 static rtx
3527 compress_float_constant (rtx x, rtx y)
3528 {
3529 enum machine_mode dstmode = GET_MODE (x);
3530 enum machine_mode orig_srcmode = GET_MODE (y);
3531 enum machine_mode srcmode;
3532 REAL_VALUE_TYPE r;
3533 int oldcost, newcost;
3534 bool speed = optimize_insn_for_speed_p ();
3535
3536 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3537
3538 if (targetm.legitimate_constant_p (dstmode, y))
3539 oldcost = set_src_cost (y, speed);
3540 else
3541 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3542
3543 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3544 srcmode != orig_srcmode;
3545 srcmode = GET_MODE_WIDER_MODE (srcmode))
3546 {
3547 enum insn_code ic;
3548 rtx trunc_y, last_insn;
3549
3550 /* Skip if the target can't extend this way. */
3551 ic = can_extend_p (dstmode, srcmode, 0);
3552 if (ic == CODE_FOR_nothing)
3553 continue;
3554
3555 /* Skip if the narrowed value isn't exact. */
3556 if (! exact_real_truncate (srcmode, &r))
3557 continue;
3558
3559 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3560
3561 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3562 {
3563 /* Skip if the target needs extra instructions to perform
3564 the extension. */
3565 if (!insn_operand_matches (ic, 1, trunc_y))
3566 continue;
3567 /* This is valid, but may not be cheaper than the original. */
3568 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3569 speed);
3570 if (oldcost < newcost)
3571 continue;
3572 }
3573 else if (float_extend_from_mem[dstmode][srcmode])
3574 {
3575 trunc_y = force_const_mem (srcmode, trunc_y);
3576 /* This is valid, but may not be cheaper than the original. */
3577 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3578 speed);
3579 if (oldcost < newcost)
3580 continue;
3581 trunc_y = validize_mem (trunc_y);
3582 }
3583 else
3584 continue;
3585
3586 /* For CSE's benefit, force the compressed constant pool entry
3587 into a new pseudo. This constant may be used in different modes,
3588 and if not, combine will put things back together for us. */
3589 trunc_y = force_reg (srcmode, trunc_y);
3590 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3591 last_insn = get_last_insn ();
3592
3593 if (REG_P (x))
3594 set_unique_reg_note (last_insn, REG_EQUAL, y);
3595
3596 return last_insn;
3597 }
3598
3599 return NULL_RTX;
3600 }
3601 \f
3602 /* Pushing data onto the stack. */
3603
3604 /* Push a block of length SIZE (perhaps variable)
3605 and return an rtx to address the beginning of the block.
3606 The value may be virtual_outgoing_args_rtx.
3607
3608 EXTRA is the number of bytes of padding to push in addition to SIZE.
3609 BELOW nonzero means this padding comes at low addresses;
3610 otherwise, the padding comes at high addresses. */
3611
3612 rtx
3613 push_block (rtx size, int extra, int below)
3614 {
3615 rtx temp;
3616
3617 size = convert_modes (Pmode, ptr_mode, size, 1);
3618 if (CONSTANT_P (size))
3619 anti_adjust_stack (plus_constant (Pmode, size, extra));
3620 else if (REG_P (size) && extra == 0)
3621 anti_adjust_stack (size);
3622 else
3623 {
3624 temp = copy_to_mode_reg (Pmode, size);
3625 if (extra != 0)
3626 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3627 temp, 0, OPTAB_LIB_WIDEN);
3628 anti_adjust_stack (temp);
3629 }
3630
3631 #ifndef STACK_GROWS_DOWNWARD
3632 if (0)
3633 #else
3634 if (1)
3635 #endif
3636 {
3637 temp = virtual_outgoing_args_rtx;
3638 if (extra != 0 && below)
3639 temp = plus_constant (Pmode, temp, extra);
3640 }
3641 else
3642 {
3643 if (CONST_INT_P (size))
3644 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3645 -INTVAL (size) - (below ? 0 : extra));
3646 else if (extra != 0 && !below)
3647 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3648 negate_rtx (Pmode, plus_constant (Pmode, size,
3649 extra)));
3650 else
3651 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3652 negate_rtx (Pmode, size));
3653 }
3654
3655 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3656 }
3657
3658 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3659
3660 static rtx
3661 mem_autoinc_base (rtx mem)
3662 {
3663 if (MEM_P (mem))
3664 {
3665 rtx addr = XEXP (mem, 0);
3666 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3667 return XEXP (addr, 0);
3668 }
3669 return NULL;
3670 }
3671
3672 /* A utility routine used here, in reload, and in try_split. The insns
3673 after PREV up to and including LAST are known to adjust the stack,
3674 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3675 placing notes as appropriate. PREV may be NULL, indicating the
3676 entire insn sequence prior to LAST should be scanned.
3677
3678 The set of allowed stack pointer modifications is small:
3679 (1) One or more auto-inc style memory references (aka pushes),
3680 (2) One or more addition/subtraction with the SP as destination,
3681 (3) A single move insn with the SP as destination,
3682 (4) A call_pop insn,
3683 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3684
3685 Insns in the sequence that do not modify the SP are ignored,
3686 except for noreturn calls.
3687
3688 The return value is the amount of adjustment that can be trivially
3689 verified, via immediate operand or auto-inc. If the adjustment
3690 cannot be trivially extracted, the return value is INT_MIN. */
3691
3692 HOST_WIDE_INT
3693 find_args_size_adjust (rtx insn)
3694 {
3695 rtx dest, set, pat;
3696 int i;
3697
3698 pat = PATTERN (insn);
3699 set = NULL;
3700
3701 /* Look for a call_pop pattern. */
3702 if (CALL_P (insn))
3703 {
3704 /* We have to allow non-call_pop patterns for the case
3705 of emit_single_push_insn of a TLS address. */
3706 if (GET_CODE (pat) != PARALLEL)
3707 return 0;
3708
3709 /* All call_pop have a stack pointer adjust in the parallel.
3710 The call itself is always first, and the stack adjust is
3711 usually last, so search from the end. */
3712 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3713 {
3714 set = XVECEXP (pat, 0, i);
3715 if (GET_CODE (set) != SET)
3716 continue;
3717 dest = SET_DEST (set);
3718 if (dest == stack_pointer_rtx)
3719 break;
3720 }
3721 /* We'd better have found the stack pointer adjust. */
3722 if (i == 0)
3723 return 0;
3724 /* Fall through to process the extracted SET and DEST
3725 as if it was a standalone insn. */
3726 }
3727 else if (GET_CODE (pat) == SET)
3728 set = pat;
3729 else if ((set = single_set (insn)) != NULL)
3730 ;
3731 else if (GET_CODE (pat) == PARALLEL)
3732 {
3733 /* ??? Some older ports use a parallel with a stack adjust
3734 and a store for a PUSH_ROUNDING pattern, rather than a
3735 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3736 /* ??? See h8300 and m68k, pushqi1. */
3737 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3738 {
3739 set = XVECEXP (pat, 0, i);
3740 if (GET_CODE (set) != SET)
3741 continue;
3742 dest = SET_DEST (set);
3743 if (dest == stack_pointer_rtx)
3744 break;
3745
3746 /* We do not expect an auto-inc of the sp in the parallel. */
3747 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3748 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3749 != stack_pointer_rtx);
3750 }
3751 if (i < 0)
3752 return 0;
3753 }
3754 else
3755 return 0;
3756
3757 dest = SET_DEST (set);
3758
3759 /* Look for direct modifications of the stack pointer. */
3760 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3761 {
3762 /* Look for a trivial adjustment, otherwise assume nothing. */
3763 /* Note that the SPU restore_stack_block pattern refers to
3764 the stack pointer in V4SImode. Consider that non-trivial. */
3765 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3766 && GET_CODE (SET_SRC (set)) == PLUS
3767 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3768 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3769 return INTVAL (XEXP (SET_SRC (set), 1));
3770 /* ??? Reload can generate no-op moves, which will be cleaned
3771 up later. Recognize it and continue searching. */
3772 else if (rtx_equal_p (dest, SET_SRC (set)))
3773 return 0;
3774 else
3775 return HOST_WIDE_INT_MIN;
3776 }
3777 else
3778 {
3779 rtx mem, addr;
3780
3781 /* Otherwise only think about autoinc patterns. */
3782 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3783 {
3784 mem = dest;
3785 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3786 != stack_pointer_rtx);
3787 }
3788 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3789 mem = SET_SRC (set);
3790 else
3791 return 0;
3792
3793 addr = XEXP (mem, 0);
3794 switch (GET_CODE (addr))
3795 {
3796 case PRE_INC:
3797 case POST_INC:
3798 return GET_MODE_SIZE (GET_MODE (mem));
3799 case PRE_DEC:
3800 case POST_DEC:
3801 return -GET_MODE_SIZE (GET_MODE (mem));
3802 case PRE_MODIFY:
3803 case POST_MODIFY:
3804 addr = XEXP (addr, 1);
3805 gcc_assert (GET_CODE (addr) == PLUS);
3806 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3807 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3808 return INTVAL (XEXP (addr, 1));
3809 default:
3810 gcc_unreachable ();
3811 }
3812 }
3813 }
3814
3815 int
3816 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3817 {
3818 int args_size = end_args_size;
3819 bool saw_unknown = false;
3820 rtx insn;
3821
3822 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3823 {
3824 HOST_WIDE_INT this_delta;
3825
3826 if (!NONDEBUG_INSN_P (insn))
3827 continue;
3828
3829 this_delta = find_args_size_adjust (insn);
3830 if (this_delta == 0)
3831 {
3832 if (!CALL_P (insn)
3833 || ACCUMULATE_OUTGOING_ARGS
3834 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3835 continue;
3836 }
3837
3838 gcc_assert (!saw_unknown);
3839 if (this_delta == HOST_WIDE_INT_MIN)
3840 saw_unknown = true;
3841
3842 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3843 #ifdef STACK_GROWS_DOWNWARD
3844 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3845 #endif
3846 args_size -= this_delta;
3847 }
3848
3849 return saw_unknown ? INT_MIN : args_size;
3850 }
3851
3852 #ifdef PUSH_ROUNDING
3853 /* Emit single push insn. */
3854
3855 static void
3856 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3857 {
3858 rtx dest_addr;
3859 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3860 rtx dest;
3861 enum insn_code icode;
3862
3863 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3864 /* If there is push pattern, use it. Otherwise try old way of throwing
3865 MEM representing push operation to move expander. */
3866 icode = optab_handler (push_optab, mode);
3867 if (icode != CODE_FOR_nothing)
3868 {
3869 struct expand_operand ops[1];
3870
3871 create_input_operand (&ops[0], x, mode);
3872 if (maybe_expand_insn (icode, 1, ops))
3873 return;
3874 }
3875 if (GET_MODE_SIZE (mode) == rounded_size)
3876 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3877 /* If we are to pad downward, adjust the stack pointer first and
3878 then store X into the stack location using an offset. This is
3879 because emit_move_insn does not know how to pad; it does not have
3880 access to type. */
3881 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3882 {
3883 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3884 HOST_WIDE_INT offset;
3885
3886 emit_move_insn (stack_pointer_rtx,
3887 expand_binop (Pmode,
3888 #ifdef STACK_GROWS_DOWNWARD
3889 sub_optab,
3890 #else
3891 add_optab,
3892 #endif
3893 stack_pointer_rtx,
3894 GEN_INT (rounded_size),
3895 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3896
3897 offset = (HOST_WIDE_INT) padding_size;
3898 #ifdef STACK_GROWS_DOWNWARD
3899 if (STACK_PUSH_CODE == POST_DEC)
3900 /* We have already decremented the stack pointer, so get the
3901 previous value. */
3902 offset += (HOST_WIDE_INT) rounded_size;
3903 #else
3904 if (STACK_PUSH_CODE == POST_INC)
3905 /* We have already incremented the stack pointer, so get the
3906 previous value. */
3907 offset -= (HOST_WIDE_INT) rounded_size;
3908 #endif
3909 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3910 }
3911 else
3912 {
3913 #ifdef STACK_GROWS_DOWNWARD
3914 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3915 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3916 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3917 #else
3918 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3919 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3920 GEN_INT (rounded_size));
3921 #endif
3922 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3923 }
3924
3925 dest = gen_rtx_MEM (mode, dest_addr);
3926
3927 if (type != 0)
3928 {
3929 set_mem_attributes (dest, type, 1);
3930
3931 if (flag_optimize_sibling_calls)
3932 /* Function incoming arguments may overlap with sibling call
3933 outgoing arguments and we cannot allow reordering of reads
3934 from function arguments with stores to outgoing arguments
3935 of sibling calls. */
3936 set_mem_alias_set (dest, 0);
3937 }
3938 emit_move_insn (dest, x);
3939 }
3940
3941 /* Emit and annotate a single push insn. */
3942
3943 static void
3944 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3945 {
3946 int delta, old_delta = stack_pointer_delta;
3947 rtx prev = get_last_insn ();
3948 rtx last;
3949
3950 emit_single_push_insn_1 (mode, x, type);
3951
3952 last = get_last_insn ();
3953
3954 /* Notice the common case where we emitted exactly one insn. */
3955 if (PREV_INSN (last) == prev)
3956 {
3957 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
3958 return;
3959 }
3960
3961 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
3962 gcc_assert (delta == INT_MIN || delta == old_delta);
3963 }
3964 #endif
3965
3966 /* Generate code to push X onto the stack, assuming it has mode MODE and
3967 type TYPE.
3968 MODE is redundant except when X is a CONST_INT (since they don't
3969 carry mode info).
3970 SIZE is an rtx for the size of data to be copied (in bytes),
3971 needed only if X is BLKmode.
3972
3973 ALIGN (in bits) is maximum alignment we can assume.
3974
3975 If PARTIAL and REG are both nonzero, then copy that many of the first
3976 bytes of X into registers starting with REG, and push the rest of X.
3977 The amount of space pushed is decreased by PARTIAL bytes.
3978 REG must be a hard register in this case.
3979 If REG is zero but PARTIAL is not, take any all others actions for an
3980 argument partially in registers, but do not actually load any
3981 registers.
3982
3983 EXTRA is the amount in bytes of extra space to leave next to this arg.
3984 This is ignored if an argument block has already been allocated.
3985
3986 On a machine that lacks real push insns, ARGS_ADDR is the address of
3987 the bottom of the argument block for this call. We use indexing off there
3988 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3989 argument block has not been preallocated.
3990
3991 ARGS_SO_FAR is the size of args previously pushed for this call.
3992
3993 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3994 for arguments passed in registers. If nonzero, it will be the number
3995 of bytes required. */
3996
3997 void
3998 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3999 unsigned int align, int partial, rtx reg, int extra,
4000 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4001 rtx alignment_pad)
4002 {
4003 rtx xinner;
4004 enum direction stack_direction
4005 #ifdef STACK_GROWS_DOWNWARD
4006 = downward;
4007 #else
4008 = upward;
4009 #endif
4010
4011 /* Decide where to pad the argument: `downward' for below,
4012 `upward' for above, or `none' for don't pad it.
4013 Default is below for small data on big-endian machines; else above. */
4014 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4015
4016 /* Invert direction if stack is post-decrement.
4017 FIXME: why? */
4018 if (STACK_PUSH_CODE == POST_DEC)
4019 if (where_pad != none)
4020 where_pad = (where_pad == downward ? upward : downward);
4021
4022 xinner = x;
4023
4024 if (mode == BLKmode
4025 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4026 {
4027 /* Copy a block into the stack, entirely or partially. */
4028
4029 rtx temp;
4030 int used;
4031 int offset;
4032 int skip;
4033
4034 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4035 used = partial - offset;
4036
4037 if (mode != BLKmode)
4038 {
4039 /* A value is to be stored in an insufficiently aligned
4040 stack slot; copy via a suitably aligned slot if
4041 necessary. */
4042 size = GEN_INT (GET_MODE_SIZE (mode));
4043 if (!MEM_P (xinner))
4044 {
4045 temp = assign_temp (type, 1, 1);
4046 emit_move_insn (temp, xinner);
4047 xinner = temp;
4048 }
4049 }
4050
4051 gcc_assert (size);
4052
4053 /* USED is now the # of bytes we need not copy to the stack
4054 because registers will take care of them. */
4055
4056 if (partial != 0)
4057 xinner = adjust_address (xinner, BLKmode, used);
4058
4059 /* If the partial register-part of the arg counts in its stack size,
4060 skip the part of stack space corresponding to the registers.
4061 Otherwise, start copying to the beginning of the stack space,
4062 by setting SKIP to 0. */
4063 skip = (reg_parm_stack_space == 0) ? 0 : used;
4064
4065 #ifdef PUSH_ROUNDING
4066 /* Do it with several push insns if that doesn't take lots of insns
4067 and if there is no difficulty with push insns that skip bytes
4068 on the stack for alignment purposes. */
4069 if (args_addr == 0
4070 && PUSH_ARGS
4071 && CONST_INT_P (size)
4072 && skip == 0
4073 && MEM_ALIGN (xinner) >= align
4074 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4075 /* Here we avoid the case of a structure whose weak alignment
4076 forces many pushes of a small amount of data,
4077 and such small pushes do rounding that causes trouble. */
4078 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4079 || align >= BIGGEST_ALIGNMENT
4080 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4081 == (align / BITS_PER_UNIT)))
4082 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4083 {
4084 /* Push padding now if padding above and stack grows down,
4085 or if padding below and stack grows up.
4086 But if space already allocated, this has already been done. */
4087 if (extra && args_addr == 0
4088 && where_pad != none && where_pad != stack_direction)
4089 anti_adjust_stack (GEN_INT (extra));
4090
4091 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4092 }
4093 else
4094 #endif /* PUSH_ROUNDING */
4095 {
4096 rtx target;
4097
4098 /* Otherwise make space on the stack and copy the data
4099 to the address of that space. */
4100
4101 /* Deduct words put into registers from the size we must copy. */
4102 if (partial != 0)
4103 {
4104 if (CONST_INT_P (size))
4105 size = GEN_INT (INTVAL (size) - used);
4106 else
4107 size = expand_binop (GET_MODE (size), sub_optab, size,
4108 GEN_INT (used), NULL_RTX, 0,
4109 OPTAB_LIB_WIDEN);
4110 }
4111
4112 /* Get the address of the stack space.
4113 In this case, we do not deal with EXTRA separately.
4114 A single stack adjust will do. */
4115 if (! args_addr)
4116 {
4117 temp = push_block (size, extra, where_pad == downward);
4118 extra = 0;
4119 }
4120 else if (CONST_INT_P (args_so_far))
4121 temp = memory_address (BLKmode,
4122 plus_constant (Pmode, args_addr,
4123 skip + INTVAL (args_so_far)));
4124 else
4125 temp = memory_address (BLKmode,
4126 plus_constant (Pmode,
4127 gen_rtx_PLUS (Pmode,
4128 args_addr,
4129 args_so_far),
4130 skip));
4131
4132 if (!ACCUMULATE_OUTGOING_ARGS)
4133 {
4134 /* If the source is referenced relative to the stack pointer,
4135 copy it to another register to stabilize it. We do not need
4136 to do this if we know that we won't be changing sp. */
4137
4138 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4139 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4140 temp = copy_to_reg (temp);
4141 }
4142
4143 target = gen_rtx_MEM (BLKmode, temp);
4144
4145 /* We do *not* set_mem_attributes here, because incoming arguments
4146 may overlap with sibling call outgoing arguments and we cannot
4147 allow reordering of reads from function arguments with stores
4148 to outgoing arguments of sibling calls. We do, however, want
4149 to record the alignment of the stack slot. */
4150 /* ALIGN may well be better aligned than TYPE, e.g. due to
4151 PARM_BOUNDARY. Assume the caller isn't lying. */
4152 set_mem_align (target, align);
4153
4154 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4155 }
4156 }
4157 else if (partial > 0)
4158 {
4159 /* Scalar partly in registers. */
4160
4161 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4162 int i;
4163 int not_stack;
4164 /* # bytes of start of argument
4165 that we must make space for but need not store. */
4166 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4167 int args_offset = INTVAL (args_so_far);
4168 int skip;
4169
4170 /* Push padding now if padding above and stack grows down,
4171 or if padding below and stack grows up.
4172 But if space already allocated, this has already been done. */
4173 if (extra && args_addr == 0
4174 && where_pad != none && where_pad != stack_direction)
4175 anti_adjust_stack (GEN_INT (extra));
4176
4177 /* If we make space by pushing it, we might as well push
4178 the real data. Otherwise, we can leave OFFSET nonzero
4179 and leave the space uninitialized. */
4180 if (args_addr == 0)
4181 offset = 0;
4182
4183 /* Now NOT_STACK gets the number of words that we don't need to
4184 allocate on the stack. Convert OFFSET to words too. */
4185 not_stack = (partial - offset) / UNITS_PER_WORD;
4186 offset /= UNITS_PER_WORD;
4187
4188 /* If the partial register-part of the arg counts in its stack size,
4189 skip the part of stack space corresponding to the registers.
4190 Otherwise, start copying to the beginning of the stack space,
4191 by setting SKIP to 0. */
4192 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4193
4194 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4195 x = validize_mem (force_const_mem (mode, x));
4196
4197 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4198 SUBREGs of such registers are not allowed. */
4199 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4200 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4201 x = copy_to_reg (x);
4202
4203 /* Loop over all the words allocated on the stack for this arg. */
4204 /* We can do it by words, because any scalar bigger than a word
4205 has a size a multiple of a word. */
4206 #ifndef PUSH_ARGS_REVERSED
4207 for (i = not_stack; i < size; i++)
4208 #else
4209 for (i = size - 1; i >= not_stack; i--)
4210 #endif
4211 if (i >= not_stack + offset)
4212 emit_push_insn (operand_subword_force (x, i, mode),
4213 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4214 0, args_addr,
4215 GEN_INT (args_offset + ((i - not_stack + skip)
4216 * UNITS_PER_WORD)),
4217 reg_parm_stack_space, alignment_pad);
4218 }
4219 else
4220 {
4221 rtx addr;
4222 rtx dest;
4223
4224 /* Push padding now if padding above and stack grows down,
4225 or if padding below and stack grows up.
4226 But if space already allocated, this has already been done. */
4227 if (extra && args_addr == 0
4228 && where_pad != none && where_pad != stack_direction)
4229 anti_adjust_stack (GEN_INT (extra));
4230
4231 #ifdef PUSH_ROUNDING
4232 if (args_addr == 0 && PUSH_ARGS)
4233 emit_single_push_insn (mode, x, type);
4234 else
4235 #endif
4236 {
4237 if (CONST_INT_P (args_so_far))
4238 addr
4239 = memory_address (mode,
4240 plus_constant (Pmode, args_addr,
4241 INTVAL (args_so_far)));
4242 else
4243 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4244 args_so_far));
4245 dest = gen_rtx_MEM (mode, addr);
4246
4247 /* We do *not* set_mem_attributes here, because incoming arguments
4248 may overlap with sibling call outgoing arguments and we cannot
4249 allow reordering of reads from function arguments with stores
4250 to outgoing arguments of sibling calls. We do, however, want
4251 to record the alignment of the stack slot. */
4252 /* ALIGN may well be better aligned than TYPE, e.g. due to
4253 PARM_BOUNDARY. Assume the caller isn't lying. */
4254 set_mem_align (dest, align);
4255
4256 emit_move_insn (dest, x);
4257 }
4258 }
4259
4260 /* If part should go in registers, copy that part
4261 into the appropriate registers. Do this now, at the end,
4262 since mem-to-mem copies above may do function calls. */
4263 if (partial > 0 && reg != 0)
4264 {
4265 /* Handle calls that pass values in multiple non-contiguous locations.
4266 The Irix 6 ABI has examples of this. */
4267 if (GET_CODE (reg) == PARALLEL)
4268 emit_group_load (reg, x, type, -1);
4269 else
4270 {
4271 gcc_assert (partial % UNITS_PER_WORD == 0);
4272 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4273 }
4274 }
4275
4276 if (extra && args_addr == 0 && where_pad == stack_direction)
4277 anti_adjust_stack (GEN_INT (extra));
4278
4279 if (alignment_pad && args_addr == 0)
4280 anti_adjust_stack (alignment_pad);
4281 }
4282 \f
4283 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4284 operations. */
4285
4286 static rtx
4287 get_subtarget (rtx x)
4288 {
4289 return (optimize
4290 || x == 0
4291 /* Only registers can be subtargets. */
4292 || !REG_P (x)
4293 /* Don't use hard regs to avoid extending their life. */
4294 || REGNO (x) < FIRST_PSEUDO_REGISTER
4295 ? 0 : x);
4296 }
4297
4298 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4299 FIELD is a bitfield. Returns true if the optimization was successful,
4300 and there's nothing else to do. */
4301
4302 static bool
4303 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4304 unsigned HOST_WIDE_INT bitpos,
4305 unsigned HOST_WIDE_INT bitregion_start,
4306 unsigned HOST_WIDE_INT bitregion_end,
4307 enum machine_mode mode1, rtx str_rtx,
4308 tree to, tree src)
4309 {
4310 enum machine_mode str_mode = GET_MODE (str_rtx);
4311 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4312 tree op0, op1;
4313 rtx value, result;
4314 optab binop;
4315 gimple srcstmt;
4316 enum tree_code code;
4317
4318 if (mode1 != VOIDmode
4319 || bitsize >= BITS_PER_WORD
4320 || str_bitsize > BITS_PER_WORD
4321 || TREE_SIDE_EFFECTS (to)
4322 || TREE_THIS_VOLATILE (to))
4323 return false;
4324
4325 STRIP_NOPS (src);
4326 if (TREE_CODE (src) != SSA_NAME)
4327 return false;
4328 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4329 return false;
4330
4331 srcstmt = get_gimple_for_ssa_name (src);
4332 if (!srcstmt
4333 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4334 return false;
4335
4336 code = gimple_assign_rhs_code (srcstmt);
4337
4338 op0 = gimple_assign_rhs1 (srcstmt);
4339
4340 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4341 to find its initialization. Hopefully the initialization will
4342 be from a bitfield load. */
4343 if (TREE_CODE (op0) == SSA_NAME)
4344 {
4345 gimple op0stmt = get_gimple_for_ssa_name (op0);
4346
4347 /* We want to eventually have OP0 be the same as TO, which
4348 should be a bitfield. */
4349 if (!op0stmt
4350 || !is_gimple_assign (op0stmt)
4351 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4352 return false;
4353 op0 = gimple_assign_rhs1 (op0stmt);
4354 }
4355
4356 op1 = gimple_assign_rhs2 (srcstmt);
4357
4358 if (!operand_equal_p (to, op0, 0))
4359 return false;
4360
4361 if (MEM_P (str_rtx))
4362 {
4363 unsigned HOST_WIDE_INT offset1;
4364
4365 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4366 str_mode = word_mode;
4367 str_mode = get_best_mode (bitsize, bitpos,
4368 bitregion_start, bitregion_end,
4369 MEM_ALIGN (str_rtx), str_mode, 0);
4370 if (str_mode == VOIDmode)
4371 return false;
4372 str_bitsize = GET_MODE_BITSIZE (str_mode);
4373
4374 offset1 = bitpos;
4375 bitpos %= str_bitsize;
4376 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4377 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4378 }
4379 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4380 return false;
4381
4382 /* If the bit field covers the whole REG/MEM, store_field
4383 will likely generate better code. */
4384 if (bitsize >= str_bitsize)
4385 return false;
4386
4387 /* We can't handle fields split across multiple entities. */
4388 if (bitpos + bitsize > str_bitsize)
4389 return false;
4390
4391 if (BYTES_BIG_ENDIAN)
4392 bitpos = str_bitsize - bitpos - bitsize;
4393
4394 switch (code)
4395 {
4396 case PLUS_EXPR:
4397 case MINUS_EXPR:
4398 /* For now, just optimize the case of the topmost bitfield
4399 where we don't need to do any masking and also
4400 1 bit bitfields where xor can be used.
4401 We might win by one instruction for the other bitfields
4402 too if insv/extv instructions aren't used, so that
4403 can be added later. */
4404 if (bitpos + bitsize != str_bitsize
4405 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4406 break;
4407
4408 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4409 value = convert_modes (str_mode,
4410 TYPE_MODE (TREE_TYPE (op1)), value,
4411 TYPE_UNSIGNED (TREE_TYPE (op1)));
4412
4413 /* We may be accessing data outside the field, which means
4414 we can alias adjacent data. */
4415 if (MEM_P (str_rtx))
4416 {
4417 str_rtx = shallow_copy_rtx (str_rtx);
4418 set_mem_alias_set (str_rtx, 0);
4419 set_mem_expr (str_rtx, 0);
4420 }
4421
4422 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4423 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4424 {
4425 value = expand_and (str_mode, value, const1_rtx, NULL);
4426 binop = xor_optab;
4427 }
4428 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4429 result = expand_binop (str_mode, binop, str_rtx,
4430 value, str_rtx, 1, OPTAB_WIDEN);
4431 if (result != str_rtx)
4432 emit_move_insn (str_rtx, result);
4433 return true;
4434
4435 case BIT_IOR_EXPR:
4436 case BIT_XOR_EXPR:
4437 if (TREE_CODE (op1) != INTEGER_CST)
4438 break;
4439 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4440 value = convert_modes (str_mode,
4441 TYPE_MODE (TREE_TYPE (op1)), value,
4442 TYPE_UNSIGNED (TREE_TYPE (op1)));
4443
4444 /* We may be accessing data outside the field, which means
4445 we can alias adjacent data. */
4446 if (MEM_P (str_rtx))
4447 {
4448 str_rtx = shallow_copy_rtx (str_rtx);
4449 set_mem_alias_set (str_rtx, 0);
4450 set_mem_expr (str_rtx, 0);
4451 }
4452
4453 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4454 if (bitpos + bitsize != str_bitsize)
4455 {
4456 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1);
4457 value = expand_and (str_mode, value, mask, NULL_RTX);
4458 }
4459 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4460 result = expand_binop (str_mode, binop, str_rtx,
4461 value, str_rtx, 1, OPTAB_WIDEN);
4462 if (result != str_rtx)
4463 emit_move_insn (str_rtx, result);
4464 return true;
4465
4466 default:
4467 break;
4468 }
4469
4470 return false;
4471 }
4472
4473 /* In the C++ memory model, consecutive bit fields in a structure are
4474 considered one memory location.
4475
4476 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4477 returns the bit range of consecutive bits in which this COMPONENT_REF
4478 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4479 and *OFFSET may be adjusted in the process.
4480
4481 If the access does not need to be restricted, 0 is returned in both
4482 *BITSTART and *BITEND. */
4483
4484 static void
4485 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4486 unsigned HOST_WIDE_INT *bitend,
4487 tree exp,
4488 HOST_WIDE_INT *bitpos,
4489 tree *offset)
4490 {
4491 HOST_WIDE_INT bitoffset;
4492 tree field, repr;
4493
4494 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4495
4496 field = TREE_OPERAND (exp, 1);
4497 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4498 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4499 need to limit the range we can access. */
4500 if (!repr)
4501 {
4502 *bitstart = *bitend = 0;
4503 return;
4504 }
4505
4506 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4507 part of a larger bit field, then the representative does not serve any
4508 useful purpose. This can occur in Ada. */
4509 if (handled_component_p (TREE_OPERAND (exp, 0)))
4510 {
4511 enum machine_mode rmode;
4512 HOST_WIDE_INT rbitsize, rbitpos;
4513 tree roffset;
4514 int unsignedp;
4515 int volatilep = 0;
4516 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4517 &roffset, &rmode, &unsignedp, &volatilep, false);
4518 if ((rbitpos % BITS_PER_UNIT) != 0)
4519 {
4520 *bitstart = *bitend = 0;
4521 return;
4522 }
4523 }
4524
4525 /* Compute the adjustment to bitpos from the offset of the field
4526 relative to the representative. DECL_FIELD_OFFSET of field and
4527 repr are the same by construction if they are not constants,
4528 see finish_bitfield_layout. */
4529 if (host_integerp (DECL_FIELD_OFFSET (field), 1)
4530 && host_integerp (DECL_FIELD_OFFSET (repr), 1))
4531 bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
4532 - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT;
4533 else
4534 bitoffset = 0;
4535 bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
4536 - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
4537
4538 /* If the adjustment is larger than bitpos, we would have a negative bit
4539 position for the lower bound and this may wreak havoc later. This can
4540 occur only if we have a non-null offset, so adjust offset and bitpos
4541 to make the lower bound non-negative. */
4542 if (bitoffset > *bitpos)
4543 {
4544 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4545
4546 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4547 gcc_assert (*offset != NULL_TREE);
4548
4549 *bitpos += adjust;
4550 *offset
4551 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4552 *bitstart = 0;
4553 }
4554 else
4555 *bitstart = *bitpos - bitoffset;
4556
4557 *bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
4558 }
4559
4560 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4561 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4562 DECL_RTL was not set yet, return NORTL. */
4563
4564 static inline bool
4565 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4566 {
4567 if (TREE_CODE (addr) != ADDR_EXPR)
4568 return false;
4569
4570 tree base = TREE_OPERAND (addr, 0);
4571
4572 if (!DECL_P (base)
4573 || TREE_ADDRESSABLE (base)
4574 || DECL_MODE (base) == BLKmode)
4575 return false;
4576
4577 if (!DECL_RTL_SET_P (base))
4578 return nortl;
4579
4580 return (!MEM_P (DECL_RTL (base)));
4581 }
4582
4583 /* Returns true if the MEM_REF REF refers to an object that does not
4584 reside in memory and has non-BLKmode. */
4585
4586 static inline bool
4587 mem_ref_refers_to_non_mem_p (tree ref)
4588 {
4589 tree base = TREE_OPERAND (ref, 0);
4590 return addr_expr_of_non_mem_decl_p_1 (base, false);
4591 }
4592
4593 /* Return TRUE iff OP is an ADDR_EXPR of a DECL that's not
4594 addressable. This is very much like mem_ref_refers_to_non_mem_p,
4595 but instead of the MEM_REF, it takes its base, and it doesn't
4596 assume a DECL is in memory just because its RTL is not set yet. */
4597
4598 bool
4599 addr_expr_of_non_mem_decl_p (tree op)
4600 {
4601 return addr_expr_of_non_mem_decl_p_1 (op, true);
4602 }
4603
4604 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4605 is true, try generating a nontemporal store. */
4606
4607 void
4608 expand_assignment (tree to, tree from, bool nontemporal)
4609 {
4610 rtx to_rtx = 0;
4611 rtx result;
4612 enum machine_mode mode;
4613 unsigned int align;
4614 enum insn_code icode;
4615
4616 /* Don't crash if the lhs of the assignment was erroneous. */
4617 if (TREE_CODE (to) == ERROR_MARK)
4618 {
4619 expand_normal (from);
4620 return;
4621 }
4622
4623 /* Optimize away no-op moves without side-effects. */
4624 if (operand_equal_p (to, from, 0))
4625 return;
4626
4627 /* Handle misaligned stores. */
4628 mode = TYPE_MODE (TREE_TYPE (to));
4629 if ((TREE_CODE (to) == MEM_REF
4630 || TREE_CODE (to) == TARGET_MEM_REF)
4631 && mode != BLKmode
4632 && !mem_ref_refers_to_non_mem_p (to)
4633 && ((align = get_object_alignment (to))
4634 < GET_MODE_ALIGNMENT (mode))
4635 && (((icode = optab_handler (movmisalign_optab, mode))
4636 != CODE_FOR_nothing)
4637 || SLOW_UNALIGNED_ACCESS (mode, align)))
4638 {
4639 rtx reg, mem;
4640
4641 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4642 reg = force_not_mem (reg);
4643 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4644
4645 if (icode != CODE_FOR_nothing)
4646 {
4647 struct expand_operand ops[2];
4648
4649 create_fixed_operand (&ops[0], mem);
4650 create_input_operand (&ops[1], reg, mode);
4651 /* The movmisalign<mode> pattern cannot fail, else the assignment
4652 would silently be omitted. */
4653 expand_insn (icode, 2, ops);
4654 }
4655 else
4656 store_bit_field (mem, GET_MODE_BITSIZE (mode),
4657 0, 0, 0, mode, reg);
4658 return;
4659 }
4660
4661 /* Assignment of a structure component needs special treatment
4662 if the structure component's rtx is not simply a MEM.
4663 Assignment of an array element at a constant index, and assignment of
4664 an array element in an unaligned packed structure field, has the same
4665 problem. Same for (partially) storing into a non-memory object. */
4666 if (handled_component_p (to)
4667 || (TREE_CODE (to) == MEM_REF
4668 && mem_ref_refers_to_non_mem_p (to))
4669 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4670 {
4671 enum machine_mode mode1;
4672 HOST_WIDE_INT bitsize, bitpos;
4673 unsigned HOST_WIDE_INT bitregion_start = 0;
4674 unsigned HOST_WIDE_INT bitregion_end = 0;
4675 tree offset;
4676 int unsignedp;
4677 int volatilep = 0;
4678 tree tem;
4679 bool misalignp;
4680 rtx mem = NULL_RTX;
4681
4682 push_temp_slots ();
4683 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4684 &unsignedp, &volatilep, true);
4685
4686 if (TREE_CODE (to) == COMPONENT_REF
4687 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4688 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4689
4690 /* If we are going to use store_bit_field and extract_bit_field,
4691 make sure to_rtx will be safe for multiple use. */
4692 mode = TYPE_MODE (TREE_TYPE (tem));
4693 if (TREE_CODE (tem) == MEM_REF
4694 && mode != BLKmode
4695 && ((align = get_object_alignment (tem))
4696 < GET_MODE_ALIGNMENT (mode))
4697 && ((icode = optab_handler (movmisalign_optab, mode))
4698 != CODE_FOR_nothing))
4699 {
4700 struct expand_operand ops[2];
4701
4702 misalignp = true;
4703 to_rtx = gen_reg_rtx (mode);
4704 mem = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4705
4706 /* If the misaligned store doesn't overwrite all bits, perform
4707 rmw cycle on MEM. */
4708 if (bitsize != GET_MODE_BITSIZE (mode))
4709 {
4710 create_input_operand (&ops[0], to_rtx, mode);
4711 create_fixed_operand (&ops[1], mem);
4712 /* The movmisalign<mode> pattern cannot fail, else the assignment
4713 would silently be omitted. */
4714 expand_insn (icode, 2, ops);
4715
4716 mem = copy_rtx (mem);
4717 }
4718 }
4719 else
4720 {
4721 misalignp = false;
4722 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4723 }
4724
4725 /* If the bitfield is volatile, we want to access it in the
4726 field's mode, not the computed mode.
4727 If a MEM has VOIDmode (external with incomplete type),
4728 use BLKmode for it instead. */
4729 if (MEM_P (to_rtx))
4730 {
4731 if (volatilep && flag_strict_volatile_bitfields > 0)
4732 to_rtx = adjust_address (to_rtx, mode1, 0);
4733 else if (GET_MODE (to_rtx) == VOIDmode)
4734 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4735 }
4736
4737 if (offset != 0)
4738 {
4739 enum machine_mode address_mode;
4740 rtx offset_rtx;
4741
4742 if (!MEM_P (to_rtx))
4743 {
4744 /* We can get constant negative offsets into arrays with broken
4745 user code. Translate this to a trap instead of ICEing. */
4746 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4747 expand_builtin_trap ();
4748 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4749 }
4750
4751 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4752 address_mode = get_address_mode (to_rtx);
4753 if (GET_MODE (offset_rtx) != address_mode)
4754 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4755
4756 /* A constant address in TO_RTX can have VOIDmode, we must not try
4757 to call force_reg for that case. Avoid that case. */
4758 if (MEM_P (to_rtx)
4759 && GET_MODE (to_rtx) == BLKmode
4760 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4761 && bitsize > 0
4762 && (bitpos % bitsize) == 0
4763 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4764 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4765 {
4766 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4767 bitpos = 0;
4768 }
4769
4770 to_rtx = offset_address (to_rtx, offset_rtx,
4771 highest_pow2_factor_for_target (to,
4772 offset));
4773 }
4774
4775 /* No action is needed if the target is not a memory and the field
4776 lies completely outside that target. This can occur if the source
4777 code contains an out-of-bounds access to a small array. */
4778 if (!MEM_P (to_rtx)
4779 && GET_MODE (to_rtx) != BLKmode
4780 && (unsigned HOST_WIDE_INT) bitpos
4781 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4782 {
4783 expand_normal (from);
4784 result = NULL;
4785 }
4786 /* Handle expand_expr of a complex value returning a CONCAT. */
4787 else if (GET_CODE (to_rtx) == CONCAT)
4788 {
4789 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4790 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4791 && bitpos == 0
4792 && bitsize == mode_bitsize)
4793 result = store_expr (from, to_rtx, false, nontemporal);
4794 else if (bitsize == mode_bitsize / 2
4795 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4796 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4797 nontemporal);
4798 else if (bitpos + bitsize <= mode_bitsize / 2)
4799 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4800 bitregion_start, bitregion_end,
4801 mode1, from,
4802 get_alias_set (to), nontemporal);
4803 else if (bitpos >= mode_bitsize / 2)
4804 result = store_field (XEXP (to_rtx, 1), bitsize,
4805 bitpos - mode_bitsize / 2,
4806 bitregion_start, bitregion_end,
4807 mode1, from,
4808 get_alias_set (to), nontemporal);
4809 else if (bitpos == 0 && bitsize == mode_bitsize)
4810 {
4811 rtx from_rtx;
4812 result = expand_normal (from);
4813 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4814 TYPE_MODE (TREE_TYPE (from)), 0);
4815 emit_move_insn (XEXP (to_rtx, 0),
4816 read_complex_part (from_rtx, false));
4817 emit_move_insn (XEXP (to_rtx, 1),
4818 read_complex_part (from_rtx, true));
4819 }
4820 else
4821 {
4822 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4823 GET_MODE_SIZE (GET_MODE (to_rtx)));
4824 write_complex_part (temp, XEXP (to_rtx, 0), false);
4825 write_complex_part (temp, XEXP (to_rtx, 1), true);
4826 result = store_field (temp, bitsize, bitpos,
4827 bitregion_start, bitregion_end,
4828 mode1, from,
4829 get_alias_set (to), nontemporal);
4830 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4831 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4832 }
4833 }
4834 else
4835 {
4836 if (MEM_P (to_rtx))
4837 {
4838 /* If the field is at offset zero, we could have been given the
4839 DECL_RTX of the parent struct. Don't munge it. */
4840 to_rtx = shallow_copy_rtx (to_rtx);
4841
4842 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4843
4844 /* Deal with volatile and readonly fields. The former is only
4845 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4846 if (volatilep)
4847 MEM_VOLATILE_P (to_rtx) = 1;
4848 }
4849
4850 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4851 bitregion_start, bitregion_end,
4852 mode1,
4853 to_rtx, to, from))
4854 result = NULL;
4855 else
4856 result = store_field (to_rtx, bitsize, bitpos,
4857 bitregion_start, bitregion_end,
4858 mode1, from,
4859 get_alias_set (to), nontemporal);
4860 }
4861
4862 if (misalignp)
4863 {
4864 struct expand_operand ops[2];
4865
4866 create_fixed_operand (&ops[0], mem);
4867 create_input_operand (&ops[1], to_rtx, mode);
4868 /* The movmisalign<mode> pattern cannot fail, else the assignment
4869 would silently be omitted. */
4870 expand_insn (icode, 2, ops);
4871 }
4872
4873 if (result)
4874 preserve_temp_slots (result);
4875 pop_temp_slots ();
4876 return;
4877 }
4878
4879 /* If the rhs is a function call and its value is not an aggregate,
4880 call the function before we start to compute the lhs.
4881 This is needed for correct code for cases such as
4882 val = setjmp (buf) on machines where reference to val
4883 requires loading up part of an address in a separate insn.
4884
4885 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4886 since it might be a promoted variable where the zero- or sign- extension
4887 needs to be done. Handling this in the normal way is safe because no
4888 computation is done before the call. The same is true for SSA names. */
4889 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4890 && COMPLETE_TYPE_P (TREE_TYPE (from))
4891 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4892 && ! (((TREE_CODE (to) == VAR_DECL
4893 || TREE_CODE (to) == PARM_DECL
4894 || TREE_CODE (to) == RESULT_DECL)
4895 && REG_P (DECL_RTL (to)))
4896 || TREE_CODE (to) == SSA_NAME))
4897 {
4898 rtx value;
4899
4900 push_temp_slots ();
4901 value = expand_normal (from);
4902 if (to_rtx == 0)
4903 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4904
4905 /* Handle calls that return values in multiple non-contiguous locations.
4906 The Irix 6 ABI has examples of this. */
4907 if (GET_CODE (to_rtx) == PARALLEL)
4908 {
4909 if (GET_CODE (value) == PARALLEL)
4910 emit_group_move (to_rtx, value);
4911 else
4912 emit_group_load (to_rtx, value, TREE_TYPE (from),
4913 int_size_in_bytes (TREE_TYPE (from)));
4914 }
4915 else if (GET_CODE (value) == PARALLEL)
4916 emit_group_store (to_rtx, value, TREE_TYPE (from),
4917 int_size_in_bytes (TREE_TYPE (from)));
4918 else if (GET_MODE (to_rtx) == BLKmode)
4919 {
4920 /* Handle calls that return BLKmode values in registers. */
4921 if (REG_P (value))
4922 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
4923 else
4924 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4925 }
4926 else
4927 {
4928 if (POINTER_TYPE_P (TREE_TYPE (to)))
4929 value = convert_memory_address_addr_space
4930 (GET_MODE (to_rtx), value,
4931 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4932
4933 emit_move_insn (to_rtx, value);
4934 }
4935 preserve_temp_slots (to_rtx);
4936 pop_temp_slots ();
4937 return;
4938 }
4939
4940 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
4941 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4942
4943 /* Don't move directly into a return register. */
4944 if (TREE_CODE (to) == RESULT_DECL
4945 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4946 {
4947 rtx temp;
4948
4949 push_temp_slots ();
4950
4951 /* If the source is itself a return value, it still is in a pseudo at
4952 this point so we can move it back to the return register directly. */
4953 if (REG_P (to_rtx)
4954 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
4955 && TREE_CODE (from) != CALL_EXPR)
4956 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
4957 else
4958 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4959
4960 /* Handle calls that return values in multiple non-contiguous locations.
4961 The Irix 6 ABI has examples of this. */
4962 if (GET_CODE (to_rtx) == PARALLEL)
4963 {
4964 if (GET_CODE (temp) == PARALLEL)
4965 emit_group_move (to_rtx, temp);
4966 else
4967 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4968 int_size_in_bytes (TREE_TYPE (from)));
4969 }
4970 else if (temp)
4971 emit_move_insn (to_rtx, temp);
4972
4973 preserve_temp_slots (to_rtx);
4974 pop_temp_slots ();
4975 return;
4976 }
4977
4978 /* In case we are returning the contents of an object which overlaps
4979 the place the value is being stored, use a safe function when copying
4980 a value through a pointer into a structure value return block. */
4981 if (TREE_CODE (to) == RESULT_DECL
4982 && TREE_CODE (from) == INDIRECT_REF
4983 && ADDR_SPACE_GENERIC_P
4984 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4985 && refs_may_alias_p (to, from)
4986 && cfun->returns_struct
4987 && !cfun->returns_pcc_struct)
4988 {
4989 rtx from_rtx, size;
4990
4991 push_temp_slots ();
4992 size = expr_size (from);
4993 from_rtx = expand_normal (from);
4994
4995 emit_library_call (memmove_libfunc, LCT_NORMAL,
4996 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4997 XEXP (from_rtx, 0), Pmode,
4998 convert_to_mode (TYPE_MODE (sizetype),
4999 size, TYPE_UNSIGNED (sizetype)),
5000 TYPE_MODE (sizetype));
5001
5002 preserve_temp_slots (to_rtx);
5003 pop_temp_slots ();
5004 return;
5005 }
5006
5007 /* Compute FROM and store the value in the rtx we got. */
5008
5009 push_temp_slots ();
5010 result = store_expr (from, to_rtx, 0, nontemporal);
5011 preserve_temp_slots (result);
5012 pop_temp_slots ();
5013 return;
5014 }
5015
5016 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5017 succeeded, false otherwise. */
5018
5019 bool
5020 emit_storent_insn (rtx to, rtx from)
5021 {
5022 struct expand_operand ops[2];
5023 enum machine_mode mode = GET_MODE (to);
5024 enum insn_code code = optab_handler (storent_optab, mode);
5025
5026 if (code == CODE_FOR_nothing)
5027 return false;
5028
5029 create_fixed_operand (&ops[0], to);
5030 create_input_operand (&ops[1], from, mode);
5031 return maybe_expand_insn (code, 2, ops);
5032 }
5033
5034 /* Generate code for computing expression EXP,
5035 and storing the value into TARGET.
5036
5037 If the mode is BLKmode then we may return TARGET itself.
5038 It turns out that in BLKmode it doesn't cause a problem.
5039 because C has no operators that could combine two different
5040 assignments into the same BLKmode object with different values
5041 with no sequence point. Will other languages need this to
5042 be more thorough?
5043
5044 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5045 stack, and block moves may need to be treated specially.
5046
5047 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5048
5049 rtx
5050 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5051 {
5052 rtx temp;
5053 rtx alt_rtl = NULL_RTX;
5054 location_t loc = curr_insn_location ();
5055
5056 if (VOID_TYPE_P (TREE_TYPE (exp)))
5057 {
5058 /* C++ can generate ?: expressions with a throw expression in one
5059 branch and an rvalue in the other. Here, we resolve attempts to
5060 store the throw expression's nonexistent result. */
5061 gcc_assert (!call_param_p);
5062 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5063 return NULL_RTX;
5064 }
5065 if (TREE_CODE (exp) == COMPOUND_EXPR)
5066 {
5067 /* Perform first part of compound expression, then assign from second
5068 part. */
5069 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5070 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5071 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5072 nontemporal);
5073 }
5074 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5075 {
5076 /* For conditional expression, get safe form of the target. Then
5077 test the condition, doing the appropriate assignment on either
5078 side. This avoids the creation of unnecessary temporaries.
5079 For non-BLKmode, it is more efficient not to do this. */
5080
5081 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5082
5083 do_pending_stack_adjust ();
5084 NO_DEFER_POP;
5085 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5086 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5087 nontemporal);
5088 emit_jump_insn (gen_jump (lab2));
5089 emit_barrier ();
5090 emit_label (lab1);
5091 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5092 nontemporal);
5093 emit_label (lab2);
5094 OK_DEFER_POP;
5095
5096 return NULL_RTX;
5097 }
5098 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5099 /* If this is a scalar in a register that is stored in a wider mode
5100 than the declared mode, compute the result into its declared mode
5101 and then convert to the wider mode. Our value is the computed
5102 expression. */
5103 {
5104 rtx inner_target = 0;
5105
5106 /* We can do the conversion inside EXP, which will often result
5107 in some optimizations. Do the conversion in two steps: first
5108 change the signedness, if needed, then the extend. But don't
5109 do this if the type of EXP is a subtype of something else
5110 since then the conversion might involve more than just
5111 converting modes. */
5112 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5113 && TREE_TYPE (TREE_TYPE (exp)) == 0
5114 && GET_MODE_PRECISION (GET_MODE (target))
5115 == TYPE_PRECISION (TREE_TYPE (exp)))
5116 {
5117 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5118 != SUBREG_PROMOTED_UNSIGNED_P (target))
5119 {
5120 /* Some types, e.g. Fortran's logical*4, won't have a signed
5121 version, so use the mode instead. */
5122 tree ntype
5123 = (signed_or_unsigned_type_for
5124 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5125 if (ntype == NULL)
5126 ntype = lang_hooks.types.type_for_mode
5127 (TYPE_MODE (TREE_TYPE (exp)),
5128 SUBREG_PROMOTED_UNSIGNED_P (target));
5129
5130 exp = fold_convert_loc (loc, ntype, exp);
5131 }
5132
5133 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5134 (GET_MODE (SUBREG_REG (target)),
5135 SUBREG_PROMOTED_UNSIGNED_P (target)),
5136 exp);
5137
5138 inner_target = SUBREG_REG (target);
5139 }
5140
5141 temp = expand_expr (exp, inner_target, VOIDmode,
5142 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5143
5144 /* If TEMP is a VOIDmode constant, use convert_modes to make
5145 sure that we properly convert it. */
5146 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5147 {
5148 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5149 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5150 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5151 GET_MODE (target), temp,
5152 SUBREG_PROMOTED_UNSIGNED_P (target));
5153 }
5154
5155 convert_move (SUBREG_REG (target), temp,
5156 SUBREG_PROMOTED_UNSIGNED_P (target));
5157
5158 return NULL_RTX;
5159 }
5160 else if ((TREE_CODE (exp) == STRING_CST
5161 || (TREE_CODE (exp) == MEM_REF
5162 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5163 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5164 == STRING_CST
5165 && integer_zerop (TREE_OPERAND (exp, 1))))
5166 && !nontemporal && !call_param_p
5167 && MEM_P (target))
5168 {
5169 /* Optimize initialization of an array with a STRING_CST. */
5170 HOST_WIDE_INT exp_len, str_copy_len;
5171 rtx dest_mem;
5172 tree str = TREE_CODE (exp) == STRING_CST
5173 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5174
5175 exp_len = int_expr_size (exp);
5176 if (exp_len <= 0)
5177 goto normal_expr;
5178
5179 if (TREE_STRING_LENGTH (str) <= 0)
5180 goto normal_expr;
5181
5182 str_copy_len = strlen (TREE_STRING_POINTER (str));
5183 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5184 goto normal_expr;
5185
5186 str_copy_len = TREE_STRING_LENGTH (str);
5187 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5188 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5189 {
5190 str_copy_len += STORE_MAX_PIECES - 1;
5191 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5192 }
5193 str_copy_len = MIN (str_copy_len, exp_len);
5194 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5195 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5196 MEM_ALIGN (target), false))
5197 goto normal_expr;
5198
5199 dest_mem = target;
5200
5201 dest_mem = store_by_pieces (dest_mem,
5202 str_copy_len, builtin_strncpy_read_str,
5203 CONST_CAST (char *,
5204 TREE_STRING_POINTER (str)),
5205 MEM_ALIGN (target), false,
5206 exp_len > str_copy_len ? 1 : 0);
5207 if (exp_len > str_copy_len)
5208 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5209 GEN_INT (exp_len - str_copy_len),
5210 BLOCK_OP_NORMAL);
5211 return NULL_RTX;
5212 }
5213 else
5214 {
5215 rtx tmp_target;
5216
5217 normal_expr:
5218 /* If we want to use a nontemporal store, force the value to
5219 register first. */
5220 tmp_target = nontemporal ? NULL_RTX : target;
5221 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5222 (call_param_p
5223 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5224 &alt_rtl);
5225 }
5226
5227 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5228 the same as that of TARGET, adjust the constant. This is needed, for
5229 example, in case it is a CONST_DOUBLE and we want only a word-sized
5230 value. */
5231 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5232 && TREE_CODE (exp) != ERROR_MARK
5233 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5234 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5235 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5236
5237 /* If value was not generated in the target, store it there.
5238 Convert the value to TARGET's type first if necessary and emit the
5239 pending incrementations that have been queued when expanding EXP.
5240 Note that we cannot emit the whole queue blindly because this will
5241 effectively disable the POST_INC optimization later.
5242
5243 If TEMP and TARGET compare equal according to rtx_equal_p, but
5244 one or both of them are volatile memory refs, we have to distinguish
5245 two cases:
5246 - expand_expr has used TARGET. In this case, we must not generate
5247 another copy. This can be detected by TARGET being equal according
5248 to == .
5249 - expand_expr has not used TARGET - that means that the source just
5250 happens to have the same RTX form. Since temp will have been created
5251 by expand_expr, it will compare unequal according to == .
5252 We must generate a copy in this case, to reach the correct number
5253 of volatile memory references. */
5254
5255 if ((! rtx_equal_p (temp, target)
5256 || (temp != target && (side_effects_p (temp)
5257 || side_effects_p (target))))
5258 && TREE_CODE (exp) != ERROR_MARK
5259 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5260 but TARGET is not valid memory reference, TEMP will differ
5261 from TARGET although it is really the same location. */
5262 && !(alt_rtl
5263 && rtx_equal_p (alt_rtl, target)
5264 && !side_effects_p (alt_rtl)
5265 && !side_effects_p (target))
5266 /* If there's nothing to copy, don't bother. Don't call
5267 expr_size unless necessary, because some front-ends (C++)
5268 expr_size-hook must not be given objects that are not
5269 supposed to be bit-copied or bit-initialized. */
5270 && expr_size (exp) != const0_rtx)
5271 {
5272 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5273 {
5274 if (GET_MODE (target) == BLKmode)
5275 {
5276 /* Handle calls that return BLKmode values in registers. */
5277 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5278 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5279 else
5280 store_bit_field (target,
5281 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5282 0, 0, 0, GET_MODE (temp), temp);
5283 }
5284 else
5285 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5286 }
5287
5288 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5289 {
5290 /* Handle copying a string constant into an array. The string
5291 constant may be shorter than the array. So copy just the string's
5292 actual length, and clear the rest. First get the size of the data
5293 type of the string, which is actually the size of the target. */
5294 rtx size = expr_size (exp);
5295
5296 if (CONST_INT_P (size)
5297 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5298 emit_block_move (target, temp, size,
5299 (call_param_p
5300 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5301 else
5302 {
5303 enum machine_mode pointer_mode
5304 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5305 enum machine_mode address_mode = get_address_mode (target);
5306
5307 /* Compute the size of the data to copy from the string. */
5308 tree copy_size
5309 = size_binop_loc (loc, MIN_EXPR,
5310 make_tree (sizetype, size),
5311 size_int (TREE_STRING_LENGTH (exp)));
5312 rtx copy_size_rtx
5313 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5314 (call_param_p
5315 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5316 rtx label = 0;
5317
5318 /* Copy that much. */
5319 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5320 TYPE_UNSIGNED (sizetype));
5321 emit_block_move (target, temp, copy_size_rtx,
5322 (call_param_p
5323 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5324
5325 /* Figure out how much is left in TARGET that we have to clear.
5326 Do all calculations in pointer_mode. */
5327 if (CONST_INT_P (copy_size_rtx))
5328 {
5329 size = plus_constant (address_mode, size,
5330 -INTVAL (copy_size_rtx));
5331 target = adjust_address (target, BLKmode,
5332 INTVAL (copy_size_rtx));
5333 }
5334 else
5335 {
5336 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5337 copy_size_rtx, NULL_RTX, 0,
5338 OPTAB_LIB_WIDEN);
5339
5340 if (GET_MODE (copy_size_rtx) != address_mode)
5341 copy_size_rtx = convert_to_mode (address_mode,
5342 copy_size_rtx,
5343 TYPE_UNSIGNED (sizetype));
5344
5345 target = offset_address (target, copy_size_rtx,
5346 highest_pow2_factor (copy_size));
5347 label = gen_label_rtx ();
5348 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5349 GET_MODE (size), 0, label);
5350 }
5351
5352 if (size != const0_rtx)
5353 clear_storage (target, size, BLOCK_OP_NORMAL);
5354
5355 if (label)
5356 emit_label (label);
5357 }
5358 }
5359 /* Handle calls that return values in multiple non-contiguous locations.
5360 The Irix 6 ABI has examples of this. */
5361 else if (GET_CODE (target) == PARALLEL)
5362 {
5363 if (GET_CODE (temp) == PARALLEL)
5364 emit_group_move (target, temp);
5365 else
5366 emit_group_load (target, temp, TREE_TYPE (exp),
5367 int_size_in_bytes (TREE_TYPE (exp)));
5368 }
5369 else if (GET_CODE (temp) == PARALLEL)
5370 emit_group_store (target, temp, TREE_TYPE (exp),
5371 int_size_in_bytes (TREE_TYPE (exp)));
5372 else if (GET_MODE (temp) == BLKmode)
5373 emit_block_move (target, temp, expr_size (exp),
5374 (call_param_p
5375 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5376 /* If we emit a nontemporal store, there is nothing else to do. */
5377 else if (nontemporal && emit_storent_insn (target, temp))
5378 ;
5379 else
5380 {
5381 temp = force_operand (temp, target);
5382 if (temp != target)
5383 emit_move_insn (target, temp);
5384 }
5385 }
5386
5387 return NULL_RTX;
5388 }
5389 \f
5390 /* Return true if field F of structure TYPE is a flexible array. */
5391
5392 static bool
5393 flexible_array_member_p (const_tree f, const_tree type)
5394 {
5395 const_tree tf;
5396
5397 tf = TREE_TYPE (f);
5398 return (DECL_CHAIN (f) == NULL
5399 && TREE_CODE (tf) == ARRAY_TYPE
5400 && TYPE_DOMAIN (tf)
5401 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5402 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5403 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5404 && int_size_in_bytes (type) >= 0);
5405 }
5406
5407 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5408 must have in order for it to completely initialize a value of type TYPE.
5409 Return -1 if the number isn't known.
5410
5411 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5412
5413 static HOST_WIDE_INT
5414 count_type_elements (const_tree type, bool for_ctor_p)
5415 {
5416 switch (TREE_CODE (type))
5417 {
5418 case ARRAY_TYPE:
5419 {
5420 tree nelts;
5421
5422 nelts = array_type_nelts (type);
5423 if (nelts && host_integerp (nelts, 1))
5424 {
5425 unsigned HOST_WIDE_INT n;
5426
5427 n = tree_low_cst (nelts, 1) + 1;
5428 if (n == 0 || for_ctor_p)
5429 return n;
5430 else
5431 return n * count_type_elements (TREE_TYPE (type), false);
5432 }
5433 return for_ctor_p ? -1 : 1;
5434 }
5435
5436 case RECORD_TYPE:
5437 {
5438 unsigned HOST_WIDE_INT n;
5439 tree f;
5440
5441 n = 0;
5442 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5443 if (TREE_CODE (f) == FIELD_DECL)
5444 {
5445 if (!for_ctor_p)
5446 n += count_type_elements (TREE_TYPE (f), false);
5447 else if (!flexible_array_member_p (f, type))
5448 /* Don't count flexible arrays, which are not supposed
5449 to be initialized. */
5450 n += 1;
5451 }
5452
5453 return n;
5454 }
5455
5456 case UNION_TYPE:
5457 case QUAL_UNION_TYPE:
5458 {
5459 tree f;
5460 HOST_WIDE_INT n, m;
5461
5462 gcc_assert (!for_ctor_p);
5463 /* Estimate the number of scalars in each field and pick the
5464 maximum. Other estimates would do instead; the idea is simply
5465 to make sure that the estimate is not sensitive to the ordering
5466 of the fields. */
5467 n = 1;
5468 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5469 if (TREE_CODE (f) == FIELD_DECL)
5470 {
5471 m = count_type_elements (TREE_TYPE (f), false);
5472 /* If the field doesn't span the whole union, add an extra
5473 scalar for the rest. */
5474 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5475 TYPE_SIZE (type)) != 1)
5476 m++;
5477 if (n < m)
5478 n = m;
5479 }
5480 return n;
5481 }
5482
5483 case COMPLEX_TYPE:
5484 return 2;
5485
5486 case VECTOR_TYPE:
5487 return TYPE_VECTOR_SUBPARTS (type);
5488
5489 case INTEGER_TYPE:
5490 case REAL_TYPE:
5491 case FIXED_POINT_TYPE:
5492 case ENUMERAL_TYPE:
5493 case BOOLEAN_TYPE:
5494 case POINTER_TYPE:
5495 case OFFSET_TYPE:
5496 case REFERENCE_TYPE:
5497 case NULLPTR_TYPE:
5498 return 1;
5499
5500 case ERROR_MARK:
5501 return 0;
5502
5503 case VOID_TYPE:
5504 case METHOD_TYPE:
5505 case FUNCTION_TYPE:
5506 case LANG_TYPE:
5507 default:
5508 gcc_unreachable ();
5509 }
5510 }
5511
5512 /* Helper for categorize_ctor_elements. Identical interface. */
5513
5514 static bool
5515 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5516 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5517 {
5518 unsigned HOST_WIDE_INT idx;
5519 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5520 tree value, purpose, elt_type;
5521
5522 /* Whether CTOR is a valid constant initializer, in accordance with what
5523 initializer_constant_valid_p does. If inferred from the constructor
5524 elements, true until proven otherwise. */
5525 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5526 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5527
5528 nz_elts = 0;
5529 init_elts = 0;
5530 num_fields = 0;
5531 elt_type = NULL_TREE;
5532
5533 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5534 {
5535 HOST_WIDE_INT mult = 1;
5536
5537 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5538 {
5539 tree lo_index = TREE_OPERAND (purpose, 0);
5540 tree hi_index = TREE_OPERAND (purpose, 1);
5541
5542 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
5543 mult = (tree_low_cst (hi_index, 1)
5544 - tree_low_cst (lo_index, 1) + 1);
5545 }
5546 num_fields += mult;
5547 elt_type = TREE_TYPE (value);
5548
5549 switch (TREE_CODE (value))
5550 {
5551 case CONSTRUCTOR:
5552 {
5553 HOST_WIDE_INT nz = 0, ic = 0;
5554
5555 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5556 p_complete);
5557
5558 nz_elts += mult * nz;
5559 init_elts += mult * ic;
5560
5561 if (const_from_elts_p && const_p)
5562 const_p = const_elt_p;
5563 }
5564 break;
5565
5566 case INTEGER_CST:
5567 case REAL_CST:
5568 case FIXED_CST:
5569 if (!initializer_zerop (value))
5570 nz_elts += mult;
5571 init_elts += mult;
5572 break;
5573
5574 case STRING_CST:
5575 nz_elts += mult * TREE_STRING_LENGTH (value);
5576 init_elts += mult * TREE_STRING_LENGTH (value);
5577 break;
5578
5579 case COMPLEX_CST:
5580 if (!initializer_zerop (TREE_REALPART (value)))
5581 nz_elts += mult;
5582 if (!initializer_zerop (TREE_IMAGPART (value)))
5583 nz_elts += mult;
5584 init_elts += mult;
5585 break;
5586
5587 case VECTOR_CST:
5588 {
5589 unsigned i;
5590 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5591 {
5592 tree v = VECTOR_CST_ELT (value, i);
5593 if (!initializer_zerop (v))
5594 nz_elts += mult;
5595 init_elts += mult;
5596 }
5597 }
5598 break;
5599
5600 default:
5601 {
5602 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5603 nz_elts += mult * tc;
5604 init_elts += mult * tc;
5605
5606 if (const_from_elts_p && const_p)
5607 const_p = initializer_constant_valid_p (value, elt_type)
5608 != NULL_TREE;
5609 }
5610 break;
5611 }
5612 }
5613
5614 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5615 num_fields, elt_type))
5616 *p_complete = false;
5617
5618 *p_nz_elts += nz_elts;
5619 *p_init_elts += init_elts;
5620
5621 return const_p;
5622 }
5623
5624 /* Examine CTOR to discover:
5625 * how many scalar fields are set to nonzero values,
5626 and place it in *P_NZ_ELTS;
5627 * how many scalar fields in total are in CTOR,
5628 and place it in *P_ELT_COUNT.
5629 * whether the constructor is complete -- in the sense that every
5630 meaningful byte is explicitly given a value --
5631 and place it in *P_COMPLETE.
5632
5633 Return whether or not CTOR is a valid static constant initializer, the same
5634 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5635
5636 bool
5637 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5638 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5639 {
5640 *p_nz_elts = 0;
5641 *p_init_elts = 0;
5642 *p_complete = true;
5643
5644 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5645 }
5646
5647 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5648 of which had type LAST_TYPE. Each element was itself a complete
5649 initializer, in the sense that every meaningful byte was explicitly
5650 given a value. Return true if the same is true for the constructor
5651 as a whole. */
5652
5653 bool
5654 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5655 const_tree last_type)
5656 {
5657 if (TREE_CODE (type) == UNION_TYPE
5658 || TREE_CODE (type) == QUAL_UNION_TYPE)
5659 {
5660 if (num_elts == 0)
5661 return false;
5662
5663 gcc_assert (num_elts == 1 && last_type);
5664
5665 /* ??? We could look at each element of the union, and find the
5666 largest element. Which would avoid comparing the size of the
5667 initialized element against any tail padding in the union.
5668 Doesn't seem worth the effort... */
5669 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5670 }
5671
5672 return count_type_elements (type, true) == num_elts;
5673 }
5674
5675 /* Return 1 if EXP contains mostly (3/4) zeros. */
5676
5677 static int
5678 mostly_zeros_p (const_tree exp)
5679 {
5680 if (TREE_CODE (exp) == CONSTRUCTOR)
5681 {
5682 HOST_WIDE_INT nz_elts, init_elts;
5683 bool complete_p;
5684
5685 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5686 return !complete_p || nz_elts < init_elts / 4;
5687 }
5688
5689 return initializer_zerop (exp);
5690 }
5691
5692 /* Return 1 if EXP contains all zeros. */
5693
5694 static int
5695 all_zeros_p (const_tree exp)
5696 {
5697 if (TREE_CODE (exp) == CONSTRUCTOR)
5698 {
5699 HOST_WIDE_INT nz_elts, init_elts;
5700 bool complete_p;
5701
5702 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5703 return nz_elts == 0;
5704 }
5705
5706 return initializer_zerop (exp);
5707 }
5708 \f
5709 /* Helper function for store_constructor.
5710 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5711 CLEARED is as for store_constructor.
5712 ALIAS_SET is the alias set to use for any stores.
5713
5714 This provides a recursive shortcut back to store_constructor when it isn't
5715 necessary to go through store_field. This is so that we can pass through
5716 the cleared field to let store_constructor know that we may not have to
5717 clear a substructure if the outer structure has already been cleared. */
5718
5719 static void
5720 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5721 HOST_WIDE_INT bitpos, enum machine_mode mode,
5722 tree exp, int cleared, alias_set_type alias_set)
5723 {
5724 if (TREE_CODE (exp) == CONSTRUCTOR
5725 /* We can only call store_constructor recursively if the size and
5726 bit position are on a byte boundary. */
5727 && bitpos % BITS_PER_UNIT == 0
5728 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5729 /* If we have a nonzero bitpos for a register target, then we just
5730 let store_field do the bitfield handling. This is unlikely to
5731 generate unnecessary clear instructions anyways. */
5732 && (bitpos == 0 || MEM_P (target)))
5733 {
5734 if (MEM_P (target))
5735 target
5736 = adjust_address (target,
5737 GET_MODE (target) == BLKmode
5738 || 0 != (bitpos
5739 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5740 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5741
5742
5743 /* Update the alias set, if required. */
5744 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5745 && MEM_ALIAS_SET (target) != 0)
5746 {
5747 target = copy_rtx (target);
5748 set_mem_alias_set (target, alias_set);
5749 }
5750
5751 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5752 }
5753 else
5754 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5755 }
5756
5757 /* Store the value of constructor EXP into the rtx TARGET.
5758 TARGET is either a REG or a MEM; we know it cannot conflict, since
5759 safe_from_p has been called.
5760 CLEARED is true if TARGET is known to have been zero'd.
5761 SIZE is the number of bytes of TARGET we are allowed to modify: this
5762 may not be the same as the size of EXP if we are assigning to a field
5763 which has been packed to exclude padding bits. */
5764
5765 static void
5766 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5767 {
5768 tree type = TREE_TYPE (exp);
5769 #ifdef WORD_REGISTER_OPERATIONS
5770 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5771 #endif
5772
5773 switch (TREE_CODE (type))
5774 {
5775 case RECORD_TYPE:
5776 case UNION_TYPE:
5777 case QUAL_UNION_TYPE:
5778 {
5779 unsigned HOST_WIDE_INT idx;
5780 tree field, value;
5781
5782 /* If size is zero or the target is already cleared, do nothing. */
5783 if (size == 0 || cleared)
5784 cleared = 1;
5785 /* We either clear the aggregate or indicate the value is dead. */
5786 else if ((TREE_CODE (type) == UNION_TYPE
5787 || TREE_CODE (type) == QUAL_UNION_TYPE)
5788 && ! CONSTRUCTOR_ELTS (exp))
5789 /* If the constructor is empty, clear the union. */
5790 {
5791 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5792 cleared = 1;
5793 }
5794
5795 /* If we are building a static constructor into a register,
5796 set the initial value as zero so we can fold the value into
5797 a constant. But if more than one register is involved,
5798 this probably loses. */
5799 else if (REG_P (target) && TREE_STATIC (exp)
5800 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5801 {
5802 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5803 cleared = 1;
5804 }
5805
5806 /* If the constructor has fewer fields than the structure or
5807 if we are initializing the structure to mostly zeros, clear
5808 the whole structure first. Don't do this if TARGET is a
5809 register whose mode size isn't equal to SIZE since
5810 clear_storage can't handle this case. */
5811 else if (size > 0
5812 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5813 != fields_length (type))
5814 || mostly_zeros_p (exp))
5815 && (!REG_P (target)
5816 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5817 == size)))
5818 {
5819 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5820 cleared = 1;
5821 }
5822
5823 if (REG_P (target) && !cleared)
5824 emit_clobber (target);
5825
5826 /* Store each element of the constructor into the
5827 corresponding field of TARGET. */
5828 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5829 {
5830 enum machine_mode mode;
5831 HOST_WIDE_INT bitsize;
5832 HOST_WIDE_INT bitpos = 0;
5833 tree offset;
5834 rtx to_rtx = target;
5835
5836 /* Just ignore missing fields. We cleared the whole
5837 structure, above, if any fields are missing. */
5838 if (field == 0)
5839 continue;
5840
5841 if (cleared && initializer_zerop (value))
5842 continue;
5843
5844 if (host_integerp (DECL_SIZE (field), 1))
5845 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5846 else
5847 bitsize = -1;
5848
5849 mode = DECL_MODE (field);
5850 if (DECL_BIT_FIELD (field))
5851 mode = VOIDmode;
5852
5853 offset = DECL_FIELD_OFFSET (field);
5854 if (host_integerp (offset, 0)
5855 && host_integerp (bit_position (field), 0))
5856 {
5857 bitpos = int_bit_position (field);
5858 offset = 0;
5859 }
5860 else
5861 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5862
5863 if (offset)
5864 {
5865 enum machine_mode address_mode;
5866 rtx offset_rtx;
5867
5868 offset
5869 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5870 make_tree (TREE_TYPE (exp),
5871 target));
5872
5873 offset_rtx = expand_normal (offset);
5874 gcc_assert (MEM_P (to_rtx));
5875
5876 address_mode = get_address_mode (to_rtx);
5877 if (GET_MODE (offset_rtx) != address_mode)
5878 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5879
5880 to_rtx = offset_address (to_rtx, offset_rtx,
5881 highest_pow2_factor (offset));
5882 }
5883
5884 #ifdef WORD_REGISTER_OPERATIONS
5885 /* If this initializes a field that is smaller than a
5886 word, at the start of a word, try to widen it to a full
5887 word. This special case allows us to output C++ member
5888 function initializations in a form that the optimizers
5889 can understand. */
5890 if (REG_P (target)
5891 && bitsize < BITS_PER_WORD
5892 && bitpos % BITS_PER_WORD == 0
5893 && GET_MODE_CLASS (mode) == MODE_INT
5894 && TREE_CODE (value) == INTEGER_CST
5895 && exp_size >= 0
5896 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5897 {
5898 tree type = TREE_TYPE (value);
5899
5900 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5901 {
5902 type = lang_hooks.types.type_for_mode
5903 (word_mode, TYPE_UNSIGNED (type));
5904 value = fold_convert (type, value);
5905 }
5906
5907 if (BYTES_BIG_ENDIAN)
5908 value
5909 = fold_build2 (LSHIFT_EXPR, type, value,
5910 build_int_cst (type,
5911 BITS_PER_WORD - bitsize));
5912 bitsize = BITS_PER_WORD;
5913 mode = word_mode;
5914 }
5915 #endif
5916
5917 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5918 && DECL_NONADDRESSABLE_P (field))
5919 {
5920 to_rtx = copy_rtx (to_rtx);
5921 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5922 }
5923
5924 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5925 value, cleared,
5926 get_alias_set (TREE_TYPE (field)));
5927 }
5928 break;
5929 }
5930 case ARRAY_TYPE:
5931 {
5932 tree value, index;
5933 unsigned HOST_WIDE_INT i;
5934 int need_to_clear;
5935 tree domain;
5936 tree elttype = TREE_TYPE (type);
5937 int const_bounds_p;
5938 HOST_WIDE_INT minelt = 0;
5939 HOST_WIDE_INT maxelt = 0;
5940
5941 domain = TYPE_DOMAIN (type);
5942 const_bounds_p = (TYPE_MIN_VALUE (domain)
5943 && TYPE_MAX_VALUE (domain)
5944 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5945 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5946
5947 /* If we have constant bounds for the range of the type, get them. */
5948 if (const_bounds_p)
5949 {
5950 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5951 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5952 }
5953
5954 /* If the constructor has fewer elements than the array, clear
5955 the whole array first. Similarly if this is static
5956 constructor of a non-BLKmode object. */
5957 if (cleared)
5958 need_to_clear = 0;
5959 else if (REG_P (target) && TREE_STATIC (exp))
5960 need_to_clear = 1;
5961 else
5962 {
5963 unsigned HOST_WIDE_INT idx;
5964 tree index, value;
5965 HOST_WIDE_INT count = 0, zero_count = 0;
5966 need_to_clear = ! const_bounds_p;
5967
5968 /* This loop is a more accurate version of the loop in
5969 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5970 is also needed to check for missing elements. */
5971 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5972 {
5973 HOST_WIDE_INT this_node_count;
5974
5975 if (need_to_clear)
5976 break;
5977
5978 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5979 {
5980 tree lo_index = TREE_OPERAND (index, 0);
5981 tree hi_index = TREE_OPERAND (index, 1);
5982
5983 if (! host_integerp (lo_index, 1)
5984 || ! host_integerp (hi_index, 1))
5985 {
5986 need_to_clear = 1;
5987 break;
5988 }
5989
5990 this_node_count = (tree_low_cst (hi_index, 1)
5991 - tree_low_cst (lo_index, 1) + 1);
5992 }
5993 else
5994 this_node_count = 1;
5995
5996 count += this_node_count;
5997 if (mostly_zeros_p (value))
5998 zero_count += this_node_count;
5999 }
6000
6001 /* Clear the entire array first if there are any missing
6002 elements, or if the incidence of zero elements is >=
6003 75%. */
6004 if (! need_to_clear
6005 && (count < maxelt - minelt + 1
6006 || 4 * zero_count >= 3 * count))
6007 need_to_clear = 1;
6008 }
6009
6010 if (need_to_clear && size > 0)
6011 {
6012 if (REG_P (target))
6013 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6014 else
6015 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6016 cleared = 1;
6017 }
6018
6019 if (!cleared && REG_P (target))
6020 /* Inform later passes that the old value is dead. */
6021 emit_clobber (target);
6022
6023 /* Store each element of the constructor into the
6024 corresponding element of TARGET, determined by counting the
6025 elements. */
6026 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6027 {
6028 enum machine_mode mode;
6029 HOST_WIDE_INT bitsize;
6030 HOST_WIDE_INT bitpos;
6031 rtx xtarget = target;
6032
6033 if (cleared && initializer_zerop (value))
6034 continue;
6035
6036 mode = TYPE_MODE (elttype);
6037 if (mode == BLKmode)
6038 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
6039 ? tree_low_cst (TYPE_SIZE (elttype), 1)
6040 : -1);
6041 else
6042 bitsize = GET_MODE_BITSIZE (mode);
6043
6044 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6045 {
6046 tree lo_index = TREE_OPERAND (index, 0);
6047 tree hi_index = TREE_OPERAND (index, 1);
6048 rtx index_r, pos_rtx;
6049 HOST_WIDE_INT lo, hi, count;
6050 tree position;
6051
6052 /* If the range is constant and "small", unroll the loop. */
6053 if (const_bounds_p
6054 && host_integerp (lo_index, 0)
6055 && host_integerp (hi_index, 0)
6056 && (lo = tree_low_cst (lo_index, 0),
6057 hi = tree_low_cst (hi_index, 0),
6058 count = hi - lo + 1,
6059 (!MEM_P (target)
6060 || count <= 2
6061 || (host_integerp (TYPE_SIZE (elttype), 1)
6062 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
6063 <= 40 * 8)))))
6064 {
6065 lo -= minelt; hi -= minelt;
6066 for (; lo <= hi; lo++)
6067 {
6068 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
6069
6070 if (MEM_P (target)
6071 && !MEM_KEEP_ALIAS_SET_P (target)
6072 && TREE_CODE (type) == ARRAY_TYPE
6073 && TYPE_NONALIASED_COMPONENT (type))
6074 {
6075 target = copy_rtx (target);
6076 MEM_KEEP_ALIAS_SET_P (target) = 1;
6077 }
6078
6079 store_constructor_field
6080 (target, bitsize, bitpos, mode, value, cleared,
6081 get_alias_set (elttype));
6082 }
6083 }
6084 else
6085 {
6086 rtx loop_start = gen_label_rtx ();
6087 rtx loop_end = gen_label_rtx ();
6088 tree exit_cond;
6089
6090 expand_normal (hi_index);
6091
6092 index = build_decl (EXPR_LOCATION (exp),
6093 VAR_DECL, NULL_TREE, domain);
6094 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6095 SET_DECL_RTL (index, index_r);
6096 store_expr (lo_index, index_r, 0, false);
6097
6098 /* Build the head of the loop. */
6099 do_pending_stack_adjust ();
6100 emit_label (loop_start);
6101
6102 /* Assign value to element index. */
6103 position =
6104 fold_convert (ssizetype,
6105 fold_build2 (MINUS_EXPR,
6106 TREE_TYPE (index),
6107 index,
6108 TYPE_MIN_VALUE (domain)));
6109
6110 position =
6111 size_binop (MULT_EXPR, position,
6112 fold_convert (ssizetype,
6113 TYPE_SIZE_UNIT (elttype)));
6114
6115 pos_rtx = expand_normal (position);
6116 xtarget = offset_address (target, pos_rtx,
6117 highest_pow2_factor (position));
6118 xtarget = adjust_address (xtarget, mode, 0);
6119 if (TREE_CODE (value) == CONSTRUCTOR)
6120 store_constructor (value, xtarget, cleared,
6121 bitsize / BITS_PER_UNIT);
6122 else
6123 store_expr (value, xtarget, 0, false);
6124
6125 /* Generate a conditional jump to exit the loop. */
6126 exit_cond = build2 (LT_EXPR, integer_type_node,
6127 index, hi_index);
6128 jumpif (exit_cond, loop_end, -1);
6129
6130 /* Update the loop counter, and jump to the head of
6131 the loop. */
6132 expand_assignment (index,
6133 build2 (PLUS_EXPR, TREE_TYPE (index),
6134 index, integer_one_node),
6135 false);
6136
6137 emit_jump (loop_start);
6138
6139 /* Build the end of the loop. */
6140 emit_label (loop_end);
6141 }
6142 }
6143 else if ((index != 0 && ! host_integerp (index, 0))
6144 || ! host_integerp (TYPE_SIZE (elttype), 1))
6145 {
6146 tree position;
6147
6148 if (index == 0)
6149 index = ssize_int (1);
6150
6151 if (minelt)
6152 index = fold_convert (ssizetype,
6153 fold_build2 (MINUS_EXPR,
6154 TREE_TYPE (index),
6155 index,
6156 TYPE_MIN_VALUE (domain)));
6157
6158 position =
6159 size_binop (MULT_EXPR, index,
6160 fold_convert (ssizetype,
6161 TYPE_SIZE_UNIT (elttype)));
6162 xtarget = offset_address (target,
6163 expand_normal (position),
6164 highest_pow2_factor (position));
6165 xtarget = adjust_address (xtarget, mode, 0);
6166 store_expr (value, xtarget, 0, false);
6167 }
6168 else
6169 {
6170 if (index != 0)
6171 bitpos = ((tree_low_cst (index, 0) - minelt)
6172 * tree_low_cst (TYPE_SIZE (elttype), 1));
6173 else
6174 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
6175
6176 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6177 && TREE_CODE (type) == ARRAY_TYPE
6178 && TYPE_NONALIASED_COMPONENT (type))
6179 {
6180 target = copy_rtx (target);
6181 MEM_KEEP_ALIAS_SET_P (target) = 1;
6182 }
6183 store_constructor_field (target, bitsize, bitpos, mode, value,
6184 cleared, get_alias_set (elttype));
6185 }
6186 }
6187 break;
6188 }
6189
6190 case VECTOR_TYPE:
6191 {
6192 unsigned HOST_WIDE_INT idx;
6193 constructor_elt *ce;
6194 int i;
6195 int need_to_clear;
6196 int icode = CODE_FOR_nothing;
6197 tree elttype = TREE_TYPE (type);
6198 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
6199 enum machine_mode eltmode = TYPE_MODE (elttype);
6200 HOST_WIDE_INT bitsize;
6201 HOST_WIDE_INT bitpos;
6202 rtvec vector = NULL;
6203 unsigned n_elts;
6204 alias_set_type alias;
6205
6206 gcc_assert (eltmode != BLKmode);
6207
6208 n_elts = TYPE_VECTOR_SUBPARTS (type);
6209 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6210 {
6211 enum machine_mode mode = GET_MODE (target);
6212
6213 icode = (int) optab_handler (vec_init_optab, mode);
6214 if (icode != CODE_FOR_nothing)
6215 {
6216 unsigned int i;
6217
6218 vector = rtvec_alloc (n_elts);
6219 for (i = 0; i < n_elts; i++)
6220 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6221 }
6222 }
6223
6224 /* If the constructor has fewer elements than the vector,
6225 clear the whole array first. Similarly if this is static
6226 constructor of a non-BLKmode object. */
6227 if (cleared)
6228 need_to_clear = 0;
6229 else if (REG_P (target) && TREE_STATIC (exp))
6230 need_to_clear = 1;
6231 else
6232 {
6233 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6234 tree value;
6235
6236 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6237 {
6238 int n_elts_here = tree_low_cst
6239 (int_const_binop (TRUNC_DIV_EXPR,
6240 TYPE_SIZE (TREE_TYPE (value)),
6241 TYPE_SIZE (elttype)), 1);
6242
6243 count += n_elts_here;
6244 if (mostly_zeros_p (value))
6245 zero_count += n_elts_here;
6246 }
6247
6248 /* Clear the entire vector first if there are any missing elements,
6249 or if the incidence of zero elements is >= 75%. */
6250 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6251 }
6252
6253 if (need_to_clear && size > 0 && !vector)
6254 {
6255 if (REG_P (target))
6256 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6257 else
6258 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6259 cleared = 1;
6260 }
6261
6262 /* Inform later passes that the old value is dead. */
6263 if (!cleared && !vector && REG_P (target))
6264 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6265
6266 if (MEM_P (target))
6267 alias = MEM_ALIAS_SET (target);
6268 else
6269 alias = get_alias_set (elttype);
6270
6271 /* Store each element of the constructor into the corresponding
6272 element of TARGET, determined by counting the elements. */
6273 for (idx = 0, i = 0;
6274 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6275 idx++, i += bitsize / elt_size)
6276 {
6277 HOST_WIDE_INT eltpos;
6278 tree value = ce->value;
6279
6280 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
6281 if (cleared && initializer_zerop (value))
6282 continue;
6283
6284 if (ce->index)
6285 eltpos = tree_low_cst (ce->index, 1);
6286 else
6287 eltpos = i;
6288
6289 if (vector)
6290 {
6291 /* Vector CONSTRUCTORs should only be built from smaller
6292 vectors in the case of BLKmode vectors. */
6293 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6294 RTVEC_ELT (vector, eltpos)
6295 = expand_normal (value);
6296 }
6297 else
6298 {
6299 enum machine_mode value_mode =
6300 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6301 ? TYPE_MODE (TREE_TYPE (value))
6302 : eltmode;
6303 bitpos = eltpos * elt_size;
6304 store_constructor_field (target, bitsize, bitpos, value_mode,
6305 value, cleared, alias);
6306 }
6307 }
6308
6309 if (vector)
6310 emit_insn (GEN_FCN (icode)
6311 (target,
6312 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6313 break;
6314 }
6315
6316 default:
6317 gcc_unreachable ();
6318 }
6319 }
6320
6321 /* Store the value of EXP (an expression tree)
6322 into a subfield of TARGET which has mode MODE and occupies
6323 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6324 If MODE is VOIDmode, it means that we are storing into a bit-field.
6325
6326 BITREGION_START is bitpos of the first bitfield in this region.
6327 BITREGION_END is the bitpos of the ending bitfield in this region.
6328 These two fields are 0, if the C++ memory model does not apply,
6329 or we are not interested in keeping track of bitfield regions.
6330
6331 Always return const0_rtx unless we have something particular to
6332 return.
6333
6334 ALIAS_SET is the alias set for the destination. This value will
6335 (in general) be different from that for TARGET, since TARGET is a
6336 reference to the containing structure.
6337
6338 If NONTEMPORAL is true, try generating a nontemporal store. */
6339
6340 static rtx
6341 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6342 unsigned HOST_WIDE_INT bitregion_start,
6343 unsigned HOST_WIDE_INT bitregion_end,
6344 enum machine_mode mode, tree exp,
6345 alias_set_type alias_set, bool nontemporal)
6346 {
6347 if (TREE_CODE (exp) == ERROR_MARK)
6348 return const0_rtx;
6349
6350 /* If we have nothing to store, do nothing unless the expression has
6351 side-effects. */
6352 if (bitsize == 0)
6353 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6354
6355 if (GET_CODE (target) == CONCAT)
6356 {
6357 /* We're storing into a struct containing a single __complex. */
6358
6359 gcc_assert (!bitpos);
6360 return store_expr (exp, target, 0, nontemporal);
6361 }
6362
6363 /* If the structure is in a register or if the component
6364 is a bit field, we cannot use addressing to access it.
6365 Use bit-field techniques or SUBREG to store in it. */
6366
6367 if (mode == VOIDmode
6368 || (mode != BLKmode && ! direct_store[(int) mode]
6369 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6370 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6371 || REG_P (target)
6372 || GET_CODE (target) == SUBREG
6373 /* If the field isn't aligned enough to store as an ordinary memref,
6374 store it as a bit field. */
6375 || (mode != BLKmode
6376 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6377 || bitpos % GET_MODE_ALIGNMENT (mode))
6378 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6379 || (bitpos % BITS_PER_UNIT != 0)))
6380 || (bitsize >= 0 && mode != BLKmode
6381 && GET_MODE_BITSIZE (mode) > bitsize)
6382 /* If the RHS and field are a constant size and the size of the
6383 RHS isn't the same size as the bitfield, we must use bitfield
6384 operations. */
6385 || (bitsize >= 0
6386 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6387 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6388 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6389 decl we must use bitfield operations. */
6390 || (bitsize >= 0
6391 && TREE_CODE (exp) == MEM_REF
6392 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6393 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6394 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6395 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6396 {
6397 rtx temp;
6398 gimple nop_def;
6399
6400 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6401 implies a mask operation. If the precision is the same size as
6402 the field we're storing into, that mask is redundant. This is
6403 particularly common with bit field assignments generated by the
6404 C front end. */
6405 nop_def = get_def_for_expr (exp, NOP_EXPR);
6406 if (nop_def)
6407 {
6408 tree type = TREE_TYPE (exp);
6409 if (INTEGRAL_TYPE_P (type)
6410 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6411 && bitsize == TYPE_PRECISION (type))
6412 {
6413 tree op = gimple_assign_rhs1 (nop_def);
6414 type = TREE_TYPE (op);
6415 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6416 exp = op;
6417 }
6418 }
6419
6420 temp = expand_normal (exp);
6421
6422 /* If BITSIZE is narrower than the size of the type of EXP
6423 we will be narrowing TEMP. Normally, what's wanted are the
6424 low-order bits. However, if EXP's type is a record and this is
6425 big-endian machine, we want the upper BITSIZE bits. */
6426 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6427 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6428 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6429 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6430 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6431 NULL_RTX, 1);
6432
6433 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6434 if (mode != VOIDmode && mode != BLKmode
6435 && mode != TYPE_MODE (TREE_TYPE (exp)))
6436 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6437
6438 /* If the modes of TEMP and TARGET are both BLKmode, both
6439 must be in memory and BITPOS must be aligned on a byte
6440 boundary. If so, we simply do a block copy. Likewise
6441 for a BLKmode-like TARGET. */
6442 if (GET_MODE (temp) == BLKmode
6443 && (GET_MODE (target) == BLKmode
6444 || (MEM_P (target)
6445 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6446 && (bitpos % BITS_PER_UNIT) == 0
6447 && (bitsize % BITS_PER_UNIT) == 0)))
6448 {
6449 gcc_assert (MEM_P (target) && MEM_P (temp)
6450 && (bitpos % BITS_PER_UNIT) == 0);
6451
6452 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6453 emit_block_move (target, temp,
6454 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6455 / BITS_PER_UNIT),
6456 BLOCK_OP_NORMAL);
6457
6458 return const0_rtx;
6459 }
6460
6461 /* Handle calls that return values in multiple non-contiguous locations.
6462 The Irix 6 ABI has examples of this. */
6463 if (GET_CODE (temp) == PARALLEL)
6464 {
6465 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6466 rtx temp_target;
6467 if (mode == BLKmode)
6468 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6469 temp_target = gen_reg_rtx (mode);
6470 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6471 temp = temp_target;
6472 }
6473 else if (mode == BLKmode)
6474 {
6475 /* Handle calls that return BLKmode values in registers. */
6476 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6477 {
6478 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6479 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6480 temp = temp_target;
6481 }
6482 else
6483 {
6484 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6485 rtx temp_target;
6486 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6487 temp_target = gen_reg_rtx (mode);
6488 temp_target
6489 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6490 false, temp_target, mode, mode);
6491 temp = temp_target;
6492 }
6493 }
6494
6495 /* Store the value in the bitfield. */
6496 store_bit_field (target, bitsize, bitpos,
6497 bitregion_start, bitregion_end,
6498 mode, temp);
6499
6500 return const0_rtx;
6501 }
6502 else
6503 {
6504 /* Now build a reference to just the desired component. */
6505 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6506
6507 if (to_rtx == target)
6508 to_rtx = copy_rtx (to_rtx);
6509
6510 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6511 set_mem_alias_set (to_rtx, alias_set);
6512
6513 return store_expr (exp, to_rtx, 0, nontemporal);
6514 }
6515 }
6516 \f
6517 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6518 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6519 codes and find the ultimate containing object, which we return.
6520
6521 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6522 bit position, and *PUNSIGNEDP to the signedness of the field.
6523 If the position of the field is variable, we store a tree
6524 giving the variable offset (in units) in *POFFSET.
6525 This offset is in addition to the bit position.
6526 If the position is not variable, we store 0 in *POFFSET.
6527
6528 If any of the extraction expressions is volatile,
6529 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6530
6531 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6532 Otherwise, it is a mode that can be used to access the field.
6533
6534 If the field describes a variable-sized object, *PMODE is set to
6535 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6536 this case, but the address of the object can be found.
6537
6538 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6539 look through nodes that serve as markers of a greater alignment than
6540 the one that can be deduced from the expression. These nodes make it
6541 possible for front-ends to prevent temporaries from being created by
6542 the middle-end on alignment considerations. For that purpose, the
6543 normal operating mode at high-level is to always pass FALSE so that
6544 the ultimate containing object is really returned; moreover, the
6545 associated predicate handled_component_p will always return TRUE
6546 on these nodes, thus indicating that they are essentially handled
6547 by get_inner_reference. TRUE should only be passed when the caller
6548 is scanning the expression in order to build another representation
6549 and specifically knows how to handle these nodes; as such, this is
6550 the normal operating mode in the RTL expanders. */
6551
6552 tree
6553 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6554 HOST_WIDE_INT *pbitpos, tree *poffset,
6555 enum machine_mode *pmode, int *punsignedp,
6556 int *pvolatilep, bool keep_aligning)
6557 {
6558 tree size_tree = 0;
6559 enum machine_mode mode = VOIDmode;
6560 bool blkmode_bitfield = false;
6561 tree offset = size_zero_node;
6562 double_int bit_offset = double_int_zero;
6563
6564 /* First get the mode, signedness, and size. We do this from just the
6565 outermost expression. */
6566 *pbitsize = -1;
6567 if (TREE_CODE (exp) == COMPONENT_REF)
6568 {
6569 tree field = TREE_OPERAND (exp, 1);
6570 size_tree = DECL_SIZE (field);
6571 if (!DECL_BIT_FIELD (field))
6572 mode = DECL_MODE (field);
6573 else if (DECL_MODE (field) == BLKmode)
6574 blkmode_bitfield = true;
6575 else if (TREE_THIS_VOLATILE (exp)
6576 && flag_strict_volatile_bitfields > 0)
6577 /* Volatile bitfields should be accessed in the mode of the
6578 field's type, not the mode computed based on the bit
6579 size. */
6580 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6581
6582 *punsignedp = DECL_UNSIGNED (field);
6583 }
6584 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6585 {
6586 size_tree = TREE_OPERAND (exp, 1);
6587 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6588 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6589
6590 /* For vector types, with the correct size of access, use the mode of
6591 inner type. */
6592 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6593 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6594 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6595 mode = TYPE_MODE (TREE_TYPE (exp));
6596 }
6597 else
6598 {
6599 mode = TYPE_MODE (TREE_TYPE (exp));
6600 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6601
6602 if (mode == BLKmode)
6603 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6604 else
6605 *pbitsize = GET_MODE_BITSIZE (mode);
6606 }
6607
6608 if (size_tree != 0)
6609 {
6610 if (! host_integerp (size_tree, 1))
6611 mode = BLKmode, *pbitsize = -1;
6612 else
6613 *pbitsize = tree_low_cst (size_tree, 1);
6614 }
6615
6616 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6617 and find the ultimate containing object. */
6618 while (1)
6619 {
6620 switch (TREE_CODE (exp))
6621 {
6622 case BIT_FIELD_REF:
6623 bit_offset += tree_to_double_int (TREE_OPERAND (exp, 2));
6624 break;
6625
6626 case COMPONENT_REF:
6627 {
6628 tree field = TREE_OPERAND (exp, 1);
6629 tree this_offset = component_ref_field_offset (exp);
6630
6631 /* If this field hasn't been filled in yet, don't go past it.
6632 This should only happen when folding expressions made during
6633 type construction. */
6634 if (this_offset == 0)
6635 break;
6636
6637 offset = size_binop (PLUS_EXPR, offset, this_offset);
6638 bit_offset += tree_to_double_int (DECL_FIELD_BIT_OFFSET (field));
6639
6640 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6641 }
6642 break;
6643
6644 case ARRAY_REF:
6645 case ARRAY_RANGE_REF:
6646 {
6647 tree index = TREE_OPERAND (exp, 1);
6648 tree low_bound = array_ref_low_bound (exp);
6649 tree unit_size = array_ref_element_size (exp);
6650
6651 /* We assume all arrays have sizes that are a multiple of a byte.
6652 First subtract the lower bound, if any, in the type of the
6653 index, then convert to sizetype and multiply by the size of
6654 the array element. */
6655 if (! integer_zerop (low_bound))
6656 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6657 index, low_bound);
6658
6659 offset = size_binop (PLUS_EXPR, offset,
6660 size_binop (MULT_EXPR,
6661 fold_convert (sizetype, index),
6662 unit_size));
6663 }
6664 break;
6665
6666 case REALPART_EXPR:
6667 break;
6668
6669 case IMAGPART_EXPR:
6670 bit_offset += double_int::from_uhwi (*pbitsize);
6671 break;
6672
6673 case VIEW_CONVERT_EXPR:
6674 if (keep_aligning && STRICT_ALIGNMENT
6675 && (TYPE_ALIGN (TREE_TYPE (exp))
6676 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6677 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6678 < BIGGEST_ALIGNMENT)
6679 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6680 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6681 goto done;
6682 break;
6683
6684 case MEM_REF:
6685 /* Hand back the decl for MEM[&decl, off]. */
6686 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6687 {
6688 tree off = TREE_OPERAND (exp, 1);
6689 if (!integer_zerop (off))
6690 {
6691 double_int boff, coff = mem_ref_offset (exp);
6692 boff = coff.alshift (BITS_PER_UNIT == 8
6693 ? 3 : exact_log2 (BITS_PER_UNIT),
6694 HOST_BITS_PER_DOUBLE_INT);
6695 bit_offset += boff;
6696 }
6697 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6698 }
6699 goto done;
6700
6701 default:
6702 goto done;
6703 }
6704
6705 /* If any reference in the chain is volatile, the effect is volatile. */
6706 if (TREE_THIS_VOLATILE (exp))
6707 *pvolatilep = 1;
6708
6709 exp = TREE_OPERAND (exp, 0);
6710 }
6711 done:
6712
6713 /* If OFFSET is constant, see if we can return the whole thing as a
6714 constant bit position. Make sure to handle overflow during
6715 this conversion. */
6716 if (TREE_CODE (offset) == INTEGER_CST)
6717 {
6718 double_int tem = tree_to_double_int (offset);
6719 tem = tem.sext (TYPE_PRECISION (sizetype));
6720 tem = tem.alshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT),
6721 HOST_BITS_PER_DOUBLE_INT);
6722 tem += bit_offset;
6723 if (tem.fits_shwi ())
6724 {
6725 *pbitpos = tem.to_shwi ();
6726 *poffset = offset = NULL_TREE;
6727 }
6728 }
6729
6730 /* Otherwise, split it up. */
6731 if (offset)
6732 {
6733 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6734 if (bit_offset.is_negative ())
6735 {
6736 double_int mask
6737 = double_int::mask (BITS_PER_UNIT == 8
6738 ? 3 : exact_log2 (BITS_PER_UNIT));
6739 double_int tem = bit_offset.and_not (mask);
6740 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6741 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6742 bit_offset -= tem;
6743 tem = tem.arshift (BITS_PER_UNIT == 8
6744 ? 3 : exact_log2 (BITS_PER_UNIT),
6745 HOST_BITS_PER_DOUBLE_INT);
6746 offset = size_binop (PLUS_EXPR, offset,
6747 double_int_to_tree (sizetype, tem));
6748 }
6749
6750 *pbitpos = bit_offset.to_shwi ();
6751 *poffset = offset;
6752 }
6753
6754 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6755 if (mode == VOIDmode
6756 && blkmode_bitfield
6757 && (*pbitpos % BITS_PER_UNIT) == 0
6758 && (*pbitsize % BITS_PER_UNIT) == 0)
6759 *pmode = BLKmode;
6760 else
6761 *pmode = mode;
6762
6763 return exp;
6764 }
6765
6766 /* Return a tree of sizetype representing the size, in bytes, of the element
6767 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6768
6769 tree
6770 array_ref_element_size (tree exp)
6771 {
6772 tree aligned_size = TREE_OPERAND (exp, 3);
6773 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6774 location_t loc = EXPR_LOCATION (exp);
6775
6776 /* If a size was specified in the ARRAY_REF, it's the size measured
6777 in alignment units of the element type. So multiply by that value. */
6778 if (aligned_size)
6779 {
6780 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6781 sizetype from another type of the same width and signedness. */
6782 if (TREE_TYPE (aligned_size) != sizetype)
6783 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6784 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6785 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6786 }
6787
6788 /* Otherwise, take the size from that of the element type. Substitute
6789 any PLACEHOLDER_EXPR that we have. */
6790 else
6791 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6792 }
6793
6794 /* Return a tree representing the lower bound of the array mentioned in
6795 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6796
6797 tree
6798 array_ref_low_bound (tree exp)
6799 {
6800 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6801
6802 /* If a lower bound is specified in EXP, use it. */
6803 if (TREE_OPERAND (exp, 2))
6804 return TREE_OPERAND (exp, 2);
6805
6806 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6807 substituting for a PLACEHOLDER_EXPR as needed. */
6808 if (domain_type && TYPE_MIN_VALUE (domain_type))
6809 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6810
6811 /* Otherwise, return a zero of the appropriate type. */
6812 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6813 }
6814
6815 /* Returns true if REF is an array reference to an array at the end of
6816 a structure. If this is the case, the array may be allocated larger
6817 than its upper bound implies. */
6818
6819 bool
6820 array_at_struct_end_p (tree ref)
6821 {
6822 if (TREE_CODE (ref) != ARRAY_REF
6823 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6824 return false;
6825
6826 while (handled_component_p (ref))
6827 {
6828 /* If the reference chain contains a component reference to a
6829 non-union type and there follows another field the reference
6830 is not at the end of a structure. */
6831 if (TREE_CODE (ref) == COMPONENT_REF
6832 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6833 {
6834 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6835 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6836 nextf = DECL_CHAIN (nextf);
6837 if (nextf)
6838 return false;
6839 }
6840
6841 ref = TREE_OPERAND (ref, 0);
6842 }
6843
6844 /* If the reference is based on a declared entity, the size of the array
6845 is constrained by its given domain. */
6846 if (DECL_P (ref))
6847 return false;
6848
6849 return true;
6850 }
6851
6852 /* Return a tree representing the upper bound of the array mentioned in
6853 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6854
6855 tree
6856 array_ref_up_bound (tree exp)
6857 {
6858 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6859
6860 /* If there is a domain type and it has an upper bound, use it, substituting
6861 for a PLACEHOLDER_EXPR as needed. */
6862 if (domain_type && TYPE_MAX_VALUE (domain_type))
6863 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6864
6865 /* Otherwise fail. */
6866 return NULL_TREE;
6867 }
6868
6869 /* Return a tree representing the offset, in bytes, of the field referenced
6870 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6871
6872 tree
6873 component_ref_field_offset (tree exp)
6874 {
6875 tree aligned_offset = TREE_OPERAND (exp, 2);
6876 tree field = TREE_OPERAND (exp, 1);
6877 location_t loc = EXPR_LOCATION (exp);
6878
6879 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6880 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6881 value. */
6882 if (aligned_offset)
6883 {
6884 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6885 sizetype from another type of the same width and signedness. */
6886 if (TREE_TYPE (aligned_offset) != sizetype)
6887 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6888 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6889 size_int (DECL_OFFSET_ALIGN (field)
6890 / BITS_PER_UNIT));
6891 }
6892
6893 /* Otherwise, take the offset from that of the field. Substitute
6894 any PLACEHOLDER_EXPR that we have. */
6895 else
6896 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6897 }
6898
6899 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6900
6901 static unsigned HOST_WIDE_INT
6902 target_align (const_tree target)
6903 {
6904 /* We might have a chain of nested references with intermediate misaligning
6905 bitfields components, so need to recurse to find out. */
6906
6907 unsigned HOST_WIDE_INT this_align, outer_align;
6908
6909 switch (TREE_CODE (target))
6910 {
6911 case BIT_FIELD_REF:
6912 return 1;
6913
6914 case COMPONENT_REF:
6915 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6916 outer_align = target_align (TREE_OPERAND (target, 0));
6917 return MIN (this_align, outer_align);
6918
6919 case ARRAY_REF:
6920 case ARRAY_RANGE_REF:
6921 this_align = TYPE_ALIGN (TREE_TYPE (target));
6922 outer_align = target_align (TREE_OPERAND (target, 0));
6923 return MIN (this_align, outer_align);
6924
6925 CASE_CONVERT:
6926 case NON_LVALUE_EXPR:
6927 case VIEW_CONVERT_EXPR:
6928 this_align = TYPE_ALIGN (TREE_TYPE (target));
6929 outer_align = target_align (TREE_OPERAND (target, 0));
6930 return MAX (this_align, outer_align);
6931
6932 default:
6933 return TYPE_ALIGN (TREE_TYPE (target));
6934 }
6935 }
6936
6937 \f
6938 /* Given an rtx VALUE that may contain additions and multiplications, return
6939 an equivalent value that just refers to a register, memory, or constant.
6940 This is done by generating instructions to perform the arithmetic and
6941 returning a pseudo-register containing the value.
6942
6943 The returned value may be a REG, SUBREG, MEM or constant. */
6944
6945 rtx
6946 force_operand (rtx value, rtx target)
6947 {
6948 rtx op1, op2;
6949 /* Use subtarget as the target for operand 0 of a binary operation. */
6950 rtx subtarget = get_subtarget (target);
6951 enum rtx_code code = GET_CODE (value);
6952
6953 /* Check for subreg applied to an expression produced by loop optimizer. */
6954 if (code == SUBREG
6955 && !REG_P (SUBREG_REG (value))
6956 && !MEM_P (SUBREG_REG (value)))
6957 {
6958 value
6959 = simplify_gen_subreg (GET_MODE (value),
6960 force_reg (GET_MODE (SUBREG_REG (value)),
6961 force_operand (SUBREG_REG (value),
6962 NULL_RTX)),
6963 GET_MODE (SUBREG_REG (value)),
6964 SUBREG_BYTE (value));
6965 code = GET_CODE (value);
6966 }
6967
6968 /* Check for a PIC address load. */
6969 if ((code == PLUS || code == MINUS)
6970 && XEXP (value, 0) == pic_offset_table_rtx
6971 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6972 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6973 || GET_CODE (XEXP (value, 1)) == CONST))
6974 {
6975 if (!subtarget)
6976 subtarget = gen_reg_rtx (GET_MODE (value));
6977 emit_move_insn (subtarget, value);
6978 return subtarget;
6979 }
6980
6981 if (ARITHMETIC_P (value))
6982 {
6983 op2 = XEXP (value, 1);
6984 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6985 subtarget = 0;
6986 if (code == MINUS && CONST_INT_P (op2))
6987 {
6988 code = PLUS;
6989 op2 = negate_rtx (GET_MODE (value), op2);
6990 }
6991
6992 /* Check for an addition with OP2 a constant integer and our first
6993 operand a PLUS of a virtual register and something else. In that
6994 case, we want to emit the sum of the virtual register and the
6995 constant first and then add the other value. This allows virtual
6996 register instantiation to simply modify the constant rather than
6997 creating another one around this addition. */
6998 if (code == PLUS && CONST_INT_P (op2)
6999 && GET_CODE (XEXP (value, 0)) == PLUS
7000 && REG_P (XEXP (XEXP (value, 0), 0))
7001 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7002 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7003 {
7004 rtx temp = expand_simple_binop (GET_MODE (value), code,
7005 XEXP (XEXP (value, 0), 0), op2,
7006 subtarget, 0, OPTAB_LIB_WIDEN);
7007 return expand_simple_binop (GET_MODE (value), code, temp,
7008 force_operand (XEXP (XEXP (value,
7009 0), 1), 0),
7010 target, 0, OPTAB_LIB_WIDEN);
7011 }
7012
7013 op1 = force_operand (XEXP (value, 0), subtarget);
7014 op2 = force_operand (op2, NULL_RTX);
7015 switch (code)
7016 {
7017 case MULT:
7018 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7019 case DIV:
7020 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7021 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7022 target, 1, OPTAB_LIB_WIDEN);
7023 else
7024 return expand_divmod (0,
7025 FLOAT_MODE_P (GET_MODE (value))
7026 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7027 GET_MODE (value), op1, op2, target, 0);
7028 case MOD:
7029 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7030 target, 0);
7031 case UDIV:
7032 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7033 target, 1);
7034 case UMOD:
7035 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7036 target, 1);
7037 case ASHIFTRT:
7038 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7039 target, 0, OPTAB_LIB_WIDEN);
7040 default:
7041 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7042 target, 1, OPTAB_LIB_WIDEN);
7043 }
7044 }
7045 if (UNARY_P (value))
7046 {
7047 if (!target)
7048 target = gen_reg_rtx (GET_MODE (value));
7049 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7050 switch (code)
7051 {
7052 case ZERO_EXTEND:
7053 case SIGN_EXTEND:
7054 case TRUNCATE:
7055 case FLOAT_EXTEND:
7056 case FLOAT_TRUNCATE:
7057 convert_move (target, op1, code == ZERO_EXTEND);
7058 return target;
7059
7060 case FIX:
7061 case UNSIGNED_FIX:
7062 expand_fix (target, op1, code == UNSIGNED_FIX);
7063 return target;
7064
7065 case FLOAT:
7066 case UNSIGNED_FLOAT:
7067 expand_float (target, op1, code == UNSIGNED_FLOAT);
7068 return target;
7069
7070 default:
7071 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7072 }
7073 }
7074
7075 #ifdef INSN_SCHEDULING
7076 /* On machines that have insn scheduling, we want all memory reference to be
7077 explicit, so we need to deal with such paradoxical SUBREGs. */
7078 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7079 value
7080 = simplify_gen_subreg (GET_MODE (value),
7081 force_reg (GET_MODE (SUBREG_REG (value)),
7082 force_operand (SUBREG_REG (value),
7083 NULL_RTX)),
7084 GET_MODE (SUBREG_REG (value)),
7085 SUBREG_BYTE (value));
7086 #endif
7087
7088 return value;
7089 }
7090 \f
7091 /* Subroutine of expand_expr: return nonzero iff there is no way that
7092 EXP can reference X, which is being modified. TOP_P is nonzero if this
7093 call is going to be used to determine whether we need a temporary
7094 for EXP, as opposed to a recursive call to this function.
7095
7096 It is always safe for this routine to return zero since it merely
7097 searches for optimization opportunities. */
7098
7099 int
7100 safe_from_p (const_rtx x, tree exp, int top_p)
7101 {
7102 rtx exp_rtl = 0;
7103 int i, nops;
7104
7105 if (x == 0
7106 /* If EXP has varying size, we MUST use a target since we currently
7107 have no way of allocating temporaries of variable size
7108 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7109 So we assume here that something at a higher level has prevented a
7110 clash. This is somewhat bogus, but the best we can do. Only
7111 do this when X is BLKmode and when we are at the top level. */
7112 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7113 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7114 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7115 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7116 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7117 != INTEGER_CST)
7118 && GET_MODE (x) == BLKmode)
7119 /* If X is in the outgoing argument area, it is always safe. */
7120 || (MEM_P (x)
7121 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7122 || (GET_CODE (XEXP (x, 0)) == PLUS
7123 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7124 return 1;
7125
7126 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7127 find the underlying pseudo. */
7128 if (GET_CODE (x) == SUBREG)
7129 {
7130 x = SUBREG_REG (x);
7131 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7132 return 0;
7133 }
7134
7135 /* Now look at our tree code and possibly recurse. */
7136 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7137 {
7138 case tcc_declaration:
7139 exp_rtl = DECL_RTL_IF_SET (exp);
7140 break;
7141
7142 case tcc_constant:
7143 return 1;
7144
7145 case tcc_exceptional:
7146 if (TREE_CODE (exp) == TREE_LIST)
7147 {
7148 while (1)
7149 {
7150 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7151 return 0;
7152 exp = TREE_CHAIN (exp);
7153 if (!exp)
7154 return 1;
7155 if (TREE_CODE (exp) != TREE_LIST)
7156 return safe_from_p (x, exp, 0);
7157 }
7158 }
7159 else if (TREE_CODE (exp) == CONSTRUCTOR)
7160 {
7161 constructor_elt *ce;
7162 unsigned HOST_WIDE_INT idx;
7163
7164 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7165 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7166 || !safe_from_p (x, ce->value, 0))
7167 return 0;
7168 return 1;
7169 }
7170 else if (TREE_CODE (exp) == ERROR_MARK)
7171 return 1; /* An already-visited SAVE_EXPR? */
7172 else
7173 return 0;
7174
7175 case tcc_statement:
7176 /* The only case we look at here is the DECL_INITIAL inside a
7177 DECL_EXPR. */
7178 return (TREE_CODE (exp) != DECL_EXPR
7179 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7180 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7181 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7182
7183 case tcc_binary:
7184 case tcc_comparison:
7185 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7186 return 0;
7187 /* Fall through. */
7188
7189 case tcc_unary:
7190 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7191
7192 case tcc_expression:
7193 case tcc_reference:
7194 case tcc_vl_exp:
7195 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7196 the expression. If it is set, we conflict iff we are that rtx or
7197 both are in memory. Otherwise, we check all operands of the
7198 expression recursively. */
7199
7200 switch (TREE_CODE (exp))
7201 {
7202 case ADDR_EXPR:
7203 /* If the operand is static or we are static, we can't conflict.
7204 Likewise if we don't conflict with the operand at all. */
7205 if (staticp (TREE_OPERAND (exp, 0))
7206 || TREE_STATIC (exp)
7207 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7208 return 1;
7209
7210 /* Otherwise, the only way this can conflict is if we are taking
7211 the address of a DECL a that address if part of X, which is
7212 very rare. */
7213 exp = TREE_OPERAND (exp, 0);
7214 if (DECL_P (exp))
7215 {
7216 if (!DECL_RTL_SET_P (exp)
7217 || !MEM_P (DECL_RTL (exp)))
7218 return 0;
7219 else
7220 exp_rtl = XEXP (DECL_RTL (exp), 0);
7221 }
7222 break;
7223
7224 case MEM_REF:
7225 if (MEM_P (x)
7226 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7227 get_alias_set (exp)))
7228 return 0;
7229 break;
7230
7231 case CALL_EXPR:
7232 /* Assume that the call will clobber all hard registers and
7233 all of memory. */
7234 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7235 || MEM_P (x))
7236 return 0;
7237 break;
7238
7239 case WITH_CLEANUP_EXPR:
7240 case CLEANUP_POINT_EXPR:
7241 /* Lowered by gimplify.c. */
7242 gcc_unreachable ();
7243
7244 case SAVE_EXPR:
7245 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7246
7247 default:
7248 break;
7249 }
7250
7251 /* If we have an rtx, we do not need to scan our operands. */
7252 if (exp_rtl)
7253 break;
7254
7255 nops = TREE_OPERAND_LENGTH (exp);
7256 for (i = 0; i < nops; i++)
7257 if (TREE_OPERAND (exp, i) != 0
7258 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7259 return 0;
7260
7261 break;
7262
7263 case tcc_type:
7264 /* Should never get a type here. */
7265 gcc_unreachable ();
7266 }
7267
7268 /* If we have an rtl, find any enclosed object. Then see if we conflict
7269 with it. */
7270 if (exp_rtl)
7271 {
7272 if (GET_CODE (exp_rtl) == SUBREG)
7273 {
7274 exp_rtl = SUBREG_REG (exp_rtl);
7275 if (REG_P (exp_rtl)
7276 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7277 return 0;
7278 }
7279
7280 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7281 are memory and they conflict. */
7282 return ! (rtx_equal_p (x, exp_rtl)
7283 || (MEM_P (x) && MEM_P (exp_rtl)
7284 && true_dependence (exp_rtl, VOIDmode, x)));
7285 }
7286
7287 /* If we reach here, it is safe. */
7288 return 1;
7289 }
7290
7291 \f
7292 /* Return the highest power of two that EXP is known to be a multiple of.
7293 This is used in updating alignment of MEMs in array references. */
7294
7295 unsigned HOST_WIDE_INT
7296 highest_pow2_factor (const_tree exp)
7297 {
7298 unsigned HOST_WIDE_INT c0, c1;
7299
7300 switch (TREE_CODE (exp))
7301 {
7302 case INTEGER_CST:
7303 /* We can find the lowest bit that's a one. If the low
7304 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
7305 We need to handle this case since we can find it in a COND_EXPR,
7306 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
7307 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
7308 later ICE. */
7309 if (TREE_OVERFLOW (exp))
7310 return BIGGEST_ALIGNMENT;
7311 else
7312 {
7313 /* Note: tree_low_cst is intentionally not used here,
7314 we don't care about the upper bits. */
7315 c0 = TREE_INT_CST_LOW (exp);
7316 c0 &= -c0;
7317 return c0 ? c0 : BIGGEST_ALIGNMENT;
7318 }
7319 break;
7320
7321 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
7322 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7323 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7324 return MIN (c0, c1);
7325
7326 case MULT_EXPR:
7327 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7328 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7329 return c0 * c1;
7330
7331 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
7332 case CEIL_DIV_EXPR:
7333 if (integer_pow2p (TREE_OPERAND (exp, 1))
7334 && host_integerp (TREE_OPERAND (exp, 1), 1))
7335 {
7336 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7337 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
7338 return MAX (1, c0 / c1);
7339 }
7340 break;
7341
7342 case BIT_AND_EXPR:
7343 /* The highest power of two of a bit-and expression is the maximum of
7344 that of its operands. We typically get here for a complex LHS and
7345 a constant negative power of two on the RHS to force an explicit
7346 alignment, so don't bother looking at the LHS. */
7347 return highest_pow2_factor (TREE_OPERAND (exp, 1));
7348
7349 CASE_CONVERT:
7350 case SAVE_EXPR:
7351 return highest_pow2_factor (TREE_OPERAND (exp, 0));
7352
7353 case COMPOUND_EXPR:
7354 return highest_pow2_factor (TREE_OPERAND (exp, 1));
7355
7356 case COND_EXPR:
7357 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7358 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
7359 return MIN (c0, c1);
7360
7361 default:
7362 break;
7363 }
7364
7365 return 1;
7366 }
7367
7368 /* Similar, except that the alignment requirements of TARGET are
7369 taken into account. Assume it is at least as aligned as its
7370 type, unless it is a COMPONENT_REF in which case the layout of
7371 the structure gives the alignment. */
7372
7373 static unsigned HOST_WIDE_INT
7374 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7375 {
7376 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7377 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7378
7379 return MAX (factor, talign);
7380 }
7381 \f
7382 #ifdef HAVE_conditional_move
7383 /* Convert the tree comparison code TCODE to the rtl one where the
7384 signedness is UNSIGNEDP. */
7385
7386 static enum rtx_code
7387 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7388 {
7389 enum rtx_code code;
7390 switch (tcode)
7391 {
7392 case EQ_EXPR:
7393 code = EQ;
7394 break;
7395 case NE_EXPR:
7396 code = NE;
7397 break;
7398 case LT_EXPR:
7399 code = unsignedp ? LTU : LT;
7400 break;
7401 case LE_EXPR:
7402 code = unsignedp ? LEU : LE;
7403 break;
7404 case GT_EXPR:
7405 code = unsignedp ? GTU : GT;
7406 break;
7407 case GE_EXPR:
7408 code = unsignedp ? GEU : GE;
7409 break;
7410 case UNORDERED_EXPR:
7411 code = UNORDERED;
7412 break;
7413 case ORDERED_EXPR:
7414 code = ORDERED;
7415 break;
7416 case UNLT_EXPR:
7417 code = UNLT;
7418 break;
7419 case UNLE_EXPR:
7420 code = UNLE;
7421 break;
7422 case UNGT_EXPR:
7423 code = UNGT;
7424 break;
7425 case UNGE_EXPR:
7426 code = UNGE;
7427 break;
7428 case UNEQ_EXPR:
7429 code = UNEQ;
7430 break;
7431 case LTGT_EXPR:
7432 code = LTGT;
7433 break;
7434
7435 default:
7436 gcc_unreachable ();
7437 }
7438 return code;
7439 }
7440 #endif
7441
7442 /* Subroutine of expand_expr. Expand the two operands of a binary
7443 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7444 The value may be stored in TARGET if TARGET is nonzero. The
7445 MODIFIER argument is as documented by expand_expr. */
7446
7447 static void
7448 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7449 enum expand_modifier modifier)
7450 {
7451 if (! safe_from_p (target, exp1, 1))
7452 target = 0;
7453 if (operand_equal_p (exp0, exp1, 0))
7454 {
7455 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7456 *op1 = copy_rtx (*op0);
7457 }
7458 else
7459 {
7460 /* If we need to preserve evaluation order, copy exp0 into its own
7461 temporary variable so that it can't be clobbered by exp1. */
7462 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7463 exp0 = save_expr (exp0);
7464 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7465 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7466 }
7467 }
7468
7469 \f
7470 /* Return a MEM that contains constant EXP. DEFER is as for
7471 output_constant_def and MODIFIER is as for expand_expr. */
7472
7473 static rtx
7474 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7475 {
7476 rtx mem;
7477
7478 mem = output_constant_def (exp, defer);
7479 if (modifier != EXPAND_INITIALIZER)
7480 mem = use_anchored_address (mem);
7481 return mem;
7482 }
7483
7484 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7485 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7486
7487 static rtx
7488 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7489 enum expand_modifier modifier, addr_space_t as)
7490 {
7491 rtx result, subtarget;
7492 tree inner, offset;
7493 HOST_WIDE_INT bitsize, bitpos;
7494 int volatilep, unsignedp;
7495 enum machine_mode mode1;
7496
7497 /* If we are taking the address of a constant and are at the top level,
7498 we have to use output_constant_def since we can't call force_const_mem
7499 at top level. */
7500 /* ??? This should be considered a front-end bug. We should not be
7501 generating ADDR_EXPR of something that isn't an LVALUE. The only
7502 exception here is STRING_CST. */
7503 if (CONSTANT_CLASS_P (exp))
7504 {
7505 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7506 if (modifier < EXPAND_SUM)
7507 result = force_operand (result, target);
7508 return result;
7509 }
7510
7511 /* Everything must be something allowed by is_gimple_addressable. */
7512 switch (TREE_CODE (exp))
7513 {
7514 case INDIRECT_REF:
7515 /* This case will happen via recursion for &a->b. */
7516 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7517
7518 case MEM_REF:
7519 {
7520 tree tem = TREE_OPERAND (exp, 0);
7521 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7522 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7523 return expand_expr (tem, target, tmode, modifier);
7524 }
7525
7526 case CONST_DECL:
7527 /* Expand the initializer like constants above. */
7528 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7529 0, modifier), 0);
7530 if (modifier < EXPAND_SUM)
7531 result = force_operand (result, target);
7532 return result;
7533
7534 case REALPART_EXPR:
7535 /* The real part of the complex number is always first, therefore
7536 the address is the same as the address of the parent object. */
7537 offset = 0;
7538 bitpos = 0;
7539 inner = TREE_OPERAND (exp, 0);
7540 break;
7541
7542 case IMAGPART_EXPR:
7543 /* The imaginary part of the complex number is always second.
7544 The expression is therefore always offset by the size of the
7545 scalar type. */
7546 offset = 0;
7547 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7548 inner = TREE_OPERAND (exp, 0);
7549 break;
7550
7551 default:
7552 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7553 expand_expr, as that can have various side effects; LABEL_DECLs for
7554 example, may not have their DECL_RTL set yet. Expand the rtl of
7555 CONSTRUCTORs too, which should yield a memory reference for the
7556 constructor's contents. Assume language specific tree nodes can
7557 be expanded in some interesting way. */
7558 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7559 if (DECL_P (exp)
7560 || TREE_CODE (exp) == CONSTRUCTOR
7561 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7562 {
7563 result = expand_expr (exp, target, tmode,
7564 modifier == EXPAND_INITIALIZER
7565 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7566
7567 /* If the DECL isn't in memory, then the DECL wasn't properly
7568 marked TREE_ADDRESSABLE, which will be either a front-end
7569 or a tree optimizer bug. */
7570
7571 if (TREE_ADDRESSABLE (exp)
7572 && ! MEM_P (result)
7573 && ! targetm.calls.allocate_stack_slots_for_args())
7574 {
7575 error ("local frame unavailable (naked function?)");
7576 return result;
7577 }
7578 else
7579 gcc_assert (MEM_P (result));
7580 result = XEXP (result, 0);
7581
7582 /* ??? Is this needed anymore? */
7583 if (DECL_P (exp))
7584 TREE_USED (exp) = 1;
7585
7586 if (modifier != EXPAND_INITIALIZER
7587 && modifier != EXPAND_CONST_ADDRESS
7588 && modifier != EXPAND_SUM)
7589 result = force_operand (result, target);
7590 return result;
7591 }
7592
7593 /* Pass FALSE as the last argument to get_inner_reference although
7594 we are expanding to RTL. The rationale is that we know how to
7595 handle "aligning nodes" here: we can just bypass them because
7596 they won't change the final object whose address will be returned
7597 (they actually exist only for that purpose). */
7598 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7599 &mode1, &unsignedp, &volatilep, false);
7600 break;
7601 }
7602
7603 /* We must have made progress. */
7604 gcc_assert (inner != exp);
7605
7606 subtarget = offset || bitpos ? NULL_RTX : target;
7607 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7608 inner alignment, force the inner to be sufficiently aligned. */
7609 if (CONSTANT_CLASS_P (inner)
7610 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7611 {
7612 inner = copy_node (inner);
7613 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7614 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7615 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7616 }
7617 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7618
7619 if (offset)
7620 {
7621 rtx tmp;
7622
7623 if (modifier != EXPAND_NORMAL)
7624 result = force_operand (result, NULL);
7625 tmp = expand_expr (offset, NULL_RTX, tmode,
7626 modifier == EXPAND_INITIALIZER
7627 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7628
7629 result = convert_memory_address_addr_space (tmode, result, as);
7630 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7631
7632 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7633 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7634 else
7635 {
7636 subtarget = bitpos ? NULL_RTX : target;
7637 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7638 1, OPTAB_LIB_WIDEN);
7639 }
7640 }
7641
7642 if (bitpos)
7643 {
7644 /* Someone beforehand should have rejected taking the address
7645 of such an object. */
7646 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7647
7648 result = convert_memory_address_addr_space (tmode, result, as);
7649 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7650 if (modifier < EXPAND_SUM)
7651 result = force_operand (result, target);
7652 }
7653
7654 return result;
7655 }
7656
7657 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7658 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7659
7660 static rtx
7661 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7662 enum expand_modifier modifier)
7663 {
7664 addr_space_t as = ADDR_SPACE_GENERIC;
7665 enum machine_mode address_mode = Pmode;
7666 enum machine_mode pointer_mode = ptr_mode;
7667 enum machine_mode rmode;
7668 rtx result;
7669
7670 /* Target mode of VOIDmode says "whatever's natural". */
7671 if (tmode == VOIDmode)
7672 tmode = TYPE_MODE (TREE_TYPE (exp));
7673
7674 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7675 {
7676 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7677 address_mode = targetm.addr_space.address_mode (as);
7678 pointer_mode = targetm.addr_space.pointer_mode (as);
7679 }
7680
7681 /* We can get called with some Weird Things if the user does silliness
7682 like "(short) &a". In that case, convert_memory_address won't do
7683 the right thing, so ignore the given target mode. */
7684 if (tmode != address_mode && tmode != pointer_mode)
7685 tmode = address_mode;
7686
7687 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7688 tmode, modifier, as);
7689
7690 /* Despite expand_expr claims concerning ignoring TMODE when not
7691 strictly convenient, stuff breaks if we don't honor it. Note
7692 that combined with the above, we only do this for pointer modes. */
7693 rmode = GET_MODE (result);
7694 if (rmode == VOIDmode)
7695 rmode = tmode;
7696 if (rmode != tmode)
7697 result = convert_memory_address_addr_space (tmode, result, as);
7698
7699 return result;
7700 }
7701
7702 /* Generate code for computing CONSTRUCTOR EXP.
7703 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7704 is TRUE, instead of creating a temporary variable in memory
7705 NULL is returned and the caller needs to handle it differently. */
7706
7707 static rtx
7708 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7709 bool avoid_temp_mem)
7710 {
7711 tree type = TREE_TYPE (exp);
7712 enum machine_mode mode = TYPE_MODE (type);
7713
7714 /* Try to avoid creating a temporary at all. This is possible
7715 if all of the initializer is zero.
7716 FIXME: try to handle all [0..255] initializers we can handle
7717 with memset. */
7718 if (TREE_STATIC (exp)
7719 && !TREE_ADDRESSABLE (exp)
7720 && target != 0 && mode == BLKmode
7721 && all_zeros_p (exp))
7722 {
7723 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7724 return target;
7725 }
7726
7727 /* All elts simple constants => refer to a constant in memory. But
7728 if this is a non-BLKmode mode, let it store a field at a time
7729 since that should make a CONST_INT or CONST_DOUBLE when we
7730 fold. Likewise, if we have a target we can use, it is best to
7731 store directly into the target unless the type is large enough
7732 that memcpy will be used. If we are making an initializer and
7733 all operands are constant, put it in memory as well.
7734
7735 FIXME: Avoid trying to fill vector constructors piece-meal.
7736 Output them with output_constant_def below unless we're sure
7737 they're zeros. This should go away when vector initializers
7738 are treated like VECTOR_CST instead of arrays. */
7739 if ((TREE_STATIC (exp)
7740 && ((mode == BLKmode
7741 && ! (target != 0 && safe_from_p (target, exp, 1)))
7742 || TREE_ADDRESSABLE (exp)
7743 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7744 && (! MOVE_BY_PIECES_P
7745 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7746 TYPE_ALIGN (type)))
7747 && ! mostly_zeros_p (exp))))
7748 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7749 && TREE_CONSTANT (exp)))
7750 {
7751 rtx constructor;
7752
7753 if (avoid_temp_mem)
7754 return NULL_RTX;
7755
7756 constructor = expand_expr_constant (exp, 1, modifier);
7757
7758 if (modifier != EXPAND_CONST_ADDRESS
7759 && modifier != EXPAND_INITIALIZER
7760 && modifier != EXPAND_SUM)
7761 constructor = validize_mem (constructor);
7762
7763 return constructor;
7764 }
7765
7766 /* Handle calls that pass values in multiple non-contiguous
7767 locations. The Irix 6 ABI has examples of this. */
7768 if (target == 0 || ! safe_from_p (target, exp, 1)
7769 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7770 {
7771 if (avoid_temp_mem)
7772 return NULL_RTX;
7773
7774 target
7775 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7776 | (TREE_READONLY (exp)
7777 * TYPE_QUAL_CONST))),
7778 TREE_ADDRESSABLE (exp), 1);
7779 }
7780
7781 store_constructor (exp, target, 0, int_expr_size (exp));
7782 return target;
7783 }
7784
7785
7786 /* expand_expr: generate code for computing expression EXP.
7787 An rtx for the computed value is returned. The value is never null.
7788 In the case of a void EXP, const0_rtx is returned.
7789
7790 The value may be stored in TARGET if TARGET is nonzero.
7791 TARGET is just a suggestion; callers must assume that
7792 the rtx returned may not be the same as TARGET.
7793
7794 If TARGET is CONST0_RTX, it means that the value will be ignored.
7795
7796 If TMODE is not VOIDmode, it suggests generating the
7797 result in mode TMODE. But this is done only when convenient.
7798 Otherwise, TMODE is ignored and the value generated in its natural mode.
7799 TMODE is just a suggestion; callers must assume that
7800 the rtx returned may not have mode TMODE.
7801
7802 Note that TARGET may have neither TMODE nor MODE. In that case, it
7803 probably will not be used.
7804
7805 If MODIFIER is EXPAND_SUM then when EXP is an addition
7806 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7807 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7808 products as above, or REG or MEM, or constant.
7809 Ordinarily in such cases we would output mul or add instructions
7810 and then return a pseudo reg containing the sum.
7811
7812 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7813 it also marks a label as absolutely required (it can't be dead).
7814 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7815 This is used for outputting expressions used in initializers.
7816
7817 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7818 with a constant address even if that address is not normally legitimate.
7819 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7820
7821 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7822 a call parameter. Such targets require special care as we haven't yet
7823 marked TARGET so that it's safe from being trashed by libcalls. We
7824 don't want to use TARGET for anything but the final result;
7825 Intermediate values must go elsewhere. Additionally, calls to
7826 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7827
7828 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7829 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7830 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7831 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7832 recursively. */
7833
7834 rtx
7835 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7836 enum expand_modifier modifier, rtx *alt_rtl)
7837 {
7838 rtx ret;
7839
7840 /* Handle ERROR_MARK before anybody tries to access its type. */
7841 if (TREE_CODE (exp) == ERROR_MARK
7842 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7843 {
7844 ret = CONST0_RTX (tmode);
7845 return ret ? ret : const0_rtx;
7846 }
7847
7848 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7849 return ret;
7850 }
7851
7852 /* Try to expand the conditional expression which is represented by
7853 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7854 return the rtl reg which repsents the result. Otherwise return
7855 NULL_RTL. */
7856
7857 static rtx
7858 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7859 tree treeop1 ATTRIBUTE_UNUSED,
7860 tree treeop2 ATTRIBUTE_UNUSED)
7861 {
7862 #ifdef HAVE_conditional_move
7863 rtx insn;
7864 rtx op00, op01, op1, op2;
7865 enum rtx_code comparison_code;
7866 enum machine_mode comparison_mode;
7867 gimple srcstmt;
7868 rtx temp;
7869 tree type = TREE_TYPE (treeop1);
7870 int unsignedp = TYPE_UNSIGNED (type);
7871 enum machine_mode mode = TYPE_MODE (type);
7872
7873 /* If we cannot do a conditional move on the mode, try doing it
7874 with the promoted mode. */
7875 if (!can_conditionally_move_p (mode))
7876 {
7877 mode = promote_mode (type, mode, &unsignedp);
7878 if (!can_conditionally_move_p (mode))
7879 return NULL_RTX;
7880 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7881 }
7882 else
7883 temp = assign_temp (type, 0, 1);
7884
7885 start_sequence ();
7886 expand_operands (treeop1, treeop2,
7887 temp, &op1, &op2, EXPAND_NORMAL);
7888
7889 if (TREE_CODE (treeop0) == SSA_NAME
7890 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7891 {
7892 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7893 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7894 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7895 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7896 comparison_mode = TYPE_MODE (type);
7897 unsignedp = TYPE_UNSIGNED (type);
7898 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7899 }
7900 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7901 {
7902 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7903 enum tree_code cmpcode = TREE_CODE (treeop0);
7904 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7905 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7906 unsignedp = TYPE_UNSIGNED (type);
7907 comparison_mode = TYPE_MODE (type);
7908 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7909 }
7910 else
7911 {
7912 op00 = expand_normal (treeop0);
7913 op01 = const0_rtx;
7914 comparison_code = NE;
7915 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7916 }
7917
7918 if (GET_MODE (op1) != mode)
7919 op1 = gen_lowpart (mode, op1);
7920
7921 if (GET_MODE (op2) != mode)
7922 op2 = gen_lowpart (mode, op2);
7923
7924 /* Try to emit the conditional move. */
7925 insn = emit_conditional_move (temp, comparison_code,
7926 op00, op01, comparison_mode,
7927 op1, op2, mode,
7928 unsignedp);
7929
7930 /* If we could do the conditional move, emit the sequence,
7931 and return. */
7932 if (insn)
7933 {
7934 rtx seq = get_insns ();
7935 end_sequence ();
7936 emit_insn (seq);
7937 return temp;
7938 }
7939
7940 /* Otherwise discard the sequence and fall back to code with
7941 branches. */
7942 end_sequence ();
7943 #endif
7944 return NULL_RTX;
7945 }
7946
7947 rtx
7948 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7949 enum expand_modifier modifier)
7950 {
7951 rtx op0, op1, op2, temp;
7952 tree type;
7953 int unsignedp;
7954 enum machine_mode mode;
7955 enum tree_code code = ops->code;
7956 optab this_optab;
7957 rtx subtarget, original_target;
7958 int ignore;
7959 bool reduce_bit_field;
7960 location_t loc = ops->location;
7961 tree treeop0, treeop1, treeop2;
7962 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7963 ? reduce_to_bit_field_precision ((expr), \
7964 target, \
7965 type) \
7966 : (expr))
7967
7968 type = ops->type;
7969 mode = TYPE_MODE (type);
7970 unsignedp = TYPE_UNSIGNED (type);
7971
7972 treeop0 = ops->op0;
7973 treeop1 = ops->op1;
7974 treeop2 = ops->op2;
7975
7976 /* We should be called only on simple (binary or unary) expressions,
7977 exactly those that are valid in gimple expressions that aren't
7978 GIMPLE_SINGLE_RHS (or invalid). */
7979 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7980 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7981 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7982
7983 ignore = (target == const0_rtx
7984 || ((CONVERT_EXPR_CODE_P (code)
7985 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7986 && TREE_CODE (type) == VOID_TYPE));
7987
7988 /* We should be called only if we need the result. */
7989 gcc_assert (!ignore);
7990
7991 /* An operation in what may be a bit-field type needs the
7992 result to be reduced to the precision of the bit-field type,
7993 which is narrower than that of the type's mode. */
7994 reduce_bit_field = (INTEGRAL_TYPE_P (type)
7995 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7996
7997 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7998 target = 0;
7999
8000 /* Use subtarget as the target for operand 0 of a binary operation. */
8001 subtarget = get_subtarget (target);
8002 original_target = target;
8003
8004 switch (code)
8005 {
8006 case NON_LVALUE_EXPR:
8007 case PAREN_EXPR:
8008 CASE_CONVERT:
8009 if (treeop0 == error_mark_node)
8010 return const0_rtx;
8011
8012 if (TREE_CODE (type) == UNION_TYPE)
8013 {
8014 tree valtype = TREE_TYPE (treeop0);
8015
8016 /* If both input and output are BLKmode, this conversion isn't doing
8017 anything except possibly changing memory attribute. */
8018 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8019 {
8020 rtx result = expand_expr (treeop0, target, tmode,
8021 modifier);
8022
8023 result = copy_rtx (result);
8024 set_mem_attributes (result, type, 0);
8025 return result;
8026 }
8027
8028 if (target == 0)
8029 {
8030 if (TYPE_MODE (type) != BLKmode)
8031 target = gen_reg_rtx (TYPE_MODE (type));
8032 else
8033 target = assign_temp (type, 1, 1);
8034 }
8035
8036 if (MEM_P (target))
8037 /* Store data into beginning of memory target. */
8038 store_expr (treeop0,
8039 adjust_address (target, TYPE_MODE (valtype), 0),
8040 modifier == EXPAND_STACK_PARM,
8041 false);
8042
8043 else
8044 {
8045 gcc_assert (REG_P (target));
8046
8047 /* Store this field into a union of the proper type. */
8048 store_field (target,
8049 MIN ((int_size_in_bytes (TREE_TYPE
8050 (treeop0))
8051 * BITS_PER_UNIT),
8052 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8053 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8054 }
8055
8056 /* Return the entire union. */
8057 return target;
8058 }
8059
8060 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8061 {
8062 op0 = expand_expr (treeop0, target, VOIDmode,
8063 modifier);
8064
8065 /* If the signedness of the conversion differs and OP0 is
8066 a promoted SUBREG, clear that indication since we now
8067 have to do the proper extension. */
8068 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8069 && GET_CODE (op0) == SUBREG)
8070 SUBREG_PROMOTED_VAR_P (op0) = 0;
8071
8072 return REDUCE_BIT_FIELD (op0);
8073 }
8074
8075 op0 = expand_expr (treeop0, NULL_RTX, mode,
8076 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8077 if (GET_MODE (op0) == mode)
8078 ;
8079
8080 /* If OP0 is a constant, just convert it into the proper mode. */
8081 else if (CONSTANT_P (op0))
8082 {
8083 tree inner_type = TREE_TYPE (treeop0);
8084 enum machine_mode inner_mode = GET_MODE (op0);
8085
8086 if (inner_mode == VOIDmode)
8087 inner_mode = TYPE_MODE (inner_type);
8088
8089 if (modifier == EXPAND_INITIALIZER)
8090 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8091 subreg_lowpart_offset (mode,
8092 inner_mode));
8093 else
8094 op0= convert_modes (mode, inner_mode, op0,
8095 TYPE_UNSIGNED (inner_type));
8096 }
8097
8098 else if (modifier == EXPAND_INITIALIZER)
8099 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8100
8101 else if (target == 0)
8102 op0 = convert_to_mode (mode, op0,
8103 TYPE_UNSIGNED (TREE_TYPE
8104 (treeop0)));
8105 else
8106 {
8107 convert_move (target, op0,
8108 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8109 op0 = target;
8110 }
8111
8112 return REDUCE_BIT_FIELD (op0);
8113
8114 case ADDR_SPACE_CONVERT_EXPR:
8115 {
8116 tree treeop0_type = TREE_TYPE (treeop0);
8117 addr_space_t as_to;
8118 addr_space_t as_from;
8119
8120 gcc_assert (POINTER_TYPE_P (type));
8121 gcc_assert (POINTER_TYPE_P (treeop0_type));
8122
8123 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8124 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8125
8126 /* Conversions between pointers to the same address space should
8127 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8128 gcc_assert (as_to != as_from);
8129
8130 /* Ask target code to handle conversion between pointers
8131 to overlapping address spaces. */
8132 if (targetm.addr_space.subset_p (as_to, as_from)
8133 || targetm.addr_space.subset_p (as_from, as_to))
8134 {
8135 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8136 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8137 gcc_assert (op0);
8138 return op0;
8139 }
8140
8141 /* For disjoint address spaces, converting anything but
8142 a null pointer invokes undefined behaviour. We simply
8143 always return a null pointer here. */
8144 return CONST0_RTX (mode);
8145 }
8146
8147 case POINTER_PLUS_EXPR:
8148 /* Even though the sizetype mode and the pointer's mode can be different
8149 expand is able to handle this correctly and get the correct result out
8150 of the PLUS_EXPR code. */
8151 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8152 if sizetype precision is smaller than pointer precision. */
8153 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8154 treeop1 = fold_convert_loc (loc, type,
8155 fold_convert_loc (loc, ssizetype,
8156 treeop1));
8157 /* If sizetype precision is larger than pointer precision, truncate the
8158 offset to have matching modes. */
8159 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8160 treeop1 = fold_convert_loc (loc, type, treeop1);
8161
8162 case PLUS_EXPR:
8163 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8164 something else, make sure we add the register to the constant and
8165 then to the other thing. This case can occur during strength
8166 reduction and doing it this way will produce better code if the
8167 frame pointer or argument pointer is eliminated.
8168
8169 fold-const.c will ensure that the constant is always in the inner
8170 PLUS_EXPR, so the only case we need to do anything about is if
8171 sp, ap, or fp is our second argument, in which case we must swap
8172 the innermost first argument and our second argument. */
8173
8174 if (TREE_CODE (treeop0) == PLUS_EXPR
8175 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8176 && TREE_CODE (treeop1) == VAR_DECL
8177 && (DECL_RTL (treeop1) == frame_pointer_rtx
8178 || DECL_RTL (treeop1) == stack_pointer_rtx
8179 || DECL_RTL (treeop1) == arg_pointer_rtx))
8180 {
8181 gcc_unreachable ();
8182 }
8183
8184 /* If the result is to be ptr_mode and we are adding an integer to
8185 something, we might be forming a constant. So try to use
8186 plus_constant. If it produces a sum and we can't accept it,
8187 use force_operand. This allows P = &ARR[const] to generate
8188 efficient code on machines where a SYMBOL_REF is not a valid
8189 address.
8190
8191 If this is an EXPAND_SUM call, always return the sum. */
8192 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8193 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8194 {
8195 if (modifier == EXPAND_STACK_PARM)
8196 target = 0;
8197 if (TREE_CODE (treeop0) == INTEGER_CST
8198 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8199 && TREE_CONSTANT (treeop1))
8200 {
8201 rtx constant_part;
8202
8203 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8204 EXPAND_SUM);
8205 /* Use immed_double_const to ensure that the constant is
8206 truncated according to the mode of OP1, then sign extended
8207 to a HOST_WIDE_INT. Using the constant directly can result
8208 in non-canonical RTL in a 64x32 cross compile. */
8209 constant_part
8210 = immed_double_const (TREE_INT_CST_LOW (treeop0),
8211 (HOST_WIDE_INT) 0,
8212 TYPE_MODE (TREE_TYPE (treeop1)));
8213 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8214 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8215 op1 = force_operand (op1, target);
8216 return REDUCE_BIT_FIELD (op1);
8217 }
8218
8219 else if (TREE_CODE (treeop1) == INTEGER_CST
8220 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8221 && TREE_CONSTANT (treeop0))
8222 {
8223 rtx constant_part;
8224
8225 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8226 (modifier == EXPAND_INITIALIZER
8227 ? EXPAND_INITIALIZER : EXPAND_SUM));
8228 if (! CONSTANT_P (op0))
8229 {
8230 op1 = expand_expr (treeop1, NULL_RTX,
8231 VOIDmode, modifier);
8232 /* Return a PLUS if modifier says it's OK. */
8233 if (modifier == EXPAND_SUM
8234 || modifier == EXPAND_INITIALIZER)
8235 return simplify_gen_binary (PLUS, mode, op0, op1);
8236 goto binop2;
8237 }
8238 /* Use immed_double_const to ensure that the constant is
8239 truncated according to the mode of OP1, then sign extended
8240 to a HOST_WIDE_INT. Using the constant directly can result
8241 in non-canonical RTL in a 64x32 cross compile. */
8242 constant_part
8243 = immed_double_const (TREE_INT_CST_LOW (treeop1),
8244 (HOST_WIDE_INT) 0,
8245 TYPE_MODE (TREE_TYPE (treeop0)));
8246 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8247 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8248 op0 = force_operand (op0, target);
8249 return REDUCE_BIT_FIELD (op0);
8250 }
8251 }
8252
8253 /* Use TER to expand pointer addition of a negated value
8254 as pointer subtraction. */
8255 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8256 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8257 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8258 && TREE_CODE (treeop1) == SSA_NAME
8259 && TYPE_MODE (TREE_TYPE (treeop0))
8260 == TYPE_MODE (TREE_TYPE (treeop1)))
8261 {
8262 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8263 if (def)
8264 {
8265 treeop1 = gimple_assign_rhs1 (def);
8266 code = MINUS_EXPR;
8267 goto do_minus;
8268 }
8269 }
8270
8271 /* No sense saving up arithmetic to be done
8272 if it's all in the wrong mode to form part of an address.
8273 And force_operand won't know whether to sign-extend or
8274 zero-extend. */
8275 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8276 || mode != ptr_mode)
8277 {
8278 expand_operands (treeop0, treeop1,
8279 subtarget, &op0, &op1, EXPAND_NORMAL);
8280 if (op0 == const0_rtx)
8281 return op1;
8282 if (op1 == const0_rtx)
8283 return op0;
8284 goto binop2;
8285 }
8286
8287 expand_operands (treeop0, treeop1,
8288 subtarget, &op0, &op1, modifier);
8289 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8290
8291 case MINUS_EXPR:
8292 do_minus:
8293 /* For initializers, we are allowed to return a MINUS of two
8294 symbolic constants. Here we handle all cases when both operands
8295 are constant. */
8296 /* Handle difference of two symbolic constants,
8297 for the sake of an initializer. */
8298 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8299 && really_constant_p (treeop0)
8300 && really_constant_p (treeop1))
8301 {
8302 expand_operands (treeop0, treeop1,
8303 NULL_RTX, &op0, &op1, modifier);
8304
8305 /* If the last operand is a CONST_INT, use plus_constant of
8306 the negated constant. Else make the MINUS. */
8307 if (CONST_INT_P (op1))
8308 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8309 -INTVAL (op1)));
8310 else
8311 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8312 }
8313
8314 /* No sense saving up arithmetic to be done
8315 if it's all in the wrong mode to form part of an address.
8316 And force_operand won't know whether to sign-extend or
8317 zero-extend. */
8318 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8319 || mode != ptr_mode)
8320 goto binop;
8321
8322 expand_operands (treeop0, treeop1,
8323 subtarget, &op0, &op1, modifier);
8324
8325 /* Convert A - const to A + (-const). */
8326 if (CONST_INT_P (op1))
8327 {
8328 op1 = negate_rtx (mode, op1);
8329 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8330 }
8331
8332 goto binop2;
8333
8334 case WIDEN_MULT_PLUS_EXPR:
8335 case WIDEN_MULT_MINUS_EXPR:
8336 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8337 op2 = expand_normal (treeop2);
8338 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8339 target, unsignedp);
8340 return target;
8341
8342 case WIDEN_MULT_EXPR:
8343 /* If first operand is constant, swap them.
8344 Thus the following special case checks need only
8345 check the second operand. */
8346 if (TREE_CODE (treeop0) == INTEGER_CST)
8347 {
8348 tree t1 = treeop0;
8349 treeop0 = treeop1;
8350 treeop1 = t1;
8351 }
8352
8353 /* First, check if we have a multiplication of one signed and one
8354 unsigned operand. */
8355 if (TREE_CODE (treeop1) != INTEGER_CST
8356 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8357 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8358 {
8359 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8360 this_optab = usmul_widen_optab;
8361 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8362 != CODE_FOR_nothing)
8363 {
8364 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8365 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8366 EXPAND_NORMAL);
8367 else
8368 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8369 EXPAND_NORMAL);
8370 goto binop3;
8371 }
8372 }
8373 /* Check for a multiplication with matching signedness. */
8374 else if ((TREE_CODE (treeop1) == INTEGER_CST
8375 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8376 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8377 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8378 {
8379 tree op0type = TREE_TYPE (treeop0);
8380 enum machine_mode innermode = TYPE_MODE (op0type);
8381 bool zextend_p = TYPE_UNSIGNED (op0type);
8382 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8383 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8384
8385 if (TREE_CODE (treeop0) != INTEGER_CST)
8386 {
8387 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8388 != CODE_FOR_nothing)
8389 {
8390 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8391 EXPAND_NORMAL);
8392 temp = expand_widening_mult (mode, op0, op1, target,
8393 unsignedp, this_optab);
8394 return REDUCE_BIT_FIELD (temp);
8395 }
8396 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8397 != CODE_FOR_nothing
8398 && innermode == word_mode)
8399 {
8400 rtx htem, hipart;
8401 op0 = expand_normal (treeop0);
8402 if (TREE_CODE (treeop1) == INTEGER_CST)
8403 op1 = convert_modes (innermode, mode,
8404 expand_normal (treeop1), unsignedp);
8405 else
8406 op1 = expand_normal (treeop1);
8407 temp = expand_binop (mode, other_optab, op0, op1, target,
8408 unsignedp, OPTAB_LIB_WIDEN);
8409 hipart = gen_highpart (innermode, temp);
8410 htem = expand_mult_highpart_adjust (innermode, hipart,
8411 op0, op1, hipart,
8412 zextend_p);
8413 if (htem != hipart)
8414 emit_move_insn (hipart, htem);
8415 return REDUCE_BIT_FIELD (temp);
8416 }
8417 }
8418 }
8419 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8420 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8421 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8422 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8423
8424 case FMA_EXPR:
8425 {
8426 optab opt = fma_optab;
8427 gimple def0, def2;
8428
8429 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8430 call. */
8431 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8432 {
8433 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8434 tree call_expr;
8435
8436 gcc_assert (fn != NULL_TREE);
8437 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8438 return expand_builtin (call_expr, target, subtarget, mode, false);
8439 }
8440
8441 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8442 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8443
8444 op0 = op2 = NULL;
8445
8446 if (def0 && def2
8447 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8448 {
8449 opt = fnms_optab;
8450 op0 = expand_normal (gimple_assign_rhs1 (def0));
8451 op2 = expand_normal (gimple_assign_rhs1 (def2));
8452 }
8453 else if (def0
8454 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8455 {
8456 opt = fnma_optab;
8457 op0 = expand_normal (gimple_assign_rhs1 (def0));
8458 }
8459 else if (def2
8460 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8461 {
8462 opt = fms_optab;
8463 op2 = expand_normal (gimple_assign_rhs1 (def2));
8464 }
8465
8466 if (op0 == NULL)
8467 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8468 if (op2 == NULL)
8469 op2 = expand_normal (treeop2);
8470 op1 = expand_normal (treeop1);
8471
8472 return expand_ternary_op (TYPE_MODE (type), opt,
8473 op0, op1, op2, target, 0);
8474 }
8475
8476 case MULT_EXPR:
8477 /* If this is a fixed-point operation, then we cannot use the code
8478 below because "expand_mult" doesn't support sat/no-sat fixed-point
8479 multiplications. */
8480 if (ALL_FIXED_POINT_MODE_P (mode))
8481 goto binop;
8482
8483 /* If first operand is constant, swap them.
8484 Thus the following special case checks need only
8485 check the second operand. */
8486 if (TREE_CODE (treeop0) == INTEGER_CST)
8487 {
8488 tree t1 = treeop0;
8489 treeop0 = treeop1;
8490 treeop1 = t1;
8491 }
8492
8493 /* Attempt to return something suitable for generating an
8494 indexed address, for machines that support that. */
8495
8496 if (modifier == EXPAND_SUM && mode == ptr_mode
8497 && host_integerp (treeop1, 0))
8498 {
8499 tree exp1 = treeop1;
8500
8501 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8502 EXPAND_SUM);
8503
8504 if (!REG_P (op0))
8505 op0 = force_operand (op0, NULL_RTX);
8506 if (!REG_P (op0))
8507 op0 = copy_to_mode_reg (mode, op0);
8508
8509 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8510 gen_int_mode (tree_low_cst (exp1, 0),
8511 TYPE_MODE (TREE_TYPE (exp1)))));
8512 }
8513
8514 if (modifier == EXPAND_STACK_PARM)
8515 target = 0;
8516
8517 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8518 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8519
8520 case TRUNC_DIV_EXPR:
8521 case FLOOR_DIV_EXPR:
8522 case CEIL_DIV_EXPR:
8523 case ROUND_DIV_EXPR:
8524 case EXACT_DIV_EXPR:
8525 /* If this is a fixed-point operation, then we cannot use the code
8526 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8527 divisions. */
8528 if (ALL_FIXED_POINT_MODE_P (mode))
8529 goto binop;
8530
8531 if (modifier == EXPAND_STACK_PARM)
8532 target = 0;
8533 /* Possible optimization: compute the dividend with EXPAND_SUM
8534 then if the divisor is constant can optimize the case
8535 where some terms of the dividend have coeffs divisible by it. */
8536 expand_operands (treeop0, treeop1,
8537 subtarget, &op0, &op1, EXPAND_NORMAL);
8538 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8539
8540 case RDIV_EXPR:
8541 goto binop;
8542
8543 case MULT_HIGHPART_EXPR:
8544 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8545 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8546 gcc_assert (temp);
8547 return temp;
8548
8549 case TRUNC_MOD_EXPR:
8550 case FLOOR_MOD_EXPR:
8551 case CEIL_MOD_EXPR:
8552 case ROUND_MOD_EXPR:
8553 if (modifier == EXPAND_STACK_PARM)
8554 target = 0;
8555 expand_operands (treeop0, treeop1,
8556 subtarget, &op0, &op1, EXPAND_NORMAL);
8557 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8558
8559 case FIXED_CONVERT_EXPR:
8560 op0 = expand_normal (treeop0);
8561 if (target == 0 || modifier == EXPAND_STACK_PARM)
8562 target = gen_reg_rtx (mode);
8563
8564 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8565 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8566 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8567 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8568 else
8569 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8570 return target;
8571
8572 case FIX_TRUNC_EXPR:
8573 op0 = expand_normal (treeop0);
8574 if (target == 0 || modifier == EXPAND_STACK_PARM)
8575 target = gen_reg_rtx (mode);
8576 expand_fix (target, op0, unsignedp);
8577 return target;
8578
8579 case FLOAT_EXPR:
8580 op0 = expand_normal (treeop0);
8581 if (target == 0 || modifier == EXPAND_STACK_PARM)
8582 target = gen_reg_rtx (mode);
8583 /* expand_float can't figure out what to do if FROM has VOIDmode.
8584 So give it the correct mode. With -O, cse will optimize this. */
8585 if (GET_MODE (op0) == VOIDmode)
8586 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8587 op0);
8588 expand_float (target, op0,
8589 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8590 return target;
8591
8592 case NEGATE_EXPR:
8593 op0 = expand_expr (treeop0, subtarget,
8594 VOIDmode, EXPAND_NORMAL);
8595 if (modifier == EXPAND_STACK_PARM)
8596 target = 0;
8597 temp = expand_unop (mode,
8598 optab_for_tree_code (NEGATE_EXPR, type,
8599 optab_default),
8600 op0, target, 0);
8601 gcc_assert (temp);
8602 return REDUCE_BIT_FIELD (temp);
8603
8604 case ABS_EXPR:
8605 op0 = expand_expr (treeop0, subtarget,
8606 VOIDmode, EXPAND_NORMAL);
8607 if (modifier == EXPAND_STACK_PARM)
8608 target = 0;
8609
8610 /* ABS_EXPR is not valid for complex arguments. */
8611 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8612 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8613
8614 /* Unsigned abs is simply the operand. Testing here means we don't
8615 risk generating incorrect code below. */
8616 if (TYPE_UNSIGNED (type))
8617 return op0;
8618
8619 return expand_abs (mode, op0, target, unsignedp,
8620 safe_from_p (target, treeop0, 1));
8621
8622 case MAX_EXPR:
8623 case MIN_EXPR:
8624 target = original_target;
8625 if (target == 0
8626 || modifier == EXPAND_STACK_PARM
8627 || (MEM_P (target) && MEM_VOLATILE_P (target))
8628 || GET_MODE (target) != mode
8629 || (REG_P (target)
8630 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8631 target = gen_reg_rtx (mode);
8632 expand_operands (treeop0, treeop1,
8633 target, &op0, &op1, EXPAND_NORMAL);
8634
8635 /* First try to do it with a special MIN or MAX instruction.
8636 If that does not win, use a conditional jump to select the proper
8637 value. */
8638 this_optab = optab_for_tree_code (code, type, optab_default);
8639 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8640 OPTAB_WIDEN);
8641 if (temp != 0)
8642 return temp;
8643
8644 /* At this point, a MEM target is no longer useful; we will get better
8645 code without it. */
8646
8647 if (! REG_P (target))
8648 target = gen_reg_rtx (mode);
8649
8650 /* If op1 was placed in target, swap op0 and op1. */
8651 if (target != op0 && target == op1)
8652 {
8653 temp = op0;
8654 op0 = op1;
8655 op1 = temp;
8656 }
8657
8658 /* We generate better code and avoid problems with op1 mentioning
8659 target by forcing op1 into a pseudo if it isn't a constant. */
8660 if (! CONSTANT_P (op1))
8661 op1 = force_reg (mode, op1);
8662
8663 {
8664 enum rtx_code comparison_code;
8665 rtx cmpop1 = op1;
8666
8667 if (code == MAX_EXPR)
8668 comparison_code = unsignedp ? GEU : GE;
8669 else
8670 comparison_code = unsignedp ? LEU : LE;
8671
8672 /* Canonicalize to comparisons against 0. */
8673 if (op1 == const1_rtx)
8674 {
8675 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8676 or (a != 0 ? a : 1) for unsigned.
8677 For MIN we are safe converting (a <= 1 ? a : 1)
8678 into (a <= 0 ? a : 1) */
8679 cmpop1 = const0_rtx;
8680 if (code == MAX_EXPR)
8681 comparison_code = unsignedp ? NE : GT;
8682 }
8683 if (op1 == constm1_rtx && !unsignedp)
8684 {
8685 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8686 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8687 cmpop1 = const0_rtx;
8688 if (code == MIN_EXPR)
8689 comparison_code = LT;
8690 }
8691 #ifdef HAVE_conditional_move
8692 /* Use a conditional move if possible. */
8693 if (can_conditionally_move_p (mode))
8694 {
8695 rtx insn;
8696
8697 /* ??? Same problem as in expmed.c: emit_conditional_move
8698 forces a stack adjustment via compare_from_rtx, and we
8699 lose the stack adjustment if the sequence we are about
8700 to create is discarded. */
8701 do_pending_stack_adjust ();
8702
8703 start_sequence ();
8704
8705 /* Try to emit the conditional move. */
8706 insn = emit_conditional_move (target, comparison_code,
8707 op0, cmpop1, mode,
8708 op0, op1, mode,
8709 unsignedp);
8710
8711 /* If we could do the conditional move, emit the sequence,
8712 and return. */
8713 if (insn)
8714 {
8715 rtx seq = get_insns ();
8716 end_sequence ();
8717 emit_insn (seq);
8718 return target;
8719 }
8720
8721 /* Otherwise discard the sequence and fall back to code with
8722 branches. */
8723 end_sequence ();
8724 }
8725 #endif
8726 if (target != op0)
8727 emit_move_insn (target, op0);
8728
8729 temp = gen_label_rtx ();
8730 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8731 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8732 -1);
8733 }
8734 emit_move_insn (target, op1);
8735 emit_label (temp);
8736 return target;
8737
8738 case BIT_NOT_EXPR:
8739 op0 = expand_expr (treeop0, subtarget,
8740 VOIDmode, EXPAND_NORMAL);
8741 if (modifier == EXPAND_STACK_PARM)
8742 target = 0;
8743 /* In case we have to reduce the result to bitfield precision
8744 for unsigned bitfield expand this as XOR with a proper constant
8745 instead. */
8746 if (reduce_bit_field && TYPE_UNSIGNED (type))
8747 temp = expand_binop (mode, xor_optab, op0,
8748 immed_double_int_const
8749 (double_int::mask (TYPE_PRECISION (type)), mode),
8750 target, 1, OPTAB_LIB_WIDEN);
8751 else
8752 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8753 gcc_assert (temp);
8754 return temp;
8755
8756 /* ??? Can optimize bitwise operations with one arg constant.
8757 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8758 and (a bitwise1 b) bitwise2 b (etc)
8759 but that is probably not worth while. */
8760
8761 case BIT_AND_EXPR:
8762 case BIT_IOR_EXPR:
8763 case BIT_XOR_EXPR:
8764 goto binop;
8765
8766 case LROTATE_EXPR:
8767 case RROTATE_EXPR:
8768 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8769 || (GET_MODE_PRECISION (TYPE_MODE (type))
8770 == TYPE_PRECISION (type)));
8771 /* fall through */
8772
8773 case LSHIFT_EXPR:
8774 case RSHIFT_EXPR:
8775 /* If this is a fixed-point operation, then we cannot use the code
8776 below because "expand_shift" doesn't support sat/no-sat fixed-point
8777 shifts. */
8778 if (ALL_FIXED_POINT_MODE_P (mode))
8779 goto binop;
8780
8781 if (! safe_from_p (subtarget, treeop1, 1))
8782 subtarget = 0;
8783 if (modifier == EXPAND_STACK_PARM)
8784 target = 0;
8785 op0 = expand_expr (treeop0, subtarget,
8786 VOIDmode, EXPAND_NORMAL);
8787 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8788 unsignedp);
8789 if (code == LSHIFT_EXPR)
8790 temp = REDUCE_BIT_FIELD (temp);
8791 return temp;
8792
8793 /* Could determine the answer when only additive constants differ. Also,
8794 the addition of one can be handled by changing the condition. */
8795 case LT_EXPR:
8796 case LE_EXPR:
8797 case GT_EXPR:
8798 case GE_EXPR:
8799 case EQ_EXPR:
8800 case NE_EXPR:
8801 case UNORDERED_EXPR:
8802 case ORDERED_EXPR:
8803 case UNLT_EXPR:
8804 case UNLE_EXPR:
8805 case UNGT_EXPR:
8806 case UNGE_EXPR:
8807 case UNEQ_EXPR:
8808 case LTGT_EXPR:
8809 temp = do_store_flag (ops,
8810 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8811 tmode != VOIDmode ? tmode : mode);
8812 if (temp)
8813 return temp;
8814
8815 /* Use a compare and a jump for BLKmode comparisons, or for function
8816 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8817
8818 if ((target == 0
8819 || modifier == EXPAND_STACK_PARM
8820 || ! safe_from_p (target, treeop0, 1)
8821 || ! safe_from_p (target, treeop1, 1)
8822 /* Make sure we don't have a hard reg (such as function's return
8823 value) live across basic blocks, if not optimizing. */
8824 || (!optimize && REG_P (target)
8825 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8826 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8827
8828 emit_move_insn (target, const0_rtx);
8829
8830 op1 = gen_label_rtx ();
8831 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8832
8833 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8834 emit_move_insn (target, constm1_rtx);
8835 else
8836 emit_move_insn (target, const1_rtx);
8837
8838 emit_label (op1);
8839 return target;
8840
8841 case COMPLEX_EXPR:
8842 /* Get the rtx code of the operands. */
8843 op0 = expand_normal (treeop0);
8844 op1 = expand_normal (treeop1);
8845
8846 if (!target)
8847 target = gen_reg_rtx (TYPE_MODE (type));
8848
8849 /* Move the real (op0) and imaginary (op1) parts to their location. */
8850 write_complex_part (target, op0, false);
8851 write_complex_part (target, op1, true);
8852
8853 return target;
8854
8855 case WIDEN_SUM_EXPR:
8856 {
8857 tree oprnd0 = treeop0;
8858 tree oprnd1 = treeop1;
8859
8860 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8861 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8862 target, unsignedp);
8863 return target;
8864 }
8865
8866 case REDUC_MAX_EXPR:
8867 case REDUC_MIN_EXPR:
8868 case REDUC_PLUS_EXPR:
8869 {
8870 op0 = expand_normal (treeop0);
8871 this_optab = optab_for_tree_code (code, type, optab_default);
8872 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8873 gcc_assert (temp);
8874 return temp;
8875 }
8876
8877 case VEC_LSHIFT_EXPR:
8878 case VEC_RSHIFT_EXPR:
8879 {
8880 target = expand_vec_shift_expr (ops, target);
8881 return target;
8882 }
8883
8884 case VEC_UNPACK_HI_EXPR:
8885 case VEC_UNPACK_LO_EXPR:
8886 {
8887 op0 = expand_normal (treeop0);
8888 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8889 target, unsignedp);
8890 gcc_assert (temp);
8891 return temp;
8892 }
8893
8894 case VEC_UNPACK_FLOAT_HI_EXPR:
8895 case VEC_UNPACK_FLOAT_LO_EXPR:
8896 {
8897 op0 = expand_normal (treeop0);
8898 /* The signedness is determined from input operand. */
8899 temp = expand_widen_pattern_expr
8900 (ops, op0, NULL_RTX, NULL_RTX,
8901 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8902
8903 gcc_assert (temp);
8904 return temp;
8905 }
8906
8907 case VEC_WIDEN_MULT_HI_EXPR:
8908 case VEC_WIDEN_MULT_LO_EXPR:
8909 case VEC_WIDEN_MULT_EVEN_EXPR:
8910 case VEC_WIDEN_MULT_ODD_EXPR:
8911 case VEC_WIDEN_LSHIFT_HI_EXPR:
8912 case VEC_WIDEN_LSHIFT_LO_EXPR:
8913 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8914 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8915 target, unsignedp);
8916 gcc_assert (target);
8917 return target;
8918
8919 case VEC_PACK_TRUNC_EXPR:
8920 case VEC_PACK_SAT_EXPR:
8921 case VEC_PACK_FIX_TRUNC_EXPR:
8922 mode = TYPE_MODE (TREE_TYPE (treeop0));
8923 goto binop;
8924
8925 case VEC_PERM_EXPR:
8926 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
8927 op2 = expand_normal (treeop2);
8928
8929 /* Careful here: if the target doesn't support integral vector modes,
8930 a constant selection vector could wind up smooshed into a normal
8931 integral constant. */
8932 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
8933 {
8934 tree sel_type = TREE_TYPE (treeop2);
8935 enum machine_mode vmode
8936 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
8937 TYPE_VECTOR_SUBPARTS (sel_type));
8938 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
8939 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
8940 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
8941 }
8942 else
8943 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
8944
8945 temp = expand_vec_perm (mode, op0, op1, op2, target);
8946 gcc_assert (temp);
8947 return temp;
8948
8949 case DOT_PROD_EXPR:
8950 {
8951 tree oprnd0 = treeop0;
8952 tree oprnd1 = treeop1;
8953 tree oprnd2 = treeop2;
8954 rtx op2;
8955
8956 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8957 op2 = expand_normal (oprnd2);
8958 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8959 target, unsignedp);
8960 return target;
8961 }
8962
8963 case REALIGN_LOAD_EXPR:
8964 {
8965 tree oprnd0 = treeop0;
8966 tree oprnd1 = treeop1;
8967 tree oprnd2 = treeop2;
8968 rtx op2;
8969
8970 this_optab = optab_for_tree_code (code, type, optab_default);
8971 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8972 op2 = expand_normal (oprnd2);
8973 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8974 target, unsignedp);
8975 gcc_assert (temp);
8976 return temp;
8977 }
8978
8979 case COND_EXPR:
8980 /* A COND_EXPR with its type being VOID_TYPE represents a
8981 conditional jump and is handled in
8982 expand_gimple_cond_expr. */
8983 gcc_assert (!VOID_TYPE_P (type));
8984
8985 /* Note that COND_EXPRs whose type is a structure or union
8986 are required to be constructed to contain assignments of
8987 a temporary variable, so that we can evaluate them here
8988 for side effect only. If type is void, we must do likewise. */
8989
8990 gcc_assert (!TREE_ADDRESSABLE (type)
8991 && !ignore
8992 && TREE_TYPE (treeop1) != void_type_node
8993 && TREE_TYPE (treeop2) != void_type_node);
8994
8995 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
8996 if (temp)
8997 return temp;
8998
8999 /* If we are not to produce a result, we have no target. Otherwise,
9000 if a target was specified use it; it will not be used as an
9001 intermediate target unless it is safe. If no target, use a
9002 temporary. */
9003
9004 if (modifier != EXPAND_STACK_PARM
9005 && original_target
9006 && safe_from_p (original_target, treeop0, 1)
9007 && GET_MODE (original_target) == mode
9008 && !MEM_P (original_target))
9009 temp = original_target;
9010 else
9011 temp = assign_temp (type, 0, 1);
9012
9013 do_pending_stack_adjust ();
9014 NO_DEFER_POP;
9015 op0 = gen_label_rtx ();
9016 op1 = gen_label_rtx ();
9017 jumpifnot (treeop0, op0, -1);
9018 store_expr (treeop1, temp,
9019 modifier == EXPAND_STACK_PARM,
9020 false);
9021
9022 emit_jump_insn (gen_jump (op1));
9023 emit_barrier ();
9024 emit_label (op0);
9025 store_expr (treeop2, temp,
9026 modifier == EXPAND_STACK_PARM,
9027 false);
9028
9029 emit_label (op1);
9030 OK_DEFER_POP;
9031 return temp;
9032
9033 case VEC_COND_EXPR:
9034 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9035 return target;
9036
9037 default:
9038 gcc_unreachable ();
9039 }
9040
9041 /* Here to do an ordinary binary operator. */
9042 binop:
9043 expand_operands (treeop0, treeop1,
9044 subtarget, &op0, &op1, EXPAND_NORMAL);
9045 binop2:
9046 this_optab = optab_for_tree_code (code, type, optab_default);
9047 binop3:
9048 if (modifier == EXPAND_STACK_PARM)
9049 target = 0;
9050 temp = expand_binop (mode, this_optab, op0, op1, target,
9051 unsignedp, OPTAB_LIB_WIDEN);
9052 gcc_assert (temp);
9053 /* Bitwise operations do not need bitfield reduction as we expect their
9054 operands being properly truncated. */
9055 if (code == BIT_XOR_EXPR
9056 || code == BIT_AND_EXPR
9057 || code == BIT_IOR_EXPR)
9058 return temp;
9059 return REDUCE_BIT_FIELD (temp);
9060 }
9061 #undef REDUCE_BIT_FIELD
9062
9063 rtx
9064 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9065 enum expand_modifier modifier, rtx *alt_rtl)
9066 {
9067 rtx op0, op1, temp, decl_rtl;
9068 tree type;
9069 int unsignedp;
9070 enum machine_mode mode;
9071 enum tree_code code = TREE_CODE (exp);
9072 rtx subtarget, original_target;
9073 int ignore;
9074 tree context;
9075 bool reduce_bit_field;
9076 location_t loc = EXPR_LOCATION (exp);
9077 struct separate_ops ops;
9078 tree treeop0, treeop1, treeop2;
9079 tree ssa_name = NULL_TREE;
9080 gimple g;
9081
9082 type = TREE_TYPE (exp);
9083 mode = TYPE_MODE (type);
9084 unsignedp = TYPE_UNSIGNED (type);
9085
9086 treeop0 = treeop1 = treeop2 = NULL_TREE;
9087 if (!VL_EXP_CLASS_P (exp))
9088 switch (TREE_CODE_LENGTH (code))
9089 {
9090 default:
9091 case 3: treeop2 = TREE_OPERAND (exp, 2);
9092 case 2: treeop1 = TREE_OPERAND (exp, 1);
9093 case 1: treeop0 = TREE_OPERAND (exp, 0);
9094 case 0: break;
9095 }
9096 ops.code = code;
9097 ops.type = type;
9098 ops.op0 = treeop0;
9099 ops.op1 = treeop1;
9100 ops.op2 = treeop2;
9101 ops.location = loc;
9102
9103 ignore = (target == const0_rtx
9104 || ((CONVERT_EXPR_CODE_P (code)
9105 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9106 && TREE_CODE (type) == VOID_TYPE));
9107
9108 /* An operation in what may be a bit-field type needs the
9109 result to be reduced to the precision of the bit-field type,
9110 which is narrower than that of the type's mode. */
9111 reduce_bit_field = (!ignore
9112 && INTEGRAL_TYPE_P (type)
9113 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9114
9115 /* If we are going to ignore this result, we need only do something
9116 if there is a side-effect somewhere in the expression. If there
9117 is, short-circuit the most common cases here. Note that we must
9118 not call expand_expr with anything but const0_rtx in case this
9119 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9120
9121 if (ignore)
9122 {
9123 if (! TREE_SIDE_EFFECTS (exp))
9124 return const0_rtx;
9125
9126 /* Ensure we reference a volatile object even if value is ignored, but
9127 don't do this if all we are doing is taking its address. */
9128 if (TREE_THIS_VOLATILE (exp)
9129 && TREE_CODE (exp) != FUNCTION_DECL
9130 && mode != VOIDmode && mode != BLKmode
9131 && modifier != EXPAND_CONST_ADDRESS)
9132 {
9133 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9134 if (MEM_P (temp))
9135 copy_to_reg (temp);
9136 return const0_rtx;
9137 }
9138
9139 if (TREE_CODE_CLASS (code) == tcc_unary
9140 || code == BIT_FIELD_REF
9141 || code == COMPONENT_REF
9142 || code == INDIRECT_REF)
9143 return expand_expr (treeop0, const0_rtx, VOIDmode,
9144 modifier);
9145
9146 else if (TREE_CODE_CLASS (code) == tcc_binary
9147 || TREE_CODE_CLASS (code) == tcc_comparison
9148 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9149 {
9150 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9151 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9152 return const0_rtx;
9153 }
9154
9155 target = 0;
9156 }
9157
9158 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9159 target = 0;
9160
9161 /* Use subtarget as the target for operand 0 of a binary operation. */
9162 subtarget = get_subtarget (target);
9163 original_target = target;
9164
9165 switch (code)
9166 {
9167 case LABEL_DECL:
9168 {
9169 tree function = decl_function_context (exp);
9170
9171 temp = label_rtx (exp);
9172 temp = gen_rtx_LABEL_REF (Pmode, temp);
9173
9174 if (function != current_function_decl
9175 && function != 0)
9176 LABEL_REF_NONLOCAL_P (temp) = 1;
9177
9178 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9179 return temp;
9180 }
9181
9182 case SSA_NAME:
9183 /* ??? ivopts calls expander, without any preparation from
9184 out-of-ssa. So fake instructions as if this was an access to the
9185 base variable. This unnecessarily allocates a pseudo, see how we can
9186 reuse it, if partition base vars have it set already. */
9187 if (!currently_expanding_to_rtl)
9188 {
9189 tree var = SSA_NAME_VAR (exp);
9190 if (var && DECL_RTL_SET_P (var))
9191 return DECL_RTL (var);
9192 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9193 LAST_VIRTUAL_REGISTER + 1);
9194 }
9195
9196 g = get_gimple_for_ssa_name (exp);
9197 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9198 if (g == NULL
9199 && modifier == EXPAND_INITIALIZER
9200 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9201 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9202 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9203 g = SSA_NAME_DEF_STMT (exp);
9204 if (g)
9205 {
9206 rtx r;
9207 location_t saved_loc = curr_insn_location ();
9208
9209 set_curr_insn_location (gimple_location (g));
9210 r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
9211 tmode, modifier, NULL);
9212 set_curr_insn_location (saved_loc);
9213 if (REG_P (r) && !REG_EXPR (r))
9214 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9215 return r;
9216 }
9217
9218 ssa_name = exp;
9219 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9220 exp = SSA_NAME_VAR (ssa_name);
9221 goto expand_decl_rtl;
9222
9223 case PARM_DECL:
9224 case VAR_DECL:
9225 /* If a static var's type was incomplete when the decl was written,
9226 but the type is complete now, lay out the decl now. */
9227 if (DECL_SIZE (exp) == 0
9228 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9229 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9230 layout_decl (exp, 0);
9231
9232 /* ... fall through ... */
9233
9234 case FUNCTION_DECL:
9235 case RESULT_DECL:
9236 decl_rtl = DECL_RTL (exp);
9237 expand_decl_rtl:
9238 gcc_assert (decl_rtl);
9239 decl_rtl = copy_rtx (decl_rtl);
9240 /* Record writes to register variables. */
9241 if (modifier == EXPAND_WRITE
9242 && REG_P (decl_rtl)
9243 && HARD_REGISTER_P (decl_rtl))
9244 add_to_hard_reg_set (&crtl->asm_clobbers,
9245 GET_MODE (decl_rtl), REGNO (decl_rtl));
9246
9247 /* Ensure variable marked as used even if it doesn't go through
9248 a parser. If it hasn't be used yet, write out an external
9249 definition. */
9250 TREE_USED (exp) = 1;
9251
9252 /* Show we haven't gotten RTL for this yet. */
9253 temp = 0;
9254
9255 /* Variables inherited from containing functions should have
9256 been lowered by this point. */
9257 context = decl_function_context (exp);
9258 gcc_assert (!context
9259 || context == current_function_decl
9260 || TREE_STATIC (exp)
9261 || DECL_EXTERNAL (exp)
9262 /* ??? C++ creates functions that are not TREE_STATIC. */
9263 || TREE_CODE (exp) == FUNCTION_DECL);
9264
9265 /* This is the case of an array whose size is to be determined
9266 from its initializer, while the initializer is still being parsed.
9267 ??? We aren't parsing while expanding anymore. */
9268
9269 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9270 temp = validize_mem (decl_rtl);
9271
9272 /* If DECL_RTL is memory, we are in the normal case and the
9273 address is not valid, get the address into a register. */
9274
9275 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9276 {
9277 if (alt_rtl)
9278 *alt_rtl = decl_rtl;
9279 decl_rtl = use_anchored_address (decl_rtl);
9280 if (modifier != EXPAND_CONST_ADDRESS
9281 && modifier != EXPAND_SUM
9282 && !memory_address_addr_space_p (DECL_MODE (exp),
9283 XEXP (decl_rtl, 0),
9284 MEM_ADDR_SPACE (decl_rtl)))
9285 temp = replace_equiv_address (decl_rtl,
9286 copy_rtx (XEXP (decl_rtl, 0)));
9287 }
9288
9289 /* If we got something, return it. But first, set the alignment
9290 if the address is a register. */
9291 if (temp != 0)
9292 {
9293 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9294 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9295
9296 return temp;
9297 }
9298
9299 /* If the mode of DECL_RTL does not match that of the decl,
9300 there are two cases: we are dealing with a BLKmode value
9301 that is returned in a register, or we are dealing with
9302 a promoted value. In the latter case, return a SUBREG
9303 of the wanted mode, but mark it so that we know that it
9304 was already extended. */
9305 if (REG_P (decl_rtl)
9306 && DECL_MODE (exp) != BLKmode
9307 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9308 {
9309 enum machine_mode pmode;
9310
9311 /* Get the signedness to be used for this variable. Ensure we get
9312 the same mode we got when the variable was declared. */
9313 if (code == SSA_NAME
9314 && (g = SSA_NAME_DEF_STMT (ssa_name))
9315 && gimple_code (g) == GIMPLE_CALL)
9316 {
9317 gcc_assert (!gimple_call_internal_p (g));
9318 pmode = promote_function_mode (type, mode, &unsignedp,
9319 gimple_call_fntype (g),
9320 2);
9321 }
9322 else
9323 pmode = promote_decl_mode (exp, &unsignedp);
9324 gcc_assert (GET_MODE (decl_rtl) == pmode);
9325
9326 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9327 SUBREG_PROMOTED_VAR_P (temp) = 1;
9328 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9329 return temp;
9330 }
9331
9332 return decl_rtl;
9333
9334 case INTEGER_CST:
9335 temp = immed_double_const (TREE_INT_CST_LOW (exp),
9336 TREE_INT_CST_HIGH (exp), mode);
9337
9338 return temp;
9339
9340 case VECTOR_CST:
9341 {
9342 tree tmp = NULL_TREE;
9343 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9344 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9345 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9346 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9347 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9348 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9349 return const_vector_from_tree (exp);
9350 if (GET_MODE_CLASS (mode) == MODE_INT)
9351 {
9352 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9353 if (type_for_mode)
9354 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9355 }
9356 if (!tmp)
9357 {
9358 vec<constructor_elt, va_gc> *v;
9359 unsigned i;
9360 vec_alloc (v, VECTOR_CST_NELTS (exp));
9361 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9362 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9363 tmp = build_constructor (type, v);
9364 }
9365 return expand_expr (tmp, ignore ? const0_rtx : target,
9366 tmode, modifier);
9367 }
9368
9369 case CONST_DECL:
9370 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9371
9372 case REAL_CST:
9373 /* If optimized, generate immediate CONST_DOUBLE
9374 which will be turned into memory by reload if necessary.
9375
9376 We used to force a register so that loop.c could see it. But
9377 this does not allow gen_* patterns to perform optimizations with
9378 the constants. It also produces two insns in cases like "x = 1.0;".
9379 On most machines, floating-point constants are not permitted in
9380 many insns, so we'd end up copying it to a register in any case.
9381
9382 Now, we do the copying in expand_binop, if appropriate. */
9383 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9384 TYPE_MODE (TREE_TYPE (exp)));
9385
9386 case FIXED_CST:
9387 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9388 TYPE_MODE (TREE_TYPE (exp)));
9389
9390 case COMPLEX_CST:
9391 /* Handle evaluating a complex constant in a CONCAT target. */
9392 if (original_target && GET_CODE (original_target) == CONCAT)
9393 {
9394 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9395 rtx rtarg, itarg;
9396
9397 rtarg = XEXP (original_target, 0);
9398 itarg = XEXP (original_target, 1);
9399
9400 /* Move the real and imaginary parts separately. */
9401 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9402 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9403
9404 if (op0 != rtarg)
9405 emit_move_insn (rtarg, op0);
9406 if (op1 != itarg)
9407 emit_move_insn (itarg, op1);
9408
9409 return original_target;
9410 }
9411
9412 /* ... fall through ... */
9413
9414 case STRING_CST:
9415 temp = expand_expr_constant (exp, 1, modifier);
9416
9417 /* temp contains a constant address.
9418 On RISC machines where a constant address isn't valid,
9419 make some insns to get that address into a register. */
9420 if (modifier != EXPAND_CONST_ADDRESS
9421 && modifier != EXPAND_INITIALIZER
9422 && modifier != EXPAND_SUM
9423 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9424 MEM_ADDR_SPACE (temp)))
9425 return replace_equiv_address (temp,
9426 copy_rtx (XEXP (temp, 0)));
9427 return temp;
9428
9429 case SAVE_EXPR:
9430 {
9431 tree val = treeop0;
9432 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
9433
9434 if (!SAVE_EXPR_RESOLVED_P (exp))
9435 {
9436 /* We can indeed still hit this case, typically via builtin
9437 expanders calling save_expr immediately before expanding
9438 something. Assume this means that we only have to deal
9439 with non-BLKmode values. */
9440 gcc_assert (GET_MODE (ret) != BLKmode);
9441
9442 val = build_decl (curr_insn_location (),
9443 VAR_DECL, NULL, TREE_TYPE (exp));
9444 DECL_ARTIFICIAL (val) = 1;
9445 DECL_IGNORED_P (val) = 1;
9446 treeop0 = val;
9447 TREE_OPERAND (exp, 0) = treeop0;
9448 SAVE_EXPR_RESOLVED_P (exp) = 1;
9449
9450 if (!CONSTANT_P (ret))
9451 ret = copy_to_reg (ret);
9452 SET_DECL_RTL (val, ret);
9453 }
9454
9455 return ret;
9456 }
9457
9458
9459 case CONSTRUCTOR:
9460 /* If we don't need the result, just ensure we evaluate any
9461 subexpressions. */
9462 if (ignore)
9463 {
9464 unsigned HOST_WIDE_INT idx;
9465 tree value;
9466
9467 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9468 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9469
9470 return const0_rtx;
9471 }
9472
9473 return expand_constructor (exp, target, modifier, false);
9474
9475 case TARGET_MEM_REF:
9476 {
9477 addr_space_t as
9478 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9479 struct mem_address addr;
9480 enum insn_code icode;
9481 unsigned int align;
9482
9483 get_address_description (exp, &addr);
9484 op0 = addr_for_mem_ref (&addr, as, true);
9485 op0 = memory_address_addr_space (mode, op0, as);
9486 temp = gen_rtx_MEM (mode, op0);
9487 set_mem_attributes (temp, exp, 0);
9488 set_mem_addr_space (temp, as);
9489 align = get_object_alignment (exp);
9490 if (modifier != EXPAND_WRITE
9491 && mode != BLKmode
9492 && align < GET_MODE_ALIGNMENT (mode)
9493 /* If the target does not have special handling for unaligned
9494 loads of mode then it can use regular moves for them. */
9495 && ((icode = optab_handler (movmisalign_optab, mode))
9496 != CODE_FOR_nothing))
9497 {
9498 struct expand_operand ops[2];
9499
9500 /* We've already validated the memory, and we're creating a
9501 new pseudo destination. The predicates really can't fail,
9502 nor can the generator. */
9503 create_output_operand (&ops[0], NULL_RTX, mode);
9504 create_fixed_operand (&ops[1], temp);
9505 expand_insn (icode, 2, ops);
9506 return ops[0].value;
9507 }
9508 return temp;
9509 }
9510
9511 case MEM_REF:
9512 {
9513 addr_space_t as
9514 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9515 enum machine_mode address_mode;
9516 tree base = TREE_OPERAND (exp, 0);
9517 gimple def_stmt;
9518 enum insn_code icode;
9519 unsigned align;
9520 /* Handle expansion of non-aliased memory with non-BLKmode. That
9521 might end up in a register. */
9522 if (mem_ref_refers_to_non_mem_p (exp))
9523 {
9524 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9525 tree bit_offset;
9526 tree bftype;
9527 base = TREE_OPERAND (base, 0);
9528 if (offset == 0
9529 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
9530 && (GET_MODE_BITSIZE (DECL_MODE (base))
9531 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
9532 return expand_expr (build1 (VIEW_CONVERT_EXPR,
9533 TREE_TYPE (exp), base),
9534 target, tmode, modifier);
9535 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
9536 bftype = TREE_TYPE (base);
9537 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
9538 bftype = TREE_TYPE (exp);
9539 else
9540 {
9541 temp = assign_stack_temp (DECL_MODE (base),
9542 GET_MODE_SIZE (DECL_MODE (base)));
9543 store_expr (base, temp, 0, false);
9544 temp = adjust_address (temp, BLKmode, offset);
9545 set_mem_size (temp, int_size_in_bytes (TREE_TYPE (exp)));
9546 return temp;
9547 }
9548 return expand_expr (build3 (BIT_FIELD_REF, bftype,
9549 base,
9550 TYPE_SIZE (TREE_TYPE (exp)),
9551 bit_offset),
9552 target, tmode, modifier);
9553 }
9554 address_mode = targetm.addr_space.address_mode (as);
9555 base = TREE_OPERAND (exp, 0);
9556 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9557 {
9558 tree mask = gimple_assign_rhs2 (def_stmt);
9559 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9560 gimple_assign_rhs1 (def_stmt), mask);
9561 TREE_OPERAND (exp, 0) = base;
9562 }
9563 align = get_object_alignment (exp);
9564 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9565 op0 = memory_address_addr_space (address_mode, op0, as);
9566 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9567 {
9568 rtx off
9569 = immed_double_int_const (mem_ref_offset (exp), address_mode);
9570 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9571 }
9572 op0 = memory_address_addr_space (mode, op0, as);
9573 temp = gen_rtx_MEM (mode, op0);
9574 set_mem_attributes (temp, exp, 0);
9575 set_mem_addr_space (temp, as);
9576 if (TREE_THIS_VOLATILE (exp))
9577 MEM_VOLATILE_P (temp) = 1;
9578 if (modifier != EXPAND_WRITE
9579 && mode != BLKmode
9580 && align < GET_MODE_ALIGNMENT (mode))
9581 {
9582 if ((icode = optab_handler (movmisalign_optab, mode))
9583 != CODE_FOR_nothing)
9584 {
9585 struct expand_operand ops[2];
9586
9587 /* We've already validated the memory, and we're creating a
9588 new pseudo destination. The predicates really can't fail,
9589 nor can the generator. */
9590 create_output_operand (&ops[0], NULL_RTX, mode);
9591 create_fixed_operand (&ops[1], temp);
9592 expand_insn (icode, 2, ops);
9593 return ops[0].value;
9594 }
9595 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9596 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9597 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9598 true, (modifier == EXPAND_STACK_PARM
9599 ? NULL_RTX : target),
9600 mode, mode);
9601 }
9602 return temp;
9603 }
9604
9605 case ARRAY_REF:
9606
9607 {
9608 tree array = treeop0;
9609 tree index = treeop1;
9610
9611 /* Fold an expression like: "foo"[2].
9612 This is not done in fold so it won't happen inside &.
9613 Don't fold if this is for wide characters since it's too
9614 difficult to do correctly and this is a very rare case. */
9615
9616 if (modifier != EXPAND_CONST_ADDRESS
9617 && modifier != EXPAND_INITIALIZER
9618 && modifier != EXPAND_MEMORY)
9619 {
9620 tree t = fold_read_from_constant_string (exp);
9621
9622 if (t)
9623 return expand_expr (t, target, tmode, modifier);
9624 }
9625
9626 /* If this is a constant index into a constant array,
9627 just get the value from the array. Handle both the cases when
9628 we have an explicit constructor and when our operand is a variable
9629 that was declared const. */
9630
9631 if (modifier != EXPAND_CONST_ADDRESS
9632 && modifier != EXPAND_INITIALIZER
9633 && modifier != EXPAND_MEMORY
9634 && TREE_CODE (array) == CONSTRUCTOR
9635 && ! TREE_SIDE_EFFECTS (array)
9636 && TREE_CODE (index) == INTEGER_CST)
9637 {
9638 unsigned HOST_WIDE_INT ix;
9639 tree field, value;
9640
9641 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9642 field, value)
9643 if (tree_int_cst_equal (field, index))
9644 {
9645 if (!TREE_SIDE_EFFECTS (value))
9646 return expand_expr (fold (value), target, tmode, modifier);
9647 break;
9648 }
9649 }
9650
9651 else if (optimize >= 1
9652 && modifier != EXPAND_CONST_ADDRESS
9653 && modifier != EXPAND_INITIALIZER
9654 && modifier != EXPAND_MEMORY
9655 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9656 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
9657 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
9658 && const_value_known_p (array))
9659 {
9660 if (TREE_CODE (index) == INTEGER_CST)
9661 {
9662 tree init = DECL_INITIAL (array);
9663
9664 if (TREE_CODE (init) == CONSTRUCTOR)
9665 {
9666 unsigned HOST_WIDE_INT ix;
9667 tree field, value;
9668
9669 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9670 field, value)
9671 if (tree_int_cst_equal (field, index))
9672 {
9673 if (TREE_SIDE_EFFECTS (value))
9674 break;
9675
9676 if (TREE_CODE (value) == CONSTRUCTOR)
9677 {
9678 /* If VALUE is a CONSTRUCTOR, this
9679 optimization is only useful if
9680 this doesn't store the CONSTRUCTOR
9681 into memory. If it does, it is more
9682 efficient to just load the data from
9683 the array directly. */
9684 rtx ret = expand_constructor (value, target,
9685 modifier, true);
9686 if (ret == NULL_RTX)
9687 break;
9688 }
9689
9690 return expand_expr (fold (value), target, tmode,
9691 modifier);
9692 }
9693 }
9694 else if(TREE_CODE (init) == STRING_CST)
9695 {
9696 tree index1 = index;
9697 tree low_bound = array_ref_low_bound (exp);
9698 index1 = fold_convert_loc (loc, sizetype,
9699 treeop1);
9700
9701 /* Optimize the special-case of a zero lower bound.
9702
9703 We convert the low_bound to sizetype to avoid some problems
9704 with constant folding. (E.g. suppose the lower bound is 1,
9705 and its mode is QI. Without the conversion,l (ARRAY
9706 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9707 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
9708
9709 if (! integer_zerop (low_bound))
9710 index1 = size_diffop_loc (loc, index1,
9711 fold_convert_loc (loc, sizetype,
9712 low_bound));
9713
9714 if (0 > compare_tree_int (index1,
9715 TREE_STRING_LENGTH (init)))
9716 {
9717 tree type = TREE_TYPE (TREE_TYPE (init));
9718 enum machine_mode mode = TYPE_MODE (type);
9719
9720 if (GET_MODE_CLASS (mode) == MODE_INT
9721 && GET_MODE_SIZE (mode) == 1)
9722 return gen_int_mode (TREE_STRING_POINTER (init)
9723 [TREE_INT_CST_LOW (index1)],
9724 mode);
9725 }
9726 }
9727 }
9728 }
9729 }
9730 goto normal_inner_ref;
9731
9732 case COMPONENT_REF:
9733 /* If the operand is a CONSTRUCTOR, we can just extract the
9734 appropriate field if it is present. */
9735 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9736 {
9737 unsigned HOST_WIDE_INT idx;
9738 tree field, value;
9739
9740 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9741 idx, field, value)
9742 if (field == treeop1
9743 /* We can normally use the value of the field in the
9744 CONSTRUCTOR. However, if this is a bitfield in
9745 an integral mode that we can fit in a HOST_WIDE_INT,
9746 we must mask only the number of bits in the bitfield,
9747 since this is done implicitly by the constructor. If
9748 the bitfield does not meet either of those conditions,
9749 we can't do this optimization. */
9750 && (! DECL_BIT_FIELD (field)
9751 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9752 && (GET_MODE_PRECISION (DECL_MODE (field))
9753 <= HOST_BITS_PER_WIDE_INT))))
9754 {
9755 if (DECL_BIT_FIELD (field)
9756 && modifier == EXPAND_STACK_PARM)
9757 target = 0;
9758 op0 = expand_expr (value, target, tmode, modifier);
9759 if (DECL_BIT_FIELD (field))
9760 {
9761 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9762 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9763
9764 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9765 {
9766 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
9767 op0 = expand_and (imode, op0, op1, target);
9768 }
9769 else
9770 {
9771 int count = GET_MODE_PRECISION (imode) - bitsize;
9772
9773 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9774 target, 0);
9775 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9776 target, 0);
9777 }
9778 }
9779
9780 return op0;
9781 }
9782 }
9783 goto normal_inner_ref;
9784
9785 case BIT_FIELD_REF:
9786 case ARRAY_RANGE_REF:
9787 normal_inner_ref:
9788 {
9789 enum machine_mode mode1, mode2;
9790 HOST_WIDE_INT bitsize, bitpos;
9791 tree offset;
9792 int volatilep = 0, must_force_mem;
9793 bool packedp = false;
9794 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9795 &mode1, &unsignedp, &volatilep, true);
9796 rtx orig_op0, memloc;
9797 bool mem_attrs_from_type = false;
9798
9799 /* If we got back the original object, something is wrong. Perhaps
9800 we are evaluating an expression too early. In any event, don't
9801 infinitely recurse. */
9802 gcc_assert (tem != exp);
9803
9804 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
9805 || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
9806 && DECL_PACKED (TREE_OPERAND (exp, 1))))
9807 packedp = true;
9808
9809 /* If TEM's type is a union of variable size, pass TARGET to the inner
9810 computation, since it will need a temporary and TARGET is known
9811 to have to do. This occurs in unchecked conversion in Ada. */
9812 orig_op0 = op0
9813 = expand_expr (tem,
9814 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9815 && COMPLETE_TYPE_P (TREE_TYPE (tem))
9816 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9817 != INTEGER_CST)
9818 && modifier != EXPAND_STACK_PARM
9819 ? target : NULL_RTX),
9820 VOIDmode,
9821 (modifier == EXPAND_INITIALIZER
9822 || modifier == EXPAND_CONST_ADDRESS
9823 || modifier == EXPAND_STACK_PARM)
9824 ? modifier : EXPAND_NORMAL);
9825
9826
9827 /* If the bitfield is volatile, we want to access it in the
9828 field's mode, not the computed mode.
9829 If a MEM has VOIDmode (external with incomplete type),
9830 use BLKmode for it instead. */
9831 if (MEM_P (op0))
9832 {
9833 if (volatilep && flag_strict_volatile_bitfields > 0)
9834 op0 = adjust_address (op0, mode1, 0);
9835 else if (GET_MODE (op0) == VOIDmode)
9836 op0 = adjust_address (op0, BLKmode, 0);
9837 }
9838
9839 mode2
9840 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9841
9842 /* If we have either an offset, a BLKmode result, or a reference
9843 outside the underlying object, we must force it to memory.
9844 Such a case can occur in Ada if we have unchecked conversion
9845 of an expression from a scalar type to an aggregate type or
9846 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9847 passed a partially uninitialized object or a view-conversion
9848 to a larger size. */
9849 must_force_mem = (offset
9850 || mode1 == BLKmode
9851 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9852
9853 /* Handle CONCAT first. */
9854 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9855 {
9856 if (bitpos == 0
9857 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9858 return op0;
9859 if (bitpos == 0
9860 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9861 && bitsize)
9862 {
9863 op0 = XEXP (op0, 0);
9864 mode2 = GET_MODE (op0);
9865 }
9866 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9867 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9868 && bitpos
9869 && bitsize)
9870 {
9871 op0 = XEXP (op0, 1);
9872 bitpos = 0;
9873 mode2 = GET_MODE (op0);
9874 }
9875 else
9876 /* Otherwise force into memory. */
9877 must_force_mem = 1;
9878 }
9879
9880 /* If this is a constant, put it in a register if it is a legitimate
9881 constant and we don't need a memory reference. */
9882 if (CONSTANT_P (op0)
9883 && mode2 != BLKmode
9884 && targetm.legitimate_constant_p (mode2, op0)
9885 && !must_force_mem)
9886 op0 = force_reg (mode2, op0);
9887
9888 /* Otherwise, if this is a constant, try to force it to the constant
9889 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9890 is a legitimate constant. */
9891 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9892 op0 = validize_mem (memloc);
9893
9894 /* Otherwise, if this is a constant or the object is not in memory
9895 and need be, put it there. */
9896 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9897 {
9898 tree nt = build_qualified_type (TREE_TYPE (tem),
9899 (TYPE_QUALS (TREE_TYPE (tem))
9900 | TYPE_QUAL_CONST));
9901 memloc = assign_temp (nt, 1, 1);
9902 emit_move_insn (memloc, op0);
9903 op0 = memloc;
9904 mem_attrs_from_type = true;
9905 }
9906
9907 if (offset)
9908 {
9909 enum machine_mode address_mode;
9910 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9911 EXPAND_SUM);
9912
9913 gcc_assert (MEM_P (op0));
9914
9915 address_mode = get_address_mode (op0);
9916 if (GET_MODE (offset_rtx) != address_mode)
9917 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9918
9919 if (GET_MODE (op0) == BLKmode
9920 /* A constant address in OP0 can have VOIDmode, we must
9921 not try to call force_reg in that case. */
9922 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9923 && bitsize != 0
9924 && (bitpos % bitsize) == 0
9925 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9926 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9927 {
9928 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9929 bitpos = 0;
9930 }
9931
9932 op0 = offset_address (op0, offset_rtx,
9933 highest_pow2_factor (offset));
9934 }
9935
9936 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9937 record its alignment as BIGGEST_ALIGNMENT. */
9938 if (MEM_P (op0) && bitpos == 0 && offset != 0
9939 && is_aligning_offset (offset, tem))
9940 set_mem_align (op0, BIGGEST_ALIGNMENT);
9941
9942 /* Don't forget about volatility even if this is a bitfield. */
9943 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9944 {
9945 if (op0 == orig_op0)
9946 op0 = copy_rtx (op0);
9947
9948 MEM_VOLATILE_P (op0) = 1;
9949 }
9950
9951 /* In cases where an aligned union has an unaligned object
9952 as a field, we might be extracting a BLKmode value from
9953 an integer-mode (e.g., SImode) object. Handle this case
9954 by doing the extract into an object as wide as the field
9955 (which we know to be the width of a basic mode), then
9956 storing into memory, and changing the mode to BLKmode. */
9957 if (mode1 == VOIDmode
9958 || REG_P (op0) || GET_CODE (op0) == SUBREG
9959 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9960 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9961 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9962 && modifier != EXPAND_CONST_ADDRESS
9963 && modifier != EXPAND_INITIALIZER
9964 && modifier != EXPAND_MEMORY)
9965 /* If the field is volatile, we always want an aligned
9966 access. Do this in following two situations:
9967 1. the access is not already naturally
9968 aligned, otherwise "normal" (non-bitfield) volatile fields
9969 become non-addressable.
9970 2. the bitsize is narrower than the access size. Need
9971 to extract bitfields from the access. */
9972 || (volatilep && flag_strict_volatile_bitfields > 0
9973 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0
9974 || (mode1 != BLKmode
9975 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
9976 /* If the field isn't aligned enough to fetch as a memref,
9977 fetch it as a bit field. */
9978 || (mode1 != BLKmode
9979 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9980 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9981 || (MEM_P (op0)
9982 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9983 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9984 && ((modifier == EXPAND_CONST_ADDRESS
9985 || modifier == EXPAND_INITIALIZER)
9986 ? STRICT_ALIGNMENT
9987 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9988 || (bitpos % BITS_PER_UNIT != 0)))
9989 /* If the type and the field are a constant size and the
9990 size of the type isn't the same size as the bitfield,
9991 we must use bitfield operations. */
9992 || (bitsize >= 0
9993 && TYPE_SIZE (TREE_TYPE (exp))
9994 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9995 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9996 bitsize)))
9997 {
9998 enum machine_mode ext_mode = mode;
9999
10000 if (ext_mode == BLKmode
10001 && ! (target != 0 && MEM_P (op0)
10002 && MEM_P (target)
10003 && bitpos % BITS_PER_UNIT == 0))
10004 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10005
10006 if (ext_mode == BLKmode)
10007 {
10008 if (target == 0)
10009 target = assign_temp (type, 1, 1);
10010
10011 if (bitsize == 0)
10012 return target;
10013
10014 /* In this case, BITPOS must start at a byte boundary and
10015 TARGET, if specified, must be a MEM. */
10016 gcc_assert (MEM_P (op0)
10017 && (!target || MEM_P (target))
10018 && !(bitpos % BITS_PER_UNIT));
10019
10020 emit_block_move (target,
10021 adjust_address (op0, VOIDmode,
10022 bitpos / BITS_PER_UNIT),
10023 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10024 / BITS_PER_UNIT),
10025 (modifier == EXPAND_STACK_PARM
10026 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10027
10028 return target;
10029 }
10030
10031 op0 = validize_mem (op0);
10032
10033 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10034 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10035
10036 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
10037 (modifier == EXPAND_STACK_PARM
10038 ? NULL_RTX : target),
10039 ext_mode, ext_mode);
10040
10041 /* If the result is a record type and BITSIZE is narrower than
10042 the mode of OP0, an integral mode, and this is a big endian
10043 machine, we must put the field into the high-order bits. */
10044 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10045 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10046 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10047 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10048 GET_MODE_BITSIZE (GET_MODE (op0))
10049 - bitsize, op0, 1);
10050
10051 /* If the result type is BLKmode, store the data into a temporary
10052 of the appropriate type, but with the mode corresponding to the
10053 mode for the data we have (op0's mode). It's tempting to make
10054 this a constant type, since we know it's only being stored once,
10055 but that can cause problems if we are taking the address of this
10056 COMPONENT_REF because the MEM of any reference via that address
10057 will have flags corresponding to the type, which will not
10058 necessarily be constant. */
10059 if (mode == BLKmode)
10060 {
10061 rtx new_rtx;
10062
10063 new_rtx = assign_stack_temp_for_type (ext_mode,
10064 GET_MODE_BITSIZE (ext_mode),
10065 type);
10066 emit_move_insn (new_rtx, op0);
10067 op0 = copy_rtx (new_rtx);
10068 PUT_MODE (op0, BLKmode);
10069 }
10070
10071 return op0;
10072 }
10073
10074 /* If the result is BLKmode, use that to access the object
10075 now as well. */
10076 if (mode == BLKmode)
10077 mode1 = BLKmode;
10078
10079 /* Get a reference to just this component. */
10080 if (modifier == EXPAND_CONST_ADDRESS
10081 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10082 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10083 else
10084 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10085
10086 if (op0 == orig_op0)
10087 op0 = copy_rtx (op0);
10088
10089 /* If op0 is a temporary because of forcing to memory, pass only the
10090 type to set_mem_attributes so that the original expression is never
10091 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10092 if (mem_attrs_from_type)
10093 set_mem_attributes (op0, type, 0);
10094 else
10095 set_mem_attributes (op0, exp, 0);
10096
10097 if (REG_P (XEXP (op0, 0)))
10098 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10099
10100 MEM_VOLATILE_P (op0) |= volatilep;
10101 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10102 || modifier == EXPAND_CONST_ADDRESS
10103 || modifier == EXPAND_INITIALIZER)
10104 return op0;
10105 else if (target == 0)
10106 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10107
10108 convert_move (target, op0, unsignedp);
10109 return target;
10110 }
10111
10112 case OBJ_TYPE_REF:
10113 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10114
10115 case CALL_EXPR:
10116 /* All valid uses of __builtin_va_arg_pack () are removed during
10117 inlining. */
10118 if (CALL_EXPR_VA_ARG_PACK (exp))
10119 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10120 {
10121 tree fndecl = get_callee_fndecl (exp), attr;
10122
10123 if (fndecl
10124 && (attr = lookup_attribute ("error",
10125 DECL_ATTRIBUTES (fndecl))) != NULL)
10126 error ("%Kcall to %qs declared with attribute error: %s",
10127 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10128 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10129 if (fndecl
10130 && (attr = lookup_attribute ("warning",
10131 DECL_ATTRIBUTES (fndecl))) != NULL)
10132 warning_at (tree_nonartificial_location (exp),
10133 0, "%Kcall to %qs declared with attribute warning: %s",
10134 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10135 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10136
10137 /* Check for a built-in function. */
10138 if (fndecl && DECL_BUILT_IN (fndecl))
10139 {
10140 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10141 return expand_builtin (exp, target, subtarget, tmode, ignore);
10142 }
10143 }
10144 return expand_call (exp, target, ignore);
10145
10146 case VIEW_CONVERT_EXPR:
10147 op0 = NULL_RTX;
10148
10149 /* If we are converting to BLKmode, try to avoid an intermediate
10150 temporary by fetching an inner memory reference. */
10151 if (mode == BLKmode
10152 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10153 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10154 && handled_component_p (treeop0))
10155 {
10156 enum machine_mode mode1;
10157 HOST_WIDE_INT bitsize, bitpos;
10158 tree offset;
10159 int unsignedp;
10160 int volatilep = 0;
10161 tree tem
10162 = get_inner_reference (treeop0, &bitsize, &bitpos,
10163 &offset, &mode1, &unsignedp, &volatilep,
10164 true);
10165 rtx orig_op0;
10166
10167 /* ??? We should work harder and deal with non-zero offsets. */
10168 if (!offset
10169 && (bitpos % BITS_PER_UNIT) == 0
10170 && bitsize >= 0
10171 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
10172 {
10173 /* See the normal_inner_ref case for the rationale. */
10174 orig_op0
10175 = expand_expr (tem,
10176 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10177 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10178 != INTEGER_CST)
10179 && modifier != EXPAND_STACK_PARM
10180 ? target : NULL_RTX),
10181 VOIDmode,
10182 (modifier == EXPAND_INITIALIZER
10183 || modifier == EXPAND_CONST_ADDRESS
10184 || modifier == EXPAND_STACK_PARM)
10185 ? modifier : EXPAND_NORMAL);
10186
10187 if (MEM_P (orig_op0))
10188 {
10189 op0 = orig_op0;
10190
10191 /* Get a reference to just this component. */
10192 if (modifier == EXPAND_CONST_ADDRESS
10193 || modifier == EXPAND_SUM
10194 || modifier == EXPAND_INITIALIZER)
10195 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10196 else
10197 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10198
10199 if (op0 == orig_op0)
10200 op0 = copy_rtx (op0);
10201
10202 set_mem_attributes (op0, treeop0, 0);
10203 if (REG_P (XEXP (op0, 0)))
10204 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10205
10206 MEM_VOLATILE_P (op0) |= volatilep;
10207 }
10208 }
10209 }
10210
10211 if (!op0)
10212 op0 = expand_expr (treeop0,
10213 NULL_RTX, VOIDmode, modifier);
10214
10215 /* If the input and output modes are both the same, we are done. */
10216 if (mode == GET_MODE (op0))
10217 ;
10218 /* If neither mode is BLKmode, and both modes are the same size
10219 then we can use gen_lowpart. */
10220 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10221 && (GET_MODE_PRECISION (mode)
10222 == GET_MODE_PRECISION (GET_MODE (op0)))
10223 && !COMPLEX_MODE_P (GET_MODE (op0)))
10224 {
10225 if (GET_CODE (op0) == SUBREG)
10226 op0 = force_reg (GET_MODE (op0), op0);
10227 temp = gen_lowpart_common (mode, op0);
10228 if (temp)
10229 op0 = temp;
10230 else
10231 {
10232 if (!REG_P (op0) && !MEM_P (op0))
10233 op0 = force_reg (GET_MODE (op0), op0);
10234 op0 = gen_lowpart (mode, op0);
10235 }
10236 }
10237 /* If both types are integral, convert from one mode to the other. */
10238 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10239 op0 = convert_modes (mode, GET_MODE (op0), op0,
10240 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10241 /* As a last resort, spill op0 to memory, and reload it in a
10242 different mode. */
10243 else if (!MEM_P (op0))
10244 {
10245 /* If the operand is not a MEM, force it into memory. Since we
10246 are going to be changing the mode of the MEM, don't call
10247 force_const_mem for constants because we don't allow pool
10248 constants to change mode. */
10249 tree inner_type = TREE_TYPE (treeop0);
10250
10251 gcc_assert (!TREE_ADDRESSABLE (exp));
10252
10253 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10254 target
10255 = assign_stack_temp_for_type
10256 (TYPE_MODE (inner_type),
10257 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10258
10259 emit_move_insn (target, op0);
10260 op0 = target;
10261 }
10262
10263 /* At this point, OP0 is in the correct mode. If the output type is
10264 such that the operand is known to be aligned, indicate that it is.
10265 Otherwise, we need only be concerned about alignment for non-BLKmode
10266 results. */
10267 if (MEM_P (op0))
10268 {
10269 enum insn_code icode;
10270
10271 if (TYPE_ALIGN_OK (type))
10272 {
10273 /* ??? Copying the MEM without substantially changing it might
10274 run afoul of the code handling volatile memory references in
10275 store_expr, which assumes that TARGET is returned unmodified
10276 if it has been used. */
10277 op0 = copy_rtx (op0);
10278 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10279 }
10280 else if (mode != BLKmode
10281 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
10282 /* If the target does have special handling for unaligned
10283 loads of mode then use them. */
10284 && ((icode = optab_handler (movmisalign_optab, mode))
10285 != CODE_FOR_nothing))
10286 {
10287 rtx reg, insn;
10288
10289 op0 = adjust_address (op0, mode, 0);
10290 /* We've already validated the memory, and we're creating a
10291 new pseudo destination. The predicates really can't
10292 fail. */
10293 reg = gen_reg_rtx (mode);
10294
10295 /* Nor can the insn generator. */
10296 insn = GEN_FCN (icode) (reg, op0);
10297 emit_insn (insn);
10298 return reg;
10299 }
10300 else if (STRICT_ALIGNMENT
10301 && mode != BLKmode
10302 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10303 {
10304 tree inner_type = TREE_TYPE (treeop0);
10305 HOST_WIDE_INT temp_size
10306 = MAX (int_size_in_bytes (inner_type),
10307 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10308 rtx new_rtx
10309 = assign_stack_temp_for_type (mode, temp_size, type);
10310 rtx new_with_op0_mode
10311 = adjust_address (new_rtx, GET_MODE (op0), 0);
10312
10313 gcc_assert (!TREE_ADDRESSABLE (exp));
10314
10315 if (GET_MODE (op0) == BLKmode)
10316 emit_block_move (new_with_op0_mode, op0,
10317 GEN_INT (GET_MODE_SIZE (mode)),
10318 (modifier == EXPAND_STACK_PARM
10319 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10320 else
10321 emit_move_insn (new_with_op0_mode, op0);
10322
10323 op0 = new_rtx;
10324 }
10325
10326 op0 = adjust_address (op0, mode, 0);
10327 }
10328
10329 return op0;
10330
10331 case MODIFY_EXPR:
10332 {
10333 tree lhs = treeop0;
10334 tree rhs = treeop1;
10335 gcc_assert (ignore);
10336
10337 /* Check for |= or &= of a bitfield of size one into another bitfield
10338 of size 1. In this case, (unless we need the result of the
10339 assignment) we can do this more efficiently with a
10340 test followed by an assignment, if necessary.
10341
10342 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10343 things change so we do, this code should be enhanced to
10344 support it. */
10345 if (TREE_CODE (lhs) == COMPONENT_REF
10346 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10347 || TREE_CODE (rhs) == BIT_AND_EXPR)
10348 && TREE_OPERAND (rhs, 0) == lhs
10349 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10350 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10351 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10352 {
10353 rtx label = gen_label_rtx ();
10354 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10355 do_jump (TREE_OPERAND (rhs, 1),
10356 value ? label : 0,
10357 value ? 0 : label, -1);
10358 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10359 false);
10360 do_pending_stack_adjust ();
10361 emit_label (label);
10362 return const0_rtx;
10363 }
10364
10365 expand_assignment (lhs, rhs, false);
10366 return const0_rtx;
10367 }
10368
10369 case ADDR_EXPR:
10370 return expand_expr_addr_expr (exp, target, tmode, modifier);
10371
10372 case REALPART_EXPR:
10373 op0 = expand_normal (treeop0);
10374 return read_complex_part (op0, false);
10375
10376 case IMAGPART_EXPR:
10377 op0 = expand_normal (treeop0);
10378 return read_complex_part (op0, true);
10379
10380 case RETURN_EXPR:
10381 case LABEL_EXPR:
10382 case GOTO_EXPR:
10383 case SWITCH_EXPR:
10384 case ASM_EXPR:
10385 /* Expanded in cfgexpand.c. */
10386 gcc_unreachable ();
10387
10388 case TRY_CATCH_EXPR:
10389 case CATCH_EXPR:
10390 case EH_FILTER_EXPR:
10391 case TRY_FINALLY_EXPR:
10392 /* Lowered by tree-eh.c. */
10393 gcc_unreachable ();
10394
10395 case WITH_CLEANUP_EXPR:
10396 case CLEANUP_POINT_EXPR:
10397 case TARGET_EXPR:
10398 case CASE_LABEL_EXPR:
10399 case VA_ARG_EXPR:
10400 case BIND_EXPR:
10401 case INIT_EXPR:
10402 case CONJ_EXPR:
10403 case COMPOUND_EXPR:
10404 case PREINCREMENT_EXPR:
10405 case PREDECREMENT_EXPR:
10406 case POSTINCREMENT_EXPR:
10407 case POSTDECREMENT_EXPR:
10408 case LOOP_EXPR:
10409 case EXIT_EXPR:
10410 case COMPOUND_LITERAL_EXPR:
10411 /* Lowered by gimplify.c. */
10412 gcc_unreachable ();
10413
10414 case FDESC_EXPR:
10415 /* Function descriptors are not valid except for as
10416 initialization constants, and should not be expanded. */
10417 gcc_unreachable ();
10418
10419 case WITH_SIZE_EXPR:
10420 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10421 have pulled out the size to use in whatever context it needed. */
10422 return expand_expr_real (treeop0, original_target, tmode,
10423 modifier, alt_rtl);
10424
10425 default:
10426 return expand_expr_real_2 (&ops, target, tmode, modifier);
10427 }
10428 }
10429 \f
10430 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10431 signedness of TYPE), possibly returning the result in TARGET. */
10432 static rtx
10433 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10434 {
10435 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10436 if (target && GET_MODE (target) != GET_MODE (exp))
10437 target = 0;
10438 /* For constant values, reduce using build_int_cst_type. */
10439 if (CONST_INT_P (exp))
10440 {
10441 HOST_WIDE_INT value = INTVAL (exp);
10442 tree t = build_int_cst_type (type, value);
10443 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10444 }
10445 else if (TYPE_UNSIGNED (type))
10446 {
10447 rtx mask = immed_double_int_const (double_int::mask (prec),
10448 GET_MODE (exp));
10449 return expand_and (GET_MODE (exp), exp, mask, target);
10450 }
10451 else
10452 {
10453 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10454 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10455 exp, count, target, 0);
10456 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10457 exp, count, target, 0);
10458 }
10459 }
10460 \f
10461 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10462 when applied to the address of EXP produces an address known to be
10463 aligned more than BIGGEST_ALIGNMENT. */
10464
10465 static int
10466 is_aligning_offset (const_tree offset, const_tree exp)
10467 {
10468 /* Strip off any conversions. */
10469 while (CONVERT_EXPR_P (offset))
10470 offset = TREE_OPERAND (offset, 0);
10471
10472 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10473 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10474 if (TREE_CODE (offset) != BIT_AND_EXPR
10475 || !host_integerp (TREE_OPERAND (offset, 1), 1)
10476 || compare_tree_int (TREE_OPERAND (offset, 1),
10477 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10478 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
10479 return 0;
10480
10481 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10482 It must be NEGATE_EXPR. Then strip any more conversions. */
10483 offset = TREE_OPERAND (offset, 0);
10484 while (CONVERT_EXPR_P (offset))
10485 offset = TREE_OPERAND (offset, 0);
10486
10487 if (TREE_CODE (offset) != NEGATE_EXPR)
10488 return 0;
10489
10490 offset = TREE_OPERAND (offset, 0);
10491 while (CONVERT_EXPR_P (offset))
10492 offset = TREE_OPERAND (offset, 0);
10493
10494 /* This must now be the address of EXP. */
10495 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10496 }
10497 \f
10498 /* Return the tree node if an ARG corresponds to a string constant or zero
10499 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10500 in bytes within the string that ARG is accessing. The type of the
10501 offset will be `sizetype'. */
10502
10503 tree
10504 string_constant (tree arg, tree *ptr_offset)
10505 {
10506 tree array, offset, lower_bound;
10507 STRIP_NOPS (arg);
10508
10509 if (TREE_CODE (arg) == ADDR_EXPR)
10510 {
10511 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10512 {
10513 *ptr_offset = size_zero_node;
10514 return TREE_OPERAND (arg, 0);
10515 }
10516 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10517 {
10518 array = TREE_OPERAND (arg, 0);
10519 offset = size_zero_node;
10520 }
10521 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10522 {
10523 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10524 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10525 if (TREE_CODE (array) != STRING_CST
10526 && TREE_CODE (array) != VAR_DECL)
10527 return 0;
10528
10529 /* Check if the array has a nonzero lower bound. */
10530 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10531 if (!integer_zerop (lower_bound))
10532 {
10533 /* If the offset and base aren't both constants, return 0. */
10534 if (TREE_CODE (lower_bound) != INTEGER_CST)
10535 return 0;
10536 if (TREE_CODE (offset) != INTEGER_CST)
10537 return 0;
10538 /* Adjust offset by the lower bound. */
10539 offset = size_diffop (fold_convert (sizetype, offset),
10540 fold_convert (sizetype, lower_bound));
10541 }
10542 }
10543 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10544 {
10545 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10546 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10547 if (TREE_CODE (array) != ADDR_EXPR)
10548 return 0;
10549 array = TREE_OPERAND (array, 0);
10550 if (TREE_CODE (array) != STRING_CST
10551 && TREE_CODE (array) != VAR_DECL)
10552 return 0;
10553 }
10554 else
10555 return 0;
10556 }
10557 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10558 {
10559 tree arg0 = TREE_OPERAND (arg, 0);
10560 tree arg1 = TREE_OPERAND (arg, 1);
10561
10562 STRIP_NOPS (arg0);
10563 STRIP_NOPS (arg1);
10564
10565 if (TREE_CODE (arg0) == ADDR_EXPR
10566 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10567 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10568 {
10569 array = TREE_OPERAND (arg0, 0);
10570 offset = arg1;
10571 }
10572 else if (TREE_CODE (arg1) == ADDR_EXPR
10573 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10574 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10575 {
10576 array = TREE_OPERAND (arg1, 0);
10577 offset = arg0;
10578 }
10579 else
10580 return 0;
10581 }
10582 else
10583 return 0;
10584
10585 if (TREE_CODE (array) == STRING_CST)
10586 {
10587 *ptr_offset = fold_convert (sizetype, offset);
10588 return array;
10589 }
10590 else if (TREE_CODE (array) == VAR_DECL
10591 || TREE_CODE (array) == CONST_DECL)
10592 {
10593 int length;
10594
10595 /* Variables initialized to string literals can be handled too. */
10596 if (!const_value_known_p (array)
10597 || !DECL_INITIAL (array)
10598 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
10599 return 0;
10600
10601 /* Avoid const char foo[4] = "abcde"; */
10602 if (DECL_SIZE_UNIT (array) == NULL_TREE
10603 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10604 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
10605 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10606 return 0;
10607
10608 /* If variable is bigger than the string literal, OFFSET must be constant
10609 and inside of the bounds of the string literal. */
10610 offset = fold_convert (sizetype, offset);
10611 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10612 && (! host_integerp (offset, 1)
10613 || compare_tree_int (offset, length) >= 0))
10614 return 0;
10615
10616 *ptr_offset = offset;
10617 return DECL_INITIAL (array);
10618 }
10619
10620 return 0;
10621 }
10622 \f
10623 /* Generate code to calculate OPS, and exploded expression
10624 using a store-flag instruction and return an rtx for the result.
10625 OPS reflects a comparison.
10626
10627 If TARGET is nonzero, store the result there if convenient.
10628
10629 Return zero if there is no suitable set-flag instruction
10630 available on this machine.
10631
10632 Once expand_expr has been called on the arguments of the comparison,
10633 we are committed to doing the store flag, since it is not safe to
10634 re-evaluate the expression. We emit the store-flag insn by calling
10635 emit_store_flag, but only expand the arguments if we have a reason
10636 to believe that emit_store_flag will be successful. If we think that
10637 it will, but it isn't, we have to simulate the store-flag with a
10638 set/jump/set sequence. */
10639
10640 static rtx
10641 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10642 {
10643 enum rtx_code code;
10644 tree arg0, arg1, type;
10645 tree tem;
10646 enum machine_mode operand_mode;
10647 int unsignedp;
10648 rtx op0, op1;
10649 rtx subtarget = target;
10650 location_t loc = ops->location;
10651
10652 arg0 = ops->op0;
10653 arg1 = ops->op1;
10654
10655 /* Don't crash if the comparison was erroneous. */
10656 if (arg0 == error_mark_node || arg1 == error_mark_node)
10657 return const0_rtx;
10658
10659 type = TREE_TYPE (arg0);
10660 operand_mode = TYPE_MODE (type);
10661 unsignedp = TYPE_UNSIGNED (type);
10662
10663 /* We won't bother with BLKmode store-flag operations because it would mean
10664 passing a lot of information to emit_store_flag. */
10665 if (operand_mode == BLKmode)
10666 return 0;
10667
10668 /* We won't bother with store-flag operations involving function pointers
10669 when function pointers must be canonicalized before comparisons. */
10670 #ifdef HAVE_canonicalize_funcptr_for_compare
10671 if (HAVE_canonicalize_funcptr_for_compare
10672 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10673 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10674 == FUNCTION_TYPE))
10675 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10676 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10677 == FUNCTION_TYPE))))
10678 return 0;
10679 #endif
10680
10681 STRIP_NOPS (arg0);
10682 STRIP_NOPS (arg1);
10683
10684 /* For vector typed comparisons emit code to generate the desired
10685 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10686 expander for this. */
10687 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10688 {
10689 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10690 tree if_true = constant_boolean_node (true, ops->type);
10691 tree if_false = constant_boolean_node (false, ops->type);
10692 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10693 }
10694
10695 /* Get the rtx comparison code to use. We know that EXP is a comparison
10696 operation of some type. Some comparisons against 1 and -1 can be
10697 converted to comparisons with zero. Do so here so that the tests
10698 below will be aware that we have a comparison with zero. These
10699 tests will not catch constants in the first operand, but constants
10700 are rarely passed as the first operand. */
10701
10702 switch (ops->code)
10703 {
10704 case EQ_EXPR:
10705 code = EQ;
10706 break;
10707 case NE_EXPR:
10708 code = NE;
10709 break;
10710 case LT_EXPR:
10711 if (integer_onep (arg1))
10712 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10713 else
10714 code = unsignedp ? LTU : LT;
10715 break;
10716 case LE_EXPR:
10717 if (! unsignedp && integer_all_onesp (arg1))
10718 arg1 = integer_zero_node, code = LT;
10719 else
10720 code = unsignedp ? LEU : LE;
10721 break;
10722 case GT_EXPR:
10723 if (! unsignedp && integer_all_onesp (arg1))
10724 arg1 = integer_zero_node, code = GE;
10725 else
10726 code = unsignedp ? GTU : GT;
10727 break;
10728 case GE_EXPR:
10729 if (integer_onep (arg1))
10730 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10731 else
10732 code = unsignedp ? GEU : GE;
10733 break;
10734
10735 case UNORDERED_EXPR:
10736 code = UNORDERED;
10737 break;
10738 case ORDERED_EXPR:
10739 code = ORDERED;
10740 break;
10741 case UNLT_EXPR:
10742 code = UNLT;
10743 break;
10744 case UNLE_EXPR:
10745 code = UNLE;
10746 break;
10747 case UNGT_EXPR:
10748 code = UNGT;
10749 break;
10750 case UNGE_EXPR:
10751 code = UNGE;
10752 break;
10753 case UNEQ_EXPR:
10754 code = UNEQ;
10755 break;
10756 case LTGT_EXPR:
10757 code = LTGT;
10758 break;
10759
10760 default:
10761 gcc_unreachable ();
10762 }
10763
10764 /* Put a constant second. */
10765 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10766 || TREE_CODE (arg0) == FIXED_CST)
10767 {
10768 tem = arg0; arg0 = arg1; arg1 = tem;
10769 code = swap_condition (code);
10770 }
10771
10772 /* If this is an equality or inequality test of a single bit, we can
10773 do this by shifting the bit being tested to the low-order bit and
10774 masking the result with the constant 1. If the condition was EQ,
10775 we xor it with 1. This does not require an scc insn and is faster
10776 than an scc insn even if we have it.
10777
10778 The code to make this transformation was moved into fold_single_bit_test,
10779 so we just call into the folder and expand its result. */
10780
10781 if ((code == NE || code == EQ)
10782 && integer_zerop (arg1)
10783 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10784 {
10785 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10786 if (srcstmt
10787 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10788 {
10789 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10790 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10791 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10792 gimple_assign_rhs1 (srcstmt),
10793 gimple_assign_rhs2 (srcstmt));
10794 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10795 if (temp)
10796 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10797 }
10798 }
10799
10800 if (! get_subtarget (target)
10801 || GET_MODE (subtarget) != operand_mode)
10802 subtarget = 0;
10803
10804 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10805
10806 if (target == 0)
10807 target = gen_reg_rtx (mode);
10808
10809 /* Try a cstore if possible. */
10810 return emit_store_flag_force (target, code, op0, op1,
10811 operand_mode, unsignedp,
10812 (TYPE_PRECISION (ops->type) == 1
10813 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10814 }
10815 \f
10816
10817 /* Stubs in case we haven't got a casesi insn. */
10818 #ifndef HAVE_casesi
10819 # define HAVE_casesi 0
10820 # define gen_casesi(a, b, c, d, e) (0)
10821 # define CODE_FOR_casesi CODE_FOR_nothing
10822 #endif
10823
10824 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10825 0 otherwise (i.e. if there is no casesi instruction).
10826
10827 DEFAULT_PROBABILITY is the probability of jumping to the default
10828 label. */
10829 int
10830 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10831 rtx table_label, rtx default_label, rtx fallback_label,
10832 int default_probability)
10833 {
10834 struct expand_operand ops[5];
10835 enum machine_mode index_mode = SImode;
10836 rtx op1, op2, index;
10837
10838 if (! HAVE_casesi)
10839 return 0;
10840
10841 /* Convert the index to SImode. */
10842 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10843 {
10844 enum machine_mode omode = TYPE_MODE (index_type);
10845 rtx rangertx = expand_normal (range);
10846
10847 /* We must handle the endpoints in the original mode. */
10848 index_expr = build2 (MINUS_EXPR, index_type,
10849 index_expr, minval);
10850 minval = integer_zero_node;
10851 index = expand_normal (index_expr);
10852 if (default_label)
10853 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10854 omode, 1, default_label,
10855 default_probability);
10856 /* Now we can safely truncate. */
10857 index = convert_to_mode (index_mode, index, 0);
10858 }
10859 else
10860 {
10861 if (TYPE_MODE (index_type) != index_mode)
10862 {
10863 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
10864 index_expr = fold_convert (index_type, index_expr);
10865 }
10866
10867 index = expand_normal (index_expr);
10868 }
10869
10870 do_pending_stack_adjust ();
10871
10872 op1 = expand_normal (minval);
10873 op2 = expand_normal (range);
10874
10875 create_input_operand (&ops[0], index, index_mode);
10876 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10877 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10878 create_fixed_operand (&ops[3], table_label);
10879 create_fixed_operand (&ops[4], (default_label
10880 ? default_label
10881 : fallback_label));
10882 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10883 return 1;
10884 }
10885
10886 /* Attempt to generate a tablejump instruction; same concept. */
10887 #ifndef HAVE_tablejump
10888 #define HAVE_tablejump 0
10889 #define gen_tablejump(x, y) (0)
10890 #endif
10891
10892 /* Subroutine of the next function.
10893
10894 INDEX is the value being switched on, with the lowest value
10895 in the table already subtracted.
10896 MODE is its expected mode (needed if INDEX is constant).
10897 RANGE is the length of the jump table.
10898 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10899
10900 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10901 index value is out of range.
10902 DEFAULT_PROBABILITY is the probability of jumping to
10903 the default label. */
10904
10905 static void
10906 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10907 rtx default_label, int default_probability)
10908 {
10909 rtx temp, vector;
10910
10911 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10912 cfun->cfg->max_jumptable_ents = INTVAL (range);
10913
10914 /* Do an unsigned comparison (in the proper mode) between the index
10915 expression and the value which represents the length of the range.
10916 Since we just finished subtracting the lower bound of the range
10917 from the index expression, this comparison allows us to simultaneously
10918 check that the original index expression value is both greater than
10919 or equal to the minimum value of the range and less than or equal to
10920 the maximum value of the range. */
10921
10922 if (default_label)
10923 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10924 default_label, default_probability);
10925
10926
10927 /* If index is in range, it must fit in Pmode.
10928 Convert to Pmode so we can index with it. */
10929 if (mode != Pmode)
10930 index = convert_to_mode (Pmode, index, 1);
10931
10932 /* Don't let a MEM slip through, because then INDEX that comes
10933 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10934 and break_out_memory_refs will go to work on it and mess it up. */
10935 #ifdef PIC_CASE_VECTOR_ADDRESS
10936 if (flag_pic && !REG_P (index))
10937 index = copy_to_mode_reg (Pmode, index);
10938 #endif
10939
10940 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10941 GET_MODE_SIZE, because this indicates how large insns are. The other
10942 uses should all be Pmode, because they are addresses. This code
10943 could fail if addresses and insns are not the same size. */
10944 index = gen_rtx_PLUS (Pmode,
10945 gen_rtx_MULT (Pmode, index,
10946 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10947 gen_rtx_LABEL_REF (Pmode, table_label));
10948 #ifdef PIC_CASE_VECTOR_ADDRESS
10949 if (flag_pic)
10950 index = PIC_CASE_VECTOR_ADDRESS (index);
10951 else
10952 #endif
10953 index = memory_address (CASE_VECTOR_MODE, index);
10954 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10955 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10956 convert_move (temp, vector, 0);
10957
10958 emit_jump_insn (gen_tablejump (temp, table_label));
10959
10960 /* If we are generating PIC code or if the table is PC-relative, the
10961 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10962 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10963 emit_barrier ();
10964 }
10965
10966 int
10967 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10968 rtx table_label, rtx default_label, int default_probability)
10969 {
10970 rtx index;
10971
10972 if (! HAVE_tablejump)
10973 return 0;
10974
10975 index_expr = fold_build2 (MINUS_EXPR, index_type,
10976 fold_convert (index_type, index_expr),
10977 fold_convert (index_type, minval));
10978 index = expand_normal (index_expr);
10979 do_pending_stack_adjust ();
10980
10981 do_tablejump (index, TYPE_MODE (index_type),
10982 convert_modes (TYPE_MODE (index_type),
10983 TYPE_MODE (TREE_TYPE (range)),
10984 expand_normal (range),
10985 TYPE_UNSIGNED (TREE_TYPE (range))),
10986 table_label, default_label, default_probability);
10987 return 1;
10988 }
10989
10990 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10991 static rtx
10992 const_vector_from_tree (tree exp)
10993 {
10994 rtvec v;
10995 unsigned i;
10996 int units;
10997 tree elt;
10998 enum machine_mode inner, mode;
10999
11000 mode = TYPE_MODE (TREE_TYPE (exp));
11001
11002 if (initializer_zerop (exp))
11003 return CONST0_RTX (mode);
11004
11005 units = GET_MODE_NUNITS (mode);
11006 inner = GET_MODE_INNER (mode);
11007
11008 v = rtvec_alloc (units);
11009
11010 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11011 {
11012 elt = VECTOR_CST_ELT (exp, i);
11013
11014 if (TREE_CODE (elt) == REAL_CST)
11015 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11016 inner);
11017 else if (TREE_CODE (elt) == FIXED_CST)
11018 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11019 inner);
11020 else
11021 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
11022 inner);
11023 }
11024
11025 return gen_rtx_CONST_VECTOR (mode, v);
11026 }
11027
11028 /* Build a decl for a personality function given a language prefix. */
11029
11030 tree
11031 build_personality_function (const char *lang)
11032 {
11033 const char *unwind_and_version;
11034 tree decl, type;
11035 char *name;
11036
11037 switch (targetm_common.except_unwind_info (&global_options))
11038 {
11039 case UI_NONE:
11040 return NULL;
11041 case UI_SJLJ:
11042 unwind_and_version = "_sj0";
11043 break;
11044 case UI_DWARF2:
11045 case UI_TARGET:
11046 unwind_and_version = "_v0";
11047 break;
11048 case UI_SEH:
11049 unwind_and_version = "_seh0";
11050 break;
11051 default:
11052 gcc_unreachable ();
11053 }
11054
11055 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11056
11057 type = build_function_type_list (integer_type_node, integer_type_node,
11058 long_long_unsigned_type_node,
11059 ptr_type_node, ptr_type_node, NULL_TREE);
11060 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11061 get_identifier (name), type);
11062 DECL_ARTIFICIAL (decl) = 1;
11063 DECL_EXTERNAL (decl) = 1;
11064 TREE_PUBLIC (decl) = 1;
11065
11066 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11067 are the flags assigned by targetm.encode_section_info. */
11068 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11069
11070 return decl;
11071 }
11072
11073 /* Extracts the personality function of DECL and returns the corresponding
11074 libfunc. */
11075
11076 rtx
11077 get_personality_function (tree decl)
11078 {
11079 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11080 enum eh_personality_kind pk;
11081
11082 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11083 if (pk == eh_personality_none)
11084 return NULL;
11085
11086 if (!personality
11087 && pk == eh_personality_any)
11088 personality = lang_hooks.eh_personality ();
11089
11090 if (pk == eh_personality_lang)
11091 gcc_assert (personality != NULL_TREE);
11092
11093 return XEXP (DECL_RTL (personality), 0);
11094 }
11095
11096 #include "gt-expr.h"