expr.c (expand_assignment): Fold the bitpos in the to_rtx if sufficiently aligned...
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "attribs.h"
30 #include "varasm.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "langhooks.h"
47 #include "intl.h"
48 #include "tm_p.h"
49 #include "tree-iterator.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-expr.h"
54 #include "is-a.h"
55 #include "gimple.h"
56 #include "gimple-ssa.h"
57 #include "cgraph.h"
58 #include "tree-ssanames.h"
59 #include "target.h"
60 #include "common/common-target.h"
61 #include "timevar.h"
62 #include "df.h"
63 #include "diagnostic.h"
64 #include "tree-ssa-live.h"
65 #include "tree-outof-ssa.h"
66 #include "target-globals.h"
67 #include "params.h"
68 #include "tree-ssa-address.h"
69 #include "cfgexpand.h"
70
71 #ifndef STACK_PUSH_CODE
72 #ifdef STACK_GROWS_DOWNWARD
73 #define STACK_PUSH_CODE PRE_DEC
74 #else
75 #define STACK_PUSH_CODE PRE_INC
76 #endif
77 #endif
78
79
80 /* If this is nonzero, we do not bother generating VOLATILE
81 around volatile memory references, and we are willing to
82 output indirect addresses. If cse is to follow, we reject
83 indirect addresses so a useful potential cse is generated;
84 if it is used only once, instruction combination will produce
85 the same indirect address eventually. */
86 int cse_not_expected;
87
88 /* This structure is used by move_by_pieces to describe the move to
89 be performed. */
90 struct move_by_pieces_d
91 {
92 rtx to;
93 rtx to_addr;
94 int autinc_to;
95 int explicit_inc_to;
96 rtx from;
97 rtx from_addr;
98 int autinc_from;
99 int explicit_inc_from;
100 unsigned HOST_WIDE_INT len;
101 HOST_WIDE_INT offset;
102 int reverse;
103 };
104
105 /* This structure is used by store_by_pieces to describe the clear to
106 be performed. */
107
108 struct store_by_pieces_d
109 {
110 rtx to;
111 rtx to_addr;
112 int autinc_to;
113 int explicit_inc_to;
114 unsigned HOST_WIDE_INT len;
115 HOST_WIDE_INT offset;
116 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
117 void *constfundata;
118 int reverse;
119 };
120
121 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
122 struct move_by_pieces_d *);
123 static bool block_move_libcall_safe_for_call_parm (void);
124 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
125 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
126 unsigned HOST_WIDE_INT);
127 static tree emit_block_move_libcall_fn (int);
128 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
129 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
130 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
131 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
132 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
133 struct store_by_pieces_d *);
134 static tree clear_storage_libcall_fn (int);
135 static rtx compress_float_constant (rtx, rtx);
136 static rtx get_subtarget (rtx);
137 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
138 HOST_WIDE_INT, enum machine_mode,
139 tree, int, alias_set_type);
140 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
141 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
142 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
143 enum machine_mode, tree, alias_set_type, bool);
144
145 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
146
147 static int is_aligning_offset (const_tree, const_tree);
148 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
149 enum expand_modifier);
150 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
151 static rtx do_store_flag (sepops, rtx, enum machine_mode);
152 #ifdef PUSH_ROUNDING
153 static void emit_single_push_insn (enum machine_mode, rtx, tree);
154 #endif
155 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
156 static rtx const_vector_from_tree (tree);
157 static void write_complex_part (rtx, rtx, bool);
158
159 /* This macro is used to determine whether move_by_pieces should be called
160 to perform a structure copy. */
161 #ifndef MOVE_BY_PIECES_P
162 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
163 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
164 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
165 #endif
166
167 /* This macro is used to determine whether clear_by_pieces should be
168 called to clear storage. */
169 #ifndef CLEAR_BY_PIECES_P
170 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
171 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
172 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
173 #endif
174
175 /* This macro is used to determine whether store_by_pieces should be
176 called to "memset" storage with byte values other than zero. */
177 #ifndef SET_BY_PIECES_P
178 #define SET_BY_PIECES_P(SIZE, ALIGN) \
179 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
180 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
181 #endif
182
183 /* This macro is used to determine whether store_by_pieces should be
184 called to "memcpy" storage when the source is a constant string. */
185 #ifndef STORE_BY_PIECES_P
186 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
188 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
189 #endif
190 \f
191 /* This is run to set up which modes can be used
192 directly in memory and to initialize the block move optab. It is run
193 at the beginning of compilation and when the target is reinitialized. */
194
195 void
196 init_expr_target (void)
197 {
198 rtx insn, pat;
199 enum machine_mode mode;
200 int num_clobbers;
201 rtx mem, mem1;
202 rtx reg;
203
204 /* Try indexing by frame ptr and try by stack ptr.
205 It is known that on the Convex the stack ptr isn't a valid index.
206 With luck, one or the other is valid on any machine. */
207 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
208 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
209
210 /* A scratch register we can modify in-place below to avoid
211 useless RTL allocations. */
212 reg = gen_rtx_REG (VOIDmode, -1);
213
214 insn = rtx_alloc (INSN);
215 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
216 PATTERN (insn) = pat;
217
218 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
219 mode = (enum machine_mode) ((int) mode + 1))
220 {
221 int regno;
222
223 direct_load[(int) mode] = direct_store[(int) mode] = 0;
224 PUT_MODE (mem, mode);
225 PUT_MODE (mem1, mode);
226 PUT_MODE (reg, mode);
227
228 /* See if there is some register that can be used in this mode and
229 directly loaded or stored from memory. */
230
231 if (mode != VOIDmode && mode != BLKmode)
232 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
233 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
234 regno++)
235 {
236 if (! HARD_REGNO_MODE_OK (regno, mode))
237 continue;
238
239 SET_REGNO (reg, regno);
240
241 SET_SRC (pat) = mem;
242 SET_DEST (pat) = reg;
243 if (recog (pat, insn, &num_clobbers) >= 0)
244 direct_load[(int) mode] = 1;
245
246 SET_SRC (pat) = mem1;
247 SET_DEST (pat) = reg;
248 if (recog (pat, insn, &num_clobbers) >= 0)
249 direct_load[(int) mode] = 1;
250
251 SET_SRC (pat) = reg;
252 SET_DEST (pat) = mem;
253 if (recog (pat, insn, &num_clobbers) >= 0)
254 direct_store[(int) mode] = 1;
255
256 SET_SRC (pat) = reg;
257 SET_DEST (pat) = mem1;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_store[(int) mode] = 1;
260 }
261 }
262
263 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
264
265 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
266 mode = GET_MODE_WIDER_MODE (mode))
267 {
268 enum machine_mode srcmode;
269 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
270 srcmode = GET_MODE_WIDER_MODE (srcmode))
271 {
272 enum insn_code ic;
273
274 ic = can_extend_p (mode, srcmode, 0);
275 if (ic == CODE_FOR_nothing)
276 continue;
277
278 PUT_MODE (mem, srcmode);
279
280 if (insn_operand_matches (ic, 1, mem))
281 float_extend_from_mem[mode][srcmode] = true;
282 }
283 }
284 }
285
286 /* This is run at the start of compiling a function. */
287
288 void
289 init_expr (void)
290 {
291 memset (&crtl->expr, 0, sizeof (crtl->expr));
292 }
293 \f
294 /* Copy data from FROM to TO, where the machine modes are not the same.
295 Both modes may be integer, or both may be floating, or both may be
296 fixed-point.
297 UNSIGNEDP should be nonzero if FROM is an unsigned type.
298 This causes zero-extension instead of sign-extension. */
299
300 void
301 convert_move (rtx to, rtx from, int unsignedp)
302 {
303 enum machine_mode to_mode = GET_MODE (to);
304 enum machine_mode from_mode = GET_MODE (from);
305 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
306 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
307 enum insn_code code;
308 rtx libcall;
309
310 /* rtx code for making an equivalent value. */
311 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
312 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
313
314
315 gcc_assert (to_real == from_real);
316 gcc_assert (to_mode != BLKmode);
317 gcc_assert (from_mode != BLKmode);
318
319 /* If the source and destination are already the same, then there's
320 nothing to do. */
321 if (to == from)
322 return;
323
324 /* If FROM is a SUBREG that indicates that we have already done at least
325 the required extension, strip it. We don't handle such SUBREGs as
326 TO here. */
327
328 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
329 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
330 >= GET_MODE_PRECISION (to_mode))
331 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
332 from = gen_lowpart (to_mode, from), from_mode = to_mode;
333
334 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
335
336 if (to_mode == from_mode
337 || (from_mode == VOIDmode && CONSTANT_P (from)))
338 {
339 emit_move_insn (to, from);
340 return;
341 }
342
343 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
344 {
345 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
346
347 if (VECTOR_MODE_P (to_mode))
348 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
349 else
350 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
351
352 emit_move_insn (to, from);
353 return;
354 }
355
356 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
357 {
358 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
359 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
360 return;
361 }
362
363 if (to_real)
364 {
365 rtx value, insns;
366 convert_optab tab;
367
368 gcc_assert ((GET_MODE_PRECISION (from_mode)
369 != GET_MODE_PRECISION (to_mode))
370 || (DECIMAL_FLOAT_MODE_P (from_mode)
371 != DECIMAL_FLOAT_MODE_P (to_mode)));
372
373 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
374 /* Conversion between decimal float and binary float, same size. */
375 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
376 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
377 tab = sext_optab;
378 else
379 tab = trunc_optab;
380
381 /* Try converting directly if the insn is supported. */
382
383 code = convert_optab_handler (tab, to_mode, from_mode);
384 if (code != CODE_FOR_nothing)
385 {
386 emit_unop_insn (code, to, from,
387 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
388 return;
389 }
390
391 /* Otherwise use a libcall. */
392 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
393
394 /* Is this conversion implemented yet? */
395 gcc_assert (libcall);
396
397 start_sequence ();
398 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
399 1, from, from_mode);
400 insns = get_insns ();
401 end_sequence ();
402 emit_libcall_block (insns, to, value,
403 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
404 from)
405 : gen_rtx_FLOAT_EXTEND (to_mode, from));
406 return;
407 }
408
409 /* Handle pointer conversion. */ /* SPEE 900220. */
410 /* Targets are expected to provide conversion insns between PxImode and
411 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
412 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
413 {
414 enum machine_mode full_mode
415 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
416
417 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
418 != CODE_FOR_nothing);
419
420 if (full_mode != from_mode)
421 from = convert_to_mode (full_mode, from, unsignedp);
422 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
423 to, from, UNKNOWN);
424 return;
425 }
426 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
427 {
428 rtx new_from;
429 enum machine_mode full_mode
430 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
431 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
432 enum insn_code icode;
433
434 icode = convert_optab_handler (ctab, full_mode, from_mode);
435 gcc_assert (icode != CODE_FOR_nothing);
436
437 if (to_mode == full_mode)
438 {
439 emit_unop_insn (icode, to, from, UNKNOWN);
440 return;
441 }
442
443 new_from = gen_reg_rtx (full_mode);
444 emit_unop_insn (icode, new_from, from, UNKNOWN);
445
446 /* else proceed to integer conversions below. */
447 from_mode = full_mode;
448 from = new_from;
449 }
450
451 /* Make sure both are fixed-point modes or both are not. */
452 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
453 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
454 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
455 {
456 /* If we widen from_mode to to_mode and they are in the same class,
457 we won't saturate the result.
458 Otherwise, always saturate the result to play safe. */
459 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
460 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
461 expand_fixed_convert (to, from, 0, 0);
462 else
463 expand_fixed_convert (to, from, 0, 1);
464 return;
465 }
466
467 /* Now both modes are integers. */
468
469 /* Handle expanding beyond a word. */
470 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
471 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
472 {
473 rtx insns;
474 rtx lowpart;
475 rtx fill_value;
476 rtx lowfrom;
477 int i;
478 enum machine_mode lowpart_mode;
479 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
480
481 /* Try converting directly if the insn is supported. */
482 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
483 != CODE_FOR_nothing)
484 {
485 /* If FROM is a SUBREG, put it into a register. Do this
486 so that we always generate the same set of insns for
487 better cse'ing; if an intermediate assignment occurred,
488 we won't be doing the operation directly on the SUBREG. */
489 if (optimize > 0 && GET_CODE (from) == SUBREG)
490 from = force_reg (from_mode, from);
491 emit_unop_insn (code, to, from, equiv_code);
492 return;
493 }
494 /* Next, try converting via full word. */
495 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
496 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
497 != CODE_FOR_nothing))
498 {
499 rtx word_to = gen_reg_rtx (word_mode);
500 if (REG_P (to))
501 {
502 if (reg_overlap_mentioned_p (to, from))
503 from = force_reg (from_mode, from);
504 emit_clobber (to);
505 }
506 convert_move (word_to, from, unsignedp);
507 emit_unop_insn (code, to, word_to, equiv_code);
508 return;
509 }
510
511 /* No special multiword conversion insn; do it by hand. */
512 start_sequence ();
513
514 /* Since we will turn this into a no conflict block, we must ensure the
515 the source does not overlap the target so force it into an isolated
516 register when maybe so. Likewise for any MEM input, since the
517 conversion sequence might require several references to it and we
518 must ensure we're getting the same value every time. */
519
520 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
521 from = force_reg (from_mode, from);
522
523 /* Get a copy of FROM widened to a word, if necessary. */
524 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
525 lowpart_mode = word_mode;
526 else
527 lowpart_mode = from_mode;
528
529 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
530
531 lowpart = gen_lowpart (lowpart_mode, to);
532 emit_move_insn (lowpart, lowfrom);
533
534 /* Compute the value to put in each remaining word. */
535 if (unsignedp)
536 fill_value = const0_rtx;
537 else
538 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
539 LT, lowfrom, const0_rtx,
540 lowpart_mode, 0, -1);
541
542 /* Fill the remaining words. */
543 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
544 {
545 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
546 rtx subword = operand_subword (to, index, 1, to_mode);
547
548 gcc_assert (subword);
549
550 if (fill_value != subword)
551 emit_move_insn (subword, fill_value);
552 }
553
554 insns = get_insns ();
555 end_sequence ();
556
557 emit_insn (insns);
558 return;
559 }
560
561 /* Truncating multi-word to a word or less. */
562 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
563 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
564 {
565 if (!((MEM_P (from)
566 && ! MEM_VOLATILE_P (from)
567 && direct_load[(int) to_mode]
568 && ! mode_dependent_address_p (XEXP (from, 0),
569 MEM_ADDR_SPACE (from)))
570 || REG_P (from)
571 || GET_CODE (from) == SUBREG))
572 from = force_reg (from_mode, from);
573 convert_move (to, gen_lowpart (word_mode, from), 0);
574 return;
575 }
576
577 /* Now follow all the conversions between integers
578 no more than a word long. */
579
580 /* For truncation, usually we can just refer to FROM in a narrower mode. */
581 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
582 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
583 {
584 if (!((MEM_P (from)
585 && ! MEM_VOLATILE_P (from)
586 && direct_load[(int) to_mode]
587 && ! mode_dependent_address_p (XEXP (from, 0),
588 MEM_ADDR_SPACE (from)))
589 || REG_P (from)
590 || GET_CODE (from) == SUBREG))
591 from = force_reg (from_mode, from);
592 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
593 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
594 from = copy_to_reg (from);
595 emit_move_insn (to, gen_lowpart (to_mode, from));
596 return;
597 }
598
599 /* Handle extension. */
600 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
601 {
602 /* Convert directly if that works. */
603 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
604 != CODE_FOR_nothing)
605 {
606 emit_unop_insn (code, to, from, equiv_code);
607 return;
608 }
609 else
610 {
611 enum machine_mode intermediate;
612 rtx tmp;
613 int shift_amount;
614
615 /* Search for a mode to convert via. */
616 for (intermediate = from_mode; intermediate != VOIDmode;
617 intermediate = GET_MODE_WIDER_MODE (intermediate))
618 if (((can_extend_p (to_mode, intermediate, unsignedp)
619 != CODE_FOR_nothing)
620 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
621 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
622 && (can_extend_p (intermediate, from_mode, unsignedp)
623 != CODE_FOR_nothing))
624 {
625 convert_move (to, convert_to_mode (intermediate, from,
626 unsignedp), unsignedp);
627 return;
628 }
629
630 /* No suitable intermediate mode.
631 Generate what we need with shifts. */
632 shift_amount = (GET_MODE_PRECISION (to_mode)
633 - GET_MODE_PRECISION (from_mode));
634 from = gen_lowpart (to_mode, force_reg (from_mode, from));
635 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
636 to, unsignedp);
637 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
638 to, unsignedp);
639 if (tmp != to)
640 emit_move_insn (to, tmp);
641 return;
642 }
643 }
644
645 /* Support special truncate insns for certain modes. */
646 if (convert_optab_handler (trunc_optab, to_mode,
647 from_mode) != CODE_FOR_nothing)
648 {
649 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
650 to, from, UNKNOWN);
651 return;
652 }
653
654 /* Handle truncation of volatile memrefs, and so on;
655 the things that couldn't be truncated directly,
656 and for which there was no special instruction.
657
658 ??? Code above formerly short-circuited this, for most integer
659 mode pairs, with a force_reg in from_mode followed by a recursive
660 call to this routine. Appears always to have been wrong. */
661 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
662 {
663 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
664 emit_move_insn (to, temp);
665 return;
666 }
667
668 /* Mode combination is not recognized. */
669 gcc_unreachable ();
670 }
671
672 /* Return an rtx for a value that would result
673 from converting X to mode MODE.
674 Both X and MODE may be floating, or both integer.
675 UNSIGNEDP is nonzero if X is an unsigned value.
676 This can be done by referring to a part of X in place
677 or by copying to a new temporary with conversion. */
678
679 rtx
680 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
681 {
682 return convert_modes (mode, VOIDmode, x, unsignedp);
683 }
684
685 /* Return an rtx for a value that would result
686 from converting X from mode OLDMODE to mode MODE.
687 Both modes may be floating, or both integer.
688 UNSIGNEDP is nonzero if X is an unsigned value.
689
690 This can be done by referring to a part of X in place
691 or by copying to a new temporary with conversion.
692
693 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
694
695 rtx
696 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
697 {
698 rtx temp;
699
700 /* If FROM is a SUBREG that indicates that we have already done at least
701 the required extension, strip it. */
702
703 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
704 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
705 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
706 x = gen_lowpart (mode, SUBREG_REG (x));
707
708 if (GET_MODE (x) != VOIDmode)
709 oldmode = GET_MODE (x);
710
711 if (mode == oldmode)
712 return x;
713
714 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
715 {
716 /* If the caller did not tell us the old mode, then there is not
717 much to do with respect to canonicalization. We have to
718 assume that all the bits are significant. */
719 if (GET_MODE_CLASS (oldmode) != MODE_INT)
720 oldmode = MAX_MODE_INT;
721 wide_int w = wide_int::from (std::make_pair (x, oldmode),
722 GET_MODE_PRECISION (mode),
723 unsignedp ? UNSIGNED : SIGNED);
724 return immed_wide_int_const (w, mode);
725 }
726
727 /* We can do this with a gen_lowpart if both desired and current modes
728 are integer, and this is either a constant integer, a register, or a
729 non-volatile MEM. */
730 if (GET_MODE_CLASS (mode) == MODE_INT
731 && GET_MODE_CLASS (oldmode) == MODE_INT
732 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
733 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
734 || (REG_P (x)
735 && (!HARD_REGISTER_P (x)
736 || HARD_REGNO_MODE_OK (REGNO (x), mode))
737 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
738
739 return gen_lowpart (mode, x);
740
741 /* Converting from integer constant into mode is always equivalent to an
742 subreg operation. */
743 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
744 {
745 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
746 return simplify_gen_subreg (mode, x, oldmode, 0);
747 }
748
749 temp = gen_reg_rtx (mode);
750 convert_move (temp, x, unsignedp);
751 return temp;
752 }
753 \f
754 /* Return the largest alignment we can use for doing a move (or store)
755 of MAX_PIECES. ALIGN is the largest alignment we could use. */
756
757 static unsigned int
758 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
759 {
760 enum machine_mode tmode;
761
762 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
763 if (align >= GET_MODE_ALIGNMENT (tmode))
764 align = GET_MODE_ALIGNMENT (tmode);
765 else
766 {
767 enum machine_mode tmode, xmode;
768
769 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
770 tmode != VOIDmode;
771 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
772 if (GET_MODE_SIZE (tmode) > max_pieces
773 || SLOW_UNALIGNED_ACCESS (tmode, align))
774 break;
775
776 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
777 }
778
779 return align;
780 }
781
782 /* Return the widest integer mode no wider than SIZE. If no such mode
783 can be found, return VOIDmode. */
784
785 static enum machine_mode
786 widest_int_mode_for_size (unsigned int size)
787 {
788 enum machine_mode tmode, mode = VOIDmode;
789
790 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
791 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
792 if (GET_MODE_SIZE (tmode) < size)
793 mode = tmode;
794
795 return mode;
796 }
797
798 /* STORE_MAX_PIECES is the number of bytes at a time that we can
799 store efficiently. Due to internal GCC limitations, this is
800 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
801 for an immediate constant. */
802
803 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
804
805 /* Determine whether the LEN bytes can be moved by using several move
806 instructions. Return nonzero if a call to move_by_pieces should
807 succeed. */
808
809 int
810 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
811 unsigned int align ATTRIBUTE_UNUSED)
812 {
813 return MOVE_BY_PIECES_P (len, align);
814 }
815
816 /* Generate several move instructions to copy LEN bytes from block FROM to
817 block TO. (These are MEM rtx's with BLKmode).
818
819 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
820 used to push FROM to the stack.
821
822 ALIGN is maximum stack alignment we can assume.
823
824 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
825 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
826 stpcpy. */
827
828 rtx
829 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
830 unsigned int align, int endp)
831 {
832 struct move_by_pieces_d data;
833 enum machine_mode to_addr_mode;
834 enum machine_mode from_addr_mode = get_address_mode (from);
835 rtx to_addr, from_addr = XEXP (from, 0);
836 unsigned int max_size = MOVE_MAX_PIECES + 1;
837 enum insn_code icode;
838
839 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
840
841 data.offset = 0;
842 data.from_addr = from_addr;
843 if (to)
844 {
845 to_addr_mode = get_address_mode (to);
846 to_addr = XEXP (to, 0);
847 data.to = to;
848 data.autinc_to
849 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
850 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
851 data.reverse
852 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
853 }
854 else
855 {
856 to_addr_mode = VOIDmode;
857 to_addr = NULL_RTX;
858 data.to = NULL_RTX;
859 data.autinc_to = 1;
860 #ifdef STACK_GROWS_DOWNWARD
861 data.reverse = 1;
862 #else
863 data.reverse = 0;
864 #endif
865 }
866 data.to_addr = to_addr;
867 data.from = from;
868 data.autinc_from
869 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
870 || GET_CODE (from_addr) == POST_INC
871 || GET_CODE (from_addr) == POST_DEC);
872
873 data.explicit_inc_from = 0;
874 data.explicit_inc_to = 0;
875 if (data.reverse) data.offset = len;
876 data.len = len;
877
878 /* If copying requires more than two move insns,
879 copy addresses to registers (to make displacements shorter)
880 and use post-increment if available. */
881 if (!(data.autinc_from && data.autinc_to)
882 && move_by_pieces_ninsns (len, align, max_size) > 2)
883 {
884 /* Find the mode of the largest move...
885 MODE might not be used depending on the definitions of the
886 USE_* macros below. */
887 enum machine_mode mode ATTRIBUTE_UNUSED
888 = widest_int_mode_for_size (max_size);
889
890 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
891 {
892 data.from_addr = copy_to_mode_reg (from_addr_mode,
893 plus_constant (from_addr_mode,
894 from_addr, len));
895 data.autinc_from = 1;
896 data.explicit_inc_from = -1;
897 }
898 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
899 {
900 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
901 data.autinc_from = 1;
902 data.explicit_inc_from = 1;
903 }
904 if (!data.autinc_from && CONSTANT_P (from_addr))
905 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
906 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
907 {
908 data.to_addr = copy_to_mode_reg (to_addr_mode,
909 plus_constant (to_addr_mode,
910 to_addr, len));
911 data.autinc_to = 1;
912 data.explicit_inc_to = -1;
913 }
914 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
915 {
916 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
917 data.autinc_to = 1;
918 data.explicit_inc_to = 1;
919 }
920 if (!data.autinc_to && CONSTANT_P (to_addr))
921 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
922 }
923
924 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
925
926 /* First move what we can in the largest integer mode, then go to
927 successively smaller modes. */
928
929 while (max_size > 1 && data.len > 0)
930 {
931 enum machine_mode mode = widest_int_mode_for_size (max_size);
932
933 if (mode == VOIDmode)
934 break;
935
936 icode = optab_handler (mov_optab, mode);
937 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
938 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
939
940 max_size = GET_MODE_SIZE (mode);
941 }
942
943 /* The code above should have handled everything. */
944 gcc_assert (!data.len);
945
946 if (endp)
947 {
948 rtx to1;
949
950 gcc_assert (!data.reverse);
951 if (data.autinc_to)
952 {
953 if (endp == 2)
954 {
955 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
956 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
957 else
958 data.to_addr = copy_to_mode_reg (to_addr_mode,
959 plus_constant (to_addr_mode,
960 data.to_addr,
961 -1));
962 }
963 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
964 data.offset);
965 }
966 else
967 {
968 if (endp == 2)
969 --data.offset;
970 to1 = adjust_address (data.to, QImode, data.offset);
971 }
972 return to1;
973 }
974 else
975 return data.to;
976 }
977
978 /* Return number of insns required to move L bytes by pieces.
979 ALIGN (in bits) is maximum alignment we can assume. */
980
981 unsigned HOST_WIDE_INT
982 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
983 unsigned int max_size)
984 {
985 unsigned HOST_WIDE_INT n_insns = 0;
986
987 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
988
989 while (max_size > 1 && l > 0)
990 {
991 enum machine_mode mode;
992 enum insn_code icode;
993
994 mode = widest_int_mode_for_size (max_size);
995
996 if (mode == VOIDmode)
997 break;
998
999 icode = optab_handler (mov_optab, mode);
1000 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1001 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1002
1003 max_size = GET_MODE_SIZE (mode);
1004 }
1005
1006 gcc_assert (!l);
1007 return n_insns;
1008 }
1009
1010 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1011 with move instructions for mode MODE. GENFUN is the gen_... function
1012 to make a move insn for that mode. DATA has all the other info. */
1013
1014 static void
1015 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1016 struct move_by_pieces_d *data)
1017 {
1018 unsigned int size = GET_MODE_SIZE (mode);
1019 rtx to1 = NULL_RTX, from1;
1020
1021 while (data->len >= size)
1022 {
1023 if (data->reverse)
1024 data->offset -= size;
1025
1026 if (data->to)
1027 {
1028 if (data->autinc_to)
1029 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1030 data->offset);
1031 else
1032 to1 = adjust_address (data->to, mode, data->offset);
1033 }
1034
1035 if (data->autinc_from)
1036 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1037 data->offset);
1038 else
1039 from1 = adjust_address (data->from, mode, data->offset);
1040
1041 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1042 emit_insn (gen_add2_insn (data->to_addr,
1043 gen_int_mode (-(HOST_WIDE_INT) size,
1044 GET_MODE (data->to_addr))));
1045 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1046 emit_insn (gen_add2_insn (data->from_addr,
1047 gen_int_mode (-(HOST_WIDE_INT) size,
1048 GET_MODE (data->from_addr))));
1049
1050 if (data->to)
1051 emit_insn ((*genfun) (to1, from1));
1052 else
1053 {
1054 #ifdef PUSH_ROUNDING
1055 emit_single_push_insn (mode, from1, NULL);
1056 #else
1057 gcc_unreachable ();
1058 #endif
1059 }
1060
1061 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1062 emit_insn (gen_add2_insn (data->to_addr,
1063 gen_int_mode (size,
1064 GET_MODE (data->to_addr))));
1065 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1066 emit_insn (gen_add2_insn (data->from_addr,
1067 gen_int_mode (size,
1068 GET_MODE (data->from_addr))));
1069
1070 if (! data->reverse)
1071 data->offset += size;
1072
1073 data->len -= size;
1074 }
1075 }
1076 \f
1077 /* Emit code to move a block Y to a block X. This may be done with
1078 string-move instructions, with multiple scalar move instructions,
1079 or with a library call.
1080
1081 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1082 SIZE is an rtx that says how long they are.
1083 ALIGN is the maximum alignment we can assume they have.
1084 METHOD describes what kind of copy this is, and what mechanisms may be used.
1085 MIN_SIZE is the minimal size of block to move
1086 MAX_SIZE is the maximal size of block to move, if it can not be represented
1087 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1088
1089 Return the address of the new block, if memcpy is called and returns it,
1090 0 otherwise. */
1091
1092 rtx
1093 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1094 unsigned int expected_align, HOST_WIDE_INT expected_size,
1095 unsigned HOST_WIDE_INT min_size,
1096 unsigned HOST_WIDE_INT max_size,
1097 unsigned HOST_WIDE_INT probable_max_size)
1098 {
1099 bool may_use_call;
1100 rtx retval = 0;
1101 unsigned int align;
1102
1103 gcc_assert (size);
1104 if (CONST_INT_P (size)
1105 && INTVAL (size) == 0)
1106 return 0;
1107
1108 switch (method)
1109 {
1110 case BLOCK_OP_NORMAL:
1111 case BLOCK_OP_TAILCALL:
1112 may_use_call = true;
1113 break;
1114
1115 case BLOCK_OP_CALL_PARM:
1116 may_use_call = block_move_libcall_safe_for_call_parm ();
1117
1118 /* Make inhibit_defer_pop nonzero around the library call
1119 to force it to pop the arguments right away. */
1120 NO_DEFER_POP;
1121 break;
1122
1123 case BLOCK_OP_NO_LIBCALL:
1124 may_use_call = false;
1125 break;
1126
1127 default:
1128 gcc_unreachable ();
1129 }
1130
1131 gcc_assert (MEM_P (x) && MEM_P (y));
1132 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1133 gcc_assert (align >= BITS_PER_UNIT);
1134
1135 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1136 block copy is more efficient for other large modes, e.g. DCmode. */
1137 x = adjust_address (x, BLKmode, 0);
1138 y = adjust_address (y, BLKmode, 0);
1139
1140 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1141 can be incorrect is coming from __builtin_memcpy. */
1142 if (CONST_INT_P (size))
1143 {
1144 x = shallow_copy_rtx (x);
1145 y = shallow_copy_rtx (y);
1146 set_mem_size (x, INTVAL (size));
1147 set_mem_size (y, INTVAL (size));
1148 }
1149
1150 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1151 move_by_pieces (x, y, INTVAL (size), align, 0);
1152 else if (emit_block_move_via_movmem (x, y, size, align,
1153 expected_align, expected_size,
1154 min_size, max_size, probable_max_size))
1155 ;
1156 else if (may_use_call
1157 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1158 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1159 {
1160 /* Since x and y are passed to a libcall, mark the corresponding
1161 tree EXPR as addressable. */
1162 tree y_expr = MEM_EXPR (y);
1163 tree x_expr = MEM_EXPR (x);
1164 if (y_expr)
1165 mark_addressable (y_expr);
1166 if (x_expr)
1167 mark_addressable (x_expr);
1168 retval = emit_block_move_via_libcall (x, y, size,
1169 method == BLOCK_OP_TAILCALL);
1170 }
1171
1172 else
1173 emit_block_move_via_loop (x, y, size, align);
1174
1175 if (method == BLOCK_OP_CALL_PARM)
1176 OK_DEFER_POP;
1177
1178 return retval;
1179 }
1180
1181 rtx
1182 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1183 {
1184 unsigned HOST_WIDE_INT max, min = 0;
1185 if (GET_CODE (size) == CONST_INT)
1186 min = max = UINTVAL (size);
1187 else
1188 max = GET_MODE_MASK (GET_MODE (size));
1189 return emit_block_move_hints (x, y, size, method, 0, -1,
1190 min, max, max);
1191 }
1192
1193 /* A subroutine of emit_block_move. Returns true if calling the
1194 block move libcall will not clobber any parameters which may have
1195 already been placed on the stack. */
1196
1197 static bool
1198 block_move_libcall_safe_for_call_parm (void)
1199 {
1200 #if defined (REG_PARM_STACK_SPACE)
1201 tree fn;
1202 #endif
1203
1204 /* If arguments are pushed on the stack, then they're safe. */
1205 if (PUSH_ARGS)
1206 return true;
1207
1208 /* If registers go on the stack anyway, any argument is sure to clobber
1209 an outgoing argument. */
1210 #if defined (REG_PARM_STACK_SPACE)
1211 fn = emit_block_move_libcall_fn (false);
1212 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1213 depend on its argument. */
1214 (void) fn;
1215 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1216 && REG_PARM_STACK_SPACE (fn) != 0)
1217 return false;
1218 #endif
1219
1220 /* If any argument goes in memory, then it might clobber an outgoing
1221 argument. */
1222 {
1223 CUMULATIVE_ARGS args_so_far_v;
1224 cumulative_args_t args_so_far;
1225 tree fn, arg;
1226
1227 fn = emit_block_move_libcall_fn (false);
1228 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1229 args_so_far = pack_cumulative_args (&args_so_far_v);
1230
1231 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1232 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1233 {
1234 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1235 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1236 NULL_TREE, true);
1237 if (!tmp || !REG_P (tmp))
1238 return false;
1239 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1240 return false;
1241 targetm.calls.function_arg_advance (args_so_far, mode,
1242 NULL_TREE, true);
1243 }
1244 }
1245 return true;
1246 }
1247
1248 /* A subroutine of emit_block_move. Expand a movmem pattern;
1249 return true if successful. */
1250
1251 static bool
1252 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1253 unsigned int expected_align, HOST_WIDE_INT expected_size,
1254 unsigned HOST_WIDE_INT min_size,
1255 unsigned HOST_WIDE_INT max_size,
1256 unsigned HOST_WIDE_INT probable_max_size)
1257 {
1258 int save_volatile_ok = volatile_ok;
1259 enum machine_mode mode;
1260
1261 if (expected_align < align)
1262 expected_align = align;
1263 if (expected_size != -1)
1264 {
1265 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1266 expected_size = probable_max_size;
1267 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1268 expected_size = min_size;
1269 }
1270
1271 /* Since this is a move insn, we don't care about volatility. */
1272 volatile_ok = 1;
1273
1274 /* Try the most limited insn first, because there's no point
1275 including more than one in the machine description unless
1276 the more limited one has some advantage. */
1277
1278 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1279 mode = GET_MODE_WIDER_MODE (mode))
1280 {
1281 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1282
1283 if (code != CODE_FOR_nothing
1284 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1285 here because if SIZE is less than the mode mask, as it is
1286 returned by the macro, it will definitely be less than the
1287 actual mode mask. Since SIZE is within the Pmode address
1288 space, we limit MODE to Pmode. */
1289 && ((CONST_INT_P (size)
1290 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1291 <= (GET_MODE_MASK (mode) >> 1)))
1292 || max_size <= (GET_MODE_MASK (mode) >> 1)
1293 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1294 {
1295 struct expand_operand ops[9];
1296 unsigned int nops;
1297
1298 /* ??? When called via emit_block_move_for_call, it'd be
1299 nice if there were some way to inform the backend, so
1300 that it doesn't fail the expansion because it thinks
1301 emitting the libcall would be more efficient. */
1302 nops = insn_data[(int) code].n_generator_args;
1303 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1304
1305 create_fixed_operand (&ops[0], x);
1306 create_fixed_operand (&ops[1], y);
1307 /* The check above guarantees that this size conversion is valid. */
1308 create_convert_operand_to (&ops[2], size, mode, true);
1309 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1310 if (nops >= 6)
1311 {
1312 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1313 create_integer_operand (&ops[5], expected_size);
1314 }
1315 if (nops >= 8)
1316 {
1317 create_integer_operand (&ops[6], min_size);
1318 /* If we can not represent the maximal size,
1319 make parameter NULL. */
1320 if ((HOST_WIDE_INT) max_size != -1)
1321 create_integer_operand (&ops[7], max_size);
1322 else
1323 create_fixed_operand (&ops[7], NULL);
1324 }
1325 if (nops == 9)
1326 {
1327 /* If we can not represent the maximal size,
1328 make parameter NULL. */
1329 if ((HOST_WIDE_INT) probable_max_size != -1)
1330 create_integer_operand (&ops[8], probable_max_size);
1331 else
1332 create_fixed_operand (&ops[8], NULL);
1333 }
1334 if (maybe_expand_insn (code, nops, ops))
1335 {
1336 volatile_ok = save_volatile_ok;
1337 return true;
1338 }
1339 }
1340 }
1341
1342 volatile_ok = save_volatile_ok;
1343 return false;
1344 }
1345
1346 /* A subroutine of emit_block_move. Expand a call to memcpy.
1347 Return the return value from memcpy, 0 otherwise. */
1348
1349 rtx
1350 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1351 {
1352 rtx dst_addr, src_addr;
1353 tree call_expr, fn, src_tree, dst_tree, size_tree;
1354 enum machine_mode size_mode;
1355 rtx retval;
1356
1357 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1358 pseudos. We can then place those new pseudos into a VAR_DECL and
1359 use them later. */
1360
1361 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1362 src_addr = copy_addr_to_reg (XEXP (src, 0));
1363
1364 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1365 src_addr = convert_memory_address (ptr_mode, src_addr);
1366
1367 dst_tree = make_tree (ptr_type_node, dst_addr);
1368 src_tree = make_tree (ptr_type_node, src_addr);
1369
1370 size_mode = TYPE_MODE (sizetype);
1371
1372 size = convert_to_mode (size_mode, size, 1);
1373 size = copy_to_mode_reg (size_mode, size);
1374
1375 /* It is incorrect to use the libcall calling conventions to call
1376 memcpy in this context. This could be a user call to memcpy and
1377 the user may wish to examine the return value from memcpy. For
1378 targets where libcalls and normal calls have different conventions
1379 for returning pointers, we could end up generating incorrect code. */
1380
1381 size_tree = make_tree (sizetype, size);
1382
1383 fn = emit_block_move_libcall_fn (true);
1384 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1385 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1386
1387 retval = expand_normal (call_expr);
1388
1389 return retval;
1390 }
1391
1392 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1393 for the function we use for block copies. */
1394
1395 static GTY(()) tree block_move_fn;
1396
1397 void
1398 init_block_move_fn (const char *asmspec)
1399 {
1400 if (!block_move_fn)
1401 {
1402 tree args, fn, attrs, attr_args;
1403
1404 fn = get_identifier ("memcpy");
1405 args = build_function_type_list (ptr_type_node, ptr_type_node,
1406 const_ptr_type_node, sizetype,
1407 NULL_TREE);
1408
1409 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1410 DECL_EXTERNAL (fn) = 1;
1411 TREE_PUBLIC (fn) = 1;
1412 DECL_ARTIFICIAL (fn) = 1;
1413 TREE_NOTHROW (fn) = 1;
1414 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1415 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1416
1417 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1418 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1419
1420 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1421
1422 block_move_fn = fn;
1423 }
1424
1425 if (asmspec)
1426 set_user_assembler_name (block_move_fn, asmspec);
1427 }
1428
1429 static tree
1430 emit_block_move_libcall_fn (int for_call)
1431 {
1432 static bool emitted_extern;
1433
1434 if (!block_move_fn)
1435 init_block_move_fn (NULL);
1436
1437 if (for_call && !emitted_extern)
1438 {
1439 emitted_extern = true;
1440 make_decl_rtl (block_move_fn);
1441 }
1442
1443 return block_move_fn;
1444 }
1445
1446 /* A subroutine of emit_block_move. Copy the data via an explicit
1447 loop. This is used only when libcalls are forbidden. */
1448 /* ??? It'd be nice to copy in hunks larger than QImode. */
1449
1450 static void
1451 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1452 unsigned int align ATTRIBUTE_UNUSED)
1453 {
1454 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1455 enum machine_mode x_addr_mode = get_address_mode (x);
1456 enum machine_mode y_addr_mode = get_address_mode (y);
1457 enum machine_mode iter_mode;
1458
1459 iter_mode = GET_MODE (size);
1460 if (iter_mode == VOIDmode)
1461 iter_mode = word_mode;
1462
1463 top_label = gen_label_rtx ();
1464 cmp_label = gen_label_rtx ();
1465 iter = gen_reg_rtx (iter_mode);
1466
1467 emit_move_insn (iter, const0_rtx);
1468
1469 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1470 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1471 do_pending_stack_adjust ();
1472
1473 emit_jump (cmp_label);
1474 emit_label (top_label);
1475
1476 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1477 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1478
1479 if (x_addr_mode != y_addr_mode)
1480 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1481 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1482
1483 x = change_address (x, QImode, x_addr);
1484 y = change_address (y, QImode, y_addr);
1485
1486 emit_move_insn (x, y);
1487
1488 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1489 true, OPTAB_LIB_WIDEN);
1490 if (tmp != iter)
1491 emit_move_insn (iter, tmp);
1492
1493 emit_label (cmp_label);
1494
1495 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1496 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1497 }
1498 \f
1499 /* Copy all or part of a value X into registers starting at REGNO.
1500 The number of registers to be filled is NREGS. */
1501
1502 void
1503 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1504 {
1505 int i;
1506 #ifdef HAVE_load_multiple
1507 rtx pat;
1508 rtx last;
1509 #endif
1510
1511 if (nregs == 0)
1512 return;
1513
1514 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1515 x = validize_mem (force_const_mem (mode, x));
1516
1517 /* See if the machine can do this with a load multiple insn. */
1518 #ifdef HAVE_load_multiple
1519 if (HAVE_load_multiple)
1520 {
1521 last = get_last_insn ();
1522 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1523 GEN_INT (nregs));
1524 if (pat)
1525 {
1526 emit_insn (pat);
1527 return;
1528 }
1529 else
1530 delete_insns_since (last);
1531 }
1532 #endif
1533
1534 for (i = 0; i < nregs; i++)
1535 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1536 operand_subword_force (x, i, mode));
1537 }
1538
1539 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1540 The number of registers to be filled is NREGS. */
1541
1542 void
1543 move_block_from_reg (int regno, rtx x, int nregs)
1544 {
1545 int i;
1546
1547 if (nregs == 0)
1548 return;
1549
1550 /* See if the machine can do this with a store multiple insn. */
1551 #ifdef HAVE_store_multiple
1552 if (HAVE_store_multiple)
1553 {
1554 rtx last = get_last_insn ();
1555 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1556 GEN_INT (nregs));
1557 if (pat)
1558 {
1559 emit_insn (pat);
1560 return;
1561 }
1562 else
1563 delete_insns_since (last);
1564 }
1565 #endif
1566
1567 for (i = 0; i < nregs; i++)
1568 {
1569 rtx tem = operand_subword (x, i, 1, BLKmode);
1570
1571 gcc_assert (tem);
1572
1573 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1574 }
1575 }
1576
1577 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1578 ORIG, where ORIG is a non-consecutive group of registers represented by
1579 a PARALLEL. The clone is identical to the original except in that the
1580 original set of registers is replaced by a new set of pseudo registers.
1581 The new set has the same modes as the original set. */
1582
1583 rtx
1584 gen_group_rtx (rtx orig)
1585 {
1586 int i, length;
1587 rtx *tmps;
1588
1589 gcc_assert (GET_CODE (orig) == PARALLEL);
1590
1591 length = XVECLEN (orig, 0);
1592 tmps = XALLOCAVEC (rtx, length);
1593
1594 /* Skip a NULL entry in first slot. */
1595 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1596
1597 if (i)
1598 tmps[0] = 0;
1599
1600 for (; i < length; i++)
1601 {
1602 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1603 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1604
1605 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1606 }
1607
1608 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1609 }
1610
1611 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1612 except that values are placed in TMPS[i], and must later be moved
1613 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1614
1615 static void
1616 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1617 {
1618 rtx src;
1619 int start, i;
1620 enum machine_mode m = GET_MODE (orig_src);
1621
1622 gcc_assert (GET_CODE (dst) == PARALLEL);
1623
1624 if (m != VOIDmode
1625 && !SCALAR_INT_MODE_P (m)
1626 && !MEM_P (orig_src)
1627 && GET_CODE (orig_src) != CONCAT)
1628 {
1629 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1630 if (imode == BLKmode)
1631 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1632 else
1633 src = gen_reg_rtx (imode);
1634 if (imode != BLKmode)
1635 src = gen_lowpart (GET_MODE (orig_src), src);
1636 emit_move_insn (src, orig_src);
1637 /* ...and back again. */
1638 if (imode != BLKmode)
1639 src = gen_lowpart (imode, src);
1640 emit_group_load_1 (tmps, dst, src, type, ssize);
1641 return;
1642 }
1643
1644 /* Check for a NULL entry, used to indicate that the parameter goes
1645 both on the stack and in registers. */
1646 if (XEXP (XVECEXP (dst, 0, 0), 0))
1647 start = 0;
1648 else
1649 start = 1;
1650
1651 /* Process the pieces. */
1652 for (i = start; i < XVECLEN (dst, 0); i++)
1653 {
1654 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1655 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1656 unsigned int bytelen = GET_MODE_SIZE (mode);
1657 int shift = 0;
1658
1659 /* Handle trailing fragments that run over the size of the struct. */
1660 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1661 {
1662 /* Arrange to shift the fragment to where it belongs.
1663 extract_bit_field loads to the lsb of the reg. */
1664 if (
1665 #ifdef BLOCK_REG_PADDING
1666 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1667 == (BYTES_BIG_ENDIAN ? upward : downward)
1668 #else
1669 BYTES_BIG_ENDIAN
1670 #endif
1671 )
1672 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1673 bytelen = ssize - bytepos;
1674 gcc_assert (bytelen > 0);
1675 }
1676
1677 /* If we won't be loading directly from memory, protect the real source
1678 from strange tricks we might play; but make sure that the source can
1679 be loaded directly into the destination. */
1680 src = orig_src;
1681 if (!MEM_P (orig_src)
1682 && (!CONSTANT_P (orig_src)
1683 || (GET_MODE (orig_src) != mode
1684 && GET_MODE (orig_src) != VOIDmode)))
1685 {
1686 if (GET_MODE (orig_src) == VOIDmode)
1687 src = gen_reg_rtx (mode);
1688 else
1689 src = gen_reg_rtx (GET_MODE (orig_src));
1690
1691 emit_move_insn (src, orig_src);
1692 }
1693
1694 /* Optimize the access just a bit. */
1695 if (MEM_P (src)
1696 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1697 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1698 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1699 && bytelen == GET_MODE_SIZE (mode))
1700 {
1701 tmps[i] = gen_reg_rtx (mode);
1702 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1703 }
1704 else if (COMPLEX_MODE_P (mode)
1705 && GET_MODE (src) == mode
1706 && bytelen == GET_MODE_SIZE (mode))
1707 /* Let emit_move_complex do the bulk of the work. */
1708 tmps[i] = src;
1709 else if (GET_CODE (src) == CONCAT)
1710 {
1711 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1712 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1713
1714 if ((bytepos == 0 && bytelen == slen0)
1715 || (bytepos != 0 && bytepos + bytelen <= slen))
1716 {
1717 /* The following assumes that the concatenated objects all
1718 have the same size. In this case, a simple calculation
1719 can be used to determine the object and the bit field
1720 to be extracted. */
1721 tmps[i] = XEXP (src, bytepos / slen0);
1722 if (! CONSTANT_P (tmps[i])
1723 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1724 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1725 (bytepos % slen0) * BITS_PER_UNIT,
1726 1, NULL_RTX, mode, mode);
1727 }
1728 else
1729 {
1730 rtx mem;
1731
1732 gcc_assert (!bytepos);
1733 mem = assign_stack_temp (GET_MODE (src), slen);
1734 emit_move_insn (mem, src);
1735 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1736 0, 1, NULL_RTX, mode, mode);
1737 }
1738 }
1739 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1740 SIMD register, which is currently broken. While we get GCC
1741 to emit proper RTL for these cases, let's dump to memory. */
1742 else if (VECTOR_MODE_P (GET_MODE (dst))
1743 && REG_P (src))
1744 {
1745 int slen = GET_MODE_SIZE (GET_MODE (src));
1746 rtx mem;
1747
1748 mem = assign_stack_temp (GET_MODE (src), slen);
1749 emit_move_insn (mem, src);
1750 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1751 }
1752 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1753 && XVECLEN (dst, 0) > 1)
1754 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1755 else if (CONSTANT_P (src))
1756 {
1757 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1758
1759 if (len == ssize)
1760 tmps[i] = src;
1761 else
1762 {
1763 rtx first, second;
1764
1765 /* TODO: const_wide_int can have sizes other than this... */
1766 gcc_assert (2 * len == ssize);
1767 split_double (src, &first, &second);
1768 if (i)
1769 tmps[i] = second;
1770 else
1771 tmps[i] = first;
1772 }
1773 }
1774 else if (REG_P (src) && GET_MODE (src) == mode)
1775 tmps[i] = src;
1776 else
1777 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1778 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1779 mode, mode);
1780
1781 if (shift)
1782 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1783 shift, tmps[i], 0);
1784 }
1785 }
1786
1787 /* Emit code to move a block SRC of type TYPE to a block DST,
1788 where DST is non-consecutive registers represented by a PARALLEL.
1789 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1790 if not known. */
1791
1792 void
1793 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1794 {
1795 rtx *tmps;
1796 int i;
1797
1798 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1799 emit_group_load_1 (tmps, dst, src, type, ssize);
1800
1801 /* Copy the extracted pieces into the proper (probable) hard regs. */
1802 for (i = 0; i < XVECLEN (dst, 0); i++)
1803 {
1804 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1805 if (d == NULL)
1806 continue;
1807 emit_move_insn (d, tmps[i]);
1808 }
1809 }
1810
1811 /* Similar, but load SRC into new pseudos in a format that looks like
1812 PARALLEL. This can later be fed to emit_group_move to get things
1813 in the right place. */
1814
1815 rtx
1816 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1817 {
1818 rtvec vec;
1819 int i;
1820
1821 vec = rtvec_alloc (XVECLEN (parallel, 0));
1822 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1823
1824 /* Convert the vector to look just like the original PARALLEL, except
1825 with the computed values. */
1826 for (i = 0; i < XVECLEN (parallel, 0); i++)
1827 {
1828 rtx e = XVECEXP (parallel, 0, i);
1829 rtx d = XEXP (e, 0);
1830
1831 if (d)
1832 {
1833 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1834 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1835 }
1836 RTVEC_ELT (vec, i) = e;
1837 }
1838
1839 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1840 }
1841
1842 /* Emit code to move a block SRC to block DST, where SRC and DST are
1843 non-consecutive groups of registers, each represented by a PARALLEL. */
1844
1845 void
1846 emit_group_move (rtx dst, rtx src)
1847 {
1848 int i;
1849
1850 gcc_assert (GET_CODE (src) == PARALLEL
1851 && GET_CODE (dst) == PARALLEL
1852 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1853
1854 /* Skip first entry if NULL. */
1855 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1856 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1857 XEXP (XVECEXP (src, 0, i), 0));
1858 }
1859
1860 /* Move a group of registers represented by a PARALLEL into pseudos. */
1861
1862 rtx
1863 emit_group_move_into_temps (rtx src)
1864 {
1865 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1866 int i;
1867
1868 for (i = 0; i < XVECLEN (src, 0); i++)
1869 {
1870 rtx e = XVECEXP (src, 0, i);
1871 rtx d = XEXP (e, 0);
1872
1873 if (d)
1874 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1875 RTVEC_ELT (vec, i) = e;
1876 }
1877
1878 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1879 }
1880
1881 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1882 where SRC is non-consecutive registers represented by a PARALLEL.
1883 SSIZE represents the total size of block ORIG_DST, or -1 if not
1884 known. */
1885
1886 void
1887 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1888 {
1889 rtx *tmps, dst;
1890 int start, finish, i;
1891 enum machine_mode m = GET_MODE (orig_dst);
1892
1893 gcc_assert (GET_CODE (src) == PARALLEL);
1894
1895 if (!SCALAR_INT_MODE_P (m)
1896 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1897 {
1898 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1899 if (imode == BLKmode)
1900 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1901 else
1902 dst = gen_reg_rtx (imode);
1903 emit_group_store (dst, src, type, ssize);
1904 if (imode != BLKmode)
1905 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1906 emit_move_insn (orig_dst, dst);
1907 return;
1908 }
1909
1910 /* Check for a NULL entry, used to indicate that the parameter goes
1911 both on the stack and in registers. */
1912 if (XEXP (XVECEXP (src, 0, 0), 0))
1913 start = 0;
1914 else
1915 start = 1;
1916 finish = XVECLEN (src, 0);
1917
1918 tmps = XALLOCAVEC (rtx, finish);
1919
1920 /* Copy the (probable) hard regs into pseudos. */
1921 for (i = start; i < finish; i++)
1922 {
1923 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1924 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1925 {
1926 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1927 emit_move_insn (tmps[i], reg);
1928 }
1929 else
1930 tmps[i] = reg;
1931 }
1932
1933 /* If we won't be storing directly into memory, protect the real destination
1934 from strange tricks we might play. */
1935 dst = orig_dst;
1936 if (GET_CODE (dst) == PARALLEL)
1937 {
1938 rtx temp;
1939
1940 /* We can get a PARALLEL dst if there is a conditional expression in
1941 a return statement. In that case, the dst and src are the same,
1942 so no action is necessary. */
1943 if (rtx_equal_p (dst, src))
1944 return;
1945
1946 /* It is unclear if we can ever reach here, but we may as well handle
1947 it. Allocate a temporary, and split this into a store/load to/from
1948 the temporary. */
1949 temp = assign_stack_temp (GET_MODE (dst), ssize);
1950 emit_group_store (temp, src, type, ssize);
1951 emit_group_load (dst, temp, type, ssize);
1952 return;
1953 }
1954 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1955 {
1956 enum machine_mode outer = GET_MODE (dst);
1957 enum machine_mode inner;
1958 HOST_WIDE_INT bytepos;
1959 bool done = false;
1960 rtx temp;
1961
1962 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1963 dst = gen_reg_rtx (outer);
1964
1965 /* Make life a bit easier for combine. */
1966 /* If the first element of the vector is the low part
1967 of the destination mode, use a paradoxical subreg to
1968 initialize the destination. */
1969 if (start < finish)
1970 {
1971 inner = GET_MODE (tmps[start]);
1972 bytepos = subreg_lowpart_offset (inner, outer);
1973 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1974 {
1975 temp = simplify_gen_subreg (outer, tmps[start],
1976 inner, 0);
1977 if (temp)
1978 {
1979 emit_move_insn (dst, temp);
1980 done = true;
1981 start++;
1982 }
1983 }
1984 }
1985
1986 /* If the first element wasn't the low part, try the last. */
1987 if (!done
1988 && start < finish - 1)
1989 {
1990 inner = GET_MODE (tmps[finish - 1]);
1991 bytepos = subreg_lowpart_offset (inner, outer);
1992 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1993 {
1994 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1995 inner, 0);
1996 if (temp)
1997 {
1998 emit_move_insn (dst, temp);
1999 done = true;
2000 finish--;
2001 }
2002 }
2003 }
2004
2005 /* Otherwise, simply initialize the result to zero. */
2006 if (!done)
2007 emit_move_insn (dst, CONST0_RTX (outer));
2008 }
2009
2010 /* Process the pieces. */
2011 for (i = start; i < finish; i++)
2012 {
2013 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2014 enum machine_mode mode = GET_MODE (tmps[i]);
2015 unsigned int bytelen = GET_MODE_SIZE (mode);
2016 unsigned int adj_bytelen;
2017 rtx dest = dst;
2018
2019 /* Handle trailing fragments that run over the size of the struct. */
2020 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2021 adj_bytelen = ssize - bytepos;
2022 else
2023 adj_bytelen = bytelen;
2024
2025 if (GET_CODE (dst) == CONCAT)
2026 {
2027 if (bytepos + adj_bytelen
2028 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2029 dest = XEXP (dst, 0);
2030 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2031 {
2032 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2033 dest = XEXP (dst, 1);
2034 }
2035 else
2036 {
2037 enum machine_mode dest_mode = GET_MODE (dest);
2038 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2039
2040 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2041
2042 if (GET_MODE_ALIGNMENT (dest_mode)
2043 >= GET_MODE_ALIGNMENT (tmp_mode))
2044 {
2045 dest = assign_stack_temp (dest_mode,
2046 GET_MODE_SIZE (dest_mode));
2047 emit_move_insn (adjust_address (dest,
2048 tmp_mode,
2049 bytepos),
2050 tmps[i]);
2051 dst = dest;
2052 }
2053 else
2054 {
2055 dest = assign_stack_temp (tmp_mode,
2056 GET_MODE_SIZE (tmp_mode));
2057 emit_move_insn (dest, tmps[i]);
2058 dst = adjust_address (dest, dest_mode, bytepos);
2059 }
2060 break;
2061 }
2062 }
2063
2064 /* Handle trailing fragments that run over the size of the struct. */
2065 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2066 {
2067 /* store_bit_field always takes its value from the lsb.
2068 Move the fragment to the lsb if it's not already there. */
2069 if (
2070 #ifdef BLOCK_REG_PADDING
2071 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2072 == (BYTES_BIG_ENDIAN ? upward : downward)
2073 #else
2074 BYTES_BIG_ENDIAN
2075 #endif
2076 )
2077 {
2078 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2079 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2080 shift, tmps[i], 0);
2081 }
2082
2083 /* Make sure not to write past the end of the struct. */
2084 store_bit_field (dest,
2085 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2086 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2087 VOIDmode, tmps[i]);
2088 }
2089
2090 /* Optimize the access just a bit. */
2091 else if (MEM_P (dest)
2092 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2093 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2094 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2095 && bytelen == GET_MODE_SIZE (mode))
2096 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2097
2098 else
2099 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2100 0, 0, mode, tmps[i]);
2101 }
2102
2103 /* Copy from the pseudo into the (probable) hard reg. */
2104 if (orig_dst != dst)
2105 emit_move_insn (orig_dst, dst);
2106 }
2107
2108 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2109 of the value stored in X. */
2110
2111 rtx
2112 maybe_emit_group_store (rtx x, tree type)
2113 {
2114 enum machine_mode mode = TYPE_MODE (type);
2115 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2116 if (GET_CODE (x) == PARALLEL)
2117 {
2118 rtx result = gen_reg_rtx (mode);
2119 emit_group_store (result, x, type, int_size_in_bytes (type));
2120 return result;
2121 }
2122 return x;
2123 }
2124
2125 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2126
2127 This is used on targets that return BLKmode values in registers. */
2128
2129 void
2130 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2131 {
2132 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2133 rtx src = NULL, dst = NULL;
2134 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2135 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2136 enum machine_mode mode = GET_MODE (srcreg);
2137 enum machine_mode tmode = GET_MODE (target);
2138 enum machine_mode copy_mode;
2139
2140 /* BLKmode registers created in the back-end shouldn't have survived. */
2141 gcc_assert (mode != BLKmode);
2142
2143 /* If the structure doesn't take up a whole number of words, see whether
2144 SRCREG is padded on the left or on the right. If it's on the left,
2145 set PADDING_CORRECTION to the number of bits to skip.
2146
2147 In most ABIs, the structure will be returned at the least end of
2148 the register, which translates to right padding on little-endian
2149 targets and left padding on big-endian targets. The opposite
2150 holds if the structure is returned at the most significant
2151 end of the register. */
2152 if (bytes % UNITS_PER_WORD != 0
2153 && (targetm.calls.return_in_msb (type)
2154 ? !BYTES_BIG_ENDIAN
2155 : BYTES_BIG_ENDIAN))
2156 padding_correction
2157 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2158
2159 /* We can use a single move if we have an exact mode for the size. */
2160 else if (MEM_P (target)
2161 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2162 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2163 && bytes == GET_MODE_SIZE (mode))
2164 {
2165 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2166 return;
2167 }
2168
2169 /* And if we additionally have the same mode for a register. */
2170 else if (REG_P (target)
2171 && GET_MODE (target) == mode
2172 && bytes == GET_MODE_SIZE (mode))
2173 {
2174 emit_move_insn (target, srcreg);
2175 return;
2176 }
2177
2178 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2179 into a new pseudo which is a full word. */
2180 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2181 {
2182 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2183 mode = word_mode;
2184 }
2185
2186 /* Copy the structure BITSIZE bits at a time. If the target lives in
2187 memory, take care of not reading/writing past its end by selecting
2188 a copy mode suited to BITSIZE. This should always be possible given
2189 how it is computed.
2190
2191 If the target lives in register, make sure not to select a copy mode
2192 larger than the mode of the register.
2193
2194 We could probably emit more efficient code for machines which do not use
2195 strict alignment, but it doesn't seem worth the effort at the current
2196 time. */
2197
2198 copy_mode = word_mode;
2199 if (MEM_P (target))
2200 {
2201 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2202 if (mem_mode != BLKmode)
2203 copy_mode = mem_mode;
2204 }
2205 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2206 copy_mode = tmode;
2207
2208 for (bitpos = 0, xbitpos = padding_correction;
2209 bitpos < bytes * BITS_PER_UNIT;
2210 bitpos += bitsize, xbitpos += bitsize)
2211 {
2212 /* We need a new source operand each time xbitpos is on a
2213 word boundary and when xbitpos == padding_correction
2214 (the first time through). */
2215 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2216 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2217
2218 /* We need a new destination operand each time bitpos is on
2219 a word boundary. */
2220 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2221 dst = target;
2222 else if (bitpos % BITS_PER_WORD == 0)
2223 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2224
2225 /* Use xbitpos for the source extraction (right justified) and
2226 bitpos for the destination store (left justified). */
2227 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2228 extract_bit_field (src, bitsize,
2229 xbitpos % BITS_PER_WORD, 1,
2230 NULL_RTX, copy_mode, copy_mode));
2231 }
2232 }
2233
2234 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2235 register if it contains any data, otherwise return null.
2236
2237 This is used on targets that return BLKmode values in registers. */
2238
2239 rtx
2240 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2241 {
2242 int i, n_regs;
2243 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2244 unsigned int bitsize;
2245 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2246 enum machine_mode dst_mode;
2247
2248 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2249
2250 x = expand_normal (src);
2251
2252 bytes = int_size_in_bytes (TREE_TYPE (src));
2253 if (bytes == 0)
2254 return NULL_RTX;
2255
2256 /* If the structure doesn't take up a whole number of words, see
2257 whether the register value should be padded on the left or on
2258 the right. Set PADDING_CORRECTION to the number of padding
2259 bits needed on the left side.
2260
2261 In most ABIs, the structure will be returned at the least end of
2262 the register, which translates to right padding on little-endian
2263 targets and left padding on big-endian targets. The opposite
2264 holds if the structure is returned at the most significant
2265 end of the register. */
2266 if (bytes % UNITS_PER_WORD != 0
2267 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2268 ? !BYTES_BIG_ENDIAN
2269 : BYTES_BIG_ENDIAN))
2270 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2271 * BITS_PER_UNIT));
2272
2273 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2274 dst_words = XALLOCAVEC (rtx, n_regs);
2275 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2276
2277 /* Copy the structure BITSIZE bits at a time. */
2278 for (bitpos = 0, xbitpos = padding_correction;
2279 bitpos < bytes * BITS_PER_UNIT;
2280 bitpos += bitsize, xbitpos += bitsize)
2281 {
2282 /* We need a new destination pseudo each time xbitpos is
2283 on a word boundary and when xbitpos == padding_correction
2284 (the first time through). */
2285 if (xbitpos % BITS_PER_WORD == 0
2286 || xbitpos == padding_correction)
2287 {
2288 /* Generate an appropriate register. */
2289 dst_word = gen_reg_rtx (word_mode);
2290 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2291
2292 /* Clear the destination before we move anything into it. */
2293 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2294 }
2295
2296 /* We need a new source operand each time bitpos is on a word
2297 boundary. */
2298 if (bitpos % BITS_PER_WORD == 0)
2299 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2300
2301 /* Use bitpos for the source extraction (left justified) and
2302 xbitpos for the destination store (right justified). */
2303 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2304 0, 0, word_mode,
2305 extract_bit_field (src_word, bitsize,
2306 bitpos % BITS_PER_WORD, 1,
2307 NULL_RTX, word_mode, word_mode));
2308 }
2309
2310 if (mode == BLKmode)
2311 {
2312 /* Find the smallest integer mode large enough to hold the
2313 entire structure. */
2314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2315 mode != VOIDmode;
2316 mode = GET_MODE_WIDER_MODE (mode))
2317 /* Have we found a large enough mode? */
2318 if (GET_MODE_SIZE (mode) >= bytes)
2319 break;
2320
2321 /* A suitable mode should have been found. */
2322 gcc_assert (mode != VOIDmode);
2323 }
2324
2325 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2326 dst_mode = word_mode;
2327 else
2328 dst_mode = mode;
2329 dst = gen_reg_rtx (dst_mode);
2330
2331 for (i = 0; i < n_regs; i++)
2332 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2333
2334 if (mode != dst_mode)
2335 dst = gen_lowpart (mode, dst);
2336
2337 return dst;
2338 }
2339
2340 /* Add a USE expression for REG to the (possibly empty) list pointed
2341 to by CALL_FUSAGE. REG must denote a hard register. */
2342
2343 void
2344 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2345 {
2346 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2347
2348 *call_fusage
2349 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2350 }
2351
2352 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2353 to by CALL_FUSAGE. REG must denote a hard register. */
2354
2355 void
2356 clobber_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2357 {
2358 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2359
2360 *call_fusage
2361 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2362 }
2363
2364 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2365 starting at REGNO. All of these registers must be hard registers. */
2366
2367 void
2368 use_regs (rtx *call_fusage, int regno, int nregs)
2369 {
2370 int i;
2371
2372 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2373
2374 for (i = 0; i < nregs; i++)
2375 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2376 }
2377
2378 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2379 PARALLEL REGS. This is for calls that pass values in multiple
2380 non-contiguous locations. The Irix 6 ABI has examples of this. */
2381
2382 void
2383 use_group_regs (rtx *call_fusage, rtx regs)
2384 {
2385 int i;
2386
2387 for (i = 0; i < XVECLEN (regs, 0); i++)
2388 {
2389 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2390
2391 /* A NULL entry means the parameter goes both on the stack and in
2392 registers. This can also be a MEM for targets that pass values
2393 partially on the stack and partially in registers. */
2394 if (reg != 0 && REG_P (reg))
2395 use_reg (call_fusage, reg);
2396 }
2397 }
2398
2399 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2400 assigment and the code of the expresion on the RHS is CODE. Return
2401 NULL otherwise. */
2402
2403 static gimple
2404 get_def_for_expr (tree name, enum tree_code code)
2405 {
2406 gimple def_stmt;
2407
2408 if (TREE_CODE (name) != SSA_NAME)
2409 return NULL;
2410
2411 def_stmt = get_gimple_for_ssa_name (name);
2412 if (!def_stmt
2413 || gimple_assign_rhs_code (def_stmt) != code)
2414 return NULL;
2415
2416 return def_stmt;
2417 }
2418
2419 #ifdef HAVE_conditional_move
2420 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2421 assigment and the class of the expresion on the RHS is CLASS. Return
2422 NULL otherwise. */
2423
2424 static gimple
2425 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2426 {
2427 gimple def_stmt;
2428
2429 if (TREE_CODE (name) != SSA_NAME)
2430 return NULL;
2431
2432 def_stmt = get_gimple_for_ssa_name (name);
2433 if (!def_stmt
2434 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2435 return NULL;
2436
2437 return def_stmt;
2438 }
2439 #endif
2440 \f
2441
2442 /* Determine whether the LEN bytes generated by CONSTFUN can be
2443 stored to memory using several move instructions. CONSTFUNDATA is
2444 a pointer which will be passed as argument in every CONSTFUN call.
2445 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2446 a memset operation and false if it's a copy of a constant string.
2447 Return nonzero if a call to store_by_pieces should succeed. */
2448
2449 int
2450 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2451 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2452 void *constfundata, unsigned int align, bool memsetp)
2453 {
2454 unsigned HOST_WIDE_INT l;
2455 unsigned int max_size;
2456 HOST_WIDE_INT offset = 0;
2457 enum machine_mode mode;
2458 enum insn_code icode;
2459 int reverse;
2460 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2461 rtx cst ATTRIBUTE_UNUSED;
2462
2463 if (len == 0)
2464 return 1;
2465
2466 if (! (memsetp
2467 ? SET_BY_PIECES_P (len, align)
2468 : STORE_BY_PIECES_P (len, align)))
2469 return 0;
2470
2471 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2472
2473 /* We would first store what we can in the largest integer mode, then go to
2474 successively smaller modes. */
2475
2476 for (reverse = 0;
2477 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2478 reverse++)
2479 {
2480 l = len;
2481 max_size = STORE_MAX_PIECES + 1;
2482 while (max_size > 1 && l > 0)
2483 {
2484 mode = widest_int_mode_for_size (max_size);
2485
2486 if (mode == VOIDmode)
2487 break;
2488
2489 icode = optab_handler (mov_optab, mode);
2490 if (icode != CODE_FOR_nothing
2491 && align >= GET_MODE_ALIGNMENT (mode))
2492 {
2493 unsigned int size = GET_MODE_SIZE (mode);
2494
2495 while (l >= size)
2496 {
2497 if (reverse)
2498 offset -= size;
2499
2500 cst = (*constfun) (constfundata, offset, mode);
2501 if (!targetm.legitimate_constant_p (mode, cst))
2502 return 0;
2503
2504 if (!reverse)
2505 offset += size;
2506
2507 l -= size;
2508 }
2509 }
2510
2511 max_size = GET_MODE_SIZE (mode);
2512 }
2513
2514 /* The code above should have handled everything. */
2515 gcc_assert (!l);
2516 }
2517
2518 return 1;
2519 }
2520
2521 /* Generate several move instructions to store LEN bytes generated by
2522 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2523 pointer which will be passed as argument in every CONSTFUN call.
2524 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2525 a memset operation and false if it's a copy of a constant string.
2526 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2527 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2528 stpcpy. */
2529
2530 rtx
2531 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2532 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2533 void *constfundata, unsigned int align, bool memsetp, int endp)
2534 {
2535 enum machine_mode to_addr_mode = get_address_mode (to);
2536 struct store_by_pieces_d data;
2537
2538 if (len == 0)
2539 {
2540 gcc_assert (endp != 2);
2541 return to;
2542 }
2543
2544 gcc_assert (memsetp
2545 ? SET_BY_PIECES_P (len, align)
2546 : STORE_BY_PIECES_P (len, align));
2547 data.constfun = constfun;
2548 data.constfundata = constfundata;
2549 data.len = len;
2550 data.to = to;
2551 store_by_pieces_1 (&data, align);
2552 if (endp)
2553 {
2554 rtx to1;
2555
2556 gcc_assert (!data.reverse);
2557 if (data.autinc_to)
2558 {
2559 if (endp == 2)
2560 {
2561 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2562 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2563 else
2564 data.to_addr = copy_to_mode_reg (to_addr_mode,
2565 plus_constant (to_addr_mode,
2566 data.to_addr,
2567 -1));
2568 }
2569 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2570 data.offset);
2571 }
2572 else
2573 {
2574 if (endp == 2)
2575 --data.offset;
2576 to1 = adjust_address (data.to, QImode, data.offset);
2577 }
2578 return to1;
2579 }
2580 else
2581 return data.to;
2582 }
2583
2584 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2585 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2586
2587 static void
2588 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2589 {
2590 struct store_by_pieces_d data;
2591
2592 if (len == 0)
2593 return;
2594
2595 data.constfun = clear_by_pieces_1;
2596 data.constfundata = NULL;
2597 data.len = len;
2598 data.to = to;
2599 store_by_pieces_1 (&data, align);
2600 }
2601
2602 /* Callback routine for clear_by_pieces.
2603 Return const0_rtx unconditionally. */
2604
2605 static rtx
2606 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2607 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2608 enum machine_mode mode ATTRIBUTE_UNUSED)
2609 {
2610 return const0_rtx;
2611 }
2612
2613 /* Subroutine of clear_by_pieces and store_by_pieces.
2614 Generate several move instructions to store LEN bytes of block TO. (A MEM
2615 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2616
2617 static void
2618 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2619 unsigned int align ATTRIBUTE_UNUSED)
2620 {
2621 enum machine_mode to_addr_mode = get_address_mode (data->to);
2622 rtx to_addr = XEXP (data->to, 0);
2623 unsigned int max_size = STORE_MAX_PIECES + 1;
2624 enum insn_code icode;
2625
2626 data->offset = 0;
2627 data->to_addr = to_addr;
2628 data->autinc_to
2629 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2630 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2631
2632 data->explicit_inc_to = 0;
2633 data->reverse
2634 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2635 if (data->reverse)
2636 data->offset = data->len;
2637
2638 /* If storing requires more than two move insns,
2639 copy addresses to registers (to make displacements shorter)
2640 and use post-increment if available. */
2641 if (!data->autinc_to
2642 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2643 {
2644 /* Determine the main mode we'll be using.
2645 MODE might not be used depending on the definitions of the
2646 USE_* macros below. */
2647 enum machine_mode mode ATTRIBUTE_UNUSED
2648 = widest_int_mode_for_size (max_size);
2649
2650 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2651 {
2652 data->to_addr = copy_to_mode_reg (to_addr_mode,
2653 plus_constant (to_addr_mode,
2654 to_addr,
2655 data->len));
2656 data->autinc_to = 1;
2657 data->explicit_inc_to = -1;
2658 }
2659
2660 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2661 && ! data->autinc_to)
2662 {
2663 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2664 data->autinc_to = 1;
2665 data->explicit_inc_to = 1;
2666 }
2667
2668 if ( !data->autinc_to && CONSTANT_P (to_addr))
2669 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2670 }
2671
2672 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2673
2674 /* First store what we can in the largest integer mode, then go to
2675 successively smaller modes. */
2676
2677 while (max_size > 1 && data->len > 0)
2678 {
2679 enum machine_mode mode = widest_int_mode_for_size (max_size);
2680
2681 if (mode == VOIDmode)
2682 break;
2683
2684 icode = optab_handler (mov_optab, mode);
2685 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2686 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2687
2688 max_size = GET_MODE_SIZE (mode);
2689 }
2690
2691 /* The code above should have handled everything. */
2692 gcc_assert (!data->len);
2693 }
2694
2695 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2696 with move instructions for mode MODE. GENFUN is the gen_... function
2697 to make a move insn for that mode. DATA has all the other info. */
2698
2699 static void
2700 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2701 struct store_by_pieces_d *data)
2702 {
2703 unsigned int size = GET_MODE_SIZE (mode);
2704 rtx to1, cst;
2705
2706 while (data->len >= size)
2707 {
2708 if (data->reverse)
2709 data->offset -= size;
2710
2711 if (data->autinc_to)
2712 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2713 data->offset);
2714 else
2715 to1 = adjust_address (data->to, mode, data->offset);
2716
2717 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2718 emit_insn (gen_add2_insn (data->to_addr,
2719 gen_int_mode (-(HOST_WIDE_INT) size,
2720 GET_MODE (data->to_addr))));
2721
2722 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2723 emit_insn ((*genfun) (to1, cst));
2724
2725 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2726 emit_insn (gen_add2_insn (data->to_addr,
2727 gen_int_mode (size,
2728 GET_MODE (data->to_addr))));
2729
2730 if (! data->reverse)
2731 data->offset += size;
2732
2733 data->len -= size;
2734 }
2735 }
2736 \f
2737 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2738 its length in bytes. */
2739
2740 rtx
2741 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2742 unsigned int expected_align, HOST_WIDE_INT expected_size,
2743 unsigned HOST_WIDE_INT min_size,
2744 unsigned HOST_WIDE_INT max_size,
2745 unsigned HOST_WIDE_INT probable_max_size)
2746 {
2747 enum machine_mode mode = GET_MODE (object);
2748 unsigned int align;
2749
2750 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2751
2752 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2753 just move a zero. Otherwise, do this a piece at a time. */
2754 if (mode != BLKmode
2755 && CONST_INT_P (size)
2756 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2757 {
2758 rtx zero = CONST0_RTX (mode);
2759 if (zero != NULL)
2760 {
2761 emit_move_insn (object, zero);
2762 return NULL;
2763 }
2764
2765 if (COMPLEX_MODE_P (mode))
2766 {
2767 zero = CONST0_RTX (GET_MODE_INNER (mode));
2768 if (zero != NULL)
2769 {
2770 write_complex_part (object, zero, 0);
2771 write_complex_part (object, zero, 1);
2772 return NULL;
2773 }
2774 }
2775 }
2776
2777 if (size == const0_rtx)
2778 return NULL;
2779
2780 align = MEM_ALIGN (object);
2781
2782 if (CONST_INT_P (size)
2783 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2784 clear_by_pieces (object, INTVAL (size), align);
2785 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2786 expected_align, expected_size,
2787 min_size, max_size, probable_max_size))
2788 ;
2789 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2790 return set_storage_via_libcall (object, size, const0_rtx,
2791 method == BLOCK_OP_TAILCALL);
2792 else
2793 gcc_unreachable ();
2794
2795 return NULL;
2796 }
2797
2798 rtx
2799 clear_storage (rtx object, rtx size, enum block_op_methods method)
2800 {
2801 unsigned HOST_WIDE_INT max, min = 0;
2802 if (GET_CODE (size) == CONST_INT)
2803 min = max = UINTVAL (size);
2804 else
2805 max = GET_MODE_MASK (GET_MODE (size));
2806 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2807 }
2808
2809
2810 /* A subroutine of clear_storage. Expand a call to memset.
2811 Return the return value of memset, 0 otherwise. */
2812
2813 rtx
2814 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2815 {
2816 tree call_expr, fn, object_tree, size_tree, val_tree;
2817 enum machine_mode size_mode;
2818 rtx retval;
2819
2820 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2821 place those into new pseudos into a VAR_DECL and use them later. */
2822
2823 object = copy_addr_to_reg (XEXP (object, 0));
2824
2825 size_mode = TYPE_MODE (sizetype);
2826 size = convert_to_mode (size_mode, size, 1);
2827 size = copy_to_mode_reg (size_mode, size);
2828
2829 /* It is incorrect to use the libcall calling conventions to call
2830 memset in this context. This could be a user call to memset and
2831 the user may wish to examine the return value from memset. For
2832 targets where libcalls and normal calls have different conventions
2833 for returning pointers, we could end up generating incorrect code. */
2834
2835 object_tree = make_tree (ptr_type_node, object);
2836 if (!CONST_INT_P (val))
2837 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2838 size_tree = make_tree (sizetype, size);
2839 val_tree = make_tree (integer_type_node, val);
2840
2841 fn = clear_storage_libcall_fn (true);
2842 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2843 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2844
2845 retval = expand_normal (call_expr);
2846
2847 return retval;
2848 }
2849
2850 /* A subroutine of set_storage_via_libcall. Create the tree node
2851 for the function we use for block clears. */
2852
2853 tree block_clear_fn;
2854
2855 void
2856 init_block_clear_fn (const char *asmspec)
2857 {
2858 if (!block_clear_fn)
2859 {
2860 tree fn, args;
2861
2862 fn = get_identifier ("memset");
2863 args = build_function_type_list (ptr_type_node, ptr_type_node,
2864 integer_type_node, sizetype,
2865 NULL_TREE);
2866
2867 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2868 DECL_EXTERNAL (fn) = 1;
2869 TREE_PUBLIC (fn) = 1;
2870 DECL_ARTIFICIAL (fn) = 1;
2871 TREE_NOTHROW (fn) = 1;
2872 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2873 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2874
2875 block_clear_fn = fn;
2876 }
2877
2878 if (asmspec)
2879 set_user_assembler_name (block_clear_fn, asmspec);
2880 }
2881
2882 static tree
2883 clear_storage_libcall_fn (int for_call)
2884 {
2885 static bool emitted_extern;
2886
2887 if (!block_clear_fn)
2888 init_block_clear_fn (NULL);
2889
2890 if (for_call && !emitted_extern)
2891 {
2892 emitted_extern = true;
2893 make_decl_rtl (block_clear_fn);
2894 }
2895
2896 return block_clear_fn;
2897 }
2898 \f
2899 /* Expand a setmem pattern; return true if successful. */
2900
2901 bool
2902 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2903 unsigned int expected_align, HOST_WIDE_INT expected_size,
2904 unsigned HOST_WIDE_INT min_size,
2905 unsigned HOST_WIDE_INT max_size,
2906 unsigned HOST_WIDE_INT probable_max_size)
2907 {
2908 /* Try the most limited insn first, because there's no point
2909 including more than one in the machine description unless
2910 the more limited one has some advantage. */
2911
2912 enum machine_mode mode;
2913
2914 if (expected_align < align)
2915 expected_align = align;
2916 if (expected_size != -1)
2917 {
2918 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2919 expected_size = max_size;
2920 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2921 expected_size = min_size;
2922 }
2923
2924 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2925 mode = GET_MODE_WIDER_MODE (mode))
2926 {
2927 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2928
2929 if (code != CODE_FOR_nothing
2930 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2931 here because if SIZE is less than the mode mask, as it is
2932 returned by the macro, it will definitely be less than the
2933 actual mode mask. Since SIZE is within the Pmode address
2934 space, we limit MODE to Pmode. */
2935 && ((CONST_INT_P (size)
2936 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2937 <= (GET_MODE_MASK (mode) >> 1)))
2938 || max_size <= (GET_MODE_MASK (mode) >> 1)
2939 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2940 {
2941 struct expand_operand ops[9];
2942 unsigned int nops;
2943
2944 nops = insn_data[(int) code].n_generator_args;
2945 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2946
2947 create_fixed_operand (&ops[0], object);
2948 /* The check above guarantees that this size conversion is valid. */
2949 create_convert_operand_to (&ops[1], size, mode, true);
2950 create_convert_operand_from (&ops[2], val, byte_mode, true);
2951 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2952 if (nops >= 6)
2953 {
2954 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2955 create_integer_operand (&ops[5], expected_size);
2956 }
2957 if (nops >= 8)
2958 {
2959 create_integer_operand (&ops[6], min_size);
2960 /* If we can not represent the maximal size,
2961 make parameter NULL. */
2962 if ((HOST_WIDE_INT) max_size != -1)
2963 create_integer_operand (&ops[7], max_size);
2964 else
2965 create_fixed_operand (&ops[7], NULL);
2966 }
2967 if (nops == 9)
2968 {
2969 /* If we can not represent the maximal size,
2970 make parameter NULL. */
2971 if ((HOST_WIDE_INT) probable_max_size != -1)
2972 create_integer_operand (&ops[8], probable_max_size);
2973 else
2974 create_fixed_operand (&ops[8], NULL);
2975 }
2976 if (maybe_expand_insn (code, nops, ops))
2977 return true;
2978 }
2979 }
2980
2981 return false;
2982 }
2983
2984 \f
2985 /* Write to one of the components of the complex value CPLX. Write VAL to
2986 the real part if IMAG_P is false, and the imaginary part if its true. */
2987
2988 static void
2989 write_complex_part (rtx cplx, rtx val, bool imag_p)
2990 {
2991 enum machine_mode cmode;
2992 enum machine_mode imode;
2993 unsigned ibitsize;
2994
2995 if (GET_CODE (cplx) == CONCAT)
2996 {
2997 emit_move_insn (XEXP (cplx, imag_p), val);
2998 return;
2999 }
3000
3001 cmode = GET_MODE (cplx);
3002 imode = GET_MODE_INNER (cmode);
3003 ibitsize = GET_MODE_BITSIZE (imode);
3004
3005 /* For MEMs simplify_gen_subreg may generate an invalid new address
3006 because, e.g., the original address is considered mode-dependent
3007 by the target, which restricts simplify_subreg from invoking
3008 adjust_address_nv. Instead of preparing fallback support for an
3009 invalid address, we call adjust_address_nv directly. */
3010 if (MEM_P (cplx))
3011 {
3012 emit_move_insn (adjust_address_nv (cplx, imode,
3013 imag_p ? GET_MODE_SIZE (imode) : 0),
3014 val);
3015 return;
3016 }
3017
3018 /* If the sub-object is at least word sized, then we know that subregging
3019 will work. This special case is important, since store_bit_field
3020 wants to operate on integer modes, and there's rarely an OImode to
3021 correspond to TCmode. */
3022 if (ibitsize >= BITS_PER_WORD
3023 /* For hard regs we have exact predicates. Assume we can split
3024 the original object if it spans an even number of hard regs.
3025 This special case is important for SCmode on 64-bit platforms
3026 where the natural size of floating-point regs is 32-bit. */
3027 || (REG_P (cplx)
3028 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3029 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3030 {
3031 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3032 imag_p ? GET_MODE_SIZE (imode) : 0);
3033 if (part)
3034 {
3035 emit_move_insn (part, val);
3036 return;
3037 }
3038 else
3039 /* simplify_gen_subreg may fail for sub-word MEMs. */
3040 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3041 }
3042
3043 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3044 }
3045
3046 /* Extract one of the components of the complex value CPLX. Extract the
3047 real part if IMAG_P is false, and the imaginary part if it's true. */
3048
3049 static rtx
3050 read_complex_part (rtx cplx, bool imag_p)
3051 {
3052 enum machine_mode cmode, imode;
3053 unsigned ibitsize;
3054
3055 if (GET_CODE (cplx) == CONCAT)
3056 return XEXP (cplx, imag_p);
3057
3058 cmode = GET_MODE (cplx);
3059 imode = GET_MODE_INNER (cmode);
3060 ibitsize = GET_MODE_BITSIZE (imode);
3061
3062 /* Special case reads from complex constants that got spilled to memory. */
3063 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3064 {
3065 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3066 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3067 {
3068 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3069 if (CONSTANT_CLASS_P (part))
3070 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3071 }
3072 }
3073
3074 /* For MEMs simplify_gen_subreg may generate an invalid new address
3075 because, e.g., the original address is considered mode-dependent
3076 by the target, which restricts simplify_subreg from invoking
3077 adjust_address_nv. Instead of preparing fallback support for an
3078 invalid address, we call adjust_address_nv directly. */
3079 if (MEM_P (cplx))
3080 return adjust_address_nv (cplx, imode,
3081 imag_p ? GET_MODE_SIZE (imode) : 0);
3082
3083 /* If the sub-object is at least word sized, then we know that subregging
3084 will work. This special case is important, since extract_bit_field
3085 wants to operate on integer modes, and there's rarely an OImode to
3086 correspond to TCmode. */
3087 if (ibitsize >= BITS_PER_WORD
3088 /* For hard regs we have exact predicates. Assume we can split
3089 the original object if it spans an even number of hard regs.
3090 This special case is important for SCmode on 64-bit platforms
3091 where the natural size of floating-point regs is 32-bit. */
3092 || (REG_P (cplx)
3093 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3094 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3095 {
3096 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3097 imag_p ? GET_MODE_SIZE (imode) : 0);
3098 if (ret)
3099 return ret;
3100 else
3101 /* simplify_gen_subreg may fail for sub-word MEMs. */
3102 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3103 }
3104
3105 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3106 true, NULL_RTX, imode, imode);
3107 }
3108 \f
3109 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3110 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3111 represented in NEW_MODE. If FORCE is true, this will never happen, as
3112 we'll force-create a SUBREG if needed. */
3113
3114 static rtx
3115 emit_move_change_mode (enum machine_mode new_mode,
3116 enum machine_mode old_mode, rtx x, bool force)
3117 {
3118 rtx ret;
3119
3120 if (push_operand (x, GET_MODE (x)))
3121 {
3122 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3123 MEM_COPY_ATTRIBUTES (ret, x);
3124 }
3125 else if (MEM_P (x))
3126 {
3127 /* We don't have to worry about changing the address since the
3128 size in bytes is supposed to be the same. */
3129 if (reload_in_progress)
3130 {
3131 /* Copy the MEM to change the mode and move any
3132 substitutions from the old MEM to the new one. */
3133 ret = adjust_address_nv (x, new_mode, 0);
3134 copy_replacements (x, ret);
3135 }
3136 else
3137 ret = adjust_address (x, new_mode, 0);
3138 }
3139 else
3140 {
3141 /* Note that we do want simplify_subreg's behavior of validating
3142 that the new mode is ok for a hard register. If we were to use
3143 simplify_gen_subreg, we would create the subreg, but would
3144 probably run into the target not being able to implement it. */
3145 /* Except, of course, when FORCE is true, when this is exactly what
3146 we want. Which is needed for CCmodes on some targets. */
3147 if (force)
3148 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3149 else
3150 ret = simplify_subreg (new_mode, x, old_mode, 0);
3151 }
3152
3153 return ret;
3154 }
3155
3156 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3157 an integer mode of the same size as MODE. Returns the instruction
3158 emitted, or NULL if such a move could not be generated. */
3159
3160 static rtx
3161 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3162 {
3163 enum machine_mode imode;
3164 enum insn_code code;
3165
3166 /* There must exist a mode of the exact size we require. */
3167 imode = int_mode_for_mode (mode);
3168 if (imode == BLKmode)
3169 return NULL_RTX;
3170
3171 /* The target must support moves in this mode. */
3172 code = optab_handler (mov_optab, imode);
3173 if (code == CODE_FOR_nothing)
3174 return NULL_RTX;
3175
3176 x = emit_move_change_mode (imode, mode, x, force);
3177 if (x == NULL_RTX)
3178 return NULL_RTX;
3179 y = emit_move_change_mode (imode, mode, y, force);
3180 if (y == NULL_RTX)
3181 return NULL_RTX;
3182 return emit_insn (GEN_FCN (code) (x, y));
3183 }
3184
3185 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3186 Return an equivalent MEM that does not use an auto-increment. */
3187
3188 rtx
3189 emit_move_resolve_push (enum machine_mode mode, rtx x)
3190 {
3191 enum rtx_code code = GET_CODE (XEXP (x, 0));
3192 HOST_WIDE_INT adjust;
3193 rtx temp;
3194
3195 adjust = GET_MODE_SIZE (mode);
3196 #ifdef PUSH_ROUNDING
3197 adjust = PUSH_ROUNDING (adjust);
3198 #endif
3199 if (code == PRE_DEC || code == POST_DEC)
3200 adjust = -adjust;
3201 else if (code == PRE_MODIFY || code == POST_MODIFY)
3202 {
3203 rtx expr = XEXP (XEXP (x, 0), 1);
3204 HOST_WIDE_INT val;
3205
3206 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3207 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3208 val = INTVAL (XEXP (expr, 1));
3209 if (GET_CODE (expr) == MINUS)
3210 val = -val;
3211 gcc_assert (adjust == val || adjust == -val);
3212 adjust = val;
3213 }
3214
3215 /* Do not use anti_adjust_stack, since we don't want to update
3216 stack_pointer_delta. */
3217 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3218 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3219 0, OPTAB_LIB_WIDEN);
3220 if (temp != stack_pointer_rtx)
3221 emit_move_insn (stack_pointer_rtx, temp);
3222
3223 switch (code)
3224 {
3225 case PRE_INC:
3226 case PRE_DEC:
3227 case PRE_MODIFY:
3228 temp = stack_pointer_rtx;
3229 break;
3230 case POST_INC:
3231 case POST_DEC:
3232 case POST_MODIFY:
3233 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3234 break;
3235 default:
3236 gcc_unreachable ();
3237 }
3238
3239 return replace_equiv_address (x, temp);
3240 }
3241
3242 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3243 X is known to satisfy push_operand, and MODE is known to be complex.
3244 Returns the last instruction emitted. */
3245
3246 rtx
3247 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3248 {
3249 enum machine_mode submode = GET_MODE_INNER (mode);
3250 bool imag_first;
3251
3252 #ifdef PUSH_ROUNDING
3253 unsigned int submodesize = GET_MODE_SIZE (submode);
3254
3255 /* In case we output to the stack, but the size is smaller than the
3256 machine can push exactly, we need to use move instructions. */
3257 if (PUSH_ROUNDING (submodesize) != submodesize)
3258 {
3259 x = emit_move_resolve_push (mode, x);
3260 return emit_move_insn (x, y);
3261 }
3262 #endif
3263
3264 /* Note that the real part always precedes the imag part in memory
3265 regardless of machine's endianness. */
3266 switch (GET_CODE (XEXP (x, 0)))
3267 {
3268 case PRE_DEC:
3269 case POST_DEC:
3270 imag_first = true;
3271 break;
3272 case PRE_INC:
3273 case POST_INC:
3274 imag_first = false;
3275 break;
3276 default:
3277 gcc_unreachable ();
3278 }
3279
3280 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3281 read_complex_part (y, imag_first));
3282 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3283 read_complex_part (y, !imag_first));
3284 }
3285
3286 /* A subroutine of emit_move_complex. Perform the move from Y to X
3287 via two moves of the parts. Returns the last instruction emitted. */
3288
3289 rtx
3290 emit_move_complex_parts (rtx x, rtx y)
3291 {
3292 /* Show the output dies here. This is necessary for SUBREGs
3293 of pseudos since we cannot track their lifetimes correctly;
3294 hard regs shouldn't appear here except as return values. */
3295 if (!reload_completed && !reload_in_progress
3296 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3297 emit_clobber (x);
3298
3299 write_complex_part (x, read_complex_part (y, false), false);
3300 write_complex_part (x, read_complex_part (y, true), true);
3301
3302 return get_last_insn ();
3303 }
3304
3305 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3306 MODE is known to be complex. Returns the last instruction emitted. */
3307
3308 static rtx
3309 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3310 {
3311 bool try_int;
3312
3313 /* Need to take special care for pushes, to maintain proper ordering
3314 of the data, and possibly extra padding. */
3315 if (push_operand (x, mode))
3316 return emit_move_complex_push (mode, x, y);
3317
3318 /* See if we can coerce the target into moving both values at once, except
3319 for floating point where we favor moving as parts if this is easy. */
3320 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3321 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3322 && !(REG_P (x)
3323 && HARD_REGISTER_P (x)
3324 && hard_regno_nregs[REGNO (x)][mode] == 1)
3325 && !(REG_P (y)
3326 && HARD_REGISTER_P (y)
3327 && hard_regno_nregs[REGNO (y)][mode] == 1))
3328 try_int = false;
3329 /* Not possible if the values are inherently not adjacent. */
3330 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3331 try_int = false;
3332 /* Is possible if both are registers (or subregs of registers). */
3333 else if (register_operand (x, mode) && register_operand (y, mode))
3334 try_int = true;
3335 /* If one of the operands is a memory, and alignment constraints
3336 are friendly enough, we may be able to do combined memory operations.
3337 We do not attempt this if Y is a constant because that combination is
3338 usually better with the by-parts thing below. */
3339 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3340 && (!STRICT_ALIGNMENT
3341 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3342 try_int = true;
3343 else
3344 try_int = false;
3345
3346 if (try_int)
3347 {
3348 rtx ret;
3349
3350 /* For memory to memory moves, optimal behavior can be had with the
3351 existing block move logic. */
3352 if (MEM_P (x) && MEM_P (y))
3353 {
3354 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3355 BLOCK_OP_NO_LIBCALL);
3356 return get_last_insn ();
3357 }
3358
3359 ret = emit_move_via_integer (mode, x, y, true);
3360 if (ret)
3361 return ret;
3362 }
3363
3364 return emit_move_complex_parts (x, y);
3365 }
3366
3367 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3368 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3369
3370 static rtx
3371 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3372 {
3373 rtx ret;
3374
3375 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3376 if (mode != CCmode)
3377 {
3378 enum insn_code code = optab_handler (mov_optab, CCmode);
3379 if (code != CODE_FOR_nothing)
3380 {
3381 x = emit_move_change_mode (CCmode, mode, x, true);
3382 y = emit_move_change_mode (CCmode, mode, y, true);
3383 return emit_insn (GEN_FCN (code) (x, y));
3384 }
3385 }
3386
3387 /* Otherwise, find the MODE_INT mode of the same width. */
3388 ret = emit_move_via_integer (mode, x, y, false);
3389 gcc_assert (ret != NULL);
3390 return ret;
3391 }
3392
3393 /* Return true if word I of OP lies entirely in the
3394 undefined bits of a paradoxical subreg. */
3395
3396 static bool
3397 undefined_operand_subword_p (const_rtx op, int i)
3398 {
3399 enum machine_mode innermode, innermostmode;
3400 int offset;
3401 if (GET_CODE (op) != SUBREG)
3402 return false;
3403 innermode = GET_MODE (op);
3404 innermostmode = GET_MODE (SUBREG_REG (op));
3405 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3406 /* The SUBREG_BYTE represents offset, as if the value were stored in
3407 memory, except for a paradoxical subreg where we define
3408 SUBREG_BYTE to be 0; undo this exception as in
3409 simplify_subreg. */
3410 if (SUBREG_BYTE (op) == 0
3411 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3412 {
3413 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3414 if (WORDS_BIG_ENDIAN)
3415 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3416 if (BYTES_BIG_ENDIAN)
3417 offset += difference % UNITS_PER_WORD;
3418 }
3419 if (offset >= GET_MODE_SIZE (innermostmode)
3420 || offset <= -GET_MODE_SIZE (word_mode))
3421 return true;
3422 return false;
3423 }
3424
3425 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3426 MODE is any multi-word or full-word mode that lacks a move_insn
3427 pattern. Note that you will get better code if you define such
3428 patterns, even if they must turn into multiple assembler instructions. */
3429
3430 static rtx
3431 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3432 {
3433 rtx last_insn = 0;
3434 rtx seq, inner;
3435 bool need_clobber;
3436 int i;
3437
3438 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3439
3440 /* If X is a push on the stack, do the push now and replace
3441 X with a reference to the stack pointer. */
3442 if (push_operand (x, mode))
3443 x = emit_move_resolve_push (mode, x);
3444
3445 /* If we are in reload, see if either operand is a MEM whose address
3446 is scheduled for replacement. */
3447 if (reload_in_progress && MEM_P (x)
3448 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3449 x = replace_equiv_address_nv (x, inner);
3450 if (reload_in_progress && MEM_P (y)
3451 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3452 y = replace_equiv_address_nv (y, inner);
3453
3454 start_sequence ();
3455
3456 need_clobber = false;
3457 for (i = 0;
3458 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3459 i++)
3460 {
3461 rtx xpart = operand_subword (x, i, 1, mode);
3462 rtx ypart;
3463
3464 /* Do not generate code for a move if it would come entirely
3465 from the undefined bits of a paradoxical subreg. */
3466 if (undefined_operand_subword_p (y, i))
3467 continue;
3468
3469 ypart = operand_subword (y, i, 1, mode);
3470
3471 /* If we can't get a part of Y, put Y into memory if it is a
3472 constant. Otherwise, force it into a register. Then we must
3473 be able to get a part of Y. */
3474 if (ypart == 0 && CONSTANT_P (y))
3475 {
3476 y = use_anchored_address (force_const_mem (mode, y));
3477 ypart = operand_subword (y, i, 1, mode);
3478 }
3479 else if (ypart == 0)
3480 ypart = operand_subword_force (y, i, mode);
3481
3482 gcc_assert (xpart && ypart);
3483
3484 need_clobber |= (GET_CODE (xpart) == SUBREG);
3485
3486 last_insn = emit_move_insn (xpart, ypart);
3487 }
3488
3489 seq = get_insns ();
3490 end_sequence ();
3491
3492 /* Show the output dies here. This is necessary for SUBREGs
3493 of pseudos since we cannot track their lifetimes correctly;
3494 hard regs shouldn't appear here except as return values.
3495 We never want to emit such a clobber after reload. */
3496 if (x != y
3497 && ! (reload_in_progress || reload_completed)
3498 && need_clobber != 0)
3499 emit_clobber (x);
3500
3501 emit_insn (seq);
3502
3503 return last_insn;
3504 }
3505
3506 /* Low level part of emit_move_insn.
3507 Called just like emit_move_insn, but assumes X and Y
3508 are basically valid. */
3509
3510 rtx
3511 emit_move_insn_1 (rtx x, rtx y)
3512 {
3513 enum machine_mode mode = GET_MODE (x);
3514 enum insn_code code;
3515
3516 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3517
3518 code = optab_handler (mov_optab, mode);
3519 if (code != CODE_FOR_nothing)
3520 return emit_insn (GEN_FCN (code) (x, y));
3521
3522 /* Expand complex moves by moving real part and imag part. */
3523 if (COMPLEX_MODE_P (mode))
3524 return emit_move_complex (mode, x, y);
3525
3526 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3527 || ALL_FIXED_POINT_MODE_P (mode))
3528 {
3529 rtx result = emit_move_via_integer (mode, x, y, true);
3530
3531 /* If we can't find an integer mode, use multi words. */
3532 if (result)
3533 return result;
3534 else
3535 return emit_move_multi_word (mode, x, y);
3536 }
3537
3538 if (GET_MODE_CLASS (mode) == MODE_CC)
3539 return emit_move_ccmode (mode, x, y);
3540
3541 /* Try using a move pattern for the corresponding integer mode. This is
3542 only safe when simplify_subreg can convert MODE constants into integer
3543 constants. At present, it can only do this reliably if the value
3544 fits within a HOST_WIDE_INT. */
3545 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3546 {
3547 rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3548
3549 if (ret)
3550 {
3551 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3552 return ret;
3553 }
3554 }
3555
3556 return emit_move_multi_word (mode, x, y);
3557 }
3558
3559 /* Generate code to copy Y into X.
3560 Both Y and X must have the same mode, except that
3561 Y can be a constant with VOIDmode.
3562 This mode cannot be BLKmode; use emit_block_move for that.
3563
3564 Return the last instruction emitted. */
3565
3566 rtx
3567 emit_move_insn (rtx x, rtx y)
3568 {
3569 enum machine_mode mode = GET_MODE (x);
3570 rtx y_cst = NULL_RTX;
3571 rtx last_insn, set;
3572
3573 gcc_assert (mode != BLKmode
3574 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3575
3576 if (CONSTANT_P (y))
3577 {
3578 if (optimize
3579 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3580 && (last_insn = compress_float_constant (x, y)))
3581 return last_insn;
3582
3583 y_cst = y;
3584
3585 if (!targetm.legitimate_constant_p (mode, y))
3586 {
3587 y = force_const_mem (mode, y);
3588
3589 /* If the target's cannot_force_const_mem prevented the spill,
3590 assume that the target's move expanders will also take care
3591 of the non-legitimate constant. */
3592 if (!y)
3593 y = y_cst;
3594 else
3595 y = use_anchored_address (y);
3596 }
3597 }
3598
3599 /* If X or Y are memory references, verify that their addresses are valid
3600 for the machine. */
3601 if (MEM_P (x)
3602 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3603 MEM_ADDR_SPACE (x))
3604 && ! push_operand (x, GET_MODE (x))))
3605 x = validize_mem (x);
3606
3607 if (MEM_P (y)
3608 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3609 MEM_ADDR_SPACE (y)))
3610 y = validize_mem (y);
3611
3612 gcc_assert (mode != BLKmode);
3613
3614 last_insn = emit_move_insn_1 (x, y);
3615
3616 if (y_cst && REG_P (x)
3617 && (set = single_set (last_insn)) != NULL_RTX
3618 && SET_DEST (set) == x
3619 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3620 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3621
3622 return last_insn;
3623 }
3624
3625 /* If Y is representable exactly in a narrower mode, and the target can
3626 perform the extension directly from constant or memory, then emit the
3627 move as an extension. */
3628
3629 static rtx
3630 compress_float_constant (rtx x, rtx y)
3631 {
3632 enum machine_mode dstmode = GET_MODE (x);
3633 enum machine_mode orig_srcmode = GET_MODE (y);
3634 enum machine_mode srcmode;
3635 REAL_VALUE_TYPE r;
3636 int oldcost, newcost;
3637 bool speed = optimize_insn_for_speed_p ();
3638
3639 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3640
3641 if (targetm.legitimate_constant_p (dstmode, y))
3642 oldcost = set_src_cost (y, speed);
3643 else
3644 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3645
3646 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3647 srcmode != orig_srcmode;
3648 srcmode = GET_MODE_WIDER_MODE (srcmode))
3649 {
3650 enum insn_code ic;
3651 rtx trunc_y, last_insn;
3652
3653 /* Skip if the target can't extend this way. */
3654 ic = can_extend_p (dstmode, srcmode, 0);
3655 if (ic == CODE_FOR_nothing)
3656 continue;
3657
3658 /* Skip if the narrowed value isn't exact. */
3659 if (! exact_real_truncate (srcmode, &r))
3660 continue;
3661
3662 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3663
3664 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3665 {
3666 /* Skip if the target needs extra instructions to perform
3667 the extension. */
3668 if (!insn_operand_matches (ic, 1, trunc_y))
3669 continue;
3670 /* This is valid, but may not be cheaper than the original. */
3671 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3672 speed);
3673 if (oldcost < newcost)
3674 continue;
3675 }
3676 else if (float_extend_from_mem[dstmode][srcmode])
3677 {
3678 trunc_y = force_const_mem (srcmode, trunc_y);
3679 /* This is valid, but may not be cheaper than the original. */
3680 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3681 speed);
3682 if (oldcost < newcost)
3683 continue;
3684 trunc_y = validize_mem (trunc_y);
3685 }
3686 else
3687 continue;
3688
3689 /* For CSE's benefit, force the compressed constant pool entry
3690 into a new pseudo. This constant may be used in different modes,
3691 and if not, combine will put things back together for us. */
3692 trunc_y = force_reg (srcmode, trunc_y);
3693
3694 /* If x is a hard register, perform the extension into a pseudo,
3695 so that e.g. stack realignment code is aware of it. */
3696 rtx target = x;
3697 if (REG_P (x) && HARD_REGISTER_P (x))
3698 target = gen_reg_rtx (dstmode);
3699
3700 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3701 last_insn = get_last_insn ();
3702
3703 if (REG_P (target))
3704 set_unique_reg_note (last_insn, REG_EQUAL, y);
3705
3706 if (target != x)
3707 return emit_move_insn (x, target);
3708 return last_insn;
3709 }
3710
3711 return NULL_RTX;
3712 }
3713 \f
3714 /* Pushing data onto the stack. */
3715
3716 /* Push a block of length SIZE (perhaps variable)
3717 and return an rtx to address the beginning of the block.
3718 The value may be virtual_outgoing_args_rtx.
3719
3720 EXTRA is the number of bytes of padding to push in addition to SIZE.
3721 BELOW nonzero means this padding comes at low addresses;
3722 otherwise, the padding comes at high addresses. */
3723
3724 rtx
3725 push_block (rtx size, int extra, int below)
3726 {
3727 rtx temp;
3728
3729 size = convert_modes (Pmode, ptr_mode, size, 1);
3730 if (CONSTANT_P (size))
3731 anti_adjust_stack (plus_constant (Pmode, size, extra));
3732 else if (REG_P (size) && extra == 0)
3733 anti_adjust_stack (size);
3734 else
3735 {
3736 temp = copy_to_mode_reg (Pmode, size);
3737 if (extra != 0)
3738 temp = expand_binop (Pmode, add_optab, temp,
3739 gen_int_mode (extra, Pmode),
3740 temp, 0, OPTAB_LIB_WIDEN);
3741 anti_adjust_stack (temp);
3742 }
3743
3744 #ifndef STACK_GROWS_DOWNWARD
3745 if (0)
3746 #else
3747 if (1)
3748 #endif
3749 {
3750 temp = virtual_outgoing_args_rtx;
3751 if (extra != 0 && below)
3752 temp = plus_constant (Pmode, temp, extra);
3753 }
3754 else
3755 {
3756 if (CONST_INT_P (size))
3757 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3758 -INTVAL (size) - (below ? 0 : extra));
3759 else if (extra != 0 && !below)
3760 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3761 negate_rtx (Pmode, plus_constant (Pmode, size,
3762 extra)));
3763 else
3764 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3765 negate_rtx (Pmode, size));
3766 }
3767
3768 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3769 }
3770
3771 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3772
3773 static rtx
3774 mem_autoinc_base (rtx mem)
3775 {
3776 if (MEM_P (mem))
3777 {
3778 rtx addr = XEXP (mem, 0);
3779 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3780 return XEXP (addr, 0);
3781 }
3782 return NULL;
3783 }
3784
3785 /* A utility routine used here, in reload, and in try_split. The insns
3786 after PREV up to and including LAST are known to adjust the stack,
3787 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3788 placing notes as appropriate. PREV may be NULL, indicating the
3789 entire insn sequence prior to LAST should be scanned.
3790
3791 The set of allowed stack pointer modifications is small:
3792 (1) One or more auto-inc style memory references (aka pushes),
3793 (2) One or more addition/subtraction with the SP as destination,
3794 (3) A single move insn with the SP as destination,
3795 (4) A call_pop insn,
3796 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3797
3798 Insns in the sequence that do not modify the SP are ignored,
3799 except for noreturn calls.
3800
3801 The return value is the amount of adjustment that can be trivially
3802 verified, via immediate operand or auto-inc. If the adjustment
3803 cannot be trivially extracted, the return value is INT_MIN. */
3804
3805 HOST_WIDE_INT
3806 find_args_size_adjust (rtx insn)
3807 {
3808 rtx dest, set, pat;
3809 int i;
3810
3811 pat = PATTERN (insn);
3812 set = NULL;
3813
3814 /* Look for a call_pop pattern. */
3815 if (CALL_P (insn))
3816 {
3817 /* We have to allow non-call_pop patterns for the case
3818 of emit_single_push_insn of a TLS address. */
3819 if (GET_CODE (pat) != PARALLEL)
3820 return 0;
3821
3822 /* All call_pop have a stack pointer adjust in the parallel.
3823 The call itself is always first, and the stack adjust is
3824 usually last, so search from the end. */
3825 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3826 {
3827 set = XVECEXP (pat, 0, i);
3828 if (GET_CODE (set) != SET)
3829 continue;
3830 dest = SET_DEST (set);
3831 if (dest == stack_pointer_rtx)
3832 break;
3833 }
3834 /* We'd better have found the stack pointer adjust. */
3835 if (i == 0)
3836 return 0;
3837 /* Fall through to process the extracted SET and DEST
3838 as if it was a standalone insn. */
3839 }
3840 else if (GET_CODE (pat) == SET)
3841 set = pat;
3842 else if ((set = single_set (insn)) != NULL)
3843 ;
3844 else if (GET_CODE (pat) == PARALLEL)
3845 {
3846 /* ??? Some older ports use a parallel with a stack adjust
3847 and a store for a PUSH_ROUNDING pattern, rather than a
3848 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3849 /* ??? See h8300 and m68k, pushqi1. */
3850 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3851 {
3852 set = XVECEXP (pat, 0, i);
3853 if (GET_CODE (set) != SET)
3854 continue;
3855 dest = SET_DEST (set);
3856 if (dest == stack_pointer_rtx)
3857 break;
3858
3859 /* We do not expect an auto-inc of the sp in the parallel. */
3860 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3861 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3862 != stack_pointer_rtx);
3863 }
3864 if (i < 0)
3865 return 0;
3866 }
3867 else
3868 return 0;
3869
3870 dest = SET_DEST (set);
3871
3872 /* Look for direct modifications of the stack pointer. */
3873 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3874 {
3875 /* Look for a trivial adjustment, otherwise assume nothing. */
3876 /* Note that the SPU restore_stack_block pattern refers to
3877 the stack pointer in V4SImode. Consider that non-trivial. */
3878 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3879 && GET_CODE (SET_SRC (set)) == PLUS
3880 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3881 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3882 return INTVAL (XEXP (SET_SRC (set), 1));
3883 /* ??? Reload can generate no-op moves, which will be cleaned
3884 up later. Recognize it and continue searching. */
3885 else if (rtx_equal_p (dest, SET_SRC (set)))
3886 return 0;
3887 else
3888 return HOST_WIDE_INT_MIN;
3889 }
3890 else
3891 {
3892 rtx mem, addr;
3893
3894 /* Otherwise only think about autoinc patterns. */
3895 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3896 {
3897 mem = dest;
3898 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3899 != stack_pointer_rtx);
3900 }
3901 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3902 mem = SET_SRC (set);
3903 else
3904 return 0;
3905
3906 addr = XEXP (mem, 0);
3907 switch (GET_CODE (addr))
3908 {
3909 case PRE_INC:
3910 case POST_INC:
3911 return GET_MODE_SIZE (GET_MODE (mem));
3912 case PRE_DEC:
3913 case POST_DEC:
3914 return -GET_MODE_SIZE (GET_MODE (mem));
3915 case PRE_MODIFY:
3916 case POST_MODIFY:
3917 addr = XEXP (addr, 1);
3918 gcc_assert (GET_CODE (addr) == PLUS);
3919 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3920 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3921 return INTVAL (XEXP (addr, 1));
3922 default:
3923 gcc_unreachable ();
3924 }
3925 }
3926 }
3927
3928 int
3929 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3930 {
3931 int args_size = end_args_size;
3932 bool saw_unknown = false;
3933 rtx insn;
3934
3935 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3936 {
3937 HOST_WIDE_INT this_delta;
3938
3939 if (!NONDEBUG_INSN_P (insn))
3940 continue;
3941
3942 this_delta = find_args_size_adjust (insn);
3943 if (this_delta == 0)
3944 {
3945 if (!CALL_P (insn)
3946 || ACCUMULATE_OUTGOING_ARGS
3947 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3948 continue;
3949 }
3950
3951 gcc_assert (!saw_unknown);
3952 if (this_delta == HOST_WIDE_INT_MIN)
3953 saw_unknown = true;
3954
3955 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3956 #ifdef STACK_GROWS_DOWNWARD
3957 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3958 #endif
3959 args_size -= this_delta;
3960 }
3961
3962 return saw_unknown ? INT_MIN : args_size;
3963 }
3964
3965 #ifdef PUSH_ROUNDING
3966 /* Emit single push insn. */
3967
3968 static void
3969 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3970 {
3971 rtx dest_addr;
3972 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3973 rtx dest;
3974 enum insn_code icode;
3975
3976 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3977 /* If there is push pattern, use it. Otherwise try old way of throwing
3978 MEM representing push operation to move expander. */
3979 icode = optab_handler (push_optab, mode);
3980 if (icode != CODE_FOR_nothing)
3981 {
3982 struct expand_operand ops[1];
3983
3984 create_input_operand (&ops[0], x, mode);
3985 if (maybe_expand_insn (icode, 1, ops))
3986 return;
3987 }
3988 if (GET_MODE_SIZE (mode) == rounded_size)
3989 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3990 /* If we are to pad downward, adjust the stack pointer first and
3991 then store X into the stack location using an offset. This is
3992 because emit_move_insn does not know how to pad; it does not have
3993 access to type. */
3994 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3995 {
3996 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3997 HOST_WIDE_INT offset;
3998
3999 emit_move_insn (stack_pointer_rtx,
4000 expand_binop (Pmode,
4001 #ifdef STACK_GROWS_DOWNWARD
4002 sub_optab,
4003 #else
4004 add_optab,
4005 #endif
4006 stack_pointer_rtx,
4007 gen_int_mode (rounded_size, Pmode),
4008 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4009
4010 offset = (HOST_WIDE_INT) padding_size;
4011 #ifdef STACK_GROWS_DOWNWARD
4012 if (STACK_PUSH_CODE == POST_DEC)
4013 /* We have already decremented the stack pointer, so get the
4014 previous value. */
4015 offset += (HOST_WIDE_INT) rounded_size;
4016 #else
4017 if (STACK_PUSH_CODE == POST_INC)
4018 /* We have already incremented the stack pointer, so get the
4019 previous value. */
4020 offset -= (HOST_WIDE_INT) rounded_size;
4021 #endif
4022 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4023 gen_int_mode (offset, Pmode));
4024 }
4025 else
4026 {
4027 #ifdef STACK_GROWS_DOWNWARD
4028 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4029 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4030 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4031 Pmode));
4032 #else
4033 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4034 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4035 gen_int_mode (rounded_size, Pmode));
4036 #endif
4037 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4038 }
4039
4040 dest = gen_rtx_MEM (mode, dest_addr);
4041
4042 if (type != 0)
4043 {
4044 set_mem_attributes (dest, type, 1);
4045
4046 if (cfun->tail_call_marked)
4047 /* Function incoming arguments may overlap with sibling call
4048 outgoing arguments and we cannot allow reordering of reads
4049 from function arguments with stores to outgoing arguments
4050 of sibling calls. */
4051 set_mem_alias_set (dest, 0);
4052 }
4053 emit_move_insn (dest, x);
4054 }
4055
4056 /* Emit and annotate a single push insn. */
4057
4058 static void
4059 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
4060 {
4061 int delta, old_delta = stack_pointer_delta;
4062 rtx prev = get_last_insn ();
4063 rtx last;
4064
4065 emit_single_push_insn_1 (mode, x, type);
4066
4067 last = get_last_insn ();
4068
4069 /* Notice the common case where we emitted exactly one insn. */
4070 if (PREV_INSN (last) == prev)
4071 {
4072 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4073 return;
4074 }
4075
4076 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4077 gcc_assert (delta == INT_MIN || delta == old_delta);
4078 }
4079 #endif
4080
4081 /* Generate code to push X onto the stack, assuming it has mode MODE and
4082 type TYPE.
4083 MODE is redundant except when X is a CONST_INT (since they don't
4084 carry mode info).
4085 SIZE is an rtx for the size of data to be copied (in bytes),
4086 needed only if X is BLKmode.
4087
4088 ALIGN (in bits) is maximum alignment we can assume.
4089
4090 If PARTIAL and REG are both nonzero, then copy that many of the first
4091 bytes of X into registers starting with REG, and push the rest of X.
4092 The amount of space pushed is decreased by PARTIAL bytes.
4093 REG must be a hard register in this case.
4094 If REG is zero but PARTIAL is not, take any all others actions for an
4095 argument partially in registers, but do not actually load any
4096 registers.
4097
4098 EXTRA is the amount in bytes of extra space to leave next to this arg.
4099 This is ignored if an argument block has already been allocated.
4100
4101 On a machine that lacks real push insns, ARGS_ADDR is the address of
4102 the bottom of the argument block for this call. We use indexing off there
4103 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4104 argument block has not been preallocated.
4105
4106 ARGS_SO_FAR is the size of args previously pushed for this call.
4107
4108 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4109 for arguments passed in registers. If nonzero, it will be the number
4110 of bytes required. */
4111
4112 void
4113 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4114 unsigned int align, int partial, rtx reg, int extra,
4115 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4116 rtx alignment_pad)
4117 {
4118 rtx xinner;
4119 enum direction stack_direction
4120 #ifdef STACK_GROWS_DOWNWARD
4121 = downward;
4122 #else
4123 = upward;
4124 #endif
4125
4126 /* Decide where to pad the argument: `downward' for below,
4127 `upward' for above, or `none' for don't pad it.
4128 Default is below for small data on big-endian machines; else above. */
4129 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4130
4131 /* Invert direction if stack is post-decrement.
4132 FIXME: why? */
4133 if (STACK_PUSH_CODE == POST_DEC)
4134 if (where_pad != none)
4135 where_pad = (where_pad == downward ? upward : downward);
4136
4137 xinner = x;
4138
4139 if (mode == BLKmode
4140 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4141 {
4142 /* Copy a block into the stack, entirely or partially. */
4143
4144 rtx temp;
4145 int used;
4146 int offset;
4147 int skip;
4148
4149 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4150 used = partial - offset;
4151
4152 if (mode != BLKmode)
4153 {
4154 /* A value is to be stored in an insufficiently aligned
4155 stack slot; copy via a suitably aligned slot if
4156 necessary. */
4157 size = GEN_INT (GET_MODE_SIZE (mode));
4158 if (!MEM_P (xinner))
4159 {
4160 temp = assign_temp (type, 1, 1);
4161 emit_move_insn (temp, xinner);
4162 xinner = temp;
4163 }
4164 }
4165
4166 gcc_assert (size);
4167
4168 /* USED is now the # of bytes we need not copy to the stack
4169 because registers will take care of them. */
4170
4171 if (partial != 0)
4172 xinner = adjust_address (xinner, BLKmode, used);
4173
4174 /* If the partial register-part of the arg counts in its stack size,
4175 skip the part of stack space corresponding to the registers.
4176 Otherwise, start copying to the beginning of the stack space,
4177 by setting SKIP to 0. */
4178 skip = (reg_parm_stack_space == 0) ? 0 : used;
4179
4180 #ifdef PUSH_ROUNDING
4181 /* Do it with several push insns if that doesn't take lots of insns
4182 and if there is no difficulty with push insns that skip bytes
4183 on the stack for alignment purposes. */
4184 if (args_addr == 0
4185 && PUSH_ARGS
4186 && CONST_INT_P (size)
4187 && skip == 0
4188 && MEM_ALIGN (xinner) >= align
4189 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4190 /* Here we avoid the case of a structure whose weak alignment
4191 forces many pushes of a small amount of data,
4192 and such small pushes do rounding that causes trouble. */
4193 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4194 || align >= BIGGEST_ALIGNMENT
4195 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4196 == (align / BITS_PER_UNIT)))
4197 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4198 {
4199 /* Push padding now if padding above and stack grows down,
4200 or if padding below and stack grows up.
4201 But if space already allocated, this has already been done. */
4202 if (extra && args_addr == 0
4203 && where_pad != none && where_pad != stack_direction)
4204 anti_adjust_stack (GEN_INT (extra));
4205
4206 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4207 }
4208 else
4209 #endif /* PUSH_ROUNDING */
4210 {
4211 rtx target;
4212
4213 /* Otherwise make space on the stack and copy the data
4214 to the address of that space. */
4215
4216 /* Deduct words put into registers from the size we must copy. */
4217 if (partial != 0)
4218 {
4219 if (CONST_INT_P (size))
4220 size = GEN_INT (INTVAL (size) - used);
4221 else
4222 size = expand_binop (GET_MODE (size), sub_optab, size,
4223 gen_int_mode (used, GET_MODE (size)),
4224 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4225 }
4226
4227 /* Get the address of the stack space.
4228 In this case, we do not deal with EXTRA separately.
4229 A single stack adjust will do. */
4230 if (! args_addr)
4231 {
4232 temp = push_block (size, extra, where_pad == downward);
4233 extra = 0;
4234 }
4235 else if (CONST_INT_P (args_so_far))
4236 temp = memory_address (BLKmode,
4237 plus_constant (Pmode, args_addr,
4238 skip + INTVAL (args_so_far)));
4239 else
4240 temp = memory_address (BLKmode,
4241 plus_constant (Pmode,
4242 gen_rtx_PLUS (Pmode,
4243 args_addr,
4244 args_so_far),
4245 skip));
4246
4247 if (!ACCUMULATE_OUTGOING_ARGS)
4248 {
4249 /* If the source is referenced relative to the stack pointer,
4250 copy it to another register to stabilize it. We do not need
4251 to do this if we know that we won't be changing sp. */
4252
4253 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4254 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4255 temp = copy_to_reg (temp);
4256 }
4257
4258 target = gen_rtx_MEM (BLKmode, temp);
4259
4260 /* We do *not* set_mem_attributes here, because incoming arguments
4261 may overlap with sibling call outgoing arguments and we cannot
4262 allow reordering of reads from function arguments with stores
4263 to outgoing arguments of sibling calls. We do, however, want
4264 to record the alignment of the stack slot. */
4265 /* ALIGN may well be better aligned than TYPE, e.g. due to
4266 PARM_BOUNDARY. Assume the caller isn't lying. */
4267 set_mem_align (target, align);
4268
4269 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4270 }
4271 }
4272 else if (partial > 0)
4273 {
4274 /* Scalar partly in registers. */
4275
4276 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4277 int i;
4278 int not_stack;
4279 /* # bytes of start of argument
4280 that we must make space for but need not store. */
4281 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4282 int args_offset = INTVAL (args_so_far);
4283 int skip;
4284
4285 /* Push padding now if padding above and stack grows down,
4286 or if padding below and stack grows up.
4287 But if space already allocated, this has already been done. */
4288 if (extra && args_addr == 0
4289 && where_pad != none && where_pad != stack_direction)
4290 anti_adjust_stack (GEN_INT (extra));
4291
4292 /* If we make space by pushing it, we might as well push
4293 the real data. Otherwise, we can leave OFFSET nonzero
4294 and leave the space uninitialized. */
4295 if (args_addr == 0)
4296 offset = 0;
4297
4298 /* Now NOT_STACK gets the number of words that we don't need to
4299 allocate on the stack. Convert OFFSET to words too. */
4300 not_stack = (partial - offset) / UNITS_PER_WORD;
4301 offset /= UNITS_PER_WORD;
4302
4303 /* If the partial register-part of the arg counts in its stack size,
4304 skip the part of stack space corresponding to the registers.
4305 Otherwise, start copying to the beginning of the stack space,
4306 by setting SKIP to 0. */
4307 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4308
4309 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4310 x = validize_mem (force_const_mem (mode, x));
4311
4312 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4313 SUBREGs of such registers are not allowed. */
4314 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4315 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4316 x = copy_to_reg (x);
4317
4318 /* Loop over all the words allocated on the stack for this arg. */
4319 /* We can do it by words, because any scalar bigger than a word
4320 has a size a multiple of a word. */
4321 for (i = size - 1; i >= not_stack; i--)
4322 if (i >= not_stack + offset)
4323 emit_push_insn (operand_subword_force (x, i, mode),
4324 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4325 0, args_addr,
4326 GEN_INT (args_offset + ((i - not_stack + skip)
4327 * UNITS_PER_WORD)),
4328 reg_parm_stack_space, alignment_pad);
4329 }
4330 else
4331 {
4332 rtx addr;
4333 rtx dest;
4334
4335 /* Push padding now if padding above and stack grows down,
4336 or if padding below and stack grows up.
4337 But if space already allocated, this has already been done. */
4338 if (extra && args_addr == 0
4339 && where_pad != none && where_pad != stack_direction)
4340 anti_adjust_stack (GEN_INT (extra));
4341
4342 #ifdef PUSH_ROUNDING
4343 if (args_addr == 0 && PUSH_ARGS)
4344 emit_single_push_insn (mode, x, type);
4345 else
4346 #endif
4347 {
4348 if (CONST_INT_P (args_so_far))
4349 addr
4350 = memory_address (mode,
4351 plus_constant (Pmode, args_addr,
4352 INTVAL (args_so_far)));
4353 else
4354 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4355 args_so_far));
4356 dest = gen_rtx_MEM (mode, addr);
4357
4358 /* We do *not* set_mem_attributes here, because incoming arguments
4359 may overlap with sibling call outgoing arguments and we cannot
4360 allow reordering of reads from function arguments with stores
4361 to outgoing arguments of sibling calls. We do, however, want
4362 to record the alignment of the stack slot. */
4363 /* ALIGN may well be better aligned than TYPE, e.g. due to
4364 PARM_BOUNDARY. Assume the caller isn't lying. */
4365 set_mem_align (dest, align);
4366
4367 emit_move_insn (dest, x);
4368 }
4369 }
4370
4371 /* If part should go in registers, copy that part
4372 into the appropriate registers. Do this now, at the end,
4373 since mem-to-mem copies above may do function calls. */
4374 if (partial > 0 && reg != 0)
4375 {
4376 /* Handle calls that pass values in multiple non-contiguous locations.
4377 The Irix 6 ABI has examples of this. */
4378 if (GET_CODE (reg) == PARALLEL)
4379 emit_group_load (reg, x, type, -1);
4380 else
4381 {
4382 gcc_assert (partial % UNITS_PER_WORD == 0);
4383 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4384 }
4385 }
4386
4387 if (extra && args_addr == 0 && where_pad == stack_direction)
4388 anti_adjust_stack (GEN_INT (extra));
4389
4390 if (alignment_pad && args_addr == 0)
4391 anti_adjust_stack (alignment_pad);
4392 }
4393 \f
4394 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4395 operations. */
4396
4397 static rtx
4398 get_subtarget (rtx x)
4399 {
4400 return (optimize
4401 || x == 0
4402 /* Only registers can be subtargets. */
4403 || !REG_P (x)
4404 /* Don't use hard regs to avoid extending their life. */
4405 || REGNO (x) < FIRST_PSEUDO_REGISTER
4406 ? 0 : x);
4407 }
4408
4409 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4410 FIELD is a bitfield. Returns true if the optimization was successful,
4411 and there's nothing else to do. */
4412
4413 static bool
4414 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4415 unsigned HOST_WIDE_INT bitpos,
4416 unsigned HOST_WIDE_INT bitregion_start,
4417 unsigned HOST_WIDE_INT bitregion_end,
4418 enum machine_mode mode1, rtx str_rtx,
4419 tree to, tree src)
4420 {
4421 enum machine_mode str_mode = GET_MODE (str_rtx);
4422 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4423 tree op0, op1;
4424 rtx value, result;
4425 optab binop;
4426 gimple srcstmt;
4427 enum tree_code code;
4428
4429 if (mode1 != VOIDmode
4430 || bitsize >= BITS_PER_WORD
4431 || str_bitsize > BITS_PER_WORD
4432 || TREE_SIDE_EFFECTS (to)
4433 || TREE_THIS_VOLATILE (to))
4434 return false;
4435
4436 STRIP_NOPS (src);
4437 if (TREE_CODE (src) != SSA_NAME)
4438 return false;
4439 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4440 return false;
4441
4442 srcstmt = get_gimple_for_ssa_name (src);
4443 if (!srcstmt
4444 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4445 return false;
4446
4447 code = gimple_assign_rhs_code (srcstmt);
4448
4449 op0 = gimple_assign_rhs1 (srcstmt);
4450
4451 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4452 to find its initialization. Hopefully the initialization will
4453 be from a bitfield load. */
4454 if (TREE_CODE (op0) == SSA_NAME)
4455 {
4456 gimple op0stmt = get_gimple_for_ssa_name (op0);
4457
4458 /* We want to eventually have OP0 be the same as TO, which
4459 should be a bitfield. */
4460 if (!op0stmt
4461 || !is_gimple_assign (op0stmt)
4462 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4463 return false;
4464 op0 = gimple_assign_rhs1 (op0stmt);
4465 }
4466
4467 op1 = gimple_assign_rhs2 (srcstmt);
4468
4469 if (!operand_equal_p (to, op0, 0))
4470 return false;
4471
4472 if (MEM_P (str_rtx))
4473 {
4474 unsigned HOST_WIDE_INT offset1;
4475
4476 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4477 str_mode = word_mode;
4478 str_mode = get_best_mode (bitsize, bitpos,
4479 bitregion_start, bitregion_end,
4480 MEM_ALIGN (str_rtx), str_mode, 0);
4481 if (str_mode == VOIDmode)
4482 return false;
4483 str_bitsize = GET_MODE_BITSIZE (str_mode);
4484
4485 offset1 = bitpos;
4486 bitpos %= str_bitsize;
4487 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4488 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4489 }
4490 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4491 return false;
4492
4493 /* If the bit field covers the whole REG/MEM, store_field
4494 will likely generate better code. */
4495 if (bitsize >= str_bitsize)
4496 return false;
4497
4498 /* We can't handle fields split across multiple entities. */
4499 if (bitpos + bitsize > str_bitsize)
4500 return false;
4501
4502 if (BYTES_BIG_ENDIAN)
4503 bitpos = str_bitsize - bitpos - bitsize;
4504
4505 switch (code)
4506 {
4507 case PLUS_EXPR:
4508 case MINUS_EXPR:
4509 /* For now, just optimize the case of the topmost bitfield
4510 where we don't need to do any masking and also
4511 1 bit bitfields where xor can be used.
4512 We might win by one instruction for the other bitfields
4513 too if insv/extv instructions aren't used, so that
4514 can be added later. */
4515 if (bitpos + bitsize != str_bitsize
4516 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4517 break;
4518
4519 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4520 value = convert_modes (str_mode,
4521 TYPE_MODE (TREE_TYPE (op1)), value,
4522 TYPE_UNSIGNED (TREE_TYPE (op1)));
4523
4524 /* We may be accessing data outside the field, which means
4525 we can alias adjacent data. */
4526 if (MEM_P (str_rtx))
4527 {
4528 str_rtx = shallow_copy_rtx (str_rtx);
4529 set_mem_alias_set (str_rtx, 0);
4530 set_mem_expr (str_rtx, 0);
4531 }
4532
4533 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4534 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4535 {
4536 value = expand_and (str_mode, value, const1_rtx, NULL);
4537 binop = xor_optab;
4538 }
4539 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4540 result = expand_binop (str_mode, binop, str_rtx,
4541 value, str_rtx, 1, OPTAB_WIDEN);
4542 if (result != str_rtx)
4543 emit_move_insn (str_rtx, result);
4544 return true;
4545
4546 case BIT_IOR_EXPR:
4547 case BIT_XOR_EXPR:
4548 if (TREE_CODE (op1) != INTEGER_CST)
4549 break;
4550 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4551 value = convert_modes (str_mode,
4552 TYPE_MODE (TREE_TYPE (op1)), value,
4553 TYPE_UNSIGNED (TREE_TYPE (op1)));
4554
4555 /* We may be accessing data outside the field, which means
4556 we can alias adjacent data. */
4557 if (MEM_P (str_rtx))
4558 {
4559 str_rtx = shallow_copy_rtx (str_rtx);
4560 set_mem_alias_set (str_rtx, 0);
4561 set_mem_expr (str_rtx, 0);
4562 }
4563
4564 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4565 if (bitpos + bitsize != str_bitsize)
4566 {
4567 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4568 str_mode);
4569 value = expand_and (str_mode, value, mask, NULL_RTX);
4570 }
4571 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4572 result = expand_binop (str_mode, binop, str_rtx,
4573 value, str_rtx, 1, OPTAB_WIDEN);
4574 if (result != str_rtx)
4575 emit_move_insn (str_rtx, result);
4576 return true;
4577
4578 default:
4579 break;
4580 }
4581
4582 return false;
4583 }
4584
4585 /* In the C++ memory model, consecutive bit fields in a structure are
4586 considered one memory location.
4587
4588 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4589 returns the bit range of consecutive bits in which this COMPONENT_REF
4590 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4591 and *OFFSET may be adjusted in the process.
4592
4593 If the access does not need to be restricted, 0 is returned in both
4594 *BITSTART and *BITEND. */
4595
4596 static void
4597 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4598 unsigned HOST_WIDE_INT *bitend,
4599 tree exp,
4600 HOST_WIDE_INT *bitpos,
4601 tree *offset)
4602 {
4603 HOST_WIDE_INT bitoffset;
4604 tree field, repr;
4605
4606 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4607
4608 field = TREE_OPERAND (exp, 1);
4609 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4610 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4611 need to limit the range we can access. */
4612 if (!repr)
4613 {
4614 *bitstart = *bitend = 0;
4615 return;
4616 }
4617
4618 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4619 part of a larger bit field, then the representative does not serve any
4620 useful purpose. This can occur in Ada. */
4621 if (handled_component_p (TREE_OPERAND (exp, 0)))
4622 {
4623 enum machine_mode rmode;
4624 HOST_WIDE_INT rbitsize, rbitpos;
4625 tree roffset;
4626 int unsignedp;
4627 int volatilep = 0;
4628 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4629 &roffset, &rmode, &unsignedp, &volatilep, false);
4630 if ((rbitpos % BITS_PER_UNIT) != 0)
4631 {
4632 *bitstart = *bitend = 0;
4633 return;
4634 }
4635 }
4636
4637 /* Compute the adjustment to bitpos from the offset of the field
4638 relative to the representative. DECL_FIELD_OFFSET of field and
4639 repr are the same by construction if they are not constants,
4640 see finish_bitfield_layout. */
4641 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4642 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4643 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4644 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4645 else
4646 bitoffset = 0;
4647 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4648 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4649
4650 /* If the adjustment is larger than bitpos, we would have a negative bit
4651 position for the lower bound and this may wreak havoc later. Adjust
4652 offset and bitpos to make the lower bound non-negative in that case. */
4653 if (bitoffset > *bitpos)
4654 {
4655 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4656 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4657
4658 *bitpos += adjust;
4659 if (*offset == NULL_TREE)
4660 *offset = size_int (-adjust / BITS_PER_UNIT);
4661 else
4662 *offset
4663 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4664 *bitstart = 0;
4665 }
4666 else
4667 *bitstart = *bitpos - bitoffset;
4668
4669 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4670 }
4671
4672 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4673 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4674 DECL_RTL was not set yet, return NORTL. */
4675
4676 static inline bool
4677 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4678 {
4679 if (TREE_CODE (addr) != ADDR_EXPR)
4680 return false;
4681
4682 tree base = TREE_OPERAND (addr, 0);
4683
4684 if (!DECL_P (base)
4685 || TREE_ADDRESSABLE (base)
4686 || DECL_MODE (base) == BLKmode)
4687 return false;
4688
4689 if (!DECL_RTL_SET_P (base))
4690 return nortl;
4691
4692 return (!MEM_P (DECL_RTL (base)));
4693 }
4694
4695 /* Returns true if the MEM_REF REF refers to an object that does not
4696 reside in memory and has non-BLKmode. */
4697
4698 static inline bool
4699 mem_ref_refers_to_non_mem_p (tree ref)
4700 {
4701 tree base = TREE_OPERAND (ref, 0);
4702 return addr_expr_of_non_mem_decl_p_1 (base, false);
4703 }
4704
4705 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4706 is true, try generating a nontemporal store. */
4707
4708 void
4709 expand_assignment (tree to, tree from, bool nontemporal)
4710 {
4711 rtx to_rtx = 0;
4712 rtx result;
4713 enum machine_mode mode;
4714 unsigned int align;
4715 enum insn_code icode;
4716
4717 /* Don't crash if the lhs of the assignment was erroneous. */
4718 if (TREE_CODE (to) == ERROR_MARK)
4719 {
4720 expand_normal (from);
4721 return;
4722 }
4723
4724 /* Optimize away no-op moves without side-effects. */
4725 if (operand_equal_p (to, from, 0))
4726 return;
4727
4728 /* Handle misaligned stores. */
4729 mode = TYPE_MODE (TREE_TYPE (to));
4730 if ((TREE_CODE (to) == MEM_REF
4731 || TREE_CODE (to) == TARGET_MEM_REF)
4732 && mode != BLKmode
4733 && !mem_ref_refers_to_non_mem_p (to)
4734 && ((align = get_object_alignment (to))
4735 < GET_MODE_ALIGNMENT (mode))
4736 && (((icode = optab_handler (movmisalign_optab, mode))
4737 != CODE_FOR_nothing)
4738 || SLOW_UNALIGNED_ACCESS (mode, align)))
4739 {
4740 rtx reg, mem;
4741
4742 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4743 reg = force_not_mem (reg);
4744 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4745
4746 if (icode != CODE_FOR_nothing)
4747 {
4748 struct expand_operand ops[2];
4749
4750 create_fixed_operand (&ops[0], mem);
4751 create_input_operand (&ops[1], reg, mode);
4752 /* The movmisalign<mode> pattern cannot fail, else the assignment
4753 would silently be omitted. */
4754 expand_insn (icode, 2, ops);
4755 }
4756 else
4757 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4758 return;
4759 }
4760
4761 /* Assignment of a structure component needs special treatment
4762 if the structure component's rtx is not simply a MEM.
4763 Assignment of an array element at a constant index, and assignment of
4764 an array element in an unaligned packed structure field, has the same
4765 problem. Same for (partially) storing into a non-memory object. */
4766 if (handled_component_p (to)
4767 || (TREE_CODE (to) == MEM_REF
4768 && mem_ref_refers_to_non_mem_p (to))
4769 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4770 {
4771 enum machine_mode mode1;
4772 HOST_WIDE_INT bitsize, bitpos;
4773 unsigned HOST_WIDE_INT bitregion_start = 0;
4774 unsigned HOST_WIDE_INT bitregion_end = 0;
4775 tree offset;
4776 int unsignedp;
4777 int volatilep = 0;
4778 tree tem;
4779
4780 push_temp_slots ();
4781 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4782 &unsignedp, &volatilep, true);
4783
4784 /* Make sure bitpos is not negative, it can wreak havoc later. */
4785 if (bitpos < 0)
4786 {
4787 gcc_assert (offset == NULL_TREE);
4788 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4789 ? 3 : exact_log2 (BITS_PER_UNIT)));
4790 bitpos &= BITS_PER_UNIT - 1;
4791 }
4792
4793 if (TREE_CODE (to) == COMPONENT_REF
4794 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4795 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4796 /* The C++ memory model naturally applies to byte-aligned fields.
4797 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4798 BITSIZE are not byte-aligned, there is no need to limit the range
4799 we can access. This can occur with packed structures in Ada. */
4800 else if (bitsize > 0
4801 && bitsize % BITS_PER_UNIT == 0
4802 && bitpos % BITS_PER_UNIT == 0)
4803 {
4804 bitregion_start = bitpos;
4805 bitregion_end = bitpos + bitsize - 1;
4806 }
4807
4808 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4809
4810 /* If the field has a mode, we want to access it in the
4811 field's mode, not the computed mode.
4812 If a MEM has VOIDmode (external with incomplete type),
4813 use BLKmode for it instead. */
4814 if (MEM_P (to_rtx))
4815 {
4816 if (mode1 != VOIDmode)
4817 to_rtx = adjust_address (to_rtx, mode1, 0);
4818 else if (GET_MODE (to_rtx) == VOIDmode)
4819 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4820 }
4821
4822 if (offset != 0)
4823 {
4824 enum machine_mode address_mode;
4825 rtx offset_rtx;
4826
4827 if (!MEM_P (to_rtx))
4828 {
4829 /* We can get constant negative offsets into arrays with broken
4830 user code. Translate this to a trap instead of ICEing. */
4831 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4832 expand_builtin_trap ();
4833 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4834 }
4835
4836 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4837 address_mode = get_address_mode (to_rtx);
4838 if (GET_MODE (offset_rtx) != address_mode)
4839 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4840
4841 /* If we have an expression in OFFSET_RTX and a non-zero
4842 byte offset in BITPOS, adding the byte offset before the
4843 OFFSET_RTX results in better intermediate code, which makes
4844 later rtl optimization passes perform better.
4845
4846 We prefer intermediate code like this:
4847
4848 r124:DI=r123:DI+0x18
4849 [r124:DI]=r121:DI
4850
4851 ... instead of ...
4852
4853 r124:DI=r123:DI+0x10
4854 [r124:DI+0x8]=r121:DI
4855
4856 This is only done for aligned data values, as these can
4857 be expected to result in single move instructions. */
4858 if (mode1 != VOIDmode
4859 && bitpos != 0
4860 && bitsize > 0
4861 && (bitpos % bitsize) == 0
4862 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4863 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4864 {
4865 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4866 bitregion_start = 0;
4867 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4868 bitregion_end -= bitpos;
4869 bitpos = 0;
4870 }
4871
4872 to_rtx = offset_address (to_rtx, offset_rtx,
4873 highest_pow2_factor_for_target (to,
4874 offset));
4875 }
4876
4877 /* No action is needed if the target is not a memory and the field
4878 lies completely outside that target. This can occur if the source
4879 code contains an out-of-bounds access to a small array. */
4880 if (!MEM_P (to_rtx)
4881 && GET_MODE (to_rtx) != BLKmode
4882 && (unsigned HOST_WIDE_INT) bitpos
4883 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4884 {
4885 expand_normal (from);
4886 result = NULL;
4887 }
4888 /* Handle expand_expr of a complex value returning a CONCAT. */
4889 else if (GET_CODE (to_rtx) == CONCAT)
4890 {
4891 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4892 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4893 && bitpos == 0
4894 && bitsize == mode_bitsize)
4895 result = store_expr (from, to_rtx, false, nontemporal);
4896 else if (bitsize == mode_bitsize / 2
4897 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4898 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4899 nontemporal);
4900 else if (bitpos + bitsize <= mode_bitsize / 2)
4901 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4902 bitregion_start, bitregion_end,
4903 mode1, from,
4904 get_alias_set (to), nontemporal);
4905 else if (bitpos >= mode_bitsize / 2)
4906 result = store_field (XEXP (to_rtx, 1), bitsize,
4907 bitpos - mode_bitsize / 2,
4908 bitregion_start, bitregion_end,
4909 mode1, from,
4910 get_alias_set (to), nontemporal);
4911 else if (bitpos == 0 && bitsize == mode_bitsize)
4912 {
4913 rtx from_rtx;
4914 result = expand_normal (from);
4915 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4916 TYPE_MODE (TREE_TYPE (from)), 0);
4917 emit_move_insn (XEXP (to_rtx, 0),
4918 read_complex_part (from_rtx, false));
4919 emit_move_insn (XEXP (to_rtx, 1),
4920 read_complex_part (from_rtx, true));
4921 }
4922 else
4923 {
4924 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4925 GET_MODE_SIZE (GET_MODE (to_rtx)));
4926 write_complex_part (temp, XEXP (to_rtx, 0), false);
4927 write_complex_part (temp, XEXP (to_rtx, 1), true);
4928 result = store_field (temp, bitsize, bitpos,
4929 bitregion_start, bitregion_end,
4930 mode1, from,
4931 get_alias_set (to), nontemporal);
4932 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4933 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4934 }
4935 }
4936 else
4937 {
4938 if (MEM_P (to_rtx))
4939 {
4940 /* If the field is at offset zero, we could have been given the
4941 DECL_RTX of the parent struct. Don't munge it. */
4942 to_rtx = shallow_copy_rtx (to_rtx);
4943 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4944 if (volatilep)
4945 MEM_VOLATILE_P (to_rtx) = 1;
4946 }
4947
4948 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4949 bitregion_start, bitregion_end,
4950 mode1,
4951 to_rtx, to, from))
4952 result = NULL;
4953 else
4954 result = store_field (to_rtx, bitsize, bitpos,
4955 bitregion_start, bitregion_end,
4956 mode1, from,
4957 get_alias_set (to), nontemporal);
4958 }
4959
4960 if (result)
4961 preserve_temp_slots (result);
4962 pop_temp_slots ();
4963 return;
4964 }
4965
4966 /* If the rhs is a function call and its value is not an aggregate,
4967 call the function before we start to compute the lhs.
4968 This is needed for correct code for cases such as
4969 val = setjmp (buf) on machines where reference to val
4970 requires loading up part of an address in a separate insn.
4971
4972 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4973 since it might be a promoted variable where the zero- or sign- extension
4974 needs to be done. Handling this in the normal way is safe because no
4975 computation is done before the call. The same is true for SSA names. */
4976 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4977 && COMPLETE_TYPE_P (TREE_TYPE (from))
4978 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4979 && ! (((TREE_CODE (to) == VAR_DECL
4980 || TREE_CODE (to) == PARM_DECL
4981 || TREE_CODE (to) == RESULT_DECL)
4982 && REG_P (DECL_RTL (to)))
4983 || TREE_CODE (to) == SSA_NAME))
4984 {
4985 rtx value;
4986
4987 push_temp_slots ();
4988 value = expand_normal (from);
4989 if (to_rtx == 0)
4990 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4991
4992 /* Handle calls that return values in multiple non-contiguous locations.
4993 The Irix 6 ABI has examples of this. */
4994 if (GET_CODE (to_rtx) == PARALLEL)
4995 {
4996 if (GET_CODE (value) == PARALLEL)
4997 emit_group_move (to_rtx, value);
4998 else
4999 emit_group_load (to_rtx, value, TREE_TYPE (from),
5000 int_size_in_bytes (TREE_TYPE (from)));
5001 }
5002 else if (GET_CODE (value) == PARALLEL)
5003 emit_group_store (to_rtx, value, TREE_TYPE (from),
5004 int_size_in_bytes (TREE_TYPE (from)));
5005 else if (GET_MODE (to_rtx) == BLKmode)
5006 {
5007 /* Handle calls that return BLKmode values in registers. */
5008 if (REG_P (value))
5009 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5010 else
5011 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5012 }
5013 else
5014 {
5015 if (POINTER_TYPE_P (TREE_TYPE (to)))
5016 value = convert_memory_address_addr_space
5017 (GET_MODE (to_rtx), value,
5018 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5019
5020 emit_move_insn (to_rtx, value);
5021 }
5022 preserve_temp_slots (to_rtx);
5023 pop_temp_slots ();
5024 return;
5025 }
5026
5027 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5028 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5029
5030 /* Don't move directly into a return register. */
5031 if (TREE_CODE (to) == RESULT_DECL
5032 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5033 {
5034 rtx temp;
5035
5036 push_temp_slots ();
5037
5038 /* If the source is itself a return value, it still is in a pseudo at
5039 this point so we can move it back to the return register directly. */
5040 if (REG_P (to_rtx)
5041 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5042 && TREE_CODE (from) != CALL_EXPR)
5043 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5044 else
5045 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5046
5047 /* Handle calls that return values in multiple non-contiguous locations.
5048 The Irix 6 ABI has examples of this. */
5049 if (GET_CODE (to_rtx) == PARALLEL)
5050 {
5051 if (GET_CODE (temp) == PARALLEL)
5052 emit_group_move (to_rtx, temp);
5053 else
5054 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5055 int_size_in_bytes (TREE_TYPE (from)));
5056 }
5057 else if (temp)
5058 emit_move_insn (to_rtx, temp);
5059
5060 preserve_temp_slots (to_rtx);
5061 pop_temp_slots ();
5062 return;
5063 }
5064
5065 /* In case we are returning the contents of an object which overlaps
5066 the place the value is being stored, use a safe function when copying
5067 a value through a pointer into a structure value return block. */
5068 if (TREE_CODE (to) == RESULT_DECL
5069 && TREE_CODE (from) == INDIRECT_REF
5070 && ADDR_SPACE_GENERIC_P
5071 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5072 && refs_may_alias_p (to, from)
5073 && cfun->returns_struct
5074 && !cfun->returns_pcc_struct)
5075 {
5076 rtx from_rtx, size;
5077
5078 push_temp_slots ();
5079 size = expr_size (from);
5080 from_rtx = expand_normal (from);
5081
5082 emit_library_call (memmove_libfunc, LCT_NORMAL,
5083 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5084 XEXP (from_rtx, 0), Pmode,
5085 convert_to_mode (TYPE_MODE (sizetype),
5086 size, TYPE_UNSIGNED (sizetype)),
5087 TYPE_MODE (sizetype));
5088
5089 preserve_temp_slots (to_rtx);
5090 pop_temp_slots ();
5091 return;
5092 }
5093
5094 /* Compute FROM and store the value in the rtx we got. */
5095
5096 push_temp_slots ();
5097 result = store_expr (from, to_rtx, 0, nontemporal);
5098 preserve_temp_slots (result);
5099 pop_temp_slots ();
5100 return;
5101 }
5102
5103 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5104 succeeded, false otherwise. */
5105
5106 bool
5107 emit_storent_insn (rtx to, rtx from)
5108 {
5109 struct expand_operand ops[2];
5110 enum machine_mode mode = GET_MODE (to);
5111 enum insn_code code = optab_handler (storent_optab, mode);
5112
5113 if (code == CODE_FOR_nothing)
5114 return false;
5115
5116 create_fixed_operand (&ops[0], to);
5117 create_input_operand (&ops[1], from, mode);
5118 return maybe_expand_insn (code, 2, ops);
5119 }
5120
5121 /* Generate code for computing expression EXP,
5122 and storing the value into TARGET.
5123
5124 If the mode is BLKmode then we may return TARGET itself.
5125 It turns out that in BLKmode it doesn't cause a problem.
5126 because C has no operators that could combine two different
5127 assignments into the same BLKmode object with different values
5128 with no sequence point. Will other languages need this to
5129 be more thorough?
5130
5131 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5132 stack, and block moves may need to be treated specially.
5133
5134 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5135
5136 rtx
5137 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5138 {
5139 rtx temp;
5140 rtx alt_rtl = NULL_RTX;
5141 location_t loc = curr_insn_location ();
5142
5143 if (VOID_TYPE_P (TREE_TYPE (exp)))
5144 {
5145 /* C++ can generate ?: expressions with a throw expression in one
5146 branch and an rvalue in the other. Here, we resolve attempts to
5147 store the throw expression's nonexistent result. */
5148 gcc_assert (!call_param_p);
5149 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5150 return NULL_RTX;
5151 }
5152 if (TREE_CODE (exp) == COMPOUND_EXPR)
5153 {
5154 /* Perform first part of compound expression, then assign from second
5155 part. */
5156 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5157 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5158 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5159 nontemporal);
5160 }
5161 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5162 {
5163 /* For conditional expression, get safe form of the target. Then
5164 test the condition, doing the appropriate assignment on either
5165 side. This avoids the creation of unnecessary temporaries.
5166 For non-BLKmode, it is more efficient not to do this. */
5167
5168 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5169
5170 do_pending_stack_adjust ();
5171 NO_DEFER_POP;
5172 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5173 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5174 nontemporal);
5175 emit_jump_insn (gen_jump (lab2));
5176 emit_barrier ();
5177 emit_label (lab1);
5178 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5179 nontemporal);
5180 emit_label (lab2);
5181 OK_DEFER_POP;
5182
5183 return NULL_RTX;
5184 }
5185 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5186 /* If this is a scalar in a register that is stored in a wider mode
5187 than the declared mode, compute the result into its declared mode
5188 and then convert to the wider mode. Our value is the computed
5189 expression. */
5190 {
5191 rtx inner_target = 0;
5192
5193 /* We can do the conversion inside EXP, which will often result
5194 in some optimizations. Do the conversion in two steps: first
5195 change the signedness, if needed, then the extend. But don't
5196 do this if the type of EXP is a subtype of something else
5197 since then the conversion might involve more than just
5198 converting modes. */
5199 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5200 && TREE_TYPE (TREE_TYPE (exp)) == 0
5201 && GET_MODE_PRECISION (GET_MODE (target))
5202 == TYPE_PRECISION (TREE_TYPE (exp)))
5203 {
5204 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5205 != SUBREG_PROMOTED_UNSIGNED_P (target))
5206 {
5207 /* Some types, e.g. Fortran's logical*4, won't have a signed
5208 version, so use the mode instead. */
5209 tree ntype
5210 = (signed_or_unsigned_type_for
5211 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5212 if (ntype == NULL)
5213 ntype = lang_hooks.types.type_for_mode
5214 (TYPE_MODE (TREE_TYPE (exp)),
5215 SUBREG_PROMOTED_UNSIGNED_P (target));
5216
5217 exp = fold_convert_loc (loc, ntype, exp);
5218 }
5219
5220 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5221 (GET_MODE (SUBREG_REG (target)),
5222 SUBREG_PROMOTED_UNSIGNED_P (target)),
5223 exp);
5224
5225 inner_target = SUBREG_REG (target);
5226 }
5227
5228 temp = expand_expr (exp, inner_target, VOIDmode,
5229 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5230
5231 /* If TEMP is a VOIDmode constant, use convert_modes to make
5232 sure that we properly convert it. */
5233 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5234 {
5235 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5236 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5237 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5238 GET_MODE (target), temp,
5239 SUBREG_PROMOTED_UNSIGNED_P (target));
5240 }
5241
5242 convert_move (SUBREG_REG (target), temp,
5243 SUBREG_PROMOTED_UNSIGNED_P (target));
5244
5245 return NULL_RTX;
5246 }
5247 else if ((TREE_CODE (exp) == STRING_CST
5248 || (TREE_CODE (exp) == MEM_REF
5249 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5250 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5251 == STRING_CST
5252 && integer_zerop (TREE_OPERAND (exp, 1))))
5253 && !nontemporal && !call_param_p
5254 && MEM_P (target))
5255 {
5256 /* Optimize initialization of an array with a STRING_CST. */
5257 HOST_WIDE_INT exp_len, str_copy_len;
5258 rtx dest_mem;
5259 tree str = TREE_CODE (exp) == STRING_CST
5260 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5261
5262 exp_len = int_expr_size (exp);
5263 if (exp_len <= 0)
5264 goto normal_expr;
5265
5266 if (TREE_STRING_LENGTH (str) <= 0)
5267 goto normal_expr;
5268
5269 str_copy_len = strlen (TREE_STRING_POINTER (str));
5270 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5271 goto normal_expr;
5272
5273 str_copy_len = TREE_STRING_LENGTH (str);
5274 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5275 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5276 {
5277 str_copy_len += STORE_MAX_PIECES - 1;
5278 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5279 }
5280 str_copy_len = MIN (str_copy_len, exp_len);
5281 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5282 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5283 MEM_ALIGN (target), false))
5284 goto normal_expr;
5285
5286 dest_mem = target;
5287
5288 dest_mem = store_by_pieces (dest_mem,
5289 str_copy_len, builtin_strncpy_read_str,
5290 CONST_CAST (char *,
5291 TREE_STRING_POINTER (str)),
5292 MEM_ALIGN (target), false,
5293 exp_len > str_copy_len ? 1 : 0);
5294 if (exp_len > str_copy_len)
5295 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5296 GEN_INT (exp_len - str_copy_len),
5297 BLOCK_OP_NORMAL);
5298 return NULL_RTX;
5299 }
5300 else
5301 {
5302 rtx tmp_target;
5303
5304 normal_expr:
5305 /* If we want to use a nontemporal store, force the value to
5306 register first. */
5307 tmp_target = nontemporal ? NULL_RTX : target;
5308 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5309 (call_param_p
5310 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5311 &alt_rtl, false);
5312 }
5313
5314 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5315 the same as that of TARGET, adjust the constant. This is needed, for
5316 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5317 only a word-sized value. */
5318 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5319 && TREE_CODE (exp) != ERROR_MARK
5320 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5321 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5322 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5323
5324 /* If value was not generated in the target, store it there.
5325 Convert the value to TARGET's type first if necessary and emit the
5326 pending incrementations that have been queued when expanding EXP.
5327 Note that we cannot emit the whole queue blindly because this will
5328 effectively disable the POST_INC optimization later.
5329
5330 If TEMP and TARGET compare equal according to rtx_equal_p, but
5331 one or both of them are volatile memory refs, we have to distinguish
5332 two cases:
5333 - expand_expr has used TARGET. In this case, we must not generate
5334 another copy. This can be detected by TARGET being equal according
5335 to == .
5336 - expand_expr has not used TARGET - that means that the source just
5337 happens to have the same RTX form. Since temp will have been created
5338 by expand_expr, it will compare unequal according to == .
5339 We must generate a copy in this case, to reach the correct number
5340 of volatile memory references. */
5341
5342 if ((! rtx_equal_p (temp, target)
5343 || (temp != target && (side_effects_p (temp)
5344 || side_effects_p (target))))
5345 && TREE_CODE (exp) != ERROR_MARK
5346 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5347 but TARGET is not valid memory reference, TEMP will differ
5348 from TARGET although it is really the same location. */
5349 && !(alt_rtl
5350 && rtx_equal_p (alt_rtl, target)
5351 && !side_effects_p (alt_rtl)
5352 && !side_effects_p (target))
5353 /* If there's nothing to copy, don't bother. Don't call
5354 expr_size unless necessary, because some front-ends (C++)
5355 expr_size-hook must not be given objects that are not
5356 supposed to be bit-copied or bit-initialized. */
5357 && expr_size (exp) != const0_rtx)
5358 {
5359 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5360 {
5361 if (GET_MODE (target) == BLKmode)
5362 {
5363 /* Handle calls that return BLKmode values in registers. */
5364 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5365 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5366 else
5367 store_bit_field (target,
5368 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5369 0, 0, 0, GET_MODE (temp), temp);
5370 }
5371 else
5372 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5373 }
5374
5375 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5376 {
5377 /* Handle copying a string constant into an array. The string
5378 constant may be shorter than the array. So copy just the string's
5379 actual length, and clear the rest. First get the size of the data
5380 type of the string, which is actually the size of the target. */
5381 rtx size = expr_size (exp);
5382
5383 if (CONST_INT_P (size)
5384 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5385 emit_block_move (target, temp, size,
5386 (call_param_p
5387 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5388 else
5389 {
5390 enum machine_mode pointer_mode
5391 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5392 enum machine_mode address_mode = get_address_mode (target);
5393
5394 /* Compute the size of the data to copy from the string. */
5395 tree copy_size
5396 = size_binop_loc (loc, MIN_EXPR,
5397 make_tree (sizetype, size),
5398 size_int (TREE_STRING_LENGTH (exp)));
5399 rtx copy_size_rtx
5400 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5401 (call_param_p
5402 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5403 rtx label = 0;
5404
5405 /* Copy that much. */
5406 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5407 TYPE_UNSIGNED (sizetype));
5408 emit_block_move (target, temp, copy_size_rtx,
5409 (call_param_p
5410 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5411
5412 /* Figure out how much is left in TARGET that we have to clear.
5413 Do all calculations in pointer_mode. */
5414 if (CONST_INT_P (copy_size_rtx))
5415 {
5416 size = plus_constant (address_mode, size,
5417 -INTVAL (copy_size_rtx));
5418 target = adjust_address (target, BLKmode,
5419 INTVAL (copy_size_rtx));
5420 }
5421 else
5422 {
5423 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5424 copy_size_rtx, NULL_RTX, 0,
5425 OPTAB_LIB_WIDEN);
5426
5427 if (GET_MODE (copy_size_rtx) != address_mode)
5428 copy_size_rtx = convert_to_mode (address_mode,
5429 copy_size_rtx,
5430 TYPE_UNSIGNED (sizetype));
5431
5432 target = offset_address (target, copy_size_rtx,
5433 highest_pow2_factor (copy_size));
5434 label = gen_label_rtx ();
5435 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5436 GET_MODE (size), 0, label);
5437 }
5438
5439 if (size != const0_rtx)
5440 clear_storage (target, size, BLOCK_OP_NORMAL);
5441
5442 if (label)
5443 emit_label (label);
5444 }
5445 }
5446 /* Handle calls that return values in multiple non-contiguous locations.
5447 The Irix 6 ABI has examples of this. */
5448 else if (GET_CODE (target) == PARALLEL)
5449 {
5450 if (GET_CODE (temp) == PARALLEL)
5451 emit_group_move (target, temp);
5452 else
5453 emit_group_load (target, temp, TREE_TYPE (exp),
5454 int_size_in_bytes (TREE_TYPE (exp)));
5455 }
5456 else if (GET_CODE (temp) == PARALLEL)
5457 emit_group_store (target, temp, TREE_TYPE (exp),
5458 int_size_in_bytes (TREE_TYPE (exp)));
5459 else if (GET_MODE (temp) == BLKmode)
5460 emit_block_move (target, temp, expr_size (exp),
5461 (call_param_p
5462 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5463 /* If we emit a nontemporal store, there is nothing else to do. */
5464 else if (nontemporal && emit_storent_insn (target, temp))
5465 ;
5466 else
5467 {
5468 temp = force_operand (temp, target);
5469 if (temp != target)
5470 emit_move_insn (target, temp);
5471 }
5472 }
5473
5474 return NULL_RTX;
5475 }
5476 \f
5477 /* Return true if field F of structure TYPE is a flexible array. */
5478
5479 static bool
5480 flexible_array_member_p (const_tree f, const_tree type)
5481 {
5482 const_tree tf;
5483
5484 tf = TREE_TYPE (f);
5485 return (DECL_CHAIN (f) == NULL
5486 && TREE_CODE (tf) == ARRAY_TYPE
5487 && TYPE_DOMAIN (tf)
5488 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5489 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5490 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5491 && int_size_in_bytes (type) >= 0);
5492 }
5493
5494 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5495 must have in order for it to completely initialize a value of type TYPE.
5496 Return -1 if the number isn't known.
5497
5498 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5499
5500 static HOST_WIDE_INT
5501 count_type_elements (const_tree type, bool for_ctor_p)
5502 {
5503 switch (TREE_CODE (type))
5504 {
5505 case ARRAY_TYPE:
5506 {
5507 tree nelts;
5508
5509 nelts = array_type_nelts (type);
5510 if (nelts && tree_fits_uhwi_p (nelts))
5511 {
5512 unsigned HOST_WIDE_INT n;
5513
5514 n = tree_to_uhwi (nelts) + 1;
5515 if (n == 0 || for_ctor_p)
5516 return n;
5517 else
5518 return n * count_type_elements (TREE_TYPE (type), false);
5519 }
5520 return for_ctor_p ? -1 : 1;
5521 }
5522
5523 case RECORD_TYPE:
5524 {
5525 unsigned HOST_WIDE_INT n;
5526 tree f;
5527
5528 n = 0;
5529 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5530 if (TREE_CODE (f) == FIELD_DECL)
5531 {
5532 if (!for_ctor_p)
5533 n += count_type_elements (TREE_TYPE (f), false);
5534 else if (!flexible_array_member_p (f, type))
5535 /* Don't count flexible arrays, which are not supposed
5536 to be initialized. */
5537 n += 1;
5538 }
5539
5540 return n;
5541 }
5542
5543 case UNION_TYPE:
5544 case QUAL_UNION_TYPE:
5545 {
5546 tree f;
5547 HOST_WIDE_INT n, m;
5548
5549 gcc_assert (!for_ctor_p);
5550 /* Estimate the number of scalars in each field and pick the
5551 maximum. Other estimates would do instead; the idea is simply
5552 to make sure that the estimate is not sensitive to the ordering
5553 of the fields. */
5554 n = 1;
5555 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5556 if (TREE_CODE (f) == FIELD_DECL)
5557 {
5558 m = count_type_elements (TREE_TYPE (f), false);
5559 /* If the field doesn't span the whole union, add an extra
5560 scalar for the rest. */
5561 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5562 TYPE_SIZE (type)) != 1)
5563 m++;
5564 if (n < m)
5565 n = m;
5566 }
5567 return n;
5568 }
5569
5570 case COMPLEX_TYPE:
5571 return 2;
5572
5573 case VECTOR_TYPE:
5574 return TYPE_VECTOR_SUBPARTS (type);
5575
5576 case INTEGER_TYPE:
5577 case REAL_TYPE:
5578 case FIXED_POINT_TYPE:
5579 case ENUMERAL_TYPE:
5580 case BOOLEAN_TYPE:
5581 case POINTER_TYPE:
5582 case OFFSET_TYPE:
5583 case REFERENCE_TYPE:
5584 case NULLPTR_TYPE:
5585 return 1;
5586
5587 case ERROR_MARK:
5588 return 0;
5589
5590 case VOID_TYPE:
5591 case METHOD_TYPE:
5592 case FUNCTION_TYPE:
5593 case LANG_TYPE:
5594 default:
5595 gcc_unreachable ();
5596 }
5597 }
5598
5599 /* Helper for categorize_ctor_elements. Identical interface. */
5600
5601 static bool
5602 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5603 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5604 {
5605 unsigned HOST_WIDE_INT idx;
5606 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5607 tree value, purpose, elt_type;
5608
5609 /* Whether CTOR is a valid constant initializer, in accordance with what
5610 initializer_constant_valid_p does. If inferred from the constructor
5611 elements, true until proven otherwise. */
5612 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5613 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5614
5615 nz_elts = 0;
5616 init_elts = 0;
5617 num_fields = 0;
5618 elt_type = NULL_TREE;
5619
5620 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5621 {
5622 HOST_WIDE_INT mult = 1;
5623
5624 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5625 {
5626 tree lo_index = TREE_OPERAND (purpose, 0);
5627 tree hi_index = TREE_OPERAND (purpose, 1);
5628
5629 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5630 mult = (tree_to_uhwi (hi_index)
5631 - tree_to_uhwi (lo_index) + 1);
5632 }
5633 num_fields += mult;
5634 elt_type = TREE_TYPE (value);
5635
5636 switch (TREE_CODE (value))
5637 {
5638 case CONSTRUCTOR:
5639 {
5640 HOST_WIDE_INT nz = 0, ic = 0;
5641
5642 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5643 p_complete);
5644
5645 nz_elts += mult * nz;
5646 init_elts += mult * ic;
5647
5648 if (const_from_elts_p && const_p)
5649 const_p = const_elt_p;
5650 }
5651 break;
5652
5653 case INTEGER_CST:
5654 case REAL_CST:
5655 case FIXED_CST:
5656 if (!initializer_zerop (value))
5657 nz_elts += mult;
5658 init_elts += mult;
5659 break;
5660
5661 case STRING_CST:
5662 nz_elts += mult * TREE_STRING_LENGTH (value);
5663 init_elts += mult * TREE_STRING_LENGTH (value);
5664 break;
5665
5666 case COMPLEX_CST:
5667 if (!initializer_zerop (TREE_REALPART (value)))
5668 nz_elts += mult;
5669 if (!initializer_zerop (TREE_IMAGPART (value)))
5670 nz_elts += mult;
5671 init_elts += mult;
5672 break;
5673
5674 case VECTOR_CST:
5675 {
5676 unsigned i;
5677 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5678 {
5679 tree v = VECTOR_CST_ELT (value, i);
5680 if (!initializer_zerop (v))
5681 nz_elts += mult;
5682 init_elts += mult;
5683 }
5684 }
5685 break;
5686
5687 default:
5688 {
5689 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5690 nz_elts += mult * tc;
5691 init_elts += mult * tc;
5692
5693 if (const_from_elts_p && const_p)
5694 const_p = initializer_constant_valid_p (value, elt_type)
5695 != NULL_TREE;
5696 }
5697 break;
5698 }
5699 }
5700
5701 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5702 num_fields, elt_type))
5703 *p_complete = false;
5704
5705 *p_nz_elts += nz_elts;
5706 *p_init_elts += init_elts;
5707
5708 return const_p;
5709 }
5710
5711 /* Examine CTOR to discover:
5712 * how many scalar fields are set to nonzero values,
5713 and place it in *P_NZ_ELTS;
5714 * how many scalar fields in total are in CTOR,
5715 and place it in *P_ELT_COUNT.
5716 * whether the constructor is complete -- in the sense that every
5717 meaningful byte is explicitly given a value --
5718 and place it in *P_COMPLETE.
5719
5720 Return whether or not CTOR is a valid static constant initializer, the same
5721 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5722
5723 bool
5724 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5725 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5726 {
5727 *p_nz_elts = 0;
5728 *p_init_elts = 0;
5729 *p_complete = true;
5730
5731 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5732 }
5733
5734 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5735 of which had type LAST_TYPE. Each element was itself a complete
5736 initializer, in the sense that every meaningful byte was explicitly
5737 given a value. Return true if the same is true for the constructor
5738 as a whole. */
5739
5740 bool
5741 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5742 const_tree last_type)
5743 {
5744 if (TREE_CODE (type) == UNION_TYPE
5745 || TREE_CODE (type) == QUAL_UNION_TYPE)
5746 {
5747 if (num_elts == 0)
5748 return false;
5749
5750 gcc_assert (num_elts == 1 && last_type);
5751
5752 /* ??? We could look at each element of the union, and find the
5753 largest element. Which would avoid comparing the size of the
5754 initialized element against any tail padding in the union.
5755 Doesn't seem worth the effort... */
5756 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5757 }
5758
5759 return count_type_elements (type, true) == num_elts;
5760 }
5761
5762 /* Return 1 if EXP contains mostly (3/4) zeros. */
5763
5764 static int
5765 mostly_zeros_p (const_tree exp)
5766 {
5767 if (TREE_CODE (exp) == CONSTRUCTOR)
5768 {
5769 HOST_WIDE_INT nz_elts, init_elts;
5770 bool complete_p;
5771
5772 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5773 return !complete_p || nz_elts < init_elts / 4;
5774 }
5775
5776 return initializer_zerop (exp);
5777 }
5778
5779 /* Return 1 if EXP contains all zeros. */
5780
5781 static int
5782 all_zeros_p (const_tree exp)
5783 {
5784 if (TREE_CODE (exp) == CONSTRUCTOR)
5785 {
5786 HOST_WIDE_INT nz_elts, init_elts;
5787 bool complete_p;
5788
5789 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5790 return nz_elts == 0;
5791 }
5792
5793 return initializer_zerop (exp);
5794 }
5795 \f
5796 /* Helper function for store_constructor.
5797 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5798 CLEARED is as for store_constructor.
5799 ALIAS_SET is the alias set to use for any stores.
5800
5801 This provides a recursive shortcut back to store_constructor when it isn't
5802 necessary to go through store_field. This is so that we can pass through
5803 the cleared field to let store_constructor know that we may not have to
5804 clear a substructure if the outer structure has already been cleared. */
5805
5806 static void
5807 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5808 HOST_WIDE_INT bitpos, enum machine_mode mode,
5809 tree exp, int cleared, alias_set_type alias_set)
5810 {
5811 if (TREE_CODE (exp) == CONSTRUCTOR
5812 /* We can only call store_constructor recursively if the size and
5813 bit position are on a byte boundary. */
5814 && bitpos % BITS_PER_UNIT == 0
5815 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5816 /* If we have a nonzero bitpos for a register target, then we just
5817 let store_field do the bitfield handling. This is unlikely to
5818 generate unnecessary clear instructions anyways. */
5819 && (bitpos == 0 || MEM_P (target)))
5820 {
5821 if (MEM_P (target))
5822 target
5823 = adjust_address (target,
5824 GET_MODE (target) == BLKmode
5825 || 0 != (bitpos
5826 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5827 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5828
5829
5830 /* Update the alias set, if required. */
5831 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5832 && MEM_ALIAS_SET (target) != 0)
5833 {
5834 target = copy_rtx (target);
5835 set_mem_alias_set (target, alias_set);
5836 }
5837
5838 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5839 }
5840 else
5841 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5842 }
5843
5844
5845 /* Returns the number of FIELD_DECLs in TYPE. */
5846
5847 static int
5848 fields_length (const_tree type)
5849 {
5850 tree t = TYPE_FIELDS (type);
5851 int count = 0;
5852
5853 for (; t; t = DECL_CHAIN (t))
5854 if (TREE_CODE (t) == FIELD_DECL)
5855 ++count;
5856
5857 return count;
5858 }
5859
5860
5861 /* Store the value of constructor EXP into the rtx TARGET.
5862 TARGET is either a REG or a MEM; we know it cannot conflict, since
5863 safe_from_p has been called.
5864 CLEARED is true if TARGET is known to have been zero'd.
5865 SIZE is the number of bytes of TARGET we are allowed to modify: this
5866 may not be the same as the size of EXP if we are assigning to a field
5867 which has been packed to exclude padding bits. */
5868
5869 static void
5870 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5871 {
5872 tree type = TREE_TYPE (exp);
5873 #ifdef WORD_REGISTER_OPERATIONS
5874 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5875 #endif
5876
5877 switch (TREE_CODE (type))
5878 {
5879 case RECORD_TYPE:
5880 case UNION_TYPE:
5881 case QUAL_UNION_TYPE:
5882 {
5883 unsigned HOST_WIDE_INT idx;
5884 tree field, value;
5885
5886 /* If size is zero or the target is already cleared, do nothing. */
5887 if (size == 0 || cleared)
5888 cleared = 1;
5889 /* We either clear the aggregate or indicate the value is dead. */
5890 else if ((TREE_CODE (type) == UNION_TYPE
5891 || TREE_CODE (type) == QUAL_UNION_TYPE)
5892 && ! CONSTRUCTOR_ELTS (exp))
5893 /* If the constructor is empty, clear the union. */
5894 {
5895 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5896 cleared = 1;
5897 }
5898
5899 /* If we are building a static constructor into a register,
5900 set the initial value as zero so we can fold the value into
5901 a constant. But if more than one register is involved,
5902 this probably loses. */
5903 else if (REG_P (target) && TREE_STATIC (exp)
5904 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5905 {
5906 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5907 cleared = 1;
5908 }
5909
5910 /* If the constructor has fewer fields than the structure or
5911 if we are initializing the structure to mostly zeros, clear
5912 the whole structure first. Don't do this if TARGET is a
5913 register whose mode size isn't equal to SIZE since
5914 clear_storage can't handle this case. */
5915 else if (size > 0
5916 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5917 != fields_length (type))
5918 || mostly_zeros_p (exp))
5919 && (!REG_P (target)
5920 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5921 == size)))
5922 {
5923 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5924 cleared = 1;
5925 }
5926
5927 if (REG_P (target) && !cleared)
5928 emit_clobber (target);
5929
5930 /* Store each element of the constructor into the
5931 corresponding field of TARGET. */
5932 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5933 {
5934 enum machine_mode mode;
5935 HOST_WIDE_INT bitsize;
5936 HOST_WIDE_INT bitpos = 0;
5937 tree offset;
5938 rtx to_rtx = target;
5939
5940 /* Just ignore missing fields. We cleared the whole
5941 structure, above, if any fields are missing. */
5942 if (field == 0)
5943 continue;
5944
5945 if (cleared && initializer_zerop (value))
5946 continue;
5947
5948 if (tree_fits_uhwi_p (DECL_SIZE (field)))
5949 bitsize = tree_to_uhwi (DECL_SIZE (field));
5950 else
5951 bitsize = -1;
5952
5953 mode = DECL_MODE (field);
5954 if (DECL_BIT_FIELD (field))
5955 mode = VOIDmode;
5956
5957 offset = DECL_FIELD_OFFSET (field);
5958 if (tree_fits_shwi_p (offset)
5959 && tree_fits_shwi_p (bit_position (field)))
5960 {
5961 bitpos = int_bit_position (field);
5962 offset = 0;
5963 }
5964 else
5965 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
5966
5967 if (offset)
5968 {
5969 enum machine_mode address_mode;
5970 rtx offset_rtx;
5971
5972 offset
5973 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5974 make_tree (TREE_TYPE (exp),
5975 target));
5976
5977 offset_rtx = expand_normal (offset);
5978 gcc_assert (MEM_P (to_rtx));
5979
5980 address_mode = get_address_mode (to_rtx);
5981 if (GET_MODE (offset_rtx) != address_mode)
5982 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5983
5984 to_rtx = offset_address (to_rtx, offset_rtx,
5985 highest_pow2_factor (offset));
5986 }
5987
5988 #ifdef WORD_REGISTER_OPERATIONS
5989 /* If this initializes a field that is smaller than a
5990 word, at the start of a word, try to widen it to a full
5991 word. This special case allows us to output C++ member
5992 function initializations in a form that the optimizers
5993 can understand. */
5994 if (REG_P (target)
5995 && bitsize < BITS_PER_WORD
5996 && bitpos % BITS_PER_WORD == 0
5997 && GET_MODE_CLASS (mode) == MODE_INT
5998 && TREE_CODE (value) == INTEGER_CST
5999 && exp_size >= 0
6000 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6001 {
6002 tree type = TREE_TYPE (value);
6003
6004 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6005 {
6006 type = lang_hooks.types.type_for_mode
6007 (word_mode, TYPE_UNSIGNED (type));
6008 value = fold_convert (type, value);
6009 }
6010
6011 if (BYTES_BIG_ENDIAN)
6012 value
6013 = fold_build2 (LSHIFT_EXPR, type, value,
6014 build_int_cst (type,
6015 BITS_PER_WORD - bitsize));
6016 bitsize = BITS_PER_WORD;
6017 mode = word_mode;
6018 }
6019 #endif
6020
6021 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6022 && DECL_NONADDRESSABLE_P (field))
6023 {
6024 to_rtx = copy_rtx (to_rtx);
6025 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6026 }
6027
6028 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6029 value, cleared,
6030 get_alias_set (TREE_TYPE (field)));
6031 }
6032 break;
6033 }
6034 case ARRAY_TYPE:
6035 {
6036 tree value, index;
6037 unsigned HOST_WIDE_INT i;
6038 int need_to_clear;
6039 tree domain;
6040 tree elttype = TREE_TYPE (type);
6041 int const_bounds_p;
6042 HOST_WIDE_INT minelt = 0;
6043 HOST_WIDE_INT maxelt = 0;
6044
6045 domain = TYPE_DOMAIN (type);
6046 const_bounds_p = (TYPE_MIN_VALUE (domain)
6047 && TYPE_MAX_VALUE (domain)
6048 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6049 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6050
6051 /* If we have constant bounds for the range of the type, get them. */
6052 if (const_bounds_p)
6053 {
6054 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6055 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6056 }
6057
6058 /* If the constructor has fewer elements than the array, clear
6059 the whole array first. Similarly if this is static
6060 constructor of a non-BLKmode object. */
6061 if (cleared)
6062 need_to_clear = 0;
6063 else if (REG_P (target) && TREE_STATIC (exp))
6064 need_to_clear = 1;
6065 else
6066 {
6067 unsigned HOST_WIDE_INT idx;
6068 tree index, value;
6069 HOST_WIDE_INT count = 0, zero_count = 0;
6070 need_to_clear = ! const_bounds_p;
6071
6072 /* This loop is a more accurate version of the loop in
6073 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6074 is also needed to check for missing elements. */
6075 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6076 {
6077 HOST_WIDE_INT this_node_count;
6078
6079 if (need_to_clear)
6080 break;
6081
6082 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6083 {
6084 tree lo_index = TREE_OPERAND (index, 0);
6085 tree hi_index = TREE_OPERAND (index, 1);
6086
6087 if (! tree_fits_uhwi_p (lo_index)
6088 || ! tree_fits_uhwi_p (hi_index))
6089 {
6090 need_to_clear = 1;
6091 break;
6092 }
6093
6094 this_node_count = (tree_to_uhwi (hi_index)
6095 - tree_to_uhwi (lo_index) + 1);
6096 }
6097 else
6098 this_node_count = 1;
6099
6100 count += this_node_count;
6101 if (mostly_zeros_p (value))
6102 zero_count += this_node_count;
6103 }
6104
6105 /* Clear the entire array first if there are any missing
6106 elements, or if the incidence of zero elements is >=
6107 75%. */
6108 if (! need_to_clear
6109 && (count < maxelt - minelt + 1
6110 || 4 * zero_count >= 3 * count))
6111 need_to_clear = 1;
6112 }
6113
6114 if (need_to_clear && size > 0)
6115 {
6116 if (REG_P (target))
6117 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6118 else
6119 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6120 cleared = 1;
6121 }
6122
6123 if (!cleared && REG_P (target))
6124 /* Inform later passes that the old value is dead. */
6125 emit_clobber (target);
6126
6127 /* Store each element of the constructor into the
6128 corresponding element of TARGET, determined by counting the
6129 elements. */
6130 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6131 {
6132 enum machine_mode mode;
6133 HOST_WIDE_INT bitsize;
6134 HOST_WIDE_INT bitpos;
6135 rtx xtarget = target;
6136
6137 if (cleared && initializer_zerop (value))
6138 continue;
6139
6140 mode = TYPE_MODE (elttype);
6141 if (mode == BLKmode)
6142 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6143 ? tree_to_uhwi (TYPE_SIZE (elttype))
6144 : -1);
6145 else
6146 bitsize = GET_MODE_BITSIZE (mode);
6147
6148 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6149 {
6150 tree lo_index = TREE_OPERAND (index, 0);
6151 tree hi_index = TREE_OPERAND (index, 1);
6152 rtx index_r, pos_rtx;
6153 HOST_WIDE_INT lo, hi, count;
6154 tree position;
6155
6156 /* If the range is constant and "small", unroll the loop. */
6157 if (const_bounds_p
6158 && tree_fits_shwi_p (lo_index)
6159 && tree_fits_shwi_p (hi_index)
6160 && (lo = tree_to_shwi (lo_index),
6161 hi = tree_to_shwi (hi_index),
6162 count = hi - lo + 1,
6163 (!MEM_P (target)
6164 || count <= 2
6165 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6166 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6167 <= 40 * 8)))))
6168 {
6169 lo -= minelt; hi -= minelt;
6170 for (; lo <= hi; lo++)
6171 {
6172 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6173
6174 if (MEM_P (target)
6175 && !MEM_KEEP_ALIAS_SET_P (target)
6176 && TREE_CODE (type) == ARRAY_TYPE
6177 && TYPE_NONALIASED_COMPONENT (type))
6178 {
6179 target = copy_rtx (target);
6180 MEM_KEEP_ALIAS_SET_P (target) = 1;
6181 }
6182
6183 store_constructor_field
6184 (target, bitsize, bitpos, mode, value, cleared,
6185 get_alias_set (elttype));
6186 }
6187 }
6188 else
6189 {
6190 rtx loop_start = gen_label_rtx ();
6191 rtx loop_end = gen_label_rtx ();
6192 tree exit_cond;
6193
6194 expand_normal (hi_index);
6195
6196 index = build_decl (EXPR_LOCATION (exp),
6197 VAR_DECL, NULL_TREE, domain);
6198 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6199 SET_DECL_RTL (index, index_r);
6200 store_expr (lo_index, index_r, 0, false);
6201
6202 /* Build the head of the loop. */
6203 do_pending_stack_adjust ();
6204 emit_label (loop_start);
6205
6206 /* Assign value to element index. */
6207 position =
6208 fold_convert (ssizetype,
6209 fold_build2 (MINUS_EXPR,
6210 TREE_TYPE (index),
6211 index,
6212 TYPE_MIN_VALUE (domain)));
6213
6214 position =
6215 size_binop (MULT_EXPR, position,
6216 fold_convert (ssizetype,
6217 TYPE_SIZE_UNIT (elttype)));
6218
6219 pos_rtx = expand_normal (position);
6220 xtarget = offset_address (target, pos_rtx,
6221 highest_pow2_factor (position));
6222 xtarget = adjust_address (xtarget, mode, 0);
6223 if (TREE_CODE (value) == CONSTRUCTOR)
6224 store_constructor (value, xtarget, cleared,
6225 bitsize / BITS_PER_UNIT);
6226 else
6227 store_expr (value, xtarget, 0, false);
6228
6229 /* Generate a conditional jump to exit the loop. */
6230 exit_cond = build2 (LT_EXPR, integer_type_node,
6231 index, hi_index);
6232 jumpif (exit_cond, loop_end, -1);
6233
6234 /* Update the loop counter, and jump to the head of
6235 the loop. */
6236 expand_assignment (index,
6237 build2 (PLUS_EXPR, TREE_TYPE (index),
6238 index, integer_one_node),
6239 false);
6240
6241 emit_jump (loop_start);
6242
6243 /* Build the end of the loop. */
6244 emit_label (loop_end);
6245 }
6246 }
6247 else if ((index != 0 && ! tree_fits_shwi_p (index))
6248 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6249 {
6250 tree position;
6251
6252 if (index == 0)
6253 index = ssize_int (1);
6254
6255 if (minelt)
6256 index = fold_convert (ssizetype,
6257 fold_build2 (MINUS_EXPR,
6258 TREE_TYPE (index),
6259 index,
6260 TYPE_MIN_VALUE (domain)));
6261
6262 position =
6263 size_binop (MULT_EXPR, index,
6264 fold_convert (ssizetype,
6265 TYPE_SIZE_UNIT (elttype)));
6266 xtarget = offset_address (target,
6267 expand_normal (position),
6268 highest_pow2_factor (position));
6269 xtarget = adjust_address (xtarget, mode, 0);
6270 store_expr (value, xtarget, 0, false);
6271 }
6272 else
6273 {
6274 if (index != 0)
6275 bitpos = ((tree_to_shwi (index) - minelt)
6276 * tree_to_uhwi (TYPE_SIZE (elttype)));
6277 else
6278 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6279
6280 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6281 && TREE_CODE (type) == ARRAY_TYPE
6282 && TYPE_NONALIASED_COMPONENT (type))
6283 {
6284 target = copy_rtx (target);
6285 MEM_KEEP_ALIAS_SET_P (target) = 1;
6286 }
6287 store_constructor_field (target, bitsize, bitpos, mode, value,
6288 cleared, get_alias_set (elttype));
6289 }
6290 }
6291 break;
6292 }
6293
6294 case VECTOR_TYPE:
6295 {
6296 unsigned HOST_WIDE_INT idx;
6297 constructor_elt *ce;
6298 int i;
6299 int need_to_clear;
6300 int icode = CODE_FOR_nothing;
6301 tree elttype = TREE_TYPE (type);
6302 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6303 enum machine_mode eltmode = TYPE_MODE (elttype);
6304 HOST_WIDE_INT bitsize;
6305 HOST_WIDE_INT bitpos;
6306 rtvec vector = NULL;
6307 unsigned n_elts;
6308 alias_set_type alias;
6309
6310 gcc_assert (eltmode != BLKmode);
6311
6312 n_elts = TYPE_VECTOR_SUBPARTS (type);
6313 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6314 {
6315 enum machine_mode mode = GET_MODE (target);
6316
6317 icode = (int) optab_handler (vec_init_optab, mode);
6318 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6319 if (icode != CODE_FOR_nothing)
6320 {
6321 tree value;
6322
6323 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6324 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6325 {
6326 icode = CODE_FOR_nothing;
6327 break;
6328 }
6329 }
6330 if (icode != CODE_FOR_nothing)
6331 {
6332 unsigned int i;
6333
6334 vector = rtvec_alloc (n_elts);
6335 for (i = 0; i < n_elts; i++)
6336 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6337 }
6338 }
6339
6340 /* If the constructor has fewer elements than the vector,
6341 clear the whole array first. Similarly if this is static
6342 constructor of a non-BLKmode object. */
6343 if (cleared)
6344 need_to_clear = 0;
6345 else if (REG_P (target) && TREE_STATIC (exp))
6346 need_to_clear = 1;
6347 else
6348 {
6349 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6350 tree value;
6351
6352 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6353 {
6354 int n_elts_here = tree_to_uhwi
6355 (int_const_binop (TRUNC_DIV_EXPR,
6356 TYPE_SIZE (TREE_TYPE (value)),
6357 TYPE_SIZE (elttype)));
6358
6359 count += n_elts_here;
6360 if (mostly_zeros_p (value))
6361 zero_count += n_elts_here;
6362 }
6363
6364 /* Clear the entire vector first if there are any missing elements,
6365 or if the incidence of zero elements is >= 75%. */
6366 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6367 }
6368
6369 if (need_to_clear && size > 0 && !vector)
6370 {
6371 if (REG_P (target))
6372 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6373 else
6374 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6375 cleared = 1;
6376 }
6377
6378 /* Inform later passes that the old value is dead. */
6379 if (!cleared && !vector && REG_P (target))
6380 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6381
6382 if (MEM_P (target))
6383 alias = MEM_ALIAS_SET (target);
6384 else
6385 alias = get_alias_set (elttype);
6386
6387 /* Store each element of the constructor into the corresponding
6388 element of TARGET, determined by counting the elements. */
6389 for (idx = 0, i = 0;
6390 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6391 idx++, i += bitsize / elt_size)
6392 {
6393 HOST_WIDE_INT eltpos;
6394 tree value = ce->value;
6395
6396 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6397 if (cleared && initializer_zerop (value))
6398 continue;
6399
6400 if (ce->index)
6401 eltpos = tree_to_uhwi (ce->index);
6402 else
6403 eltpos = i;
6404
6405 if (vector)
6406 {
6407 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6408 elements. */
6409 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6410 RTVEC_ELT (vector, eltpos)
6411 = expand_normal (value);
6412 }
6413 else
6414 {
6415 enum machine_mode value_mode =
6416 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6417 ? TYPE_MODE (TREE_TYPE (value))
6418 : eltmode;
6419 bitpos = eltpos * elt_size;
6420 store_constructor_field (target, bitsize, bitpos, value_mode,
6421 value, cleared, alias);
6422 }
6423 }
6424
6425 if (vector)
6426 emit_insn (GEN_FCN (icode)
6427 (target,
6428 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6429 break;
6430 }
6431
6432 default:
6433 gcc_unreachable ();
6434 }
6435 }
6436
6437 /* Store the value of EXP (an expression tree)
6438 into a subfield of TARGET which has mode MODE and occupies
6439 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6440 If MODE is VOIDmode, it means that we are storing into a bit-field.
6441
6442 BITREGION_START is bitpos of the first bitfield in this region.
6443 BITREGION_END is the bitpos of the ending bitfield in this region.
6444 These two fields are 0, if the C++ memory model does not apply,
6445 or we are not interested in keeping track of bitfield regions.
6446
6447 Always return const0_rtx unless we have something particular to
6448 return.
6449
6450 ALIAS_SET is the alias set for the destination. This value will
6451 (in general) be different from that for TARGET, since TARGET is a
6452 reference to the containing structure.
6453
6454 If NONTEMPORAL is true, try generating a nontemporal store. */
6455
6456 static rtx
6457 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6458 unsigned HOST_WIDE_INT bitregion_start,
6459 unsigned HOST_WIDE_INT bitregion_end,
6460 enum machine_mode mode, tree exp,
6461 alias_set_type alias_set, bool nontemporal)
6462 {
6463 if (TREE_CODE (exp) == ERROR_MARK)
6464 return const0_rtx;
6465
6466 /* If we have nothing to store, do nothing unless the expression has
6467 side-effects. */
6468 if (bitsize == 0)
6469 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6470
6471 if (GET_CODE (target) == CONCAT)
6472 {
6473 /* We're storing into a struct containing a single __complex. */
6474
6475 gcc_assert (!bitpos);
6476 return store_expr (exp, target, 0, nontemporal);
6477 }
6478
6479 /* If the structure is in a register or if the component
6480 is a bit field, we cannot use addressing to access it.
6481 Use bit-field techniques or SUBREG to store in it. */
6482
6483 if (mode == VOIDmode
6484 || (mode != BLKmode && ! direct_store[(int) mode]
6485 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6486 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6487 || REG_P (target)
6488 || GET_CODE (target) == SUBREG
6489 /* If the field isn't aligned enough to store as an ordinary memref,
6490 store it as a bit field. */
6491 || (mode != BLKmode
6492 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6493 || bitpos % GET_MODE_ALIGNMENT (mode))
6494 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6495 || (bitpos % BITS_PER_UNIT != 0)))
6496 || (bitsize >= 0 && mode != BLKmode
6497 && GET_MODE_BITSIZE (mode) > bitsize)
6498 /* If the RHS and field are a constant size and the size of the
6499 RHS isn't the same size as the bitfield, we must use bitfield
6500 operations. */
6501 || (bitsize >= 0
6502 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6503 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6504 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6505 decl we must use bitfield operations. */
6506 || (bitsize >= 0
6507 && TREE_CODE (exp) == MEM_REF
6508 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6509 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6510 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6511 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6512 {
6513 rtx temp;
6514 gimple nop_def;
6515
6516 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6517 implies a mask operation. If the precision is the same size as
6518 the field we're storing into, that mask is redundant. This is
6519 particularly common with bit field assignments generated by the
6520 C front end. */
6521 nop_def = get_def_for_expr (exp, NOP_EXPR);
6522 if (nop_def)
6523 {
6524 tree type = TREE_TYPE (exp);
6525 if (INTEGRAL_TYPE_P (type)
6526 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6527 && bitsize == TYPE_PRECISION (type))
6528 {
6529 tree op = gimple_assign_rhs1 (nop_def);
6530 type = TREE_TYPE (op);
6531 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6532 exp = op;
6533 }
6534 }
6535
6536 temp = expand_normal (exp);
6537
6538 /* If BITSIZE is narrower than the size of the type of EXP
6539 we will be narrowing TEMP. Normally, what's wanted are the
6540 low-order bits. However, if EXP's type is a record and this is
6541 big-endian machine, we want the upper BITSIZE bits. */
6542 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6543 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6544 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6545 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6546 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6547 NULL_RTX, 1);
6548
6549 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6550 if (mode != VOIDmode && mode != BLKmode
6551 && mode != TYPE_MODE (TREE_TYPE (exp)))
6552 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6553
6554 /* If the modes of TEMP and TARGET are both BLKmode, both
6555 must be in memory and BITPOS must be aligned on a byte
6556 boundary. If so, we simply do a block copy. Likewise
6557 for a BLKmode-like TARGET. */
6558 if (GET_MODE (temp) == BLKmode
6559 && (GET_MODE (target) == BLKmode
6560 || (MEM_P (target)
6561 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6562 && (bitpos % BITS_PER_UNIT) == 0
6563 && (bitsize % BITS_PER_UNIT) == 0)))
6564 {
6565 gcc_assert (MEM_P (target) && MEM_P (temp)
6566 && (bitpos % BITS_PER_UNIT) == 0);
6567
6568 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6569 emit_block_move (target, temp,
6570 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6571 / BITS_PER_UNIT),
6572 BLOCK_OP_NORMAL);
6573
6574 return const0_rtx;
6575 }
6576
6577 /* Handle calls that return values in multiple non-contiguous locations.
6578 The Irix 6 ABI has examples of this. */
6579 if (GET_CODE (temp) == PARALLEL)
6580 {
6581 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6582 rtx temp_target;
6583 if (mode == BLKmode)
6584 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6585 temp_target = gen_reg_rtx (mode);
6586 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6587 temp = temp_target;
6588 }
6589 else if (mode == BLKmode)
6590 {
6591 /* Handle calls that return BLKmode values in registers. */
6592 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6593 {
6594 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6595 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6596 temp = temp_target;
6597 }
6598 else
6599 {
6600 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6601 rtx temp_target;
6602 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6603 temp_target = gen_reg_rtx (mode);
6604 temp_target
6605 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6606 temp_target, mode, mode);
6607 temp = temp_target;
6608 }
6609 }
6610
6611 /* Store the value in the bitfield. */
6612 store_bit_field (target, bitsize, bitpos,
6613 bitregion_start, bitregion_end,
6614 mode, temp);
6615
6616 return const0_rtx;
6617 }
6618 else
6619 {
6620 /* Now build a reference to just the desired component. */
6621 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6622
6623 if (to_rtx == target)
6624 to_rtx = copy_rtx (to_rtx);
6625
6626 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6627 set_mem_alias_set (to_rtx, alias_set);
6628
6629 return store_expr (exp, to_rtx, 0, nontemporal);
6630 }
6631 }
6632 \f
6633 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6634 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6635 codes and find the ultimate containing object, which we return.
6636
6637 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6638 bit position, and *PUNSIGNEDP to the signedness of the field.
6639 If the position of the field is variable, we store a tree
6640 giving the variable offset (in units) in *POFFSET.
6641 This offset is in addition to the bit position.
6642 If the position is not variable, we store 0 in *POFFSET.
6643
6644 If any of the extraction expressions is volatile,
6645 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6646
6647 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6648 Otherwise, it is a mode that can be used to access the field.
6649
6650 If the field describes a variable-sized object, *PMODE is set to
6651 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6652 this case, but the address of the object can be found.
6653
6654 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6655 look through nodes that serve as markers of a greater alignment than
6656 the one that can be deduced from the expression. These nodes make it
6657 possible for front-ends to prevent temporaries from being created by
6658 the middle-end on alignment considerations. For that purpose, the
6659 normal operating mode at high-level is to always pass FALSE so that
6660 the ultimate containing object is really returned; moreover, the
6661 associated predicate handled_component_p will always return TRUE
6662 on these nodes, thus indicating that they are essentially handled
6663 by get_inner_reference. TRUE should only be passed when the caller
6664 is scanning the expression in order to build another representation
6665 and specifically knows how to handle these nodes; as such, this is
6666 the normal operating mode in the RTL expanders. */
6667
6668 tree
6669 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6670 HOST_WIDE_INT *pbitpos, tree *poffset,
6671 enum machine_mode *pmode, int *punsignedp,
6672 int *pvolatilep, bool keep_aligning)
6673 {
6674 tree size_tree = 0;
6675 enum machine_mode mode = VOIDmode;
6676 bool blkmode_bitfield = false;
6677 tree offset = size_zero_node;
6678 offset_int bit_offset = 0;
6679
6680 /* First get the mode, signedness, and size. We do this from just the
6681 outermost expression. */
6682 *pbitsize = -1;
6683 if (TREE_CODE (exp) == COMPONENT_REF)
6684 {
6685 tree field = TREE_OPERAND (exp, 1);
6686 size_tree = DECL_SIZE (field);
6687 if (flag_strict_volatile_bitfields > 0
6688 && TREE_THIS_VOLATILE (exp)
6689 && DECL_BIT_FIELD_TYPE (field)
6690 && DECL_MODE (field) != BLKmode)
6691 /* Volatile bitfields should be accessed in the mode of the
6692 field's type, not the mode computed based on the bit
6693 size. */
6694 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6695 else if (!DECL_BIT_FIELD (field))
6696 mode = DECL_MODE (field);
6697 else if (DECL_MODE (field) == BLKmode)
6698 blkmode_bitfield = true;
6699
6700 *punsignedp = DECL_UNSIGNED (field);
6701 }
6702 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6703 {
6704 size_tree = TREE_OPERAND (exp, 1);
6705 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6706 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6707
6708 /* For vector types, with the correct size of access, use the mode of
6709 inner type. */
6710 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6711 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6712 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6713 mode = TYPE_MODE (TREE_TYPE (exp));
6714 }
6715 else
6716 {
6717 mode = TYPE_MODE (TREE_TYPE (exp));
6718 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6719
6720 if (mode == BLKmode)
6721 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6722 else
6723 *pbitsize = GET_MODE_BITSIZE (mode);
6724 }
6725
6726 if (size_tree != 0)
6727 {
6728 if (! tree_fits_uhwi_p (size_tree))
6729 mode = BLKmode, *pbitsize = -1;
6730 else
6731 *pbitsize = tree_to_uhwi (size_tree);
6732 }
6733
6734 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6735 and find the ultimate containing object. */
6736 while (1)
6737 {
6738 switch (TREE_CODE (exp))
6739 {
6740 case BIT_FIELD_REF:
6741 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6742 break;
6743
6744 case COMPONENT_REF:
6745 {
6746 tree field = TREE_OPERAND (exp, 1);
6747 tree this_offset = component_ref_field_offset (exp);
6748
6749 /* If this field hasn't been filled in yet, don't go past it.
6750 This should only happen when folding expressions made during
6751 type construction. */
6752 if (this_offset == 0)
6753 break;
6754
6755 offset = size_binop (PLUS_EXPR, offset, this_offset);
6756 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6757
6758 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6759 }
6760 break;
6761
6762 case ARRAY_REF:
6763 case ARRAY_RANGE_REF:
6764 {
6765 tree index = TREE_OPERAND (exp, 1);
6766 tree low_bound = array_ref_low_bound (exp);
6767 tree unit_size = array_ref_element_size (exp);
6768
6769 /* We assume all arrays have sizes that are a multiple of a byte.
6770 First subtract the lower bound, if any, in the type of the
6771 index, then convert to sizetype and multiply by the size of
6772 the array element. */
6773 if (! integer_zerop (low_bound))
6774 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6775 index, low_bound);
6776
6777 offset = size_binop (PLUS_EXPR, offset,
6778 size_binop (MULT_EXPR,
6779 fold_convert (sizetype, index),
6780 unit_size));
6781 }
6782 break;
6783
6784 case REALPART_EXPR:
6785 break;
6786
6787 case IMAGPART_EXPR:
6788 bit_offset += *pbitsize;
6789 break;
6790
6791 case VIEW_CONVERT_EXPR:
6792 if (keep_aligning && STRICT_ALIGNMENT
6793 && (TYPE_ALIGN (TREE_TYPE (exp))
6794 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6795 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6796 < BIGGEST_ALIGNMENT)
6797 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6798 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6799 goto done;
6800 break;
6801
6802 case MEM_REF:
6803 /* Hand back the decl for MEM[&decl, off]. */
6804 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6805 {
6806 tree off = TREE_OPERAND (exp, 1);
6807 if (!integer_zerop (off))
6808 {
6809 offset_int boff, coff = mem_ref_offset (exp);
6810 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6811 bit_offset += boff;
6812 }
6813 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6814 }
6815 goto done;
6816
6817 default:
6818 goto done;
6819 }
6820
6821 /* If any reference in the chain is volatile, the effect is volatile. */
6822 if (TREE_THIS_VOLATILE (exp))
6823 *pvolatilep = 1;
6824
6825 exp = TREE_OPERAND (exp, 0);
6826 }
6827 done:
6828
6829 /* If OFFSET is constant, see if we can return the whole thing as a
6830 constant bit position. Make sure to handle overflow during
6831 this conversion. */
6832 if (TREE_CODE (offset) == INTEGER_CST)
6833 {
6834 offset_int tem = wi::sext (wi::to_offset (offset),
6835 TYPE_PRECISION (sizetype));
6836 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6837 tem += bit_offset;
6838 if (wi::fits_shwi_p (tem))
6839 {
6840 *pbitpos = tem.to_shwi ();
6841 *poffset = offset = NULL_TREE;
6842 }
6843 }
6844
6845 /* Otherwise, split it up. */
6846 if (offset)
6847 {
6848 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6849 if (wi::neg_p (bit_offset))
6850 {
6851 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6852 offset_int tem = bit_offset.and_not (mask);
6853 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6854 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6855 bit_offset -= tem;
6856 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6857 offset = size_binop (PLUS_EXPR, offset,
6858 wide_int_to_tree (sizetype, tem));
6859 }
6860
6861 *pbitpos = bit_offset.to_shwi ();
6862 *poffset = offset;
6863 }
6864
6865 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6866 if (mode == VOIDmode
6867 && blkmode_bitfield
6868 && (*pbitpos % BITS_PER_UNIT) == 0
6869 && (*pbitsize % BITS_PER_UNIT) == 0)
6870 *pmode = BLKmode;
6871 else
6872 *pmode = mode;
6873
6874 return exp;
6875 }
6876
6877 /* Return a tree of sizetype representing the size, in bytes, of the element
6878 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6879
6880 tree
6881 array_ref_element_size (tree exp)
6882 {
6883 tree aligned_size = TREE_OPERAND (exp, 3);
6884 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6885 location_t loc = EXPR_LOCATION (exp);
6886
6887 /* If a size was specified in the ARRAY_REF, it's the size measured
6888 in alignment units of the element type. So multiply by that value. */
6889 if (aligned_size)
6890 {
6891 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6892 sizetype from another type of the same width and signedness. */
6893 if (TREE_TYPE (aligned_size) != sizetype)
6894 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6895 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6896 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6897 }
6898
6899 /* Otherwise, take the size from that of the element type. Substitute
6900 any PLACEHOLDER_EXPR that we have. */
6901 else
6902 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6903 }
6904
6905 /* Return a tree representing the lower bound of the array mentioned in
6906 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6907
6908 tree
6909 array_ref_low_bound (tree exp)
6910 {
6911 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6912
6913 /* If a lower bound is specified in EXP, use it. */
6914 if (TREE_OPERAND (exp, 2))
6915 return TREE_OPERAND (exp, 2);
6916
6917 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6918 substituting for a PLACEHOLDER_EXPR as needed. */
6919 if (domain_type && TYPE_MIN_VALUE (domain_type))
6920 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6921
6922 /* Otherwise, return a zero of the appropriate type. */
6923 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6924 }
6925
6926 /* Returns true if REF is an array reference to an array at the end of
6927 a structure. If this is the case, the array may be allocated larger
6928 than its upper bound implies. */
6929
6930 bool
6931 array_at_struct_end_p (tree ref)
6932 {
6933 if (TREE_CODE (ref) != ARRAY_REF
6934 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6935 return false;
6936
6937 while (handled_component_p (ref))
6938 {
6939 /* If the reference chain contains a component reference to a
6940 non-union type and there follows another field the reference
6941 is not at the end of a structure. */
6942 if (TREE_CODE (ref) == COMPONENT_REF
6943 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6944 {
6945 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6946 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6947 nextf = DECL_CHAIN (nextf);
6948 if (nextf)
6949 return false;
6950 }
6951
6952 ref = TREE_OPERAND (ref, 0);
6953 }
6954
6955 /* If the reference is based on a declared entity, the size of the array
6956 is constrained by its given domain. */
6957 if (DECL_P (ref))
6958 return false;
6959
6960 return true;
6961 }
6962
6963 /* Return a tree representing the upper bound of the array mentioned in
6964 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6965
6966 tree
6967 array_ref_up_bound (tree exp)
6968 {
6969 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6970
6971 /* If there is a domain type and it has an upper bound, use it, substituting
6972 for a PLACEHOLDER_EXPR as needed. */
6973 if (domain_type && TYPE_MAX_VALUE (domain_type))
6974 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6975
6976 /* Otherwise fail. */
6977 return NULL_TREE;
6978 }
6979
6980 /* Return a tree representing the offset, in bytes, of the field referenced
6981 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6982
6983 tree
6984 component_ref_field_offset (tree exp)
6985 {
6986 tree aligned_offset = TREE_OPERAND (exp, 2);
6987 tree field = TREE_OPERAND (exp, 1);
6988 location_t loc = EXPR_LOCATION (exp);
6989
6990 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6991 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6992 value. */
6993 if (aligned_offset)
6994 {
6995 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6996 sizetype from another type of the same width and signedness. */
6997 if (TREE_TYPE (aligned_offset) != sizetype)
6998 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6999 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7000 size_int (DECL_OFFSET_ALIGN (field)
7001 / BITS_PER_UNIT));
7002 }
7003
7004 /* Otherwise, take the offset from that of the field. Substitute
7005 any PLACEHOLDER_EXPR that we have. */
7006 else
7007 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7008 }
7009
7010 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7011
7012 static unsigned HOST_WIDE_INT
7013 target_align (const_tree target)
7014 {
7015 /* We might have a chain of nested references with intermediate misaligning
7016 bitfields components, so need to recurse to find out. */
7017
7018 unsigned HOST_WIDE_INT this_align, outer_align;
7019
7020 switch (TREE_CODE (target))
7021 {
7022 case BIT_FIELD_REF:
7023 return 1;
7024
7025 case COMPONENT_REF:
7026 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7027 outer_align = target_align (TREE_OPERAND (target, 0));
7028 return MIN (this_align, outer_align);
7029
7030 case ARRAY_REF:
7031 case ARRAY_RANGE_REF:
7032 this_align = TYPE_ALIGN (TREE_TYPE (target));
7033 outer_align = target_align (TREE_OPERAND (target, 0));
7034 return MIN (this_align, outer_align);
7035
7036 CASE_CONVERT:
7037 case NON_LVALUE_EXPR:
7038 case VIEW_CONVERT_EXPR:
7039 this_align = TYPE_ALIGN (TREE_TYPE (target));
7040 outer_align = target_align (TREE_OPERAND (target, 0));
7041 return MAX (this_align, outer_align);
7042
7043 default:
7044 return TYPE_ALIGN (TREE_TYPE (target));
7045 }
7046 }
7047
7048 \f
7049 /* Given an rtx VALUE that may contain additions and multiplications, return
7050 an equivalent value that just refers to a register, memory, or constant.
7051 This is done by generating instructions to perform the arithmetic and
7052 returning a pseudo-register containing the value.
7053
7054 The returned value may be a REG, SUBREG, MEM or constant. */
7055
7056 rtx
7057 force_operand (rtx value, rtx target)
7058 {
7059 rtx op1, op2;
7060 /* Use subtarget as the target for operand 0 of a binary operation. */
7061 rtx subtarget = get_subtarget (target);
7062 enum rtx_code code = GET_CODE (value);
7063
7064 /* Check for subreg applied to an expression produced by loop optimizer. */
7065 if (code == SUBREG
7066 && !REG_P (SUBREG_REG (value))
7067 && !MEM_P (SUBREG_REG (value)))
7068 {
7069 value
7070 = simplify_gen_subreg (GET_MODE (value),
7071 force_reg (GET_MODE (SUBREG_REG (value)),
7072 force_operand (SUBREG_REG (value),
7073 NULL_RTX)),
7074 GET_MODE (SUBREG_REG (value)),
7075 SUBREG_BYTE (value));
7076 code = GET_CODE (value);
7077 }
7078
7079 /* Check for a PIC address load. */
7080 if ((code == PLUS || code == MINUS)
7081 && XEXP (value, 0) == pic_offset_table_rtx
7082 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7083 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7084 || GET_CODE (XEXP (value, 1)) == CONST))
7085 {
7086 if (!subtarget)
7087 subtarget = gen_reg_rtx (GET_MODE (value));
7088 emit_move_insn (subtarget, value);
7089 return subtarget;
7090 }
7091
7092 if (ARITHMETIC_P (value))
7093 {
7094 op2 = XEXP (value, 1);
7095 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7096 subtarget = 0;
7097 if (code == MINUS && CONST_INT_P (op2))
7098 {
7099 code = PLUS;
7100 op2 = negate_rtx (GET_MODE (value), op2);
7101 }
7102
7103 /* Check for an addition with OP2 a constant integer and our first
7104 operand a PLUS of a virtual register and something else. In that
7105 case, we want to emit the sum of the virtual register and the
7106 constant first and then add the other value. This allows virtual
7107 register instantiation to simply modify the constant rather than
7108 creating another one around this addition. */
7109 if (code == PLUS && CONST_INT_P (op2)
7110 && GET_CODE (XEXP (value, 0)) == PLUS
7111 && REG_P (XEXP (XEXP (value, 0), 0))
7112 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7113 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7114 {
7115 rtx temp = expand_simple_binop (GET_MODE (value), code,
7116 XEXP (XEXP (value, 0), 0), op2,
7117 subtarget, 0, OPTAB_LIB_WIDEN);
7118 return expand_simple_binop (GET_MODE (value), code, temp,
7119 force_operand (XEXP (XEXP (value,
7120 0), 1), 0),
7121 target, 0, OPTAB_LIB_WIDEN);
7122 }
7123
7124 op1 = force_operand (XEXP (value, 0), subtarget);
7125 op2 = force_operand (op2, NULL_RTX);
7126 switch (code)
7127 {
7128 case MULT:
7129 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7130 case DIV:
7131 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7132 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7133 target, 1, OPTAB_LIB_WIDEN);
7134 else
7135 return expand_divmod (0,
7136 FLOAT_MODE_P (GET_MODE (value))
7137 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7138 GET_MODE (value), op1, op2, target, 0);
7139 case MOD:
7140 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7141 target, 0);
7142 case UDIV:
7143 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7144 target, 1);
7145 case UMOD:
7146 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7147 target, 1);
7148 case ASHIFTRT:
7149 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7150 target, 0, OPTAB_LIB_WIDEN);
7151 default:
7152 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7153 target, 1, OPTAB_LIB_WIDEN);
7154 }
7155 }
7156 if (UNARY_P (value))
7157 {
7158 if (!target)
7159 target = gen_reg_rtx (GET_MODE (value));
7160 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7161 switch (code)
7162 {
7163 case ZERO_EXTEND:
7164 case SIGN_EXTEND:
7165 case TRUNCATE:
7166 case FLOAT_EXTEND:
7167 case FLOAT_TRUNCATE:
7168 convert_move (target, op1, code == ZERO_EXTEND);
7169 return target;
7170
7171 case FIX:
7172 case UNSIGNED_FIX:
7173 expand_fix (target, op1, code == UNSIGNED_FIX);
7174 return target;
7175
7176 case FLOAT:
7177 case UNSIGNED_FLOAT:
7178 expand_float (target, op1, code == UNSIGNED_FLOAT);
7179 return target;
7180
7181 default:
7182 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7183 }
7184 }
7185
7186 #ifdef INSN_SCHEDULING
7187 /* On machines that have insn scheduling, we want all memory reference to be
7188 explicit, so we need to deal with such paradoxical SUBREGs. */
7189 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7190 value
7191 = simplify_gen_subreg (GET_MODE (value),
7192 force_reg (GET_MODE (SUBREG_REG (value)),
7193 force_operand (SUBREG_REG (value),
7194 NULL_RTX)),
7195 GET_MODE (SUBREG_REG (value)),
7196 SUBREG_BYTE (value));
7197 #endif
7198
7199 return value;
7200 }
7201 \f
7202 /* Subroutine of expand_expr: return nonzero iff there is no way that
7203 EXP can reference X, which is being modified. TOP_P is nonzero if this
7204 call is going to be used to determine whether we need a temporary
7205 for EXP, as opposed to a recursive call to this function.
7206
7207 It is always safe for this routine to return zero since it merely
7208 searches for optimization opportunities. */
7209
7210 int
7211 safe_from_p (const_rtx x, tree exp, int top_p)
7212 {
7213 rtx exp_rtl = 0;
7214 int i, nops;
7215
7216 if (x == 0
7217 /* If EXP has varying size, we MUST use a target since we currently
7218 have no way of allocating temporaries of variable size
7219 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7220 So we assume here that something at a higher level has prevented a
7221 clash. This is somewhat bogus, but the best we can do. Only
7222 do this when X is BLKmode and when we are at the top level. */
7223 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7224 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7225 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7226 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7227 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7228 != INTEGER_CST)
7229 && GET_MODE (x) == BLKmode)
7230 /* If X is in the outgoing argument area, it is always safe. */
7231 || (MEM_P (x)
7232 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7233 || (GET_CODE (XEXP (x, 0)) == PLUS
7234 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7235 return 1;
7236
7237 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7238 find the underlying pseudo. */
7239 if (GET_CODE (x) == SUBREG)
7240 {
7241 x = SUBREG_REG (x);
7242 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7243 return 0;
7244 }
7245
7246 /* Now look at our tree code and possibly recurse. */
7247 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7248 {
7249 case tcc_declaration:
7250 exp_rtl = DECL_RTL_IF_SET (exp);
7251 break;
7252
7253 case tcc_constant:
7254 return 1;
7255
7256 case tcc_exceptional:
7257 if (TREE_CODE (exp) == TREE_LIST)
7258 {
7259 while (1)
7260 {
7261 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7262 return 0;
7263 exp = TREE_CHAIN (exp);
7264 if (!exp)
7265 return 1;
7266 if (TREE_CODE (exp) != TREE_LIST)
7267 return safe_from_p (x, exp, 0);
7268 }
7269 }
7270 else if (TREE_CODE (exp) == CONSTRUCTOR)
7271 {
7272 constructor_elt *ce;
7273 unsigned HOST_WIDE_INT idx;
7274
7275 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7276 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7277 || !safe_from_p (x, ce->value, 0))
7278 return 0;
7279 return 1;
7280 }
7281 else if (TREE_CODE (exp) == ERROR_MARK)
7282 return 1; /* An already-visited SAVE_EXPR? */
7283 else
7284 return 0;
7285
7286 case tcc_statement:
7287 /* The only case we look at here is the DECL_INITIAL inside a
7288 DECL_EXPR. */
7289 return (TREE_CODE (exp) != DECL_EXPR
7290 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7291 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7292 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7293
7294 case tcc_binary:
7295 case tcc_comparison:
7296 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7297 return 0;
7298 /* Fall through. */
7299
7300 case tcc_unary:
7301 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7302
7303 case tcc_expression:
7304 case tcc_reference:
7305 case tcc_vl_exp:
7306 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7307 the expression. If it is set, we conflict iff we are that rtx or
7308 both are in memory. Otherwise, we check all operands of the
7309 expression recursively. */
7310
7311 switch (TREE_CODE (exp))
7312 {
7313 case ADDR_EXPR:
7314 /* If the operand is static or we are static, we can't conflict.
7315 Likewise if we don't conflict with the operand at all. */
7316 if (staticp (TREE_OPERAND (exp, 0))
7317 || TREE_STATIC (exp)
7318 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7319 return 1;
7320
7321 /* Otherwise, the only way this can conflict is if we are taking
7322 the address of a DECL a that address if part of X, which is
7323 very rare. */
7324 exp = TREE_OPERAND (exp, 0);
7325 if (DECL_P (exp))
7326 {
7327 if (!DECL_RTL_SET_P (exp)
7328 || !MEM_P (DECL_RTL (exp)))
7329 return 0;
7330 else
7331 exp_rtl = XEXP (DECL_RTL (exp), 0);
7332 }
7333 break;
7334
7335 case MEM_REF:
7336 if (MEM_P (x)
7337 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7338 get_alias_set (exp)))
7339 return 0;
7340 break;
7341
7342 case CALL_EXPR:
7343 /* Assume that the call will clobber all hard registers and
7344 all of memory. */
7345 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7346 || MEM_P (x))
7347 return 0;
7348 break;
7349
7350 case WITH_CLEANUP_EXPR:
7351 case CLEANUP_POINT_EXPR:
7352 /* Lowered by gimplify.c. */
7353 gcc_unreachable ();
7354
7355 case SAVE_EXPR:
7356 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7357
7358 default:
7359 break;
7360 }
7361
7362 /* If we have an rtx, we do not need to scan our operands. */
7363 if (exp_rtl)
7364 break;
7365
7366 nops = TREE_OPERAND_LENGTH (exp);
7367 for (i = 0; i < nops; i++)
7368 if (TREE_OPERAND (exp, i) != 0
7369 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7370 return 0;
7371
7372 break;
7373
7374 case tcc_type:
7375 /* Should never get a type here. */
7376 gcc_unreachable ();
7377 }
7378
7379 /* If we have an rtl, find any enclosed object. Then see if we conflict
7380 with it. */
7381 if (exp_rtl)
7382 {
7383 if (GET_CODE (exp_rtl) == SUBREG)
7384 {
7385 exp_rtl = SUBREG_REG (exp_rtl);
7386 if (REG_P (exp_rtl)
7387 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7388 return 0;
7389 }
7390
7391 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7392 are memory and they conflict. */
7393 return ! (rtx_equal_p (x, exp_rtl)
7394 || (MEM_P (x) && MEM_P (exp_rtl)
7395 && true_dependence (exp_rtl, VOIDmode, x)));
7396 }
7397
7398 /* If we reach here, it is safe. */
7399 return 1;
7400 }
7401
7402 \f
7403 /* Return the highest power of two that EXP is known to be a multiple of.
7404 This is used in updating alignment of MEMs in array references. */
7405
7406 unsigned HOST_WIDE_INT
7407 highest_pow2_factor (const_tree exp)
7408 {
7409 unsigned HOST_WIDE_INT ret;
7410 int trailing_zeros = tree_ctz (exp);
7411 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7412 return BIGGEST_ALIGNMENT;
7413 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7414 if (ret > BIGGEST_ALIGNMENT)
7415 return BIGGEST_ALIGNMENT;
7416 return ret;
7417 }
7418
7419 /* Similar, except that the alignment requirements of TARGET are
7420 taken into account. Assume it is at least as aligned as its
7421 type, unless it is a COMPONENT_REF in which case the layout of
7422 the structure gives the alignment. */
7423
7424 static unsigned HOST_WIDE_INT
7425 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7426 {
7427 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7428 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7429
7430 return MAX (factor, talign);
7431 }
7432 \f
7433 #ifdef HAVE_conditional_move
7434 /* Convert the tree comparison code TCODE to the rtl one where the
7435 signedness is UNSIGNEDP. */
7436
7437 static enum rtx_code
7438 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7439 {
7440 enum rtx_code code;
7441 switch (tcode)
7442 {
7443 case EQ_EXPR:
7444 code = EQ;
7445 break;
7446 case NE_EXPR:
7447 code = NE;
7448 break;
7449 case LT_EXPR:
7450 code = unsignedp ? LTU : LT;
7451 break;
7452 case LE_EXPR:
7453 code = unsignedp ? LEU : LE;
7454 break;
7455 case GT_EXPR:
7456 code = unsignedp ? GTU : GT;
7457 break;
7458 case GE_EXPR:
7459 code = unsignedp ? GEU : GE;
7460 break;
7461 case UNORDERED_EXPR:
7462 code = UNORDERED;
7463 break;
7464 case ORDERED_EXPR:
7465 code = ORDERED;
7466 break;
7467 case UNLT_EXPR:
7468 code = UNLT;
7469 break;
7470 case UNLE_EXPR:
7471 code = UNLE;
7472 break;
7473 case UNGT_EXPR:
7474 code = UNGT;
7475 break;
7476 case UNGE_EXPR:
7477 code = UNGE;
7478 break;
7479 case UNEQ_EXPR:
7480 code = UNEQ;
7481 break;
7482 case LTGT_EXPR:
7483 code = LTGT;
7484 break;
7485
7486 default:
7487 gcc_unreachable ();
7488 }
7489 return code;
7490 }
7491 #endif
7492
7493 /* Subroutine of expand_expr. Expand the two operands of a binary
7494 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7495 The value may be stored in TARGET if TARGET is nonzero. The
7496 MODIFIER argument is as documented by expand_expr. */
7497
7498 static void
7499 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7500 enum expand_modifier modifier)
7501 {
7502 if (! safe_from_p (target, exp1, 1))
7503 target = 0;
7504 if (operand_equal_p (exp0, exp1, 0))
7505 {
7506 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7507 *op1 = copy_rtx (*op0);
7508 }
7509 else
7510 {
7511 /* If we need to preserve evaluation order, copy exp0 into its own
7512 temporary variable so that it can't be clobbered by exp1. */
7513 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7514 exp0 = save_expr (exp0);
7515 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7516 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7517 }
7518 }
7519
7520 \f
7521 /* Return a MEM that contains constant EXP. DEFER is as for
7522 output_constant_def and MODIFIER is as for expand_expr. */
7523
7524 static rtx
7525 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7526 {
7527 rtx mem;
7528
7529 mem = output_constant_def (exp, defer);
7530 if (modifier != EXPAND_INITIALIZER)
7531 mem = use_anchored_address (mem);
7532 return mem;
7533 }
7534
7535 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7536 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7537
7538 static rtx
7539 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7540 enum expand_modifier modifier, addr_space_t as)
7541 {
7542 rtx result, subtarget;
7543 tree inner, offset;
7544 HOST_WIDE_INT bitsize, bitpos;
7545 int volatilep, unsignedp;
7546 enum machine_mode mode1;
7547
7548 /* If we are taking the address of a constant and are at the top level,
7549 we have to use output_constant_def since we can't call force_const_mem
7550 at top level. */
7551 /* ??? This should be considered a front-end bug. We should not be
7552 generating ADDR_EXPR of something that isn't an LVALUE. The only
7553 exception here is STRING_CST. */
7554 if (CONSTANT_CLASS_P (exp))
7555 {
7556 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7557 if (modifier < EXPAND_SUM)
7558 result = force_operand (result, target);
7559 return result;
7560 }
7561
7562 /* Everything must be something allowed by is_gimple_addressable. */
7563 switch (TREE_CODE (exp))
7564 {
7565 case INDIRECT_REF:
7566 /* This case will happen via recursion for &a->b. */
7567 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7568
7569 case MEM_REF:
7570 {
7571 tree tem = TREE_OPERAND (exp, 0);
7572 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7573 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7574 return expand_expr (tem, target, tmode, modifier);
7575 }
7576
7577 case CONST_DECL:
7578 /* Expand the initializer like constants above. */
7579 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7580 0, modifier), 0);
7581 if (modifier < EXPAND_SUM)
7582 result = force_operand (result, target);
7583 return result;
7584
7585 case REALPART_EXPR:
7586 /* The real part of the complex number is always first, therefore
7587 the address is the same as the address of the parent object. */
7588 offset = 0;
7589 bitpos = 0;
7590 inner = TREE_OPERAND (exp, 0);
7591 break;
7592
7593 case IMAGPART_EXPR:
7594 /* The imaginary part of the complex number is always second.
7595 The expression is therefore always offset by the size of the
7596 scalar type. */
7597 offset = 0;
7598 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7599 inner = TREE_OPERAND (exp, 0);
7600 break;
7601
7602 case COMPOUND_LITERAL_EXPR:
7603 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7604 rtl_for_decl_init is called on DECL_INITIAL with
7605 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7606 if (modifier == EXPAND_INITIALIZER
7607 && COMPOUND_LITERAL_EXPR_DECL (exp))
7608 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7609 target, tmode, modifier, as);
7610 /* FALLTHRU */
7611 default:
7612 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7613 expand_expr, as that can have various side effects; LABEL_DECLs for
7614 example, may not have their DECL_RTL set yet. Expand the rtl of
7615 CONSTRUCTORs too, which should yield a memory reference for the
7616 constructor's contents. Assume language specific tree nodes can
7617 be expanded in some interesting way. */
7618 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7619 if (DECL_P (exp)
7620 || TREE_CODE (exp) == CONSTRUCTOR
7621 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7622 {
7623 result = expand_expr (exp, target, tmode,
7624 modifier == EXPAND_INITIALIZER
7625 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7626
7627 /* If the DECL isn't in memory, then the DECL wasn't properly
7628 marked TREE_ADDRESSABLE, which will be either a front-end
7629 or a tree optimizer bug. */
7630
7631 if (TREE_ADDRESSABLE (exp)
7632 && ! MEM_P (result)
7633 && ! targetm.calls.allocate_stack_slots_for_args ())
7634 {
7635 error ("local frame unavailable (naked function?)");
7636 return result;
7637 }
7638 else
7639 gcc_assert (MEM_P (result));
7640 result = XEXP (result, 0);
7641
7642 /* ??? Is this needed anymore? */
7643 if (DECL_P (exp))
7644 TREE_USED (exp) = 1;
7645
7646 if (modifier != EXPAND_INITIALIZER
7647 && modifier != EXPAND_CONST_ADDRESS
7648 && modifier != EXPAND_SUM)
7649 result = force_operand (result, target);
7650 return result;
7651 }
7652
7653 /* Pass FALSE as the last argument to get_inner_reference although
7654 we are expanding to RTL. The rationale is that we know how to
7655 handle "aligning nodes" here: we can just bypass them because
7656 they won't change the final object whose address will be returned
7657 (they actually exist only for that purpose). */
7658 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7659 &mode1, &unsignedp, &volatilep, false);
7660 break;
7661 }
7662
7663 /* We must have made progress. */
7664 gcc_assert (inner != exp);
7665
7666 subtarget = offset || bitpos ? NULL_RTX : target;
7667 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7668 inner alignment, force the inner to be sufficiently aligned. */
7669 if (CONSTANT_CLASS_P (inner)
7670 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7671 {
7672 inner = copy_node (inner);
7673 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7674 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7675 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7676 }
7677 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7678
7679 if (offset)
7680 {
7681 rtx tmp;
7682
7683 if (modifier != EXPAND_NORMAL)
7684 result = force_operand (result, NULL);
7685 tmp = expand_expr (offset, NULL_RTX, tmode,
7686 modifier == EXPAND_INITIALIZER
7687 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7688
7689 /* expand_expr is allowed to return an object in a mode other
7690 than TMODE. If it did, we need to convert. */
7691 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7692 tmp = convert_modes (tmode, GET_MODE (tmp),
7693 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7694 result = convert_memory_address_addr_space (tmode, result, as);
7695 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7696
7697 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7698 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7699 else
7700 {
7701 subtarget = bitpos ? NULL_RTX : target;
7702 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7703 1, OPTAB_LIB_WIDEN);
7704 }
7705 }
7706
7707 if (bitpos)
7708 {
7709 /* Someone beforehand should have rejected taking the address
7710 of such an object. */
7711 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7712
7713 result = convert_memory_address_addr_space (tmode, result, as);
7714 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7715 if (modifier < EXPAND_SUM)
7716 result = force_operand (result, target);
7717 }
7718
7719 return result;
7720 }
7721
7722 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7723 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7724
7725 static rtx
7726 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7727 enum expand_modifier modifier)
7728 {
7729 addr_space_t as = ADDR_SPACE_GENERIC;
7730 enum machine_mode address_mode = Pmode;
7731 enum machine_mode pointer_mode = ptr_mode;
7732 enum machine_mode rmode;
7733 rtx result;
7734
7735 /* Target mode of VOIDmode says "whatever's natural". */
7736 if (tmode == VOIDmode)
7737 tmode = TYPE_MODE (TREE_TYPE (exp));
7738
7739 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7740 {
7741 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7742 address_mode = targetm.addr_space.address_mode (as);
7743 pointer_mode = targetm.addr_space.pointer_mode (as);
7744 }
7745
7746 /* We can get called with some Weird Things if the user does silliness
7747 like "(short) &a". In that case, convert_memory_address won't do
7748 the right thing, so ignore the given target mode. */
7749 if (tmode != address_mode && tmode != pointer_mode)
7750 tmode = address_mode;
7751
7752 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7753 tmode, modifier, as);
7754
7755 /* Despite expand_expr claims concerning ignoring TMODE when not
7756 strictly convenient, stuff breaks if we don't honor it. Note
7757 that combined with the above, we only do this for pointer modes. */
7758 rmode = GET_MODE (result);
7759 if (rmode == VOIDmode)
7760 rmode = tmode;
7761 if (rmode != tmode)
7762 result = convert_memory_address_addr_space (tmode, result, as);
7763
7764 return result;
7765 }
7766
7767 /* Generate code for computing CONSTRUCTOR EXP.
7768 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7769 is TRUE, instead of creating a temporary variable in memory
7770 NULL is returned and the caller needs to handle it differently. */
7771
7772 static rtx
7773 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7774 bool avoid_temp_mem)
7775 {
7776 tree type = TREE_TYPE (exp);
7777 enum machine_mode mode = TYPE_MODE (type);
7778
7779 /* Try to avoid creating a temporary at all. This is possible
7780 if all of the initializer is zero.
7781 FIXME: try to handle all [0..255] initializers we can handle
7782 with memset. */
7783 if (TREE_STATIC (exp)
7784 && !TREE_ADDRESSABLE (exp)
7785 && target != 0 && mode == BLKmode
7786 && all_zeros_p (exp))
7787 {
7788 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7789 return target;
7790 }
7791
7792 /* All elts simple constants => refer to a constant in memory. But
7793 if this is a non-BLKmode mode, let it store a field at a time
7794 since that should make a CONST_INT, CONST_WIDE_INT or
7795 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7796 use, it is best to store directly into the target unless the type
7797 is large enough that memcpy will be used. If we are making an
7798 initializer and all operands are constant, put it in memory as
7799 well.
7800
7801 FIXME: Avoid trying to fill vector constructors piece-meal.
7802 Output them with output_constant_def below unless we're sure
7803 they're zeros. This should go away when vector initializers
7804 are treated like VECTOR_CST instead of arrays. */
7805 if ((TREE_STATIC (exp)
7806 && ((mode == BLKmode
7807 && ! (target != 0 && safe_from_p (target, exp, 1)))
7808 || TREE_ADDRESSABLE (exp)
7809 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7810 && (! MOVE_BY_PIECES_P
7811 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7812 TYPE_ALIGN (type)))
7813 && ! mostly_zeros_p (exp))))
7814 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7815 && TREE_CONSTANT (exp)))
7816 {
7817 rtx constructor;
7818
7819 if (avoid_temp_mem)
7820 return NULL_RTX;
7821
7822 constructor = expand_expr_constant (exp, 1, modifier);
7823
7824 if (modifier != EXPAND_CONST_ADDRESS
7825 && modifier != EXPAND_INITIALIZER
7826 && modifier != EXPAND_SUM)
7827 constructor = validize_mem (constructor);
7828
7829 return constructor;
7830 }
7831
7832 /* Handle calls that pass values in multiple non-contiguous
7833 locations. The Irix 6 ABI has examples of this. */
7834 if (target == 0 || ! safe_from_p (target, exp, 1)
7835 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7836 {
7837 if (avoid_temp_mem)
7838 return NULL_RTX;
7839
7840 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7841 }
7842
7843 store_constructor (exp, target, 0, int_expr_size (exp));
7844 return target;
7845 }
7846
7847
7848 /* expand_expr: generate code for computing expression EXP.
7849 An rtx for the computed value is returned. The value is never null.
7850 In the case of a void EXP, const0_rtx is returned.
7851
7852 The value may be stored in TARGET if TARGET is nonzero.
7853 TARGET is just a suggestion; callers must assume that
7854 the rtx returned may not be the same as TARGET.
7855
7856 If TARGET is CONST0_RTX, it means that the value will be ignored.
7857
7858 If TMODE is not VOIDmode, it suggests generating the
7859 result in mode TMODE. But this is done only when convenient.
7860 Otherwise, TMODE is ignored and the value generated in its natural mode.
7861 TMODE is just a suggestion; callers must assume that
7862 the rtx returned may not have mode TMODE.
7863
7864 Note that TARGET may have neither TMODE nor MODE. In that case, it
7865 probably will not be used.
7866
7867 If MODIFIER is EXPAND_SUM then when EXP is an addition
7868 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7869 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7870 products as above, or REG or MEM, or constant.
7871 Ordinarily in such cases we would output mul or add instructions
7872 and then return a pseudo reg containing the sum.
7873
7874 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7875 it also marks a label as absolutely required (it can't be dead).
7876 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7877 This is used for outputting expressions used in initializers.
7878
7879 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7880 with a constant address even if that address is not normally legitimate.
7881 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7882
7883 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7884 a call parameter. Such targets require special care as we haven't yet
7885 marked TARGET so that it's safe from being trashed by libcalls. We
7886 don't want to use TARGET for anything but the final result;
7887 Intermediate values must go elsewhere. Additionally, calls to
7888 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7889
7890 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7891 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7892 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7893 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7894 recursively.
7895
7896 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7897 In this case, we don't adjust a returned MEM rtx that wouldn't be
7898 sufficiently aligned for its mode; instead, it's up to the caller
7899 to deal with it afterwards. This is used to make sure that unaligned
7900 base objects for which out-of-bounds accesses are supported, for
7901 example record types with trailing arrays, aren't realigned behind
7902 the back of the caller.
7903 The normal operating mode is to pass FALSE for this parameter. */
7904
7905 rtx
7906 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7907 enum expand_modifier modifier, rtx *alt_rtl,
7908 bool inner_reference_p)
7909 {
7910 rtx ret;
7911
7912 /* Handle ERROR_MARK before anybody tries to access its type. */
7913 if (TREE_CODE (exp) == ERROR_MARK
7914 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7915 {
7916 ret = CONST0_RTX (tmode);
7917 return ret ? ret : const0_rtx;
7918 }
7919
7920 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7921 inner_reference_p);
7922 return ret;
7923 }
7924
7925 /* Try to expand the conditional expression which is represented by
7926 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7927 return the rtl reg which repsents the result. Otherwise return
7928 NULL_RTL. */
7929
7930 static rtx
7931 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7932 tree treeop1 ATTRIBUTE_UNUSED,
7933 tree treeop2 ATTRIBUTE_UNUSED)
7934 {
7935 #ifdef HAVE_conditional_move
7936 rtx insn;
7937 rtx op00, op01, op1, op2;
7938 enum rtx_code comparison_code;
7939 enum machine_mode comparison_mode;
7940 gimple srcstmt;
7941 rtx temp;
7942 tree type = TREE_TYPE (treeop1);
7943 int unsignedp = TYPE_UNSIGNED (type);
7944 enum machine_mode mode = TYPE_MODE (type);
7945 enum machine_mode orig_mode = mode;
7946
7947 /* If we cannot do a conditional move on the mode, try doing it
7948 with the promoted mode. */
7949 if (!can_conditionally_move_p (mode))
7950 {
7951 mode = promote_mode (type, mode, &unsignedp);
7952 if (!can_conditionally_move_p (mode))
7953 return NULL_RTX;
7954 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7955 }
7956 else
7957 temp = assign_temp (type, 0, 1);
7958
7959 start_sequence ();
7960 expand_operands (treeop1, treeop2,
7961 temp, &op1, &op2, EXPAND_NORMAL);
7962
7963 if (TREE_CODE (treeop0) == SSA_NAME
7964 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7965 {
7966 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7967 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7968 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7969 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7970 comparison_mode = TYPE_MODE (type);
7971 unsignedp = TYPE_UNSIGNED (type);
7972 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7973 }
7974 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7975 {
7976 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7977 enum tree_code cmpcode = TREE_CODE (treeop0);
7978 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7979 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7980 unsignedp = TYPE_UNSIGNED (type);
7981 comparison_mode = TYPE_MODE (type);
7982 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7983 }
7984 else
7985 {
7986 op00 = expand_normal (treeop0);
7987 op01 = const0_rtx;
7988 comparison_code = NE;
7989 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7990 }
7991
7992 if (GET_MODE (op1) != mode)
7993 op1 = gen_lowpart (mode, op1);
7994
7995 if (GET_MODE (op2) != mode)
7996 op2 = gen_lowpart (mode, op2);
7997
7998 /* Try to emit the conditional move. */
7999 insn = emit_conditional_move (temp, comparison_code,
8000 op00, op01, comparison_mode,
8001 op1, op2, mode,
8002 unsignedp);
8003
8004 /* If we could do the conditional move, emit the sequence,
8005 and return. */
8006 if (insn)
8007 {
8008 rtx seq = get_insns ();
8009 end_sequence ();
8010 emit_insn (seq);
8011 return convert_modes (orig_mode, mode, temp, 0);
8012 }
8013
8014 /* Otherwise discard the sequence and fall back to code with
8015 branches. */
8016 end_sequence ();
8017 #endif
8018 return NULL_RTX;
8019 }
8020
8021 rtx
8022 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
8023 enum expand_modifier modifier)
8024 {
8025 rtx op0, op1, op2, temp;
8026 tree type;
8027 int unsignedp;
8028 enum machine_mode mode;
8029 enum tree_code code = ops->code;
8030 optab this_optab;
8031 rtx subtarget, original_target;
8032 int ignore;
8033 bool reduce_bit_field;
8034 location_t loc = ops->location;
8035 tree treeop0, treeop1, treeop2;
8036 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8037 ? reduce_to_bit_field_precision ((expr), \
8038 target, \
8039 type) \
8040 : (expr))
8041
8042 type = ops->type;
8043 mode = TYPE_MODE (type);
8044 unsignedp = TYPE_UNSIGNED (type);
8045
8046 treeop0 = ops->op0;
8047 treeop1 = ops->op1;
8048 treeop2 = ops->op2;
8049
8050 /* We should be called only on simple (binary or unary) expressions,
8051 exactly those that are valid in gimple expressions that aren't
8052 GIMPLE_SINGLE_RHS (or invalid). */
8053 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8054 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8055 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8056
8057 ignore = (target == const0_rtx
8058 || ((CONVERT_EXPR_CODE_P (code)
8059 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8060 && TREE_CODE (type) == VOID_TYPE));
8061
8062 /* We should be called only if we need the result. */
8063 gcc_assert (!ignore);
8064
8065 /* An operation in what may be a bit-field type needs the
8066 result to be reduced to the precision of the bit-field type,
8067 which is narrower than that of the type's mode. */
8068 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8069 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8070
8071 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8072 target = 0;
8073
8074 /* Use subtarget as the target for operand 0 of a binary operation. */
8075 subtarget = get_subtarget (target);
8076 original_target = target;
8077
8078 switch (code)
8079 {
8080 case NON_LVALUE_EXPR:
8081 case PAREN_EXPR:
8082 CASE_CONVERT:
8083 if (treeop0 == error_mark_node)
8084 return const0_rtx;
8085
8086 if (TREE_CODE (type) == UNION_TYPE)
8087 {
8088 tree valtype = TREE_TYPE (treeop0);
8089
8090 /* If both input and output are BLKmode, this conversion isn't doing
8091 anything except possibly changing memory attribute. */
8092 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8093 {
8094 rtx result = expand_expr (treeop0, target, tmode,
8095 modifier);
8096
8097 result = copy_rtx (result);
8098 set_mem_attributes (result, type, 0);
8099 return result;
8100 }
8101
8102 if (target == 0)
8103 {
8104 if (TYPE_MODE (type) != BLKmode)
8105 target = gen_reg_rtx (TYPE_MODE (type));
8106 else
8107 target = assign_temp (type, 1, 1);
8108 }
8109
8110 if (MEM_P (target))
8111 /* Store data into beginning of memory target. */
8112 store_expr (treeop0,
8113 adjust_address (target, TYPE_MODE (valtype), 0),
8114 modifier == EXPAND_STACK_PARM,
8115 false);
8116
8117 else
8118 {
8119 gcc_assert (REG_P (target));
8120
8121 /* Store this field into a union of the proper type. */
8122 store_field (target,
8123 MIN ((int_size_in_bytes (TREE_TYPE
8124 (treeop0))
8125 * BITS_PER_UNIT),
8126 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8127 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8128 }
8129
8130 /* Return the entire union. */
8131 return target;
8132 }
8133
8134 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8135 {
8136 op0 = expand_expr (treeop0, target, VOIDmode,
8137 modifier);
8138
8139 /* If the signedness of the conversion differs and OP0 is
8140 a promoted SUBREG, clear that indication since we now
8141 have to do the proper extension. */
8142 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8143 && GET_CODE (op0) == SUBREG)
8144 SUBREG_PROMOTED_VAR_P (op0) = 0;
8145
8146 return REDUCE_BIT_FIELD (op0);
8147 }
8148
8149 op0 = expand_expr (treeop0, NULL_RTX, mode,
8150 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8151 if (GET_MODE (op0) == mode)
8152 ;
8153
8154 /* If OP0 is a constant, just convert it into the proper mode. */
8155 else if (CONSTANT_P (op0))
8156 {
8157 tree inner_type = TREE_TYPE (treeop0);
8158 enum machine_mode inner_mode = GET_MODE (op0);
8159
8160 if (inner_mode == VOIDmode)
8161 inner_mode = TYPE_MODE (inner_type);
8162
8163 if (modifier == EXPAND_INITIALIZER)
8164 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8165 subreg_lowpart_offset (mode,
8166 inner_mode));
8167 else
8168 op0= convert_modes (mode, inner_mode, op0,
8169 TYPE_UNSIGNED (inner_type));
8170 }
8171
8172 else if (modifier == EXPAND_INITIALIZER)
8173 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8174
8175 else if (target == 0)
8176 op0 = convert_to_mode (mode, op0,
8177 TYPE_UNSIGNED (TREE_TYPE
8178 (treeop0)));
8179 else
8180 {
8181 convert_move (target, op0,
8182 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8183 op0 = target;
8184 }
8185
8186 return REDUCE_BIT_FIELD (op0);
8187
8188 case ADDR_SPACE_CONVERT_EXPR:
8189 {
8190 tree treeop0_type = TREE_TYPE (treeop0);
8191 addr_space_t as_to;
8192 addr_space_t as_from;
8193
8194 gcc_assert (POINTER_TYPE_P (type));
8195 gcc_assert (POINTER_TYPE_P (treeop0_type));
8196
8197 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8198 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8199
8200 /* Conversions between pointers to the same address space should
8201 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8202 gcc_assert (as_to != as_from);
8203
8204 /* Ask target code to handle conversion between pointers
8205 to overlapping address spaces. */
8206 if (targetm.addr_space.subset_p (as_to, as_from)
8207 || targetm.addr_space.subset_p (as_from, as_to))
8208 {
8209 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8210 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8211 gcc_assert (op0);
8212 return op0;
8213 }
8214
8215 /* For disjoint address spaces, converting anything but
8216 a null pointer invokes undefined behaviour. We simply
8217 always return a null pointer here. */
8218 return CONST0_RTX (mode);
8219 }
8220
8221 case POINTER_PLUS_EXPR:
8222 /* Even though the sizetype mode and the pointer's mode can be different
8223 expand is able to handle this correctly and get the correct result out
8224 of the PLUS_EXPR code. */
8225 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8226 if sizetype precision is smaller than pointer precision. */
8227 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8228 treeop1 = fold_convert_loc (loc, type,
8229 fold_convert_loc (loc, ssizetype,
8230 treeop1));
8231 /* If sizetype precision is larger than pointer precision, truncate the
8232 offset to have matching modes. */
8233 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8234 treeop1 = fold_convert_loc (loc, type, treeop1);
8235
8236 case PLUS_EXPR:
8237 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8238 something else, make sure we add the register to the constant and
8239 then to the other thing. This case can occur during strength
8240 reduction and doing it this way will produce better code if the
8241 frame pointer or argument pointer is eliminated.
8242
8243 fold-const.c will ensure that the constant is always in the inner
8244 PLUS_EXPR, so the only case we need to do anything about is if
8245 sp, ap, or fp is our second argument, in which case we must swap
8246 the innermost first argument and our second argument. */
8247
8248 if (TREE_CODE (treeop0) == PLUS_EXPR
8249 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8250 && TREE_CODE (treeop1) == VAR_DECL
8251 && (DECL_RTL (treeop1) == frame_pointer_rtx
8252 || DECL_RTL (treeop1) == stack_pointer_rtx
8253 || DECL_RTL (treeop1) == arg_pointer_rtx))
8254 {
8255 gcc_unreachable ();
8256 }
8257
8258 /* If the result is to be ptr_mode and we are adding an integer to
8259 something, we might be forming a constant. So try to use
8260 plus_constant. If it produces a sum and we can't accept it,
8261 use force_operand. This allows P = &ARR[const] to generate
8262 efficient code on machines where a SYMBOL_REF is not a valid
8263 address.
8264
8265 If this is an EXPAND_SUM call, always return the sum. */
8266 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8267 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8268 {
8269 if (modifier == EXPAND_STACK_PARM)
8270 target = 0;
8271 if (TREE_CODE (treeop0) == INTEGER_CST
8272 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8273 && TREE_CONSTANT (treeop1))
8274 {
8275 rtx constant_part;
8276 HOST_WIDE_INT wc;
8277 enum machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8278
8279 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8280 EXPAND_SUM);
8281 /* Use wi::shwi to ensure that the constant is
8282 truncated according to the mode of OP1, then sign extended
8283 to a HOST_WIDE_INT. Using the constant directly can result
8284 in non-canonical RTL in a 64x32 cross compile. */
8285 wc = TREE_INT_CST_LOW (treeop0);
8286 constant_part =
8287 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8288 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8289 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8290 op1 = force_operand (op1, target);
8291 return REDUCE_BIT_FIELD (op1);
8292 }
8293
8294 else if (TREE_CODE (treeop1) == INTEGER_CST
8295 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8296 && TREE_CONSTANT (treeop0))
8297 {
8298 rtx constant_part;
8299 HOST_WIDE_INT wc;
8300 enum machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8301
8302 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8303 (modifier == EXPAND_INITIALIZER
8304 ? EXPAND_INITIALIZER : EXPAND_SUM));
8305 if (! CONSTANT_P (op0))
8306 {
8307 op1 = expand_expr (treeop1, NULL_RTX,
8308 VOIDmode, modifier);
8309 /* Return a PLUS if modifier says it's OK. */
8310 if (modifier == EXPAND_SUM
8311 || modifier == EXPAND_INITIALIZER)
8312 return simplify_gen_binary (PLUS, mode, op0, op1);
8313 goto binop2;
8314 }
8315 /* Use wi::shwi to ensure that the constant is
8316 truncated according to the mode of OP1, then sign extended
8317 to a HOST_WIDE_INT. Using the constant directly can result
8318 in non-canonical RTL in a 64x32 cross compile. */
8319 wc = TREE_INT_CST_LOW (treeop1);
8320 constant_part
8321 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8322 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8323 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8324 op0 = force_operand (op0, target);
8325 return REDUCE_BIT_FIELD (op0);
8326 }
8327 }
8328
8329 /* Use TER to expand pointer addition of a negated value
8330 as pointer subtraction. */
8331 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8332 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8333 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8334 && TREE_CODE (treeop1) == SSA_NAME
8335 && TYPE_MODE (TREE_TYPE (treeop0))
8336 == TYPE_MODE (TREE_TYPE (treeop1)))
8337 {
8338 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8339 if (def)
8340 {
8341 treeop1 = gimple_assign_rhs1 (def);
8342 code = MINUS_EXPR;
8343 goto do_minus;
8344 }
8345 }
8346
8347 /* No sense saving up arithmetic to be done
8348 if it's all in the wrong mode to form part of an address.
8349 And force_operand won't know whether to sign-extend or
8350 zero-extend. */
8351 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8352 || mode != ptr_mode)
8353 {
8354 expand_operands (treeop0, treeop1,
8355 subtarget, &op0, &op1, EXPAND_NORMAL);
8356 if (op0 == const0_rtx)
8357 return op1;
8358 if (op1 == const0_rtx)
8359 return op0;
8360 goto binop2;
8361 }
8362
8363 expand_operands (treeop0, treeop1,
8364 subtarget, &op0, &op1, modifier);
8365 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8366
8367 case MINUS_EXPR:
8368 do_minus:
8369 /* For initializers, we are allowed to return a MINUS of two
8370 symbolic constants. Here we handle all cases when both operands
8371 are constant. */
8372 /* Handle difference of two symbolic constants,
8373 for the sake of an initializer. */
8374 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8375 && really_constant_p (treeop0)
8376 && really_constant_p (treeop1))
8377 {
8378 expand_operands (treeop0, treeop1,
8379 NULL_RTX, &op0, &op1, modifier);
8380
8381 /* If the last operand is a CONST_INT, use plus_constant of
8382 the negated constant. Else make the MINUS. */
8383 if (CONST_INT_P (op1))
8384 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8385 -INTVAL (op1)));
8386 else
8387 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8388 }
8389
8390 /* No sense saving up arithmetic to be done
8391 if it's all in the wrong mode to form part of an address.
8392 And force_operand won't know whether to sign-extend or
8393 zero-extend. */
8394 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8395 || mode != ptr_mode)
8396 goto binop;
8397
8398 expand_operands (treeop0, treeop1,
8399 subtarget, &op0, &op1, modifier);
8400
8401 /* Convert A - const to A + (-const). */
8402 if (CONST_INT_P (op1))
8403 {
8404 op1 = negate_rtx (mode, op1);
8405 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8406 }
8407
8408 goto binop2;
8409
8410 case WIDEN_MULT_PLUS_EXPR:
8411 case WIDEN_MULT_MINUS_EXPR:
8412 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8413 op2 = expand_normal (treeop2);
8414 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8415 target, unsignedp);
8416 return target;
8417
8418 case WIDEN_MULT_EXPR:
8419 /* If first operand is constant, swap them.
8420 Thus the following special case checks need only
8421 check the second operand. */
8422 if (TREE_CODE (treeop0) == INTEGER_CST)
8423 {
8424 tree t1 = treeop0;
8425 treeop0 = treeop1;
8426 treeop1 = t1;
8427 }
8428
8429 /* First, check if we have a multiplication of one signed and one
8430 unsigned operand. */
8431 if (TREE_CODE (treeop1) != INTEGER_CST
8432 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8433 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8434 {
8435 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8436 this_optab = usmul_widen_optab;
8437 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8438 != CODE_FOR_nothing)
8439 {
8440 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8441 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8442 EXPAND_NORMAL);
8443 else
8444 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8445 EXPAND_NORMAL);
8446 /* op0 and op1 might still be constant, despite the above
8447 != INTEGER_CST check. Handle it. */
8448 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8449 {
8450 op0 = convert_modes (innermode, mode, op0, true);
8451 op1 = convert_modes (innermode, mode, op1, false);
8452 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8453 target, unsignedp));
8454 }
8455 goto binop3;
8456 }
8457 }
8458 /* Check for a multiplication with matching signedness. */
8459 else if ((TREE_CODE (treeop1) == INTEGER_CST
8460 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8461 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8462 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8463 {
8464 tree op0type = TREE_TYPE (treeop0);
8465 enum machine_mode innermode = TYPE_MODE (op0type);
8466 bool zextend_p = TYPE_UNSIGNED (op0type);
8467 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8468 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8469
8470 if (TREE_CODE (treeop0) != INTEGER_CST)
8471 {
8472 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8473 != CODE_FOR_nothing)
8474 {
8475 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8476 EXPAND_NORMAL);
8477 /* op0 and op1 might still be constant, despite the above
8478 != INTEGER_CST check. Handle it. */
8479 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8480 {
8481 widen_mult_const:
8482 op0 = convert_modes (innermode, mode, op0, zextend_p);
8483 op1
8484 = convert_modes (innermode, mode, op1,
8485 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8486 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8487 target,
8488 unsignedp));
8489 }
8490 temp = expand_widening_mult (mode, op0, op1, target,
8491 unsignedp, this_optab);
8492 return REDUCE_BIT_FIELD (temp);
8493 }
8494 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8495 != CODE_FOR_nothing
8496 && innermode == word_mode)
8497 {
8498 rtx htem, hipart;
8499 op0 = expand_normal (treeop0);
8500 if (TREE_CODE (treeop1) == INTEGER_CST)
8501 op1 = convert_modes (innermode, mode,
8502 expand_normal (treeop1),
8503 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8504 else
8505 op1 = expand_normal (treeop1);
8506 /* op0 and op1 might still be constant, despite the above
8507 != INTEGER_CST check. Handle it. */
8508 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8509 goto widen_mult_const;
8510 temp = expand_binop (mode, other_optab, op0, op1, target,
8511 unsignedp, OPTAB_LIB_WIDEN);
8512 hipart = gen_highpart (innermode, temp);
8513 htem = expand_mult_highpart_adjust (innermode, hipart,
8514 op0, op1, hipart,
8515 zextend_p);
8516 if (htem != hipart)
8517 emit_move_insn (hipart, htem);
8518 return REDUCE_BIT_FIELD (temp);
8519 }
8520 }
8521 }
8522 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8523 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8524 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8525 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8526
8527 case FMA_EXPR:
8528 {
8529 optab opt = fma_optab;
8530 gimple def0, def2;
8531
8532 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8533 call. */
8534 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8535 {
8536 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8537 tree call_expr;
8538
8539 gcc_assert (fn != NULL_TREE);
8540 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8541 return expand_builtin (call_expr, target, subtarget, mode, false);
8542 }
8543
8544 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8545 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8546
8547 op0 = op2 = NULL;
8548
8549 if (def0 && def2
8550 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8551 {
8552 opt = fnms_optab;
8553 op0 = expand_normal (gimple_assign_rhs1 (def0));
8554 op2 = expand_normal (gimple_assign_rhs1 (def2));
8555 }
8556 else if (def0
8557 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8558 {
8559 opt = fnma_optab;
8560 op0 = expand_normal (gimple_assign_rhs1 (def0));
8561 }
8562 else if (def2
8563 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8564 {
8565 opt = fms_optab;
8566 op2 = expand_normal (gimple_assign_rhs1 (def2));
8567 }
8568
8569 if (op0 == NULL)
8570 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8571 if (op2 == NULL)
8572 op2 = expand_normal (treeop2);
8573 op1 = expand_normal (treeop1);
8574
8575 return expand_ternary_op (TYPE_MODE (type), opt,
8576 op0, op1, op2, target, 0);
8577 }
8578
8579 case MULT_EXPR:
8580 /* If this is a fixed-point operation, then we cannot use the code
8581 below because "expand_mult" doesn't support sat/no-sat fixed-point
8582 multiplications. */
8583 if (ALL_FIXED_POINT_MODE_P (mode))
8584 goto binop;
8585
8586 /* If first operand is constant, swap them.
8587 Thus the following special case checks need only
8588 check the second operand. */
8589 if (TREE_CODE (treeop0) == INTEGER_CST)
8590 {
8591 tree t1 = treeop0;
8592 treeop0 = treeop1;
8593 treeop1 = t1;
8594 }
8595
8596 /* Attempt to return something suitable for generating an
8597 indexed address, for machines that support that. */
8598
8599 if (modifier == EXPAND_SUM && mode == ptr_mode
8600 && tree_fits_shwi_p (treeop1))
8601 {
8602 tree exp1 = treeop1;
8603
8604 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8605 EXPAND_SUM);
8606
8607 if (!REG_P (op0))
8608 op0 = force_operand (op0, NULL_RTX);
8609 if (!REG_P (op0))
8610 op0 = copy_to_mode_reg (mode, op0);
8611
8612 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8613 gen_int_mode (tree_to_shwi (exp1),
8614 TYPE_MODE (TREE_TYPE (exp1)))));
8615 }
8616
8617 if (modifier == EXPAND_STACK_PARM)
8618 target = 0;
8619
8620 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8621 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8622
8623 case TRUNC_DIV_EXPR:
8624 case FLOOR_DIV_EXPR:
8625 case CEIL_DIV_EXPR:
8626 case ROUND_DIV_EXPR:
8627 case EXACT_DIV_EXPR:
8628 /* If this is a fixed-point operation, then we cannot use the code
8629 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8630 divisions. */
8631 if (ALL_FIXED_POINT_MODE_P (mode))
8632 goto binop;
8633
8634 if (modifier == EXPAND_STACK_PARM)
8635 target = 0;
8636 /* Possible optimization: compute the dividend with EXPAND_SUM
8637 then if the divisor is constant can optimize the case
8638 where some terms of the dividend have coeffs divisible by it. */
8639 expand_operands (treeop0, treeop1,
8640 subtarget, &op0, &op1, EXPAND_NORMAL);
8641 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8642
8643 case RDIV_EXPR:
8644 goto binop;
8645
8646 case MULT_HIGHPART_EXPR:
8647 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8648 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8649 gcc_assert (temp);
8650 return temp;
8651
8652 case TRUNC_MOD_EXPR:
8653 case FLOOR_MOD_EXPR:
8654 case CEIL_MOD_EXPR:
8655 case ROUND_MOD_EXPR:
8656 if (modifier == EXPAND_STACK_PARM)
8657 target = 0;
8658 expand_operands (treeop0, treeop1,
8659 subtarget, &op0, &op1, EXPAND_NORMAL);
8660 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8661
8662 case FIXED_CONVERT_EXPR:
8663 op0 = expand_normal (treeop0);
8664 if (target == 0 || modifier == EXPAND_STACK_PARM)
8665 target = gen_reg_rtx (mode);
8666
8667 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8668 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8669 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8670 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8671 else
8672 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8673 return target;
8674
8675 case FIX_TRUNC_EXPR:
8676 op0 = expand_normal (treeop0);
8677 if (target == 0 || modifier == EXPAND_STACK_PARM)
8678 target = gen_reg_rtx (mode);
8679 expand_fix (target, op0, unsignedp);
8680 return target;
8681
8682 case FLOAT_EXPR:
8683 op0 = expand_normal (treeop0);
8684 if (target == 0 || modifier == EXPAND_STACK_PARM)
8685 target = gen_reg_rtx (mode);
8686 /* expand_float can't figure out what to do if FROM has VOIDmode.
8687 So give it the correct mode. With -O, cse will optimize this. */
8688 if (GET_MODE (op0) == VOIDmode)
8689 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8690 op0);
8691 expand_float (target, op0,
8692 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8693 return target;
8694
8695 case NEGATE_EXPR:
8696 op0 = expand_expr (treeop0, subtarget,
8697 VOIDmode, EXPAND_NORMAL);
8698 if (modifier == EXPAND_STACK_PARM)
8699 target = 0;
8700 temp = expand_unop (mode,
8701 optab_for_tree_code (NEGATE_EXPR, type,
8702 optab_default),
8703 op0, target, 0);
8704 gcc_assert (temp);
8705 return REDUCE_BIT_FIELD (temp);
8706
8707 case ABS_EXPR:
8708 op0 = expand_expr (treeop0, subtarget,
8709 VOIDmode, EXPAND_NORMAL);
8710 if (modifier == EXPAND_STACK_PARM)
8711 target = 0;
8712
8713 /* ABS_EXPR is not valid for complex arguments. */
8714 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8715 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8716
8717 /* Unsigned abs is simply the operand. Testing here means we don't
8718 risk generating incorrect code below. */
8719 if (TYPE_UNSIGNED (type))
8720 return op0;
8721
8722 return expand_abs (mode, op0, target, unsignedp,
8723 safe_from_p (target, treeop0, 1));
8724
8725 case MAX_EXPR:
8726 case MIN_EXPR:
8727 target = original_target;
8728 if (target == 0
8729 || modifier == EXPAND_STACK_PARM
8730 || (MEM_P (target) && MEM_VOLATILE_P (target))
8731 || GET_MODE (target) != mode
8732 || (REG_P (target)
8733 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8734 target = gen_reg_rtx (mode);
8735 expand_operands (treeop0, treeop1,
8736 target, &op0, &op1, EXPAND_NORMAL);
8737
8738 /* First try to do it with a special MIN or MAX instruction.
8739 If that does not win, use a conditional jump to select the proper
8740 value. */
8741 this_optab = optab_for_tree_code (code, type, optab_default);
8742 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8743 OPTAB_WIDEN);
8744 if (temp != 0)
8745 return temp;
8746
8747 /* At this point, a MEM target is no longer useful; we will get better
8748 code without it. */
8749
8750 if (! REG_P (target))
8751 target = gen_reg_rtx (mode);
8752
8753 /* If op1 was placed in target, swap op0 and op1. */
8754 if (target != op0 && target == op1)
8755 {
8756 temp = op0;
8757 op0 = op1;
8758 op1 = temp;
8759 }
8760
8761 /* We generate better code and avoid problems with op1 mentioning
8762 target by forcing op1 into a pseudo if it isn't a constant. */
8763 if (! CONSTANT_P (op1))
8764 op1 = force_reg (mode, op1);
8765
8766 {
8767 enum rtx_code comparison_code;
8768 rtx cmpop1 = op1;
8769
8770 if (code == MAX_EXPR)
8771 comparison_code = unsignedp ? GEU : GE;
8772 else
8773 comparison_code = unsignedp ? LEU : LE;
8774
8775 /* Canonicalize to comparisons against 0. */
8776 if (op1 == const1_rtx)
8777 {
8778 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8779 or (a != 0 ? a : 1) for unsigned.
8780 For MIN we are safe converting (a <= 1 ? a : 1)
8781 into (a <= 0 ? a : 1) */
8782 cmpop1 = const0_rtx;
8783 if (code == MAX_EXPR)
8784 comparison_code = unsignedp ? NE : GT;
8785 }
8786 if (op1 == constm1_rtx && !unsignedp)
8787 {
8788 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8789 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8790 cmpop1 = const0_rtx;
8791 if (code == MIN_EXPR)
8792 comparison_code = LT;
8793 }
8794 #ifdef HAVE_conditional_move
8795 /* Use a conditional move if possible. */
8796 if (can_conditionally_move_p (mode))
8797 {
8798 rtx insn;
8799
8800 start_sequence ();
8801
8802 /* Try to emit the conditional move. */
8803 insn = emit_conditional_move (target, comparison_code,
8804 op0, cmpop1, mode,
8805 op0, op1, mode,
8806 unsignedp);
8807
8808 /* If we could do the conditional move, emit the sequence,
8809 and return. */
8810 if (insn)
8811 {
8812 rtx seq = get_insns ();
8813 end_sequence ();
8814 emit_insn (seq);
8815 return target;
8816 }
8817
8818 /* Otherwise discard the sequence and fall back to code with
8819 branches. */
8820 end_sequence ();
8821 }
8822 #endif
8823 if (target != op0)
8824 emit_move_insn (target, op0);
8825
8826 temp = gen_label_rtx ();
8827 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8828 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8829 -1);
8830 }
8831 emit_move_insn (target, op1);
8832 emit_label (temp);
8833 return target;
8834
8835 case BIT_NOT_EXPR:
8836 op0 = expand_expr (treeop0, subtarget,
8837 VOIDmode, EXPAND_NORMAL);
8838 if (modifier == EXPAND_STACK_PARM)
8839 target = 0;
8840 /* In case we have to reduce the result to bitfield precision
8841 for unsigned bitfield expand this as XOR with a proper constant
8842 instead. */
8843 if (reduce_bit_field && TYPE_UNSIGNED (type))
8844 {
8845 wide_int mask = wi::mask (TYPE_PRECISION (type),
8846 false, GET_MODE_PRECISION (mode));
8847
8848 temp = expand_binop (mode, xor_optab, op0,
8849 immed_wide_int_const (mask, mode),
8850 target, 1, OPTAB_LIB_WIDEN);
8851 }
8852 else
8853 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8854 gcc_assert (temp);
8855 return temp;
8856
8857 /* ??? Can optimize bitwise operations with one arg constant.
8858 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8859 and (a bitwise1 b) bitwise2 b (etc)
8860 but that is probably not worth while. */
8861
8862 case BIT_AND_EXPR:
8863 case BIT_IOR_EXPR:
8864 case BIT_XOR_EXPR:
8865 goto binop;
8866
8867 case LROTATE_EXPR:
8868 case RROTATE_EXPR:
8869 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8870 || (GET_MODE_PRECISION (TYPE_MODE (type))
8871 == TYPE_PRECISION (type)));
8872 /* fall through */
8873
8874 case LSHIFT_EXPR:
8875 case RSHIFT_EXPR:
8876 /* If this is a fixed-point operation, then we cannot use the code
8877 below because "expand_shift" doesn't support sat/no-sat fixed-point
8878 shifts. */
8879 if (ALL_FIXED_POINT_MODE_P (mode))
8880 goto binop;
8881
8882 if (! safe_from_p (subtarget, treeop1, 1))
8883 subtarget = 0;
8884 if (modifier == EXPAND_STACK_PARM)
8885 target = 0;
8886 op0 = expand_expr (treeop0, subtarget,
8887 VOIDmode, EXPAND_NORMAL);
8888 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8889 unsignedp);
8890 if (code == LSHIFT_EXPR)
8891 temp = REDUCE_BIT_FIELD (temp);
8892 return temp;
8893
8894 /* Could determine the answer when only additive constants differ. Also,
8895 the addition of one can be handled by changing the condition. */
8896 case LT_EXPR:
8897 case LE_EXPR:
8898 case GT_EXPR:
8899 case GE_EXPR:
8900 case EQ_EXPR:
8901 case NE_EXPR:
8902 case UNORDERED_EXPR:
8903 case ORDERED_EXPR:
8904 case UNLT_EXPR:
8905 case UNLE_EXPR:
8906 case UNGT_EXPR:
8907 case UNGE_EXPR:
8908 case UNEQ_EXPR:
8909 case LTGT_EXPR:
8910 temp = do_store_flag (ops,
8911 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8912 tmode != VOIDmode ? tmode : mode);
8913 if (temp)
8914 return temp;
8915
8916 /* Use a compare and a jump for BLKmode comparisons, or for function
8917 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8918
8919 if ((target == 0
8920 || modifier == EXPAND_STACK_PARM
8921 || ! safe_from_p (target, treeop0, 1)
8922 || ! safe_from_p (target, treeop1, 1)
8923 /* Make sure we don't have a hard reg (such as function's return
8924 value) live across basic blocks, if not optimizing. */
8925 || (!optimize && REG_P (target)
8926 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8927 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8928
8929 emit_move_insn (target, const0_rtx);
8930
8931 op1 = gen_label_rtx ();
8932 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8933
8934 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8935 emit_move_insn (target, constm1_rtx);
8936 else
8937 emit_move_insn (target, const1_rtx);
8938
8939 emit_label (op1);
8940 return target;
8941
8942 case COMPLEX_EXPR:
8943 /* Get the rtx code of the operands. */
8944 op0 = expand_normal (treeop0);
8945 op1 = expand_normal (treeop1);
8946
8947 if (!target)
8948 target = gen_reg_rtx (TYPE_MODE (type));
8949 else
8950 /* If target overlaps with op1, then either we need to force
8951 op1 into a pseudo (if target also overlaps with op0),
8952 or write the complex parts in reverse order. */
8953 switch (GET_CODE (target))
8954 {
8955 case CONCAT:
8956 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8957 {
8958 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8959 {
8960 complex_expr_force_op1:
8961 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8962 emit_move_insn (temp, op1);
8963 op1 = temp;
8964 break;
8965 }
8966 complex_expr_swap_order:
8967 /* Move the imaginary (op1) and real (op0) parts to their
8968 location. */
8969 write_complex_part (target, op1, true);
8970 write_complex_part (target, op0, false);
8971
8972 return target;
8973 }
8974 break;
8975 case MEM:
8976 temp = adjust_address_nv (target,
8977 GET_MODE_INNER (GET_MODE (target)), 0);
8978 if (reg_overlap_mentioned_p (temp, op1))
8979 {
8980 enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8981 temp = adjust_address_nv (target, imode,
8982 GET_MODE_SIZE (imode));
8983 if (reg_overlap_mentioned_p (temp, op0))
8984 goto complex_expr_force_op1;
8985 goto complex_expr_swap_order;
8986 }
8987 break;
8988 default:
8989 if (reg_overlap_mentioned_p (target, op1))
8990 {
8991 if (reg_overlap_mentioned_p (target, op0))
8992 goto complex_expr_force_op1;
8993 goto complex_expr_swap_order;
8994 }
8995 break;
8996 }
8997
8998 /* Move the real (op0) and imaginary (op1) parts to their location. */
8999 write_complex_part (target, op0, false);
9000 write_complex_part (target, op1, true);
9001
9002 return target;
9003
9004 case WIDEN_SUM_EXPR:
9005 {
9006 tree oprnd0 = treeop0;
9007 tree oprnd1 = treeop1;
9008
9009 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9010 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9011 target, unsignedp);
9012 return target;
9013 }
9014
9015 case REDUC_MAX_EXPR:
9016 case REDUC_MIN_EXPR:
9017 case REDUC_PLUS_EXPR:
9018 {
9019 op0 = expand_normal (treeop0);
9020 this_optab = optab_for_tree_code (code, type, optab_default);
9021 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9022 gcc_assert (temp);
9023 return temp;
9024 }
9025
9026 case VEC_LSHIFT_EXPR:
9027 case VEC_RSHIFT_EXPR:
9028 {
9029 target = expand_vec_shift_expr (ops, target);
9030 return target;
9031 }
9032
9033 case VEC_UNPACK_HI_EXPR:
9034 case VEC_UNPACK_LO_EXPR:
9035 {
9036 op0 = expand_normal (treeop0);
9037 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9038 target, unsignedp);
9039 gcc_assert (temp);
9040 return temp;
9041 }
9042
9043 case VEC_UNPACK_FLOAT_HI_EXPR:
9044 case VEC_UNPACK_FLOAT_LO_EXPR:
9045 {
9046 op0 = expand_normal (treeop0);
9047 /* The signedness is determined from input operand. */
9048 temp = expand_widen_pattern_expr
9049 (ops, op0, NULL_RTX, NULL_RTX,
9050 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9051
9052 gcc_assert (temp);
9053 return temp;
9054 }
9055
9056 case VEC_WIDEN_MULT_HI_EXPR:
9057 case VEC_WIDEN_MULT_LO_EXPR:
9058 case VEC_WIDEN_MULT_EVEN_EXPR:
9059 case VEC_WIDEN_MULT_ODD_EXPR:
9060 case VEC_WIDEN_LSHIFT_HI_EXPR:
9061 case VEC_WIDEN_LSHIFT_LO_EXPR:
9062 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9063 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9064 target, unsignedp);
9065 gcc_assert (target);
9066 return target;
9067
9068 case VEC_PACK_TRUNC_EXPR:
9069 case VEC_PACK_SAT_EXPR:
9070 case VEC_PACK_FIX_TRUNC_EXPR:
9071 mode = TYPE_MODE (TREE_TYPE (treeop0));
9072 goto binop;
9073
9074 case VEC_PERM_EXPR:
9075 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9076 op2 = expand_normal (treeop2);
9077
9078 /* Careful here: if the target doesn't support integral vector modes,
9079 a constant selection vector could wind up smooshed into a normal
9080 integral constant. */
9081 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9082 {
9083 tree sel_type = TREE_TYPE (treeop2);
9084 enum machine_mode vmode
9085 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9086 TYPE_VECTOR_SUBPARTS (sel_type));
9087 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9088 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9089 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9090 }
9091 else
9092 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9093
9094 temp = expand_vec_perm (mode, op0, op1, op2, target);
9095 gcc_assert (temp);
9096 return temp;
9097
9098 case DOT_PROD_EXPR:
9099 {
9100 tree oprnd0 = treeop0;
9101 tree oprnd1 = treeop1;
9102 tree oprnd2 = treeop2;
9103 rtx op2;
9104
9105 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9106 op2 = expand_normal (oprnd2);
9107 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9108 target, unsignedp);
9109 return target;
9110 }
9111
9112 case REALIGN_LOAD_EXPR:
9113 {
9114 tree oprnd0 = treeop0;
9115 tree oprnd1 = treeop1;
9116 tree oprnd2 = treeop2;
9117 rtx op2;
9118
9119 this_optab = optab_for_tree_code (code, type, optab_default);
9120 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9121 op2 = expand_normal (oprnd2);
9122 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9123 target, unsignedp);
9124 gcc_assert (temp);
9125 return temp;
9126 }
9127
9128 case COND_EXPR:
9129 /* A COND_EXPR with its type being VOID_TYPE represents a
9130 conditional jump and is handled in
9131 expand_gimple_cond_expr. */
9132 gcc_assert (!VOID_TYPE_P (type));
9133
9134 /* Note that COND_EXPRs whose type is a structure or union
9135 are required to be constructed to contain assignments of
9136 a temporary variable, so that we can evaluate them here
9137 for side effect only. If type is void, we must do likewise. */
9138
9139 gcc_assert (!TREE_ADDRESSABLE (type)
9140 && !ignore
9141 && TREE_TYPE (treeop1) != void_type_node
9142 && TREE_TYPE (treeop2) != void_type_node);
9143
9144 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9145 if (temp)
9146 return temp;
9147
9148 /* If we are not to produce a result, we have no target. Otherwise,
9149 if a target was specified use it; it will not be used as an
9150 intermediate target unless it is safe. If no target, use a
9151 temporary. */
9152
9153 if (modifier != EXPAND_STACK_PARM
9154 && original_target
9155 && safe_from_p (original_target, treeop0, 1)
9156 && GET_MODE (original_target) == mode
9157 && !MEM_P (original_target))
9158 temp = original_target;
9159 else
9160 temp = assign_temp (type, 0, 1);
9161
9162 do_pending_stack_adjust ();
9163 NO_DEFER_POP;
9164 op0 = gen_label_rtx ();
9165 op1 = gen_label_rtx ();
9166 jumpifnot (treeop0, op0, -1);
9167 store_expr (treeop1, temp,
9168 modifier == EXPAND_STACK_PARM,
9169 false);
9170
9171 emit_jump_insn (gen_jump (op1));
9172 emit_barrier ();
9173 emit_label (op0);
9174 store_expr (treeop2, temp,
9175 modifier == EXPAND_STACK_PARM,
9176 false);
9177
9178 emit_label (op1);
9179 OK_DEFER_POP;
9180 return temp;
9181
9182 case VEC_COND_EXPR:
9183 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9184 return target;
9185
9186 default:
9187 gcc_unreachable ();
9188 }
9189
9190 /* Here to do an ordinary binary operator. */
9191 binop:
9192 expand_operands (treeop0, treeop1,
9193 subtarget, &op0, &op1, EXPAND_NORMAL);
9194 binop2:
9195 this_optab = optab_for_tree_code (code, type, optab_default);
9196 binop3:
9197 if (modifier == EXPAND_STACK_PARM)
9198 target = 0;
9199 temp = expand_binop (mode, this_optab, op0, op1, target,
9200 unsignedp, OPTAB_LIB_WIDEN);
9201 gcc_assert (temp);
9202 /* Bitwise operations do not need bitfield reduction as we expect their
9203 operands being properly truncated. */
9204 if (code == BIT_XOR_EXPR
9205 || code == BIT_AND_EXPR
9206 || code == BIT_IOR_EXPR)
9207 return temp;
9208 return REDUCE_BIT_FIELD (temp);
9209 }
9210 #undef REDUCE_BIT_FIELD
9211
9212
9213 /* Return TRUE if expression STMT is suitable for replacement.
9214 Never consider memory loads as replaceable, because those don't ever lead
9215 into constant expressions. */
9216
9217 static bool
9218 stmt_is_replaceable_p (gimple stmt)
9219 {
9220 if (ssa_is_replaceable_p (stmt))
9221 {
9222 /* Don't move around loads. */
9223 if (!gimple_assign_single_p (stmt)
9224 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9225 return true;
9226 }
9227 return false;
9228 }
9229
9230 rtx
9231 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9232 enum expand_modifier modifier, rtx *alt_rtl,
9233 bool inner_reference_p)
9234 {
9235 rtx op0, op1, temp, decl_rtl;
9236 tree type;
9237 int unsignedp;
9238 enum machine_mode mode;
9239 enum tree_code code = TREE_CODE (exp);
9240 rtx subtarget, original_target;
9241 int ignore;
9242 tree context;
9243 bool reduce_bit_field;
9244 location_t loc = EXPR_LOCATION (exp);
9245 struct separate_ops ops;
9246 tree treeop0, treeop1, treeop2;
9247 tree ssa_name = NULL_TREE;
9248 gimple g;
9249
9250 type = TREE_TYPE (exp);
9251 mode = TYPE_MODE (type);
9252 unsignedp = TYPE_UNSIGNED (type);
9253
9254 treeop0 = treeop1 = treeop2 = NULL_TREE;
9255 if (!VL_EXP_CLASS_P (exp))
9256 switch (TREE_CODE_LENGTH (code))
9257 {
9258 default:
9259 case 3: treeop2 = TREE_OPERAND (exp, 2);
9260 case 2: treeop1 = TREE_OPERAND (exp, 1);
9261 case 1: treeop0 = TREE_OPERAND (exp, 0);
9262 case 0: break;
9263 }
9264 ops.code = code;
9265 ops.type = type;
9266 ops.op0 = treeop0;
9267 ops.op1 = treeop1;
9268 ops.op2 = treeop2;
9269 ops.location = loc;
9270
9271 ignore = (target == const0_rtx
9272 || ((CONVERT_EXPR_CODE_P (code)
9273 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9274 && TREE_CODE (type) == VOID_TYPE));
9275
9276 /* An operation in what may be a bit-field type needs the
9277 result to be reduced to the precision of the bit-field type,
9278 which is narrower than that of the type's mode. */
9279 reduce_bit_field = (!ignore
9280 && INTEGRAL_TYPE_P (type)
9281 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9282
9283 /* If we are going to ignore this result, we need only do something
9284 if there is a side-effect somewhere in the expression. If there
9285 is, short-circuit the most common cases here. Note that we must
9286 not call expand_expr with anything but const0_rtx in case this
9287 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9288
9289 if (ignore)
9290 {
9291 if (! TREE_SIDE_EFFECTS (exp))
9292 return const0_rtx;
9293
9294 /* Ensure we reference a volatile object even if value is ignored, but
9295 don't do this if all we are doing is taking its address. */
9296 if (TREE_THIS_VOLATILE (exp)
9297 && TREE_CODE (exp) != FUNCTION_DECL
9298 && mode != VOIDmode && mode != BLKmode
9299 && modifier != EXPAND_CONST_ADDRESS)
9300 {
9301 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9302 if (MEM_P (temp))
9303 copy_to_reg (temp);
9304 return const0_rtx;
9305 }
9306
9307 if (TREE_CODE_CLASS (code) == tcc_unary
9308 || code == BIT_FIELD_REF
9309 || code == COMPONENT_REF
9310 || code == INDIRECT_REF)
9311 return expand_expr (treeop0, const0_rtx, VOIDmode,
9312 modifier);
9313
9314 else if (TREE_CODE_CLASS (code) == tcc_binary
9315 || TREE_CODE_CLASS (code) == tcc_comparison
9316 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9317 {
9318 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9319 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9320 return const0_rtx;
9321 }
9322
9323 target = 0;
9324 }
9325
9326 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9327 target = 0;
9328
9329 /* Use subtarget as the target for operand 0 of a binary operation. */
9330 subtarget = get_subtarget (target);
9331 original_target = target;
9332
9333 switch (code)
9334 {
9335 case LABEL_DECL:
9336 {
9337 tree function = decl_function_context (exp);
9338
9339 temp = label_rtx (exp);
9340 temp = gen_rtx_LABEL_REF (Pmode, temp);
9341
9342 if (function != current_function_decl
9343 && function != 0)
9344 LABEL_REF_NONLOCAL_P (temp) = 1;
9345
9346 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9347 return temp;
9348 }
9349
9350 case SSA_NAME:
9351 /* ??? ivopts calls expander, without any preparation from
9352 out-of-ssa. So fake instructions as if this was an access to the
9353 base variable. This unnecessarily allocates a pseudo, see how we can
9354 reuse it, if partition base vars have it set already. */
9355 if (!currently_expanding_to_rtl)
9356 {
9357 tree var = SSA_NAME_VAR (exp);
9358 if (var && DECL_RTL_SET_P (var))
9359 return DECL_RTL (var);
9360 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9361 LAST_VIRTUAL_REGISTER + 1);
9362 }
9363
9364 g = get_gimple_for_ssa_name (exp);
9365 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9366 if (g == NULL
9367 && modifier == EXPAND_INITIALIZER
9368 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9369 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9370 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9371 g = SSA_NAME_DEF_STMT (exp);
9372 if (g)
9373 {
9374 rtx r;
9375 ops.code = gimple_assign_rhs_code (g);
9376 switch (get_gimple_rhs_class (ops.code))
9377 {
9378 case GIMPLE_TERNARY_RHS:
9379 ops.op2 = gimple_assign_rhs3 (g);
9380 /* Fallthru */
9381 case GIMPLE_BINARY_RHS:
9382 ops.op1 = gimple_assign_rhs2 (g);
9383 /* Fallthru */
9384 case GIMPLE_UNARY_RHS:
9385 ops.op0 = gimple_assign_rhs1 (g);
9386 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9387 ops.location = gimple_location (g);
9388 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9389 break;
9390 case GIMPLE_SINGLE_RHS:
9391 {
9392 location_t saved_loc = curr_insn_location ();
9393 set_curr_insn_location (gimple_location (g));
9394 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9395 tmode, modifier, NULL, inner_reference_p);
9396 set_curr_insn_location (saved_loc);
9397 break;
9398 }
9399 default:
9400 gcc_unreachable ();
9401 }
9402 if (REG_P (r) && !REG_EXPR (r))
9403 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9404 return r;
9405 }
9406
9407 ssa_name = exp;
9408 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9409 exp = SSA_NAME_VAR (ssa_name);
9410 goto expand_decl_rtl;
9411
9412 case PARM_DECL:
9413 case VAR_DECL:
9414 /* If a static var's type was incomplete when the decl was written,
9415 but the type is complete now, lay out the decl now. */
9416 if (DECL_SIZE (exp) == 0
9417 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9418 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9419 layout_decl (exp, 0);
9420
9421 /* ... fall through ... */
9422
9423 case FUNCTION_DECL:
9424 case RESULT_DECL:
9425 decl_rtl = DECL_RTL (exp);
9426 expand_decl_rtl:
9427 gcc_assert (decl_rtl);
9428 decl_rtl = copy_rtx (decl_rtl);
9429 /* Record writes to register variables. */
9430 if (modifier == EXPAND_WRITE
9431 && REG_P (decl_rtl)
9432 && HARD_REGISTER_P (decl_rtl))
9433 add_to_hard_reg_set (&crtl->asm_clobbers,
9434 GET_MODE (decl_rtl), REGNO (decl_rtl));
9435
9436 /* Ensure variable marked as used even if it doesn't go through
9437 a parser. If it hasn't be used yet, write out an external
9438 definition. */
9439 TREE_USED (exp) = 1;
9440
9441 /* Show we haven't gotten RTL for this yet. */
9442 temp = 0;
9443
9444 /* Variables inherited from containing functions should have
9445 been lowered by this point. */
9446 context = decl_function_context (exp);
9447 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9448 || context == current_function_decl
9449 || TREE_STATIC (exp)
9450 || DECL_EXTERNAL (exp)
9451 /* ??? C++ creates functions that are not TREE_STATIC. */
9452 || TREE_CODE (exp) == FUNCTION_DECL);
9453
9454 /* This is the case of an array whose size is to be determined
9455 from its initializer, while the initializer is still being parsed.
9456 ??? We aren't parsing while expanding anymore. */
9457
9458 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9459 temp = validize_mem (decl_rtl);
9460
9461 /* If DECL_RTL is memory, we are in the normal case and the
9462 address is not valid, get the address into a register. */
9463
9464 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9465 {
9466 if (alt_rtl)
9467 *alt_rtl = decl_rtl;
9468 decl_rtl = use_anchored_address (decl_rtl);
9469 if (modifier != EXPAND_CONST_ADDRESS
9470 && modifier != EXPAND_SUM
9471 && !memory_address_addr_space_p (DECL_MODE (exp),
9472 XEXP (decl_rtl, 0),
9473 MEM_ADDR_SPACE (decl_rtl)))
9474 temp = replace_equiv_address (decl_rtl,
9475 copy_rtx (XEXP (decl_rtl, 0)));
9476 }
9477
9478 /* If we got something, return it. But first, set the alignment
9479 if the address is a register. */
9480 if (temp != 0)
9481 {
9482 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9483 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9484
9485 return temp;
9486 }
9487
9488 /* If the mode of DECL_RTL does not match that of the decl,
9489 there are two cases: we are dealing with a BLKmode value
9490 that is returned in a register, or we are dealing with
9491 a promoted value. In the latter case, return a SUBREG
9492 of the wanted mode, but mark it so that we know that it
9493 was already extended. */
9494 if (REG_P (decl_rtl)
9495 && DECL_MODE (exp) != BLKmode
9496 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9497 {
9498 enum machine_mode pmode;
9499
9500 /* Get the signedness to be used for this variable. Ensure we get
9501 the same mode we got when the variable was declared. */
9502 if (code == SSA_NAME
9503 && (g = SSA_NAME_DEF_STMT (ssa_name))
9504 && gimple_code (g) == GIMPLE_CALL
9505 && !gimple_call_internal_p (g))
9506 pmode = promote_function_mode (type, mode, &unsignedp,
9507 gimple_call_fntype (g),
9508 2);
9509 else
9510 pmode = promote_decl_mode (exp, &unsignedp);
9511 gcc_assert (GET_MODE (decl_rtl) == pmode);
9512
9513 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9514 SUBREG_PROMOTED_VAR_P (temp) = 1;
9515 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9516 return temp;
9517 }
9518
9519 return decl_rtl;
9520
9521 case INTEGER_CST:
9522 /* Given that TYPE_PRECISION (type) is not always equal to
9523 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9524 the former to the latter according to the signedness of the
9525 type. */
9526 temp = immed_wide_int_const (wide_int::from
9527 (exp,
9528 GET_MODE_PRECISION (TYPE_MODE (type)),
9529 TYPE_SIGN (type)),
9530 TYPE_MODE (type));
9531 return temp;
9532
9533 case VECTOR_CST:
9534 {
9535 tree tmp = NULL_TREE;
9536 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9537 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9538 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9539 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9540 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9541 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9542 return const_vector_from_tree (exp);
9543 if (GET_MODE_CLASS (mode) == MODE_INT)
9544 {
9545 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9546 if (type_for_mode)
9547 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9548 }
9549 if (!tmp)
9550 {
9551 vec<constructor_elt, va_gc> *v;
9552 unsigned i;
9553 vec_alloc (v, VECTOR_CST_NELTS (exp));
9554 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9555 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9556 tmp = build_constructor (type, v);
9557 }
9558 return expand_expr (tmp, ignore ? const0_rtx : target,
9559 tmode, modifier);
9560 }
9561
9562 case CONST_DECL:
9563 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9564
9565 case REAL_CST:
9566 /* If optimized, generate immediate CONST_DOUBLE
9567 which will be turned into memory by reload if necessary.
9568
9569 We used to force a register so that loop.c could see it. But
9570 this does not allow gen_* patterns to perform optimizations with
9571 the constants. It also produces two insns in cases like "x = 1.0;".
9572 On most machines, floating-point constants are not permitted in
9573 many insns, so we'd end up copying it to a register in any case.
9574
9575 Now, we do the copying in expand_binop, if appropriate. */
9576 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9577 TYPE_MODE (TREE_TYPE (exp)));
9578
9579 case FIXED_CST:
9580 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9581 TYPE_MODE (TREE_TYPE (exp)));
9582
9583 case COMPLEX_CST:
9584 /* Handle evaluating a complex constant in a CONCAT target. */
9585 if (original_target && GET_CODE (original_target) == CONCAT)
9586 {
9587 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9588 rtx rtarg, itarg;
9589
9590 rtarg = XEXP (original_target, 0);
9591 itarg = XEXP (original_target, 1);
9592
9593 /* Move the real and imaginary parts separately. */
9594 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9595 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9596
9597 if (op0 != rtarg)
9598 emit_move_insn (rtarg, op0);
9599 if (op1 != itarg)
9600 emit_move_insn (itarg, op1);
9601
9602 return original_target;
9603 }
9604
9605 /* ... fall through ... */
9606
9607 case STRING_CST:
9608 temp = expand_expr_constant (exp, 1, modifier);
9609
9610 /* temp contains a constant address.
9611 On RISC machines where a constant address isn't valid,
9612 make some insns to get that address into a register. */
9613 if (modifier != EXPAND_CONST_ADDRESS
9614 && modifier != EXPAND_INITIALIZER
9615 && modifier != EXPAND_SUM
9616 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9617 MEM_ADDR_SPACE (temp)))
9618 return replace_equiv_address (temp,
9619 copy_rtx (XEXP (temp, 0)));
9620 return temp;
9621
9622 case SAVE_EXPR:
9623 {
9624 tree val = treeop0;
9625 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9626 inner_reference_p);
9627
9628 if (!SAVE_EXPR_RESOLVED_P (exp))
9629 {
9630 /* We can indeed still hit this case, typically via builtin
9631 expanders calling save_expr immediately before expanding
9632 something. Assume this means that we only have to deal
9633 with non-BLKmode values. */
9634 gcc_assert (GET_MODE (ret) != BLKmode);
9635
9636 val = build_decl (curr_insn_location (),
9637 VAR_DECL, NULL, TREE_TYPE (exp));
9638 DECL_ARTIFICIAL (val) = 1;
9639 DECL_IGNORED_P (val) = 1;
9640 treeop0 = val;
9641 TREE_OPERAND (exp, 0) = treeop0;
9642 SAVE_EXPR_RESOLVED_P (exp) = 1;
9643
9644 if (!CONSTANT_P (ret))
9645 ret = copy_to_reg (ret);
9646 SET_DECL_RTL (val, ret);
9647 }
9648
9649 return ret;
9650 }
9651
9652
9653 case CONSTRUCTOR:
9654 /* If we don't need the result, just ensure we evaluate any
9655 subexpressions. */
9656 if (ignore)
9657 {
9658 unsigned HOST_WIDE_INT idx;
9659 tree value;
9660
9661 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9662 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9663
9664 return const0_rtx;
9665 }
9666
9667 return expand_constructor (exp, target, modifier, false);
9668
9669 case TARGET_MEM_REF:
9670 {
9671 addr_space_t as
9672 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9673 enum insn_code icode;
9674 unsigned int align;
9675
9676 op0 = addr_for_mem_ref (exp, as, true);
9677 op0 = memory_address_addr_space (mode, op0, as);
9678 temp = gen_rtx_MEM (mode, op0);
9679 set_mem_attributes (temp, exp, 0);
9680 set_mem_addr_space (temp, as);
9681 align = get_object_alignment (exp);
9682 if (modifier != EXPAND_WRITE
9683 && modifier != EXPAND_MEMORY
9684 && mode != BLKmode
9685 && align < GET_MODE_ALIGNMENT (mode)
9686 /* If the target does not have special handling for unaligned
9687 loads of mode then it can use regular moves for them. */
9688 && ((icode = optab_handler (movmisalign_optab, mode))
9689 != CODE_FOR_nothing))
9690 {
9691 struct expand_operand ops[2];
9692
9693 /* We've already validated the memory, and we're creating a
9694 new pseudo destination. The predicates really can't fail,
9695 nor can the generator. */
9696 create_output_operand (&ops[0], NULL_RTX, mode);
9697 create_fixed_operand (&ops[1], temp);
9698 expand_insn (icode, 2, ops);
9699 temp = ops[0].value;
9700 }
9701 return temp;
9702 }
9703
9704 case MEM_REF:
9705 {
9706 addr_space_t as
9707 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9708 enum machine_mode address_mode;
9709 tree base = TREE_OPERAND (exp, 0);
9710 gimple def_stmt;
9711 enum insn_code icode;
9712 unsigned align;
9713 /* Handle expansion of non-aliased memory with non-BLKmode. That
9714 might end up in a register. */
9715 if (mem_ref_refers_to_non_mem_p (exp))
9716 {
9717 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9718 base = TREE_OPERAND (base, 0);
9719 if (offset == 0
9720 && tree_fits_uhwi_p (TYPE_SIZE (type))
9721 && (GET_MODE_BITSIZE (DECL_MODE (base))
9722 == tree_to_uhwi (TYPE_SIZE (type))))
9723 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9724 target, tmode, modifier);
9725 if (TYPE_MODE (type) == BLKmode)
9726 {
9727 temp = assign_stack_temp (DECL_MODE (base),
9728 GET_MODE_SIZE (DECL_MODE (base)));
9729 store_expr (base, temp, 0, false);
9730 temp = adjust_address (temp, BLKmode, offset);
9731 set_mem_size (temp, int_size_in_bytes (type));
9732 return temp;
9733 }
9734 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9735 bitsize_int (offset * BITS_PER_UNIT));
9736 return expand_expr (exp, target, tmode, modifier);
9737 }
9738 address_mode = targetm.addr_space.address_mode (as);
9739 base = TREE_OPERAND (exp, 0);
9740 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9741 {
9742 tree mask = gimple_assign_rhs2 (def_stmt);
9743 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9744 gimple_assign_rhs1 (def_stmt), mask);
9745 TREE_OPERAND (exp, 0) = base;
9746 }
9747 align = get_object_alignment (exp);
9748 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9749 op0 = memory_address_addr_space (mode, op0, as);
9750 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9751 {
9752 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9753 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9754 op0 = memory_address_addr_space (mode, op0, as);
9755 }
9756 temp = gen_rtx_MEM (mode, op0);
9757 set_mem_attributes (temp, exp, 0);
9758 set_mem_addr_space (temp, as);
9759 if (TREE_THIS_VOLATILE (exp))
9760 MEM_VOLATILE_P (temp) = 1;
9761 if (modifier != EXPAND_WRITE
9762 && modifier != EXPAND_MEMORY
9763 && !inner_reference_p
9764 && mode != BLKmode
9765 && align < GET_MODE_ALIGNMENT (mode))
9766 {
9767 if ((icode = optab_handler (movmisalign_optab, mode))
9768 != CODE_FOR_nothing)
9769 {
9770 struct expand_operand ops[2];
9771
9772 /* We've already validated the memory, and we're creating a
9773 new pseudo destination. The predicates really can't fail,
9774 nor can the generator. */
9775 create_output_operand (&ops[0], NULL_RTX, mode);
9776 create_fixed_operand (&ops[1], temp);
9777 expand_insn (icode, 2, ops);
9778 temp = ops[0].value;
9779 }
9780 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9781 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9782 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9783 (modifier == EXPAND_STACK_PARM
9784 ? NULL_RTX : target),
9785 mode, mode);
9786 }
9787 return temp;
9788 }
9789
9790 case ARRAY_REF:
9791
9792 {
9793 tree array = treeop0;
9794 tree index = treeop1;
9795 tree init;
9796
9797 /* Fold an expression like: "foo"[2].
9798 This is not done in fold so it won't happen inside &.
9799 Don't fold if this is for wide characters since it's too
9800 difficult to do correctly and this is a very rare case. */
9801
9802 if (modifier != EXPAND_CONST_ADDRESS
9803 && modifier != EXPAND_INITIALIZER
9804 && modifier != EXPAND_MEMORY)
9805 {
9806 tree t = fold_read_from_constant_string (exp);
9807
9808 if (t)
9809 return expand_expr (t, target, tmode, modifier);
9810 }
9811
9812 /* If this is a constant index into a constant array,
9813 just get the value from the array. Handle both the cases when
9814 we have an explicit constructor and when our operand is a variable
9815 that was declared const. */
9816
9817 if (modifier != EXPAND_CONST_ADDRESS
9818 && modifier != EXPAND_INITIALIZER
9819 && modifier != EXPAND_MEMORY
9820 && TREE_CODE (array) == CONSTRUCTOR
9821 && ! TREE_SIDE_EFFECTS (array)
9822 && TREE_CODE (index) == INTEGER_CST)
9823 {
9824 unsigned HOST_WIDE_INT ix;
9825 tree field, value;
9826
9827 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9828 field, value)
9829 if (tree_int_cst_equal (field, index))
9830 {
9831 if (!TREE_SIDE_EFFECTS (value))
9832 return expand_expr (fold (value), target, tmode, modifier);
9833 break;
9834 }
9835 }
9836
9837 else if (optimize >= 1
9838 && modifier != EXPAND_CONST_ADDRESS
9839 && modifier != EXPAND_INITIALIZER
9840 && modifier != EXPAND_MEMORY
9841 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9842 && TREE_CODE (index) == INTEGER_CST
9843 && (TREE_CODE (array) == VAR_DECL
9844 || TREE_CODE (array) == CONST_DECL)
9845 && (init = ctor_for_folding (array)) != error_mark_node)
9846 {
9847 if (init == NULL_TREE)
9848 {
9849 tree value = build_zero_cst (type);
9850 if (TREE_CODE (value) == CONSTRUCTOR)
9851 {
9852 /* If VALUE is a CONSTRUCTOR, this optimization is only
9853 useful if this doesn't store the CONSTRUCTOR into
9854 memory. If it does, it is more efficient to just
9855 load the data from the array directly. */
9856 rtx ret = expand_constructor (value, target,
9857 modifier, true);
9858 if (ret == NULL_RTX)
9859 value = NULL_TREE;
9860 }
9861
9862 if (value)
9863 return expand_expr (value, target, tmode, modifier);
9864 }
9865 else if (TREE_CODE (init) == CONSTRUCTOR)
9866 {
9867 unsigned HOST_WIDE_INT ix;
9868 tree field, value;
9869
9870 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9871 field, value)
9872 if (tree_int_cst_equal (field, index))
9873 {
9874 if (TREE_SIDE_EFFECTS (value))
9875 break;
9876
9877 if (TREE_CODE (value) == CONSTRUCTOR)
9878 {
9879 /* If VALUE is a CONSTRUCTOR, this
9880 optimization is only useful if
9881 this doesn't store the CONSTRUCTOR
9882 into memory. If it does, it is more
9883 efficient to just load the data from
9884 the array directly. */
9885 rtx ret = expand_constructor (value, target,
9886 modifier, true);
9887 if (ret == NULL_RTX)
9888 break;
9889 }
9890
9891 return
9892 expand_expr (fold (value), target, tmode, modifier);
9893 }
9894 }
9895 else if (TREE_CODE (init) == STRING_CST)
9896 {
9897 tree low_bound = array_ref_low_bound (exp);
9898 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9899
9900 /* Optimize the special case of a zero lower bound.
9901
9902 We convert the lower bound to sizetype to avoid problems
9903 with constant folding. E.g. suppose the lower bound is
9904 1 and its mode is QI. Without the conversion
9905 (ARRAY + (INDEX - (unsigned char)1))
9906 becomes
9907 (ARRAY + (-(unsigned char)1) + INDEX)
9908 which becomes
9909 (ARRAY + 255 + INDEX). Oops! */
9910 if (!integer_zerop (low_bound))
9911 index1 = size_diffop_loc (loc, index1,
9912 fold_convert_loc (loc, sizetype,
9913 low_bound));
9914
9915 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9916 {
9917 tree type = TREE_TYPE (TREE_TYPE (init));
9918 enum machine_mode mode = TYPE_MODE (type);
9919
9920 if (GET_MODE_CLASS (mode) == MODE_INT
9921 && GET_MODE_SIZE (mode) == 1)
9922 return gen_int_mode (TREE_STRING_POINTER (init)
9923 [TREE_INT_CST_LOW (index1)],
9924 mode);
9925 }
9926 }
9927 }
9928 }
9929 goto normal_inner_ref;
9930
9931 case COMPONENT_REF:
9932 /* If the operand is a CONSTRUCTOR, we can just extract the
9933 appropriate field if it is present. */
9934 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9935 {
9936 unsigned HOST_WIDE_INT idx;
9937 tree field, value;
9938
9939 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9940 idx, field, value)
9941 if (field == treeop1
9942 /* We can normally use the value of the field in the
9943 CONSTRUCTOR. However, if this is a bitfield in
9944 an integral mode that we can fit in a HOST_WIDE_INT,
9945 we must mask only the number of bits in the bitfield,
9946 since this is done implicitly by the constructor. If
9947 the bitfield does not meet either of those conditions,
9948 we can't do this optimization. */
9949 && (! DECL_BIT_FIELD (field)
9950 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9951 && (GET_MODE_PRECISION (DECL_MODE (field))
9952 <= HOST_BITS_PER_WIDE_INT))))
9953 {
9954 if (DECL_BIT_FIELD (field)
9955 && modifier == EXPAND_STACK_PARM)
9956 target = 0;
9957 op0 = expand_expr (value, target, tmode, modifier);
9958 if (DECL_BIT_FIELD (field))
9959 {
9960 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9961 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9962
9963 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9964 {
9965 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
9966 imode);
9967 op0 = expand_and (imode, op0, op1, target);
9968 }
9969 else
9970 {
9971 int count = GET_MODE_PRECISION (imode) - bitsize;
9972
9973 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9974 target, 0);
9975 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9976 target, 0);
9977 }
9978 }
9979
9980 return op0;
9981 }
9982 }
9983 goto normal_inner_ref;
9984
9985 case BIT_FIELD_REF:
9986 case ARRAY_RANGE_REF:
9987 normal_inner_ref:
9988 {
9989 enum machine_mode mode1, mode2;
9990 HOST_WIDE_INT bitsize, bitpos;
9991 tree offset;
9992 int volatilep = 0, must_force_mem;
9993 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9994 &mode1, &unsignedp, &volatilep, true);
9995 rtx orig_op0, memloc;
9996 bool mem_attrs_from_type = false;
9997
9998 /* If we got back the original object, something is wrong. Perhaps
9999 we are evaluating an expression too early. In any event, don't
10000 infinitely recurse. */
10001 gcc_assert (tem != exp);
10002
10003 /* If TEM's type is a union of variable size, pass TARGET to the inner
10004 computation, since it will need a temporary and TARGET is known
10005 to have to do. This occurs in unchecked conversion in Ada. */
10006 orig_op0 = op0
10007 = expand_expr_real (tem,
10008 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10009 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10010 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10011 != INTEGER_CST)
10012 && modifier != EXPAND_STACK_PARM
10013 ? target : NULL_RTX),
10014 VOIDmode,
10015 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10016 NULL, true);
10017
10018 /* If the field has a mode, we want to access it in the
10019 field's mode, not the computed mode.
10020 If a MEM has VOIDmode (external with incomplete type),
10021 use BLKmode for it instead. */
10022 if (MEM_P (op0))
10023 {
10024 if (mode1 != VOIDmode)
10025 op0 = adjust_address (op0, mode1, 0);
10026 else if (GET_MODE (op0) == VOIDmode)
10027 op0 = adjust_address (op0, BLKmode, 0);
10028 }
10029
10030 mode2
10031 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10032
10033 /* If we have either an offset, a BLKmode result, or a reference
10034 outside the underlying object, we must force it to memory.
10035 Such a case can occur in Ada if we have unchecked conversion
10036 of an expression from a scalar type to an aggregate type or
10037 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10038 passed a partially uninitialized object or a view-conversion
10039 to a larger size. */
10040 must_force_mem = (offset
10041 || mode1 == BLKmode
10042 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10043
10044 /* Handle CONCAT first. */
10045 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10046 {
10047 if (bitpos == 0
10048 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10049 return op0;
10050 if (bitpos == 0
10051 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10052 && bitsize)
10053 {
10054 op0 = XEXP (op0, 0);
10055 mode2 = GET_MODE (op0);
10056 }
10057 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10058 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10059 && bitpos
10060 && bitsize)
10061 {
10062 op0 = XEXP (op0, 1);
10063 bitpos = 0;
10064 mode2 = GET_MODE (op0);
10065 }
10066 else
10067 /* Otherwise force into memory. */
10068 must_force_mem = 1;
10069 }
10070
10071 /* If this is a constant, put it in a register if it is a legitimate
10072 constant and we don't need a memory reference. */
10073 if (CONSTANT_P (op0)
10074 && mode2 != BLKmode
10075 && targetm.legitimate_constant_p (mode2, op0)
10076 && !must_force_mem)
10077 op0 = force_reg (mode2, op0);
10078
10079 /* Otherwise, if this is a constant, try to force it to the constant
10080 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10081 is a legitimate constant. */
10082 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10083 op0 = validize_mem (memloc);
10084
10085 /* Otherwise, if this is a constant or the object is not in memory
10086 and need be, put it there. */
10087 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10088 {
10089 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10090 emit_move_insn (memloc, op0);
10091 op0 = memloc;
10092 mem_attrs_from_type = true;
10093 }
10094
10095 if (offset)
10096 {
10097 enum machine_mode address_mode;
10098 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10099 EXPAND_SUM);
10100
10101 gcc_assert (MEM_P (op0));
10102
10103 address_mode = get_address_mode (op0);
10104 if (GET_MODE (offset_rtx) != address_mode)
10105 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10106
10107 /* See the comment in expand_assignment for the rationale. */
10108 if (mode1 != VOIDmode
10109 && bitpos != 0
10110 && bitsize > 0
10111 && (bitpos % bitsize) == 0
10112 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10113 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10114 {
10115 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10116 bitpos = 0;
10117 }
10118
10119 op0 = offset_address (op0, offset_rtx,
10120 highest_pow2_factor (offset));
10121 }
10122
10123 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10124 record its alignment as BIGGEST_ALIGNMENT. */
10125 if (MEM_P (op0) && bitpos == 0 && offset != 0
10126 && is_aligning_offset (offset, tem))
10127 set_mem_align (op0, BIGGEST_ALIGNMENT);
10128
10129 /* Don't forget about volatility even if this is a bitfield. */
10130 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10131 {
10132 if (op0 == orig_op0)
10133 op0 = copy_rtx (op0);
10134
10135 MEM_VOLATILE_P (op0) = 1;
10136 }
10137
10138 /* In cases where an aligned union has an unaligned object
10139 as a field, we might be extracting a BLKmode value from
10140 an integer-mode (e.g., SImode) object. Handle this case
10141 by doing the extract into an object as wide as the field
10142 (which we know to be the width of a basic mode), then
10143 storing into memory, and changing the mode to BLKmode. */
10144 if (mode1 == VOIDmode
10145 || REG_P (op0) || GET_CODE (op0) == SUBREG
10146 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10147 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10148 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10149 && modifier != EXPAND_CONST_ADDRESS
10150 && modifier != EXPAND_INITIALIZER
10151 && modifier != EXPAND_MEMORY)
10152 /* If the bitfield is volatile and the bitsize
10153 is narrower than the access size of the bitfield,
10154 we need to extract bitfields from the access. */
10155 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10156 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10157 && mode1 != BLKmode
10158 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10159 /* If the field isn't aligned enough to fetch as a memref,
10160 fetch it as a bit field. */
10161 || (mode1 != BLKmode
10162 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10163 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10164 || (MEM_P (op0)
10165 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10166 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10167 && modifier != EXPAND_MEMORY
10168 && ((modifier == EXPAND_CONST_ADDRESS
10169 || modifier == EXPAND_INITIALIZER)
10170 ? STRICT_ALIGNMENT
10171 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10172 || (bitpos % BITS_PER_UNIT != 0)))
10173 /* If the type and the field are a constant size and the
10174 size of the type isn't the same size as the bitfield,
10175 we must use bitfield operations. */
10176 || (bitsize >= 0
10177 && TYPE_SIZE (TREE_TYPE (exp))
10178 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10179 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10180 bitsize)))
10181 {
10182 enum machine_mode ext_mode = mode;
10183
10184 if (ext_mode == BLKmode
10185 && ! (target != 0 && MEM_P (op0)
10186 && MEM_P (target)
10187 && bitpos % BITS_PER_UNIT == 0))
10188 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10189
10190 if (ext_mode == BLKmode)
10191 {
10192 if (target == 0)
10193 target = assign_temp (type, 1, 1);
10194
10195 /* ??? Unlike the similar test a few lines below, this one is
10196 very likely obsolete. */
10197 if (bitsize == 0)
10198 return target;
10199
10200 /* In this case, BITPOS must start at a byte boundary and
10201 TARGET, if specified, must be a MEM. */
10202 gcc_assert (MEM_P (op0)
10203 && (!target || MEM_P (target))
10204 && !(bitpos % BITS_PER_UNIT));
10205
10206 emit_block_move (target,
10207 adjust_address (op0, VOIDmode,
10208 bitpos / BITS_PER_UNIT),
10209 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10210 / BITS_PER_UNIT),
10211 (modifier == EXPAND_STACK_PARM
10212 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10213
10214 return target;
10215 }
10216
10217 /* If we have nothing to extract, the result will be 0 for targets
10218 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10219 return 0 for the sake of consistency, as reading a zero-sized
10220 bitfield is valid in Ada and the value is fully specified. */
10221 if (bitsize == 0)
10222 return const0_rtx;
10223
10224 op0 = validize_mem (op0);
10225
10226 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10227 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10228
10229 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10230 (modifier == EXPAND_STACK_PARM
10231 ? NULL_RTX : target),
10232 ext_mode, ext_mode);
10233
10234 /* If the result is a record type and BITSIZE is narrower than
10235 the mode of OP0, an integral mode, and this is a big endian
10236 machine, we must put the field into the high-order bits. */
10237 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10238 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10239 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10240 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10241 GET_MODE_BITSIZE (GET_MODE (op0))
10242 - bitsize, op0, 1);
10243
10244 /* If the result type is BLKmode, store the data into a temporary
10245 of the appropriate type, but with the mode corresponding to the
10246 mode for the data we have (op0's mode). */
10247 if (mode == BLKmode)
10248 {
10249 rtx new_rtx
10250 = assign_stack_temp_for_type (ext_mode,
10251 GET_MODE_BITSIZE (ext_mode),
10252 type);
10253 emit_move_insn (new_rtx, op0);
10254 op0 = copy_rtx (new_rtx);
10255 PUT_MODE (op0, BLKmode);
10256 }
10257
10258 return op0;
10259 }
10260
10261 /* If the result is BLKmode, use that to access the object
10262 now as well. */
10263 if (mode == BLKmode)
10264 mode1 = BLKmode;
10265
10266 /* Get a reference to just this component. */
10267 if (modifier == EXPAND_CONST_ADDRESS
10268 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10269 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10270 else
10271 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10272
10273 if (op0 == orig_op0)
10274 op0 = copy_rtx (op0);
10275
10276 /* If op0 is a temporary because of forcing to memory, pass only the
10277 type to set_mem_attributes so that the original expression is never
10278 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10279 if (mem_attrs_from_type)
10280 set_mem_attributes (op0, type, 0);
10281 else
10282 set_mem_attributes (op0, exp, 0);
10283
10284 if (REG_P (XEXP (op0, 0)))
10285 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10286
10287 MEM_VOLATILE_P (op0) |= volatilep;
10288 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10289 || modifier == EXPAND_CONST_ADDRESS
10290 || modifier == EXPAND_INITIALIZER)
10291 return op0;
10292
10293 if (target == 0)
10294 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10295
10296 convert_move (target, op0, unsignedp);
10297 return target;
10298 }
10299
10300 case OBJ_TYPE_REF:
10301 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10302
10303 case CALL_EXPR:
10304 /* All valid uses of __builtin_va_arg_pack () are removed during
10305 inlining. */
10306 if (CALL_EXPR_VA_ARG_PACK (exp))
10307 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10308 {
10309 tree fndecl = get_callee_fndecl (exp), attr;
10310
10311 if (fndecl
10312 && (attr = lookup_attribute ("error",
10313 DECL_ATTRIBUTES (fndecl))) != NULL)
10314 error ("%Kcall to %qs declared with attribute error: %s",
10315 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10316 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10317 if (fndecl
10318 && (attr = lookup_attribute ("warning",
10319 DECL_ATTRIBUTES (fndecl))) != NULL)
10320 warning_at (tree_nonartificial_location (exp),
10321 0, "%Kcall to %qs declared with attribute warning: %s",
10322 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10323 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10324
10325 /* Check for a built-in function. */
10326 if (fndecl && DECL_BUILT_IN (fndecl))
10327 {
10328 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10329 return expand_builtin (exp, target, subtarget, tmode, ignore);
10330 }
10331 }
10332 return expand_call (exp, target, ignore);
10333
10334 case VIEW_CONVERT_EXPR:
10335 op0 = NULL_RTX;
10336
10337 /* If we are converting to BLKmode, try to avoid an intermediate
10338 temporary by fetching an inner memory reference. */
10339 if (mode == BLKmode
10340 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10341 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10342 && handled_component_p (treeop0))
10343 {
10344 enum machine_mode mode1;
10345 HOST_WIDE_INT bitsize, bitpos;
10346 tree offset;
10347 int unsignedp;
10348 int volatilep = 0;
10349 tree tem
10350 = get_inner_reference (treeop0, &bitsize, &bitpos,
10351 &offset, &mode1, &unsignedp, &volatilep,
10352 true);
10353 rtx orig_op0;
10354
10355 /* ??? We should work harder and deal with non-zero offsets. */
10356 if (!offset
10357 && (bitpos % BITS_PER_UNIT) == 0
10358 && bitsize >= 0
10359 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10360 {
10361 /* See the normal_inner_ref case for the rationale. */
10362 orig_op0
10363 = expand_expr_real (tem,
10364 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10365 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10366 != INTEGER_CST)
10367 && modifier != EXPAND_STACK_PARM
10368 ? target : NULL_RTX),
10369 VOIDmode,
10370 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10371 NULL, true);
10372
10373 if (MEM_P (orig_op0))
10374 {
10375 op0 = orig_op0;
10376
10377 /* Get a reference to just this component. */
10378 if (modifier == EXPAND_CONST_ADDRESS
10379 || modifier == EXPAND_SUM
10380 || modifier == EXPAND_INITIALIZER)
10381 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10382 else
10383 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10384
10385 if (op0 == orig_op0)
10386 op0 = copy_rtx (op0);
10387
10388 set_mem_attributes (op0, treeop0, 0);
10389 if (REG_P (XEXP (op0, 0)))
10390 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10391
10392 MEM_VOLATILE_P (op0) |= volatilep;
10393 }
10394 }
10395 }
10396
10397 if (!op0)
10398 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10399 NULL, inner_reference_p);
10400
10401 /* If the input and output modes are both the same, we are done. */
10402 if (mode == GET_MODE (op0))
10403 ;
10404 /* If neither mode is BLKmode, and both modes are the same size
10405 then we can use gen_lowpart. */
10406 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10407 && (GET_MODE_PRECISION (mode)
10408 == GET_MODE_PRECISION (GET_MODE (op0)))
10409 && !COMPLEX_MODE_P (GET_MODE (op0)))
10410 {
10411 if (GET_CODE (op0) == SUBREG)
10412 op0 = force_reg (GET_MODE (op0), op0);
10413 temp = gen_lowpart_common (mode, op0);
10414 if (temp)
10415 op0 = temp;
10416 else
10417 {
10418 if (!REG_P (op0) && !MEM_P (op0))
10419 op0 = force_reg (GET_MODE (op0), op0);
10420 op0 = gen_lowpart (mode, op0);
10421 }
10422 }
10423 /* If both types are integral, convert from one mode to the other. */
10424 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10425 op0 = convert_modes (mode, GET_MODE (op0), op0,
10426 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10427 /* If the output type is a bit-field type, do an extraction. */
10428 else if (reduce_bit_field)
10429 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10430 TYPE_UNSIGNED (type), NULL_RTX,
10431 mode, mode);
10432 /* As a last resort, spill op0 to memory, and reload it in a
10433 different mode. */
10434 else if (!MEM_P (op0))
10435 {
10436 /* If the operand is not a MEM, force it into memory. Since we
10437 are going to be changing the mode of the MEM, don't call
10438 force_const_mem for constants because we don't allow pool
10439 constants to change mode. */
10440 tree inner_type = TREE_TYPE (treeop0);
10441
10442 gcc_assert (!TREE_ADDRESSABLE (exp));
10443
10444 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10445 target
10446 = assign_stack_temp_for_type
10447 (TYPE_MODE (inner_type),
10448 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10449
10450 emit_move_insn (target, op0);
10451 op0 = target;
10452 }
10453
10454 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10455 output type is such that the operand is known to be aligned, indicate
10456 that it is. Otherwise, we need only be concerned about alignment for
10457 non-BLKmode results. */
10458 if (MEM_P (op0))
10459 {
10460 enum insn_code icode;
10461
10462 if (TYPE_ALIGN_OK (type))
10463 {
10464 /* ??? Copying the MEM without substantially changing it might
10465 run afoul of the code handling volatile memory references in
10466 store_expr, which assumes that TARGET is returned unmodified
10467 if it has been used. */
10468 op0 = copy_rtx (op0);
10469 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10470 }
10471 else if (modifier != EXPAND_WRITE
10472 && modifier != EXPAND_MEMORY
10473 && !inner_reference_p
10474 && mode != BLKmode
10475 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10476 {
10477 /* If the target does have special handling for unaligned
10478 loads of mode then use them. */
10479 if ((icode = optab_handler (movmisalign_optab, mode))
10480 != CODE_FOR_nothing)
10481 {
10482 rtx reg, insn;
10483
10484 op0 = adjust_address (op0, mode, 0);
10485 /* We've already validated the memory, and we're creating a
10486 new pseudo destination. The predicates really can't
10487 fail. */
10488 reg = gen_reg_rtx (mode);
10489
10490 /* Nor can the insn generator. */
10491 insn = GEN_FCN (icode) (reg, op0);
10492 emit_insn (insn);
10493 return reg;
10494 }
10495 else if (STRICT_ALIGNMENT)
10496 {
10497 tree inner_type = TREE_TYPE (treeop0);
10498 HOST_WIDE_INT temp_size
10499 = MAX (int_size_in_bytes (inner_type),
10500 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10501 rtx new_rtx
10502 = assign_stack_temp_for_type (mode, temp_size, type);
10503 rtx new_with_op0_mode
10504 = adjust_address (new_rtx, GET_MODE (op0), 0);
10505
10506 gcc_assert (!TREE_ADDRESSABLE (exp));
10507
10508 if (GET_MODE (op0) == BLKmode)
10509 emit_block_move (new_with_op0_mode, op0,
10510 GEN_INT (GET_MODE_SIZE (mode)),
10511 (modifier == EXPAND_STACK_PARM
10512 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10513 else
10514 emit_move_insn (new_with_op0_mode, op0);
10515
10516 op0 = new_rtx;
10517 }
10518 }
10519
10520 op0 = adjust_address (op0, mode, 0);
10521 }
10522
10523 return op0;
10524
10525 case MODIFY_EXPR:
10526 {
10527 tree lhs = treeop0;
10528 tree rhs = treeop1;
10529 gcc_assert (ignore);
10530
10531 /* Check for |= or &= of a bitfield of size one into another bitfield
10532 of size 1. In this case, (unless we need the result of the
10533 assignment) we can do this more efficiently with a
10534 test followed by an assignment, if necessary.
10535
10536 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10537 things change so we do, this code should be enhanced to
10538 support it. */
10539 if (TREE_CODE (lhs) == COMPONENT_REF
10540 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10541 || TREE_CODE (rhs) == BIT_AND_EXPR)
10542 && TREE_OPERAND (rhs, 0) == lhs
10543 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10544 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10545 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10546 {
10547 rtx label = gen_label_rtx ();
10548 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10549 do_jump (TREE_OPERAND (rhs, 1),
10550 value ? label : 0,
10551 value ? 0 : label, -1);
10552 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10553 false);
10554 do_pending_stack_adjust ();
10555 emit_label (label);
10556 return const0_rtx;
10557 }
10558
10559 expand_assignment (lhs, rhs, false);
10560 return const0_rtx;
10561 }
10562
10563 case ADDR_EXPR:
10564 return expand_expr_addr_expr (exp, target, tmode, modifier);
10565
10566 case REALPART_EXPR:
10567 op0 = expand_normal (treeop0);
10568 return read_complex_part (op0, false);
10569
10570 case IMAGPART_EXPR:
10571 op0 = expand_normal (treeop0);
10572 return read_complex_part (op0, true);
10573
10574 case RETURN_EXPR:
10575 case LABEL_EXPR:
10576 case GOTO_EXPR:
10577 case SWITCH_EXPR:
10578 case ASM_EXPR:
10579 /* Expanded in cfgexpand.c. */
10580 gcc_unreachable ();
10581
10582 case TRY_CATCH_EXPR:
10583 case CATCH_EXPR:
10584 case EH_FILTER_EXPR:
10585 case TRY_FINALLY_EXPR:
10586 /* Lowered by tree-eh.c. */
10587 gcc_unreachable ();
10588
10589 case WITH_CLEANUP_EXPR:
10590 case CLEANUP_POINT_EXPR:
10591 case TARGET_EXPR:
10592 case CASE_LABEL_EXPR:
10593 case VA_ARG_EXPR:
10594 case BIND_EXPR:
10595 case INIT_EXPR:
10596 case CONJ_EXPR:
10597 case COMPOUND_EXPR:
10598 case PREINCREMENT_EXPR:
10599 case PREDECREMENT_EXPR:
10600 case POSTINCREMENT_EXPR:
10601 case POSTDECREMENT_EXPR:
10602 case LOOP_EXPR:
10603 case EXIT_EXPR:
10604 case COMPOUND_LITERAL_EXPR:
10605 /* Lowered by gimplify.c. */
10606 gcc_unreachable ();
10607
10608 case FDESC_EXPR:
10609 /* Function descriptors are not valid except for as
10610 initialization constants, and should not be expanded. */
10611 gcc_unreachable ();
10612
10613 case WITH_SIZE_EXPR:
10614 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10615 have pulled out the size to use in whatever context it needed. */
10616 return expand_expr_real (treeop0, original_target, tmode,
10617 modifier, alt_rtl, inner_reference_p);
10618
10619 default:
10620 return expand_expr_real_2 (&ops, target, tmode, modifier);
10621 }
10622 }
10623 \f
10624 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10625 signedness of TYPE), possibly returning the result in TARGET. */
10626 static rtx
10627 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10628 {
10629 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10630 if (target && GET_MODE (target) != GET_MODE (exp))
10631 target = 0;
10632 /* For constant values, reduce using build_int_cst_type. */
10633 if (CONST_INT_P (exp))
10634 {
10635 HOST_WIDE_INT value = INTVAL (exp);
10636 tree t = build_int_cst_type (type, value);
10637 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10638 }
10639 else if (TYPE_UNSIGNED (type))
10640 {
10641 enum machine_mode mode = GET_MODE (exp);
10642 rtx mask = immed_wide_int_const
10643 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10644 return expand_and (mode, exp, mask, target);
10645 }
10646 else
10647 {
10648 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10649 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10650 exp, count, target, 0);
10651 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10652 exp, count, target, 0);
10653 }
10654 }
10655 \f
10656 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10657 when applied to the address of EXP produces an address known to be
10658 aligned more than BIGGEST_ALIGNMENT. */
10659
10660 static int
10661 is_aligning_offset (const_tree offset, const_tree exp)
10662 {
10663 /* Strip off any conversions. */
10664 while (CONVERT_EXPR_P (offset))
10665 offset = TREE_OPERAND (offset, 0);
10666
10667 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10668 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10669 if (TREE_CODE (offset) != BIT_AND_EXPR
10670 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10671 || compare_tree_int (TREE_OPERAND (offset, 1),
10672 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10673 || !exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10674 return 0;
10675
10676 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10677 It must be NEGATE_EXPR. Then strip any more conversions. */
10678 offset = TREE_OPERAND (offset, 0);
10679 while (CONVERT_EXPR_P (offset))
10680 offset = TREE_OPERAND (offset, 0);
10681
10682 if (TREE_CODE (offset) != NEGATE_EXPR)
10683 return 0;
10684
10685 offset = TREE_OPERAND (offset, 0);
10686 while (CONVERT_EXPR_P (offset))
10687 offset = TREE_OPERAND (offset, 0);
10688
10689 /* This must now be the address of EXP. */
10690 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10691 }
10692 \f
10693 /* Return the tree node if an ARG corresponds to a string constant or zero
10694 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10695 in bytes within the string that ARG is accessing. The type of the
10696 offset will be `sizetype'. */
10697
10698 tree
10699 string_constant (tree arg, tree *ptr_offset)
10700 {
10701 tree array, offset, lower_bound;
10702 STRIP_NOPS (arg);
10703
10704 if (TREE_CODE (arg) == ADDR_EXPR)
10705 {
10706 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10707 {
10708 *ptr_offset = size_zero_node;
10709 return TREE_OPERAND (arg, 0);
10710 }
10711 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10712 {
10713 array = TREE_OPERAND (arg, 0);
10714 offset = size_zero_node;
10715 }
10716 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10717 {
10718 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10719 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10720 if (TREE_CODE (array) != STRING_CST
10721 && TREE_CODE (array) != VAR_DECL)
10722 return 0;
10723
10724 /* Check if the array has a nonzero lower bound. */
10725 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10726 if (!integer_zerop (lower_bound))
10727 {
10728 /* If the offset and base aren't both constants, return 0. */
10729 if (TREE_CODE (lower_bound) != INTEGER_CST)
10730 return 0;
10731 if (TREE_CODE (offset) != INTEGER_CST)
10732 return 0;
10733 /* Adjust offset by the lower bound. */
10734 offset = size_diffop (fold_convert (sizetype, offset),
10735 fold_convert (sizetype, lower_bound));
10736 }
10737 }
10738 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10739 {
10740 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10741 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10742 if (TREE_CODE (array) != ADDR_EXPR)
10743 return 0;
10744 array = TREE_OPERAND (array, 0);
10745 if (TREE_CODE (array) != STRING_CST
10746 && TREE_CODE (array) != VAR_DECL)
10747 return 0;
10748 }
10749 else
10750 return 0;
10751 }
10752 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10753 {
10754 tree arg0 = TREE_OPERAND (arg, 0);
10755 tree arg1 = TREE_OPERAND (arg, 1);
10756
10757 STRIP_NOPS (arg0);
10758 STRIP_NOPS (arg1);
10759
10760 if (TREE_CODE (arg0) == ADDR_EXPR
10761 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10762 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10763 {
10764 array = TREE_OPERAND (arg0, 0);
10765 offset = arg1;
10766 }
10767 else if (TREE_CODE (arg1) == ADDR_EXPR
10768 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10769 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10770 {
10771 array = TREE_OPERAND (arg1, 0);
10772 offset = arg0;
10773 }
10774 else
10775 return 0;
10776 }
10777 else
10778 return 0;
10779
10780 if (TREE_CODE (array) == STRING_CST)
10781 {
10782 *ptr_offset = fold_convert (sizetype, offset);
10783 return array;
10784 }
10785 else if (TREE_CODE (array) == VAR_DECL
10786 || TREE_CODE (array) == CONST_DECL)
10787 {
10788 int length;
10789 tree init = ctor_for_folding (array);
10790
10791 /* Variables initialized to string literals can be handled too. */
10792 if (init == error_mark_node
10793 || !init
10794 || TREE_CODE (init) != STRING_CST)
10795 return 0;
10796
10797 /* Avoid const char foo[4] = "abcde"; */
10798 if (DECL_SIZE_UNIT (array) == NULL_TREE
10799 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10800 || (length = TREE_STRING_LENGTH (init)) <= 0
10801 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10802 return 0;
10803
10804 /* If variable is bigger than the string literal, OFFSET must be constant
10805 and inside of the bounds of the string literal. */
10806 offset = fold_convert (sizetype, offset);
10807 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10808 && (! tree_fits_uhwi_p (offset)
10809 || compare_tree_int (offset, length) >= 0))
10810 return 0;
10811
10812 *ptr_offset = offset;
10813 return init;
10814 }
10815
10816 return 0;
10817 }
10818 \f
10819 /* Generate code to calculate OPS, and exploded expression
10820 using a store-flag instruction and return an rtx for the result.
10821 OPS reflects a comparison.
10822
10823 If TARGET is nonzero, store the result there if convenient.
10824
10825 Return zero if there is no suitable set-flag instruction
10826 available on this machine.
10827
10828 Once expand_expr has been called on the arguments of the comparison,
10829 we are committed to doing the store flag, since it is not safe to
10830 re-evaluate the expression. We emit the store-flag insn by calling
10831 emit_store_flag, but only expand the arguments if we have a reason
10832 to believe that emit_store_flag will be successful. If we think that
10833 it will, but it isn't, we have to simulate the store-flag with a
10834 set/jump/set sequence. */
10835
10836 static rtx
10837 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10838 {
10839 enum rtx_code code;
10840 tree arg0, arg1, type;
10841 tree tem;
10842 enum machine_mode operand_mode;
10843 int unsignedp;
10844 rtx op0, op1;
10845 rtx subtarget = target;
10846 location_t loc = ops->location;
10847
10848 arg0 = ops->op0;
10849 arg1 = ops->op1;
10850
10851 /* Don't crash if the comparison was erroneous. */
10852 if (arg0 == error_mark_node || arg1 == error_mark_node)
10853 return const0_rtx;
10854
10855 type = TREE_TYPE (arg0);
10856 operand_mode = TYPE_MODE (type);
10857 unsignedp = TYPE_UNSIGNED (type);
10858
10859 /* We won't bother with BLKmode store-flag operations because it would mean
10860 passing a lot of information to emit_store_flag. */
10861 if (operand_mode == BLKmode)
10862 return 0;
10863
10864 /* We won't bother with store-flag operations involving function pointers
10865 when function pointers must be canonicalized before comparisons. */
10866 #ifdef HAVE_canonicalize_funcptr_for_compare
10867 if (HAVE_canonicalize_funcptr_for_compare
10868 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10869 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10870 == FUNCTION_TYPE))
10871 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10872 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10873 == FUNCTION_TYPE))))
10874 return 0;
10875 #endif
10876
10877 STRIP_NOPS (arg0);
10878 STRIP_NOPS (arg1);
10879
10880 /* For vector typed comparisons emit code to generate the desired
10881 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10882 expander for this. */
10883 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10884 {
10885 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10886 tree if_true = constant_boolean_node (true, ops->type);
10887 tree if_false = constant_boolean_node (false, ops->type);
10888 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10889 }
10890
10891 /* Get the rtx comparison code to use. We know that EXP is a comparison
10892 operation of some type. Some comparisons against 1 and -1 can be
10893 converted to comparisons with zero. Do so here so that the tests
10894 below will be aware that we have a comparison with zero. These
10895 tests will not catch constants in the first operand, but constants
10896 are rarely passed as the first operand. */
10897
10898 switch (ops->code)
10899 {
10900 case EQ_EXPR:
10901 code = EQ;
10902 break;
10903 case NE_EXPR:
10904 code = NE;
10905 break;
10906 case LT_EXPR:
10907 if (integer_onep (arg1))
10908 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10909 else
10910 code = unsignedp ? LTU : LT;
10911 break;
10912 case LE_EXPR:
10913 if (! unsignedp && integer_all_onesp (arg1))
10914 arg1 = integer_zero_node, code = LT;
10915 else
10916 code = unsignedp ? LEU : LE;
10917 break;
10918 case GT_EXPR:
10919 if (! unsignedp && integer_all_onesp (arg1))
10920 arg1 = integer_zero_node, code = GE;
10921 else
10922 code = unsignedp ? GTU : GT;
10923 break;
10924 case GE_EXPR:
10925 if (integer_onep (arg1))
10926 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10927 else
10928 code = unsignedp ? GEU : GE;
10929 break;
10930
10931 case UNORDERED_EXPR:
10932 code = UNORDERED;
10933 break;
10934 case ORDERED_EXPR:
10935 code = ORDERED;
10936 break;
10937 case UNLT_EXPR:
10938 code = UNLT;
10939 break;
10940 case UNLE_EXPR:
10941 code = UNLE;
10942 break;
10943 case UNGT_EXPR:
10944 code = UNGT;
10945 break;
10946 case UNGE_EXPR:
10947 code = UNGE;
10948 break;
10949 case UNEQ_EXPR:
10950 code = UNEQ;
10951 break;
10952 case LTGT_EXPR:
10953 code = LTGT;
10954 break;
10955
10956 default:
10957 gcc_unreachable ();
10958 }
10959
10960 /* Put a constant second. */
10961 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10962 || TREE_CODE (arg0) == FIXED_CST)
10963 {
10964 tem = arg0; arg0 = arg1; arg1 = tem;
10965 code = swap_condition (code);
10966 }
10967
10968 /* If this is an equality or inequality test of a single bit, we can
10969 do this by shifting the bit being tested to the low-order bit and
10970 masking the result with the constant 1. If the condition was EQ,
10971 we xor it with 1. This does not require an scc insn and is faster
10972 than an scc insn even if we have it.
10973
10974 The code to make this transformation was moved into fold_single_bit_test,
10975 so we just call into the folder and expand its result. */
10976
10977 if ((code == NE || code == EQ)
10978 && integer_zerop (arg1)
10979 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10980 {
10981 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10982 if (srcstmt
10983 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10984 {
10985 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10986 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10987 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10988 gimple_assign_rhs1 (srcstmt),
10989 gimple_assign_rhs2 (srcstmt));
10990 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10991 if (temp)
10992 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10993 }
10994 }
10995
10996 if (! get_subtarget (target)
10997 || GET_MODE (subtarget) != operand_mode)
10998 subtarget = 0;
10999
11000 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11001
11002 if (target == 0)
11003 target = gen_reg_rtx (mode);
11004
11005 /* Try a cstore if possible. */
11006 return emit_store_flag_force (target, code, op0, op1,
11007 operand_mode, unsignedp,
11008 (TYPE_PRECISION (ops->type) == 1
11009 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11010 }
11011 \f
11012
11013 /* Stubs in case we haven't got a casesi insn. */
11014 #ifndef HAVE_casesi
11015 # define HAVE_casesi 0
11016 # define gen_casesi(a, b, c, d, e) (0)
11017 # define CODE_FOR_casesi CODE_FOR_nothing
11018 #endif
11019
11020 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11021 0 otherwise (i.e. if there is no casesi instruction).
11022
11023 DEFAULT_PROBABILITY is the probability of jumping to the default
11024 label. */
11025 int
11026 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11027 rtx table_label, rtx default_label, rtx fallback_label,
11028 int default_probability)
11029 {
11030 struct expand_operand ops[5];
11031 enum machine_mode index_mode = SImode;
11032 rtx op1, op2, index;
11033
11034 if (! HAVE_casesi)
11035 return 0;
11036
11037 /* Convert the index to SImode. */
11038 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11039 {
11040 enum machine_mode omode = TYPE_MODE (index_type);
11041 rtx rangertx = expand_normal (range);
11042
11043 /* We must handle the endpoints in the original mode. */
11044 index_expr = build2 (MINUS_EXPR, index_type,
11045 index_expr, minval);
11046 minval = integer_zero_node;
11047 index = expand_normal (index_expr);
11048 if (default_label)
11049 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11050 omode, 1, default_label,
11051 default_probability);
11052 /* Now we can safely truncate. */
11053 index = convert_to_mode (index_mode, index, 0);
11054 }
11055 else
11056 {
11057 if (TYPE_MODE (index_type) != index_mode)
11058 {
11059 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11060 index_expr = fold_convert (index_type, index_expr);
11061 }
11062
11063 index = expand_normal (index_expr);
11064 }
11065
11066 do_pending_stack_adjust ();
11067
11068 op1 = expand_normal (minval);
11069 op2 = expand_normal (range);
11070
11071 create_input_operand (&ops[0], index, index_mode);
11072 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11073 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11074 create_fixed_operand (&ops[3], table_label);
11075 create_fixed_operand (&ops[4], (default_label
11076 ? default_label
11077 : fallback_label));
11078 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11079 return 1;
11080 }
11081
11082 /* Attempt to generate a tablejump instruction; same concept. */
11083 #ifndef HAVE_tablejump
11084 #define HAVE_tablejump 0
11085 #define gen_tablejump(x, y) (0)
11086 #endif
11087
11088 /* Subroutine of the next function.
11089
11090 INDEX is the value being switched on, with the lowest value
11091 in the table already subtracted.
11092 MODE is its expected mode (needed if INDEX is constant).
11093 RANGE is the length of the jump table.
11094 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11095
11096 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11097 index value is out of range.
11098 DEFAULT_PROBABILITY is the probability of jumping to
11099 the default label. */
11100
11101 static void
11102 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
11103 rtx default_label, int default_probability)
11104 {
11105 rtx temp, vector;
11106
11107 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11108 cfun->cfg->max_jumptable_ents = INTVAL (range);
11109
11110 /* Do an unsigned comparison (in the proper mode) between the index
11111 expression and the value which represents the length of the range.
11112 Since we just finished subtracting the lower bound of the range
11113 from the index expression, this comparison allows us to simultaneously
11114 check that the original index expression value is both greater than
11115 or equal to the minimum value of the range and less than or equal to
11116 the maximum value of the range. */
11117
11118 if (default_label)
11119 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11120 default_label, default_probability);
11121
11122
11123 /* If index is in range, it must fit in Pmode.
11124 Convert to Pmode so we can index with it. */
11125 if (mode != Pmode)
11126 index = convert_to_mode (Pmode, index, 1);
11127
11128 /* Don't let a MEM slip through, because then INDEX that comes
11129 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11130 and break_out_memory_refs will go to work on it and mess it up. */
11131 #ifdef PIC_CASE_VECTOR_ADDRESS
11132 if (flag_pic && !REG_P (index))
11133 index = copy_to_mode_reg (Pmode, index);
11134 #endif
11135
11136 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11137 GET_MODE_SIZE, because this indicates how large insns are. The other
11138 uses should all be Pmode, because they are addresses. This code
11139 could fail if addresses and insns are not the same size. */
11140 index = simplify_gen_binary (MULT, Pmode, index,
11141 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11142 Pmode));
11143 index = simplify_gen_binary (PLUS, Pmode, index,
11144 gen_rtx_LABEL_REF (Pmode, table_label));
11145
11146 #ifdef PIC_CASE_VECTOR_ADDRESS
11147 if (flag_pic)
11148 index = PIC_CASE_VECTOR_ADDRESS (index);
11149 else
11150 #endif
11151 index = memory_address (CASE_VECTOR_MODE, index);
11152 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11153 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11154 convert_move (temp, vector, 0);
11155
11156 emit_jump_insn (gen_tablejump (temp, table_label));
11157
11158 /* If we are generating PIC code or if the table is PC-relative, the
11159 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11160 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11161 emit_barrier ();
11162 }
11163
11164 int
11165 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11166 rtx table_label, rtx default_label, int default_probability)
11167 {
11168 rtx index;
11169
11170 if (! HAVE_tablejump)
11171 return 0;
11172
11173 index_expr = fold_build2 (MINUS_EXPR, index_type,
11174 fold_convert (index_type, index_expr),
11175 fold_convert (index_type, minval));
11176 index = expand_normal (index_expr);
11177 do_pending_stack_adjust ();
11178
11179 do_tablejump (index, TYPE_MODE (index_type),
11180 convert_modes (TYPE_MODE (index_type),
11181 TYPE_MODE (TREE_TYPE (range)),
11182 expand_normal (range),
11183 TYPE_UNSIGNED (TREE_TYPE (range))),
11184 table_label, default_label, default_probability);
11185 return 1;
11186 }
11187
11188 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11189 static rtx
11190 const_vector_from_tree (tree exp)
11191 {
11192 rtvec v;
11193 unsigned i;
11194 int units;
11195 tree elt;
11196 enum machine_mode inner, mode;
11197
11198 mode = TYPE_MODE (TREE_TYPE (exp));
11199
11200 if (initializer_zerop (exp))
11201 return CONST0_RTX (mode);
11202
11203 units = GET_MODE_NUNITS (mode);
11204 inner = GET_MODE_INNER (mode);
11205
11206 v = rtvec_alloc (units);
11207
11208 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11209 {
11210 elt = VECTOR_CST_ELT (exp, i);
11211
11212 if (TREE_CODE (elt) == REAL_CST)
11213 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11214 inner);
11215 else if (TREE_CODE (elt) == FIXED_CST)
11216 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11217 inner);
11218 else
11219 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11220 }
11221
11222 return gen_rtx_CONST_VECTOR (mode, v);
11223 }
11224
11225 /* Build a decl for a personality function given a language prefix. */
11226
11227 tree
11228 build_personality_function (const char *lang)
11229 {
11230 const char *unwind_and_version;
11231 tree decl, type;
11232 char *name;
11233
11234 switch (targetm_common.except_unwind_info (&global_options))
11235 {
11236 case UI_NONE:
11237 return NULL;
11238 case UI_SJLJ:
11239 unwind_and_version = "_sj0";
11240 break;
11241 case UI_DWARF2:
11242 case UI_TARGET:
11243 unwind_and_version = "_v0";
11244 break;
11245 case UI_SEH:
11246 unwind_and_version = "_seh0";
11247 break;
11248 default:
11249 gcc_unreachable ();
11250 }
11251
11252 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11253
11254 type = build_function_type_list (integer_type_node, integer_type_node,
11255 long_long_unsigned_type_node,
11256 ptr_type_node, ptr_type_node, NULL_TREE);
11257 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11258 get_identifier (name), type);
11259 DECL_ARTIFICIAL (decl) = 1;
11260 DECL_EXTERNAL (decl) = 1;
11261 TREE_PUBLIC (decl) = 1;
11262
11263 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11264 are the flags assigned by targetm.encode_section_info. */
11265 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11266
11267 return decl;
11268 }
11269
11270 /* Extracts the personality function of DECL and returns the corresponding
11271 libfunc. */
11272
11273 rtx
11274 get_personality_function (tree decl)
11275 {
11276 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11277 enum eh_personality_kind pk;
11278
11279 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11280 if (pk == eh_personality_none)
11281 return NULL;
11282
11283 if (!personality
11284 && pk == eh_personality_any)
11285 personality = lang_hooks.eh_personality ();
11286
11287 if (pk == eh_personality_lang)
11288 gcc_assert (personality != NULL_TREE);
11289
11290 return XEXP (DECL_RTL (personality), 0);
11291 }
11292
11293 #include "gt-expr.h"