expr.c (expand_cond_expr_using_cmove): Use COMPARISON_CLASS_P.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "hash-set.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "flags.h"
41 #include "regs.h"
42 #include "hard-reg-set.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-config.h"
46 #include "insn-attr.h"
47 #include "hashtab.h"
48 #include "statistics.h"
49 #include "real.h"
50 #include "fixed-value.h"
51 #include "expmed.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "calls.h"
55 #include "emit-rtl.h"
56 #include "stmt.h"
57 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
58 #include "expr.h"
59 #include "insn-codes.h"
60 #include "optabs.h"
61 #include "libfuncs.h"
62 #include "recog.h"
63 #include "reload.h"
64 #include "typeclass.h"
65 #include "toplev.h"
66 #include "langhooks.h"
67 #include "intl.h"
68 #include "tm_p.h"
69 #include "tree-iterator.h"
70 #include "predict.h"
71 #include "dominance.h"
72 #include "cfg.h"
73 #include "basic-block.h"
74 #include "tree-ssa-alias.h"
75 #include "internal-fn.h"
76 #include "gimple-expr.h"
77 #include "is-a.h"
78 #include "gimple.h"
79 #include "gimple-ssa.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "tree-ssanames.h"
85 #include "target.h"
86 #include "common/common-target.h"
87 #include "timevar.h"
88 #include "df.h"
89 #include "diagnostic.h"
90 #include "tree-ssa-live.h"
91 #include "tree-outof-ssa.h"
92 #include "target-globals.h"
93 #include "params.h"
94 #include "tree-ssa-address.h"
95 #include "cfgexpand.h"
96 #include "builtins.h"
97 #include "tree-chkp.h"
98 #include "rtl-chkp.h"
99 #include "ccmp.h"
100
101 #ifndef STACK_PUSH_CODE
102 #ifdef STACK_GROWS_DOWNWARD
103 #define STACK_PUSH_CODE PRE_DEC
104 #else
105 #define STACK_PUSH_CODE PRE_INC
106 #endif
107 #endif
108
109
110 /* If this is nonzero, we do not bother generating VOLATILE
111 around volatile memory references, and we are willing to
112 output indirect addresses. If cse is to follow, we reject
113 indirect addresses so a useful potential cse is generated;
114 if it is used only once, instruction combination will produce
115 the same indirect address eventually. */
116 int cse_not_expected;
117
118 /* This structure is used by move_by_pieces to describe the move to
119 be performed. */
120 struct move_by_pieces_d
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 rtx from;
127 rtx from_addr;
128 int autinc_from;
129 int explicit_inc_from;
130 unsigned HOST_WIDE_INT len;
131 HOST_WIDE_INT offset;
132 int reverse;
133 };
134
135 /* This structure is used by store_by_pieces to describe the clear to
136 be performed. */
137
138 struct store_by_pieces_d
139 {
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 unsigned HOST_WIDE_INT len;
145 HOST_WIDE_INT offset;
146 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
147 void *constfundata;
148 int reverse;
149 };
150
151 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
152 struct move_by_pieces_d *);
153 static bool block_move_libcall_safe_for_call_parm (void);
154 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
155 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
156 unsigned HOST_WIDE_INT);
157 static tree emit_block_move_libcall_fn (int);
158 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
159 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
160 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
161 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
162 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
163 struct store_by_pieces_d *);
164 static tree clear_storage_libcall_fn (int);
165 static rtx_insn *compress_float_constant (rtx, rtx);
166 static rtx get_subtarget (rtx);
167 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
168 HOST_WIDE_INT, machine_mode,
169 tree, int, alias_set_type);
170 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
171 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
172 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
173 machine_mode, tree, alias_set_type, bool);
174
175 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
176
177 static int is_aligning_offset (const_tree, const_tree);
178 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
179 static rtx do_store_flag (sepops, rtx, machine_mode);
180 #ifdef PUSH_ROUNDING
181 static void emit_single_push_insn (machine_mode, rtx, tree);
182 #endif
183 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
184 static rtx const_vector_from_tree (tree);
185 static tree tree_expr_size (const_tree);
186 static HOST_WIDE_INT int_expr_size (tree);
187
188 \f
189 /* This is run to set up which modes can be used
190 directly in memory and to initialize the block move optab. It is run
191 at the beginning of compilation and when the target is reinitialized. */
192
193 void
194 init_expr_target (void)
195 {
196 rtx insn, pat;
197 machine_mode mode;
198 int num_clobbers;
199 rtx mem, mem1;
200 rtx reg;
201
202 /* Try indexing by frame ptr and try by stack ptr.
203 It is known that on the Convex the stack ptr isn't a valid index.
204 With luck, one or the other is valid on any machine. */
205 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
206 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
207
208 /* A scratch register we can modify in-place below to avoid
209 useless RTL allocations. */
210 reg = gen_rtx_REG (word_mode, FIRST_PSEUDO_REGISTER);
211
212 insn = rtx_alloc (INSN);
213 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
214 PATTERN (insn) = pat;
215
216 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
217 mode = (machine_mode) ((int) mode + 1))
218 {
219 int regno;
220
221 direct_load[(int) mode] = direct_store[(int) mode] = 0;
222 PUT_MODE (mem, mode);
223 PUT_MODE (mem1, mode);
224
225 /* See if there is some register that can be used in this mode and
226 directly loaded or stored from memory. */
227
228 if (mode != VOIDmode && mode != BLKmode)
229 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
230 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
231 regno++)
232 {
233 if (! HARD_REGNO_MODE_OK (regno, mode))
234 continue;
235
236 set_mode_and_regno (reg, mode, regno);
237
238 SET_SRC (pat) = mem;
239 SET_DEST (pat) = reg;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_load[(int) mode] = 1;
242
243 SET_SRC (pat) = mem1;
244 SET_DEST (pat) = reg;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_load[(int) mode] = 1;
247
248 SET_SRC (pat) = reg;
249 SET_DEST (pat) = mem;
250 if (recog (pat, insn, &num_clobbers) >= 0)
251 direct_store[(int) mode] = 1;
252
253 SET_SRC (pat) = reg;
254 SET_DEST (pat) = mem1;
255 if (recog (pat, insn, &num_clobbers) >= 0)
256 direct_store[(int) mode] = 1;
257 }
258 }
259
260 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, FIRST_PSEUDO_REGISTER));
261
262 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
263 mode = GET_MODE_WIDER_MODE (mode))
264 {
265 machine_mode srcmode;
266 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
267 srcmode = GET_MODE_WIDER_MODE (srcmode))
268 {
269 enum insn_code ic;
270
271 ic = can_extend_p (mode, srcmode, 0);
272 if (ic == CODE_FOR_nothing)
273 continue;
274
275 PUT_MODE (mem, srcmode);
276
277 if (insn_operand_matches (ic, 1, mem))
278 float_extend_from_mem[mode][srcmode] = true;
279 }
280 }
281 }
282
283 /* This is run at the start of compiling a function. */
284
285 void
286 init_expr (void)
287 {
288 memset (&crtl->expr, 0, sizeof (crtl->expr));
289 }
290 \f
291 /* Copy data from FROM to TO, where the machine modes are not the same.
292 Both modes may be integer, or both may be floating, or both may be
293 fixed-point.
294 UNSIGNEDP should be nonzero if FROM is an unsigned type.
295 This causes zero-extension instead of sign-extension. */
296
297 void
298 convert_move (rtx to, rtx from, int unsignedp)
299 {
300 machine_mode to_mode = GET_MODE (to);
301 machine_mode from_mode = GET_MODE (from);
302 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
303 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
304 enum insn_code code;
305 rtx libcall;
306
307 /* rtx code for making an equivalent value. */
308 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
309 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
310
311
312 gcc_assert (to_real == from_real);
313 gcc_assert (to_mode != BLKmode);
314 gcc_assert (from_mode != BLKmode);
315
316 /* If the source and destination are already the same, then there's
317 nothing to do. */
318 if (to == from)
319 return;
320
321 /* If FROM is a SUBREG that indicates that we have already done at least
322 the required extension, strip it. We don't handle such SUBREGs as
323 TO here. */
324
325 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
326 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
327 >= GET_MODE_PRECISION (to_mode))
328 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
329 from = gen_lowpart (to_mode, from), from_mode = to_mode;
330
331 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
332
333 if (to_mode == from_mode
334 || (from_mode == VOIDmode && CONSTANT_P (from)))
335 {
336 emit_move_insn (to, from);
337 return;
338 }
339
340 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
341 {
342 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
343
344 if (VECTOR_MODE_P (to_mode))
345 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
346 else
347 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
348
349 emit_move_insn (to, from);
350 return;
351 }
352
353 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
354 {
355 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
356 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
357 return;
358 }
359
360 if (to_real)
361 {
362 rtx value;
363 rtx_insn *insns;
364 convert_optab tab;
365
366 gcc_assert ((GET_MODE_PRECISION (from_mode)
367 != GET_MODE_PRECISION (to_mode))
368 || (DECIMAL_FLOAT_MODE_P (from_mode)
369 != DECIMAL_FLOAT_MODE_P (to_mode)));
370
371 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
372 /* Conversion between decimal float and binary float, same size. */
373 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
374 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
375 tab = sext_optab;
376 else
377 tab = trunc_optab;
378
379 /* Try converting directly if the insn is supported. */
380
381 code = convert_optab_handler (tab, to_mode, from_mode);
382 if (code != CODE_FOR_nothing)
383 {
384 emit_unop_insn (code, to, from,
385 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
386 return;
387 }
388
389 /* Otherwise use a libcall. */
390 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
391
392 /* Is this conversion implemented yet? */
393 gcc_assert (libcall);
394
395 start_sequence ();
396 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
397 1, from, from_mode);
398 insns = get_insns ();
399 end_sequence ();
400 emit_libcall_block (insns, to, value,
401 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
402 from)
403 : gen_rtx_FLOAT_EXTEND (to_mode, from));
404 return;
405 }
406
407 /* Handle pointer conversion. */ /* SPEE 900220. */
408 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
409 {
410 convert_optab ctab;
411
412 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
413 ctab = trunc_optab;
414 else if (unsignedp)
415 ctab = zext_optab;
416 else
417 ctab = sext_optab;
418
419 if (convert_optab_handler (ctab, to_mode, from_mode)
420 != CODE_FOR_nothing)
421 {
422 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
423 to, from, UNKNOWN);
424 return;
425 }
426 }
427
428 /* Targets are expected to provide conversion insns between PxImode and
429 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
430 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
431 {
432 machine_mode full_mode
433 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
434
435 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
436 != CODE_FOR_nothing);
437
438 if (full_mode != from_mode)
439 from = convert_to_mode (full_mode, from, unsignedp);
440 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
441 to, from, UNKNOWN);
442 return;
443 }
444 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
445 {
446 rtx new_from;
447 machine_mode full_mode
448 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
449 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
450 enum insn_code icode;
451
452 icode = convert_optab_handler (ctab, full_mode, from_mode);
453 gcc_assert (icode != CODE_FOR_nothing);
454
455 if (to_mode == full_mode)
456 {
457 emit_unop_insn (icode, to, from, UNKNOWN);
458 return;
459 }
460
461 new_from = gen_reg_rtx (full_mode);
462 emit_unop_insn (icode, new_from, from, UNKNOWN);
463
464 /* else proceed to integer conversions below. */
465 from_mode = full_mode;
466 from = new_from;
467 }
468
469 /* Make sure both are fixed-point modes or both are not. */
470 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
471 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
472 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
473 {
474 /* If we widen from_mode to to_mode and they are in the same class,
475 we won't saturate the result.
476 Otherwise, always saturate the result to play safe. */
477 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
478 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
479 expand_fixed_convert (to, from, 0, 0);
480 else
481 expand_fixed_convert (to, from, 0, 1);
482 return;
483 }
484
485 /* Now both modes are integers. */
486
487 /* Handle expanding beyond a word. */
488 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
489 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
490 {
491 rtx_insn *insns;
492 rtx lowpart;
493 rtx fill_value;
494 rtx lowfrom;
495 int i;
496 machine_mode lowpart_mode;
497 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
498
499 /* Try converting directly if the insn is supported. */
500 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
501 != CODE_FOR_nothing)
502 {
503 /* If FROM is a SUBREG, put it into a register. Do this
504 so that we always generate the same set of insns for
505 better cse'ing; if an intermediate assignment occurred,
506 we won't be doing the operation directly on the SUBREG. */
507 if (optimize > 0 && GET_CODE (from) == SUBREG)
508 from = force_reg (from_mode, from);
509 emit_unop_insn (code, to, from, equiv_code);
510 return;
511 }
512 /* Next, try converting via full word. */
513 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
514 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
515 != CODE_FOR_nothing))
516 {
517 rtx word_to = gen_reg_rtx (word_mode);
518 if (REG_P (to))
519 {
520 if (reg_overlap_mentioned_p (to, from))
521 from = force_reg (from_mode, from);
522 emit_clobber (to);
523 }
524 convert_move (word_to, from, unsignedp);
525 emit_unop_insn (code, to, word_to, equiv_code);
526 return;
527 }
528
529 /* No special multiword conversion insn; do it by hand. */
530 start_sequence ();
531
532 /* Since we will turn this into a no conflict block, we must ensure the
533 the source does not overlap the target so force it into an isolated
534 register when maybe so. Likewise for any MEM input, since the
535 conversion sequence might require several references to it and we
536 must ensure we're getting the same value every time. */
537
538 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
539 from = force_reg (from_mode, from);
540
541 /* Get a copy of FROM widened to a word, if necessary. */
542 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
543 lowpart_mode = word_mode;
544 else
545 lowpart_mode = from_mode;
546
547 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
548
549 lowpart = gen_lowpart (lowpart_mode, to);
550 emit_move_insn (lowpart, lowfrom);
551
552 /* Compute the value to put in each remaining word. */
553 if (unsignedp)
554 fill_value = const0_rtx;
555 else
556 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
557 LT, lowfrom, const0_rtx,
558 lowpart_mode, 0, -1);
559
560 /* Fill the remaining words. */
561 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
562 {
563 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
564 rtx subword = operand_subword (to, index, 1, to_mode);
565
566 gcc_assert (subword);
567
568 if (fill_value != subword)
569 emit_move_insn (subword, fill_value);
570 }
571
572 insns = get_insns ();
573 end_sequence ();
574
575 emit_insn (insns);
576 return;
577 }
578
579 /* Truncating multi-word to a word or less. */
580 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
581 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
582 {
583 if (!((MEM_P (from)
584 && ! MEM_VOLATILE_P (from)
585 && direct_load[(int) to_mode]
586 && ! mode_dependent_address_p (XEXP (from, 0),
587 MEM_ADDR_SPACE (from)))
588 || REG_P (from)
589 || GET_CODE (from) == SUBREG))
590 from = force_reg (from_mode, from);
591 convert_move (to, gen_lowpart (word_mode, from), 0);
592 return;
593 }
594
595 /* Now follow all the conversions between integers
596 no more than a word long. */
597
598 /* For truncation, usually we can just refer to FROM in a narrower mode. */
599 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
600 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
601 {
602 if (!((MEM_P (from)
603 && ! MEM_VOLATILE_P (from)
604 && direct_load[(int) to_mode]
605 && ! mode_dependent_address_p (XEXP (from, 0),
606 MEM_ADDR_SPACE (from)))
607 || REG_P (from)
608 || GET_CODE (from) == SUBREG))
609 from = force_reg (from_mode, from);
610 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
611 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
612 from = copy_to_reg (from);
613 emit_move_insn (to, gen_lowpart (to_mode, from));
614 return;
615 }
616
617 /* Handle extension. */
618 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
619 {
620 /* Convert directly if that works. */
621 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
622 != CODE_FOR_nothing)
623 {
624 emit_unop_insn (code, to, from, equiv_code);
625 return;
626 }
627 else
628 {
629 machine_mode intermediate;
630 rtx tmp;
631 int shift_amount;
632
633 /* Search for a mode to convert via. */
634 for (intermediate = from_mode; intermediate != VOIDmode;
635 intermediate = GET_MODE_WIDER_MODE (intermediate))
636 if (((can_extend_p (to_mode, intermediate, unsignedp)
637 != CODE_FOR_nothing)
638 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
639 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
640 && (can_extend_p (intermediate, from_mode, unsignedp)
641 != CODE_FOR_nothing))
642 {
643 convert_move (to, convert_to_mode (intermediate, from,
644 unsignedp), unsignedp);
645 return;
646 }
647
648 /* No suitable intermediate mode.
649 Generate what we need with shifts. */
650 shift_amount = (GET_MODE_PRECISION (to_mode)
651 - GET_MODE_PRECISION (from_mode));
652 from = gen_lowpart (to_mode, force_reg (from_mode, from));
653 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
654 to, unsignedp);
655 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
656 to, unsignedp);
657 if (tmp != to)
658 emit_move_insn (to, tmp);
659 return;
660 }
661 }
662
663 /* Support special truncate insns for certain modes. */
664 if (convert_optab_handler (trunc_optab, to_mode,
665 from_mode) != CODE_FOR_nothing)
666 {
667 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
668 to, from, UNKNOWN);
669 return;
670 }
671
672 /* Handle truncation of volatile memrefs, and so on;
673 the things that couldn't be truncated directly,
674 and for which there was no special instruction.
675
676 ??? Code above formerly short-circuited this, for most integer
677 mode pairs, with a force_reg in from_mode followed by a recursive
678 call to this routine. Appears always to have been wrong. */
679 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
680 {
681 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
682 emit_move_insn (to, temp);
683 return;
684 }
685
686 /* Mode combination is not recognized. */
687 gcc_unreachable ();
688 }
689
690 /* Return an rtx for a value that would result
691 from converting X to mode MODE.
692 Both X and MODE may be floating, or both integer.
693 UNSIGNEDP is nonzero if X is an unsigned value.
694 This can be done by referring to a part of X in place
695 or by copying to a new temporary with conversion. */
696
697 rtx
698 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
699 {
700 return convert_modes (mode, VOIDmode, x, unsignedp);
701 }
702
703 /* Return an rtx for a value that would result
704 from converting X from mode OLDMODE to mode MODE.
705 Both modes may be floating, or both integer.
706 UNSIGNEDP is nonzero if X is an unsigned value.
707
708 This can be done by referring to a part of X in place
709 or by copying to a new temporary with conversion.
710
711 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
712
713 rtx
714 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
715 {
716 rtx temp;
717
718 /* If FROM is a SUBREG that indicates that we have already done at least
719 the required extension, strip it. */
720
721 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
722 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
723 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
724 x = gen_lowpart (mode, SUBREG_REG (x));
725
726 if (GET_MODE (x) != VOIDmode)
727 oldmode = GET_MODE (x);
728
729 if (mode == oldmode)
730 return x;
731
732 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
733 {
734 /* If the caller did not tell us the old mode, then there is not
735 much to do with respect to canonicalization. We have to
736 assume that all the bits are significant. */
737 if (GET_MODE_CLASS (oldmode) != MODE_INT)
738 oldmode = MAX_MODE_INT;
739 wide_int w = wide_int::from (std::make_pair (x, oldmode),
740 GET_MODE_PRECISION (mode),
741 unsignedp ? UNSIGNED : SIGNED);
742 return immed_wide_int_const (w, mode);
743 }
744
745 /* We can do this with a gen_lowpart if both desired and current modes
746 are integer, and this is either a constant integer, a register, or a
747 non-volatile MEM. */
748 if (GET_MODE_CLASS (mode) == MODE_INT
749 && GET_MODE_CLASS (oldmode) == MODE_INT
750 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
751 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
752 || (REG_P (x)
753 && (!HARD_REGISTER_P (x)
754 || HARD_REGNO_MODE_OK (REGNO (x), mode))
755 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
756
757 return gen_lowpart (mode, x);
758
759 /* Converting from integer constant into mode is always equivalent to an
760 subreg operation. */
761 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
762 {
763 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
764 return simplify_gen_subreg (mode, x, oldmode, 0);
765 }
766
767 temp = gen_reg_rtx (mode);
768 convert_move (temp, x, unsignedp);
769 return temp;
770 }
771 \f
772 /* Return the largest alignment we can use for doing a move (or store)
773 of MAX_PIECES. ALIGN is the largest alignment we could use. */
774
775 static unsigned int
776 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
777 {
778 machine_mode tmode;
779
780 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
781 if (align >= GET_MODE_ALIGNMENT (tmode))
782 align = GET_MODE_ALIGNMENT (tmode);
783 else
784 {
785 machine_mode tmode, xmode;
786
787 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
788 tmode != VOIDmode;
789 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
790 if (GET_MODE_SIZE (tmode) > max_pieces
791 || SLOW_UNALIGNED_ACCESS (tmode, align))
792 break;
793
794 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
795 }
796
797 return align;
798 }
799
800 /* Return the widest integer mode no wider than SIZE. If no such mode
801 can be found, return VOIDmode. */
802
803 static machine_mode
804 widest_int_mode_for_size (unsigned int size)
805 {
806 machine_mode tmode, mode = VOIDmode;
807
808 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
809 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
810 if (GET_MODE_SIZE (tmode) < size)
811 mode = tmode;
812
813 return mode;
814 }
815
816 /* Determine whether the LEN bytes can be moved by using several move
817 instructions. Return nonzero if a call to move_by_pieces should
818 succeed. */
819
820 int
821 can_move_by_pieces (unsigned HOST_WIDE_INT len,
822 unsigned int align)
823 {
824 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
825 optimize_insn_for_speed_p ());
826 }
827
828 /* Generate several move instructions to copy LEN bytes from block FROM to
829 block TO. (These are MEM rtx's with BLKmode).
830
831 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
832 used to push FROM to the stack.
833
834 ALIGN is maximum stack alignment we can assume.
835
836 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
837 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
838 stpcpy. */
839
840 rtx
841 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
842 unsigned int align, int endp)
843 {
844 struct move_by_pieces_d data;
845 machine_mode to_addr_mode;
846 machine_mode from_addr_mode = get_address_mode (from);
847 rtx to_addr, from_addr = XEXP (from, 0);
848 unsigned int max_size = MOVE_MAX_PIECES + 1;
849 enum insn_code icode;
850
851 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
852
853 data.offset = 0;
854 data.from_addr = from_addr;
855 if (to)
856 {
857 to_addr_mode = get_address_mode (to);
858 to_addr = XEXP (to, 0);
859 data.to = to;
860 data.autinc_to
861 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
862 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
863 data.reverse
864 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
865 }
866 else
867 {
868 to_addr_mode = VOIDmode;
869 to_addr = NULL_RTX;
870 data.to = NULL_RTX;
871 data.autinc_to = 1;
872 #ifdef STACK_GROWS_DOWNWARD
873 data.reverse = 1;
874 #else
875 data.reverse = 0;
876 #endif
877 }
878 data.to_addr = to_addr;
879 data.from = from;
880 data.autinc_from
881 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
882 || GET_CODE (from_addr) == POST_INC
883 || GET_CODE (from_addr) == POST_DEC);
884
885 data.explicit_inc_from = 0;
886 data.explicit_inc_to = 0;
887 if (data.reverse) data.offset = len;
888 data.len = len;
889
890 /* If copying requires more than two move insns,
891 copy addresses to registers (to make displacements shorter)
892 and use post-increment if available. */
893 if (!(data.autinc_from && data.autinc_to)
894 && move_by_pieces_ninsns (len, align, max_size) > 2)
895 {
896 /* Find the mode of the largest move...
897 MODE might not be used depending on the definitions of the
898 USE_* macros below. */
899 machine_mode mode ATTRIBUTE_UNUSED
900 = widest_int_mode_for_size (max_size);
901
902 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
903 {
904 data.from_addr = copy_to_mode_reg (from_addr_mode,
905 plus_constant (from_addr_mode,
906 from_addr, len));
907 data.autinc_from = 1;
908 data.explicit_inc_from = -1;
909 }
910 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
911 {
912 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
913 data.autinc_from = 1;
914 data.explicit_inc_from = 1;
915 }
916 if (!data.autinc_from && CONSTANT_P (from_addr))
917 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
918 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
919 {
920 data.to_addr = copy_to_mode_reg (to_addr_mode,
921 plus_constant (to_addr_mode,
922 to_addr, len));
923 data.autinc_to = 1;
924 data.explicit_inc_to = -1;
925 }
926 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
927 {
928 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
929 data.autinc_to = 1;
930 data.explicit_inc_to = 1;
931 }
932 if (!data.autinc_to && CONSTANT_P (to_addr))
933 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
934 }
935
936 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
937
938 /* First move what we can in the largest integer mode, then go to
939 successively smaller modes. */
940
941 while (max_size > 1 && data.len > 0)
942 {
943 machine_mode mode = widest_int_mode_for_size (max_size);
944
945 if (mode == VOIDmode)
946 break;
947
948 icode = optab_handler (mov_optab, mode);
949 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
950 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
951
952 max_size = GET_MODE_SIZE (mode);
953 }
954
955 /* The code above should have handled everything. */
956 gcc_assert (!data.len);
957
958 if (endp)
959 {
960 rtx to1;
961
962 gcc_assert (!data.reverse);
963 if (data.autinc_to)
964 {
965 if (endp == 2)
966 {
967 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
968 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
969 else
970 data.to_addr = copy_to_mode_reg (to_addr_mode,
971 plus_constant (to_addr_mode,
972 data.to_addr,
973 -1));
974 }
975 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
976 data.offset);
977 }
978 else
979 {
980 if (endp == 2)
981 --data.offset;
982 to1 = adjust_address (data.to, QImode, data.offset);
983 }
984 return to1;
985 }
986 else
987 return data.to;
988 }
989
990 /* Return number of insns required to move L bytes by pieces.
991 ALIGN (in bits) is maximum alignment we can assume. */
992
993 unsigned HOST_WIDE_INT
994 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
995 unsigned int max_size)
996 {
997 unsigned HOST_WIDE_INT n_insns = 0;
998
999 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1000
1001 while (max_size > 1 && l > 0)
1002 {
1003 machine_mode mode;
1004 enum insn_code icode;
1005
1006 mode = widest_int_mode_for_size (max_size);
1007
1008 if (mode == VOIDmode)
1009 break;
1010
1011 icode = optab_handler (mov_optab, mode);
1012 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1013 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1014
1015 max_size = GET_MODE_SIZE (mode);
1016 }
1017
1018 gcc_assert (!l);
1019 return n_insns;
1020 }
1021
1022 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1023 with move instructions for mode MODE. GENFUN is the gen_... function
1024 to make a move insn for that mode. DATA has all the other info. */
1025
1026 static void
1027 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1028 struct move_by_pieces_d *data)
1029 {
1030 unsigned int size = GET_MODE_SIZE (mode);
1031 rtx to1 = NULL_RTX, from1;
1032
1033 while (data->len >= size)
1034 {
1035 if (data->reverse)
1036 data->offset -= size;
1037
1038 if (data->to)
1039 {
1040 if (data->autinc_to)
1041 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1042 data->offset);
1043 else
1044 to1 = adjust_address (data->to, mode, data->offset);
1045 }
1046
1047 if (data->autinc_from)
1048 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1049 data->offset);
1050 else
1051 from1 = adjust_address (data->from, mode, data->offset);
1052
1053 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1054 emit_insn (gen_add2_insn (data->to_addr,
1055 gen_int_mode (-(HOST_WIDE_INT) size,
1056 GET_MODE (data->to_addr))));
1057 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1058 emit_insn (gen_add2_insn (data->from_addr,
1059 gen_int_mode (-(HOST_WIDE_INT) size,
1060 GET_MODE (data->from_addr))));
1061
1062 if (data->to)
1063 emit_insn ((*genfun) (to1, from1));
1064 else
1065 {
1066 #ifdef PUSH_ROUNDING
1067 emit_single_push_insn (mode, from1, NULL);
1068 #else
1069 gcc_unreachable ();
1070 #endif
1071 }
1072
1073 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1074 emit_insn (gen_add2_insn (data->to_addr,
1075 gen_int_mode (size,
1076 GET_MODE (data->to_addr))));
1077 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1078 emit_insn (gen_add2_insn (data->from_addr,
1079 gen_int_mode (size,
1080 GET_MODE (data->from_addr))));
1081
1082 if (! data->reverse)
1083 data->offset += size;
1084
1085 data->len -= size;
1086 }
1087 }
1088 \f
1089 /* Emit code to move a block Y to a block X. This may be done with
1090 string-move instructions, with multiple scalar move instructions,
1091 or with a library call.
1092
1093 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1094 SIZE is an rtx that says how long they are.
1095 ALIGN is the maximum alignment we can assume they have.
1096 METHOD describes what kind of copy this is, and what mechanisms may be used.
1097 MIN_SIZE is the minimal size of block to move
1098 MAX_SIZE is the maximal size of block to move, if it can not be represented
1099 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1100
1101 Return the address of the new block, if memcpy is called and returns it,
1102 0 otherwise. */
1103
1104 rtx
1105 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1106 unsigned int expected_align, HOST_WIDE_INT expected_size,
1107 unsigned HOST_WIDE_INT min_size,
1108 unsigned HOST_WIDE_INT max_size,
1109 unsigned HOST_WIDE_INT probable_max_size)
1110 {
1111 bool may_use_call;
1112 rtx retval = 0;
1113 unsigned int align;
1114
1115 gcc_assert (size);
1116 if (CONST_INT_P (size)
1117 && INTVAL (size) == 0)
1118 return 0;
1119
1120 switch (method)
1121 {
1122 case BLOCK_OP_NORMAL:
1123 case BLOCK_OP_TAILCALL:
1124 may_use_call = true;
1125 break;
1126
1127 case BLOCK_OP_CALL_PARM:
1128 may_use_call = block_move_libcall_safe_for_call_parm ();
1129
1130 /* Make inhibit_defer_pop nonzero around the library call
1131 to force it to pop the arguments right away. */
1132 NO_DEFER_POP;
1133 break;
1134
1135 case BLOCK_OP_NO_LIBCALL:
1136 may_use_call = false;
1137 break;
1138
1139 default:
1140 gcc_unreachable ();
1141 }
1142
1143 gcc_assert (MEM_P (x) && MEM_P (y));
1144 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1145 gcc_assert (align >= BITS_PER_UNIT);
1146
1147 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1148 block copy is more efficient for other large modes, e.g. DCmode. */
1149 x = adjust_address (x, BLKmode, 0);
1150 y = adjust_address (y, BLKmode, 0);
1151
1152 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1153 can be incorrect is coming from __builtin_memcpy. */
1154 if (CONST_INT_P (size))
1155 {
1156 x = shallow_copy_rtx (x);
1157 y = shallow_copy_rtx (y);
1158 set_mem_size (x, INTVAL (size));
1159 set_mem_size (y, INTVAL (size));
1160 }
1161
1162 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1163 move_by_pieces (x, y, INTVAL (size), align, 0);
1164 else if (emit_block_move_via_movmem (x, y, size, align,
1165 expected_align, expected_size,
1166 min_size, max_size, probable_max_size))
1167 ;
1168 else if (may_use_call
1169 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1170 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1171 {
1172 /* Since x and y are passed to a libcall, mark the corresponding
1173 tree EXPR as addressable. */
1174 tree y_expr = MEM_EXPR (y);
1175 tree x_expr = MEM_EXPR (x);
1176 if (y_expr)
1177 mark_addressable (y_expr);
1178 if (x_expr)
1179 mark_addressable (x_expr);
1180 retval = emit_block_move_via_libcall (x, y, size,
1181 method == BLOCK_OP_TAILCALL);
1182 }
1183
1184 else
1185 emit_block_move_via_loop (x, y, size, align);
1186
1187 if (method == BLOCK_OP_CALL_PARM)
1188 OK_DEFER_POP;
1189
1190 return retval;
1191 }
1192
1193 rtx
1194 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1195 {
1196 unsigned HOST_WIDE_INT max, min = 0;
1197 if (GET_CODE (size) == CONST_INT)
1198 min = max = UINTVAL (size);
1199 else
1200 max = GET_MODE_MASK (GET_MODE (size));
1201 return emit_block_move_hints (x, y, size, method, 0, -1,
1202 min, max, max);
1203 }
1204
1205 /* A subroutine of emit_block_move. Returns true if calling the
1206 block move libcall will not clobber any parameters which may have
1207 already been placed on the stack. */
1208
1209 static bool
1210 block_move_libcall_safe_for_call_parm (void)
1211 {
1212 #if defined (REG_PARM_STACK_SPACE)
1213 tree fn;
1214 #endif
1215
1216 /* If arguments are pushed on the stack, then they're safe. */
1217 if (PUSH_ARGS)
1218 return true;
1219
1220 /* If registers go on the stack anyway, any argument is sure to clobber
1221 an outgoing argument. */
1222 #if defined (REG_PARM_STACK_SPACE)
1223 fn = emit_block_move_libcall_fn (false);
1224 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1225 depend on its argument. */
1226 (void) fn;
1227 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1228 && REG_PARM_STACK_SPACE (fn) != 0)
1229 return false;
1230 #endif
1231
1232 /* If any argument goes in memory, then it might clobber an outgoing
1233 argument. */
1234 {
1235 CUMULATIVE_ARGS args_so_far_v;
1236 cumulative_args_t args_so_far;
1237 tree fn, arg;
1238
1239 fn = emit_block_move_libcall_fn (false);
1240 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1241 args_so_far = pack_cumulative_args (&args_so_far_v);
1242
1243 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1244 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1245 {
1246 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1247 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1248 NULL_TREE, true);
1249 if (!tmp || !REG_P (tmp))
1250 return false;
1251 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1252 return false;
1253 targetm.calls.function_arg_advance (args_so_far, mode,
1254 NULL_TREE, true);
1255 }
1256 }
1257 return true;
1258 }
1259
1260 /* A subroutine of emit_block_move. Expand a movmem pattern;
1261 return true if successful. */
1262
1263 static bool
1264 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1265 unsigned int expected_align, HOST_WIDE_INT expected_size,
1266 unsigned HOST_WIDE_INT min_size,
1267 unsigned HOST_WIDE_INT max_size,
1268 unsigned HOST_WIDE_INT probable_max_size)
1269 {
1270 int save_volatile_ok = volatile_ok;
1271 machine_mode mode;
1272
1273 if (expected_align < align)
1274 expected_align = align;
1275 if (expected_size != -1)
1276 {
1277 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1278 expected_size = probable_max_size;
1279 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1280 expected_size = min_size;
1281 }
1282
1283 /* Since this is a move insn, we don't care about volatility. */
1284 volatile_ok = 1;
1285
1286 /* Try the most limited insn first, because there's no point
1287 including more than one in the machine description unless
1288 the more limited one has some advantage. */
1289
1290 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1291 mode = GET_MODE_WIDER_MODE (mode))
1292 {
1293 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1294
1295 if (code != CODE_FOR_nothing
1296 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1297 here because if SIZE is less than the mode mask, as it is
1298 returned by the macro, it will definitely be less than the
1299 actual mode mask. Since SIZE is within the Pmode address
1300 space, we limit MODE to Pmode. */
1301 && ((CONST_INT_P (size)
1302 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1303 <= (GET_MODE_MASK (mode) >> 1)))
1304 || max_size <= (GET_MODE_MASK (mode) >> 1)
1305 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1306 {
1307 struct expand_operand ops[9];
1308 unsigned int nops;
1309
1310 /* ??? When called via emit_block_move_for_call, it'd be
1311 nice if there were some way to inform the backend, so
1312 that it doesn't fail the expansion because it thinks
1313 emitting the libcall would be more efficient. */
1314 nops = insn_data[(int) code].n_generator_args;
1315 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1316
1317 create_fixed_operand (&ops[0], x);
1318 create_fixed_operand (&ops[1], y);
1319 /* The check above guarantees that this size conversion is valid. */
1320 create_convert_operand_to (&ops[2], size, mode, true);
1321 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1322 if (nops >= 6)
1323 {
1324 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1325 create_integer_operand (&ops[5], expected_size);
1326 }
1327 if (nops >= 8)
1328 {
1329 create_integer_operand (&ops[6], min_size);
1330 /* If we can not represent the maximal size,
1331 make parameter NULL. */
1332 if ((HOST_WIDE_INT) max_size != -1)
1333 create_integer_operand (&ops[7], max_size);
1334 else
1335 create_fixed_operand (&ops[7], NULL);
1336 }
1337 if (nops == 9)
1338 {
1339 /* If we can not represent the maximal size,
1340 make parameter NULL. */
1341 if ((HOST_WIDE_INT) probable_max_size != -1)
1342 create_integer_operand (&ops[8], probable_max_size);
1343 else
1344 create_fixed_operand (&ops[8], NULL);
1345 }
1346 if (maybe_expand_insn (code, nops, ops))
1347 {
1348 volatile_ok = save_volatile_ok;
1349 return true;
1350 }
1351 }
1352 }
1353
1354 volatile_ok = save_volatile_ok;
1355 return false;
1356 }
1357
1358 /* A subroutine of emit_block_move. Expand a call to memcpy.
1359 Return the return value from memcpy, 0 otherwise. */
1360
1361 rtx
1362 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1363 {
1364 rtx dst_addr, src_addr;
1365 tree call_expr, fn, src_tree, dst_tree, size_tree;
1366 machine_mode size_mode;
1367 rtx retval;
1368
1369 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1370 pseudos. We can then place those new pseudos into a VAR_DECL and
1371 use them later. */
1372
1373 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1374 src_addr = copy_addr_to_reg (XEXP (src, 0));
1375
1376 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1377 src_addr = convert_memory_address (ptr_mode, src_addr);
1378
1379 dst_tree = make_tree (ptr_type_node, dst_addr);
1380 src_tree = make_tree (ptr_type_node, src_addr);
1381
1382 size_mode = TYPE_MODE (sizetype);
1383
1384 size = convert_to_mode (size_mode, size, 1);
1385 size = copy_to_mode_reg (size_mode, size);
1386
1387 /* It is incorrect to use the libcall calling conventions to call
1388 memcpy in this context. This could be a user call to memcpy and
1389 the user may wish to examine the return value from memcpy. For
1390 targets where libcalls and normal calls have different conventions
1391 for returning pointers, we could end up generating incorrect code. */
1392
1393 size_tree = make_tree (sizetype, size);
1394
1395 fn = emit_block_move_libcall_fn (true);
1396 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1397 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1398
1399 retval = expand_normal (call_expr);
1400
1401 return retval;
1402 }
1403
1404 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1405 for the function we use for block copies. */
1406
1407 static GTY(()) tree block_move_fn;
1408
1409 void
1410 init_block_move_fn (const char *asmspec)
1411 {
1412 if (!block_move_fn)
1413 {
1414 tree args, fn, attrs, attr_args;
1415
1416 fn = get_identifier ("memcpy");
1417 args = build_function_type_list (ptr_type_node, ptr_type_node,
1418 const_ptr_type_node, sizetype,
1419 NULL_TREE);
1420
1421 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1422 DECL_EXTERNAL (fn) = 1;
1423 TREE_PUBLIC (fn) = 1;
1424 DECL_ARTIFICIAL (fn) = 1;
1425 TREE_NOTHROW (fn) = 1;
1426 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1427 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1428
1429 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1430 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1431
1432 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1433
1434 block_move_fn = fn;
1435 }
1436
1437 if (asmspec)
1438 set_user_assembler_name (block_move_fn, asmspec);
1439 }
1440
1441 static tree
1442 emit_block_move_libcall_fn (int for_call)
1443 {
1444 static bool emitted_extern;
1445
1446 if (!block_move_fn)
1447 init_block_move_fn (NULL);
1448
1449 if (for_call && !emitted_extern)
1450 {
1451 emitted_extern = true;
1452 make_decl_rtl (block_move_fn);
1453 }
1454
1455 return block_move_fn;
1456 }
1457
1458 /* A subroutine of emit_block_move. Copy the data via an explicit
1459 loop. This is used only when libcalls are forbidden. */
1460 /* ??? It'd be nice to copy in hunks larger than QImode. */
1461
1462 static void
1463 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1464 unsigned int align ATTRIBUTE_UNUSED)
1465 {
1466 rtx_code_label *cmp_label, *top_label;
1467 rtx iter, x_addr, y_addr, tmp;
1468 machine_mode x_addr_mode = get_address_mode (x);
1469 machine_mode y_addr_mode = get_address_mode (y);
1470 machine_mode iter_mode;
1471
1472 iter_mode = GET_MODE (size);
1473 if (iter_mode == VOIDmode)
1474 iter_mode = word_mode;
1475
1476 top_label = gen_label_rtx ();
1477 cmp_label = gen_label_rtx ();
1478 iter = gen_reg_rtx (iter_mode);
1479
1480 emit_move_insn (iter, const0_rtx);
1481
1482 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1483 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1484 do_pending_stack_adjust ();
1485
1486 emit_jump (cmp_label);
1487 emit_label (top_label);
1488
1489 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1490 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1491
1492 if (x_addr_mode != y_addr_mode)
1493 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1494 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1495
1496 x = change_address (x, QImode, x_addr);
1497 y = change_address (y, QImode, y_addr);
1498
1499 emit_move_insn (x, y);
1500
1501 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1502 true, OPTAB_LIB_WIDEN);
1503 if (tmp != iter)
1504 emit_move_insn (iter, tmp);
1505
1506 emit_label (cmp_label);
1507
1508 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1509 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1510 }
1511 \f
1512 /* Copy all or part of a value X into registers starting at REGNO.
1513 The number of registers to be filled is NREGS. */
1514
1515 void
1516 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1517 {
1518 int i;
1519 #ifdef HAVE_load_multiple
1520 rtx pat;
1521 rtx_insn *last;
1522 #endif
1523
1524 if (nregs == 0)
1525 return;
1526
1527 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1528 x = validize_mem (force_const_mem (mode, x));
1529
1530 /* See if the machine can do this with a load multiple insn. */
1531 #ifdef HAVE_load_multiple
1532 if (HAVE_load_multiple)
1533 {
1534 last = get_last_insn ();
1535 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1536 GEN_INT (nregs));
1537 if (pat)
1538 {
1539 emit_insn (pat);
1540 return;
1541 }
1542 else
1543 delete_insns_since (last);
1544 }
1545 #endif
1546
1547 for (i = 0; i < nregs; i++)
1548 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1549 operand_subword_force (x, i, mode));
1550 }
1551
1552 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1553 The number of registers to be filled is NREGS. */
1554
1555 void
1556 move_block_from_reg (int regno, rtx x, int nregs)
1557 {
1558 int i;
1559
1560 if (nregs == 0)
1561 return;
1562
1563 /* See if the machine can do this with a store multiple insn. */
1564 #ifdef HAVE_store_multiple
1565 if (HAVE_store_multiple)
1566 {
1567 rtx_insn *last = get_last_insn ();
1568 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1569 GEN_INT (nregs));
1570 if (pat)
1571 {
1572 emit_insn (pat);
1573 return;
1574 }
1575 else
1576 delete_insns_since (last);
1577 }
1578 #endif
1579
1580 for (i = 0; i < nregs; i++)
1581 {
1582 rtx tem = operand_subword (x, i, 1, BLKmode);
1583
1584 gcc_assert (tem);
1585
1586 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1587 }
1588 }
1589
1590 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1591 ORIG, where ORIG is a non-consecutive group of registers represented by
1592 a PARALLEL. The clone is identical to the original except in that the
1593 original set of registers is replaced by a new set of pseudo registers.
1594 The new set has the same modes as the original set. */
1595
1596 rtx
1597 gen_group_rtx (rtx orig)
1598 {
1599 int i, length;
1600 rtx *tmps;
1601
1602 gcc_assert (GET_CODE (orig) == PARALLEL);
1603
1604 length = XVECLEN (orig, 0);
1605 tmps = XALLOCAVEC (rtx, length);
1606
1607 /* Skip a NULL entry in first slot. */
1608 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1609
1610 if (i)
1611 tmps[0] = 0;
1612
1613 for (; i < length; i++)
1614 {
1615 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1616 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1617
1618 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1619 }
1620
1621 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1622 }
1623
1624 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1625 except that values are placed in TMPS[i], and must later be moved
1626 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1627
1628 static void
1629 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1630 {
1631 rtx src;
1632 int start, i;
1633 machine_mode m = GET_MODE (orig_src);
1634
1635 gcc_assert (GET_CODE (dst) == PARALLEL);
1636
1637 if (m != VOIDmode
1638 && !SCALAR_INT_MODE_P (m)
1639 && !MEM_P (orig_src)
1640 && GET_CODE (orig_src) != CONCAT)
1641 {
1642 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1643 if (imode == BLKmode)
1644 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1645 else
1646 src = gen_reg_rtx (imode);
1647 if (imode != BLKmode)
1648 src = gen_lowpart (GET_MODE (orig_src), src);
1649 emit_move_insn (src, orig_src);
1650 /* ...and back again. */
1651 if (imode != BLKmode)
1652 src = gen_lowpart (imode, src);
1653 emit_group_load_1 (tmps, dst, src, type, ssize);
1654 return;
1655 }
1656
1657 /* Check for a NULL entry, used to indicate that the parameter goes
1658 both on the stack and in registers. */
1659 if (XEXP (XVECEXP (dst, 0, 0), 0))
1660 start = 0;
1661 else
1662 start = 1;
1663
1664 /* Process the pieces. */
1665 for (i = start; i < XVECLEN (dst, 0); i++)
1666 {
1667 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1668 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1669 unsigned int bytelen = GET_MODE_SIZE (mode);
1670 int shift = 0;
1671
1672 /* Handle trailing fragments that run over the size of the struct. */
1673 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1674 {
1675 /* Arrange to shift the fragment to where it belongs.
1676 extract_bit_field loads to the lsb of the reg. */
1677 if (
1678 #ifdef BLOCK_REG_PADDING
1679 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1680 == (BYTES_BIG_ENDIAN ? upward : downward)
1681 #else
1682 BYTES_BIG_ENDIAN
1683 #endif
1684 )
1685 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1686 bytelen = ssize - bytepos;
1687 gcc_assert (bytelen > 0);
1688 }
1689
1690 /* If we won't be loading directly from memory, protect the real source
1691 from strange tricks we might play; but make sure that the source can
1692 be loaded directly into the destination. */
1693 src = orig_src;
1694 if (!MEM_P (orig_src)
1695 && (!CONSTANT_P (orig_src)
1696 || (GET_MODE (orig_src) != mode
1697 && GET_MODE (orig_src) != VOIDmode)))
1698 {
1699 if (GET_MODE (orig_src) == VOIDmode)
1700 src = gen_reg_rtx (mode);
1701 else
1702 src = gen_reg_rtx (GET_MODE (orig_src));
1703
1704 emit_move_insn (src, orig_src);
1705 }
1706
1707 /* Optimize the access just a bit. */
1708 if (MEM_P (src)
1709 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1710 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1711 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1712 && bytelen == GET_MODE_SIZE (mode))
1713 {
1714 tmps[i] = gen_reg_rtx (mode);
1715 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1716 }
1717 else if (COMPLEX_MODE_P (mode)
1718 && GET_MODE (src) == mode
1719 && bytelen == GET_MODE_SIZE (mode))
1720 /* Let emit_move_complex do the bulk of the work. */
1721 tmps[i] = src;
1722 else if (GET_CODE (src) == CONCAT)
1723 {
1724 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1725 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1726
1727 if ((bytepos == 0 && bytelen == slen0)
1728 || (bytepos != 0 && bytepos + bytelen <= slen))
1729 {
1730 /* The following assumes that the concatenated objects all
1731 have the same size. In this case, a simple calculation
1732 can be used to determine the object and the bit field
1733 to be extracted. */
1734 tmps[i] = XEXP (src, bytepos / slen0);
1735 if (! CONSTANT_P (tmps[i])
1736 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1737 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1738 (bytepos % slen0) * BITS_PER_UNIT,
1739 1, NULL_RTX, mode, mode);
1740 }
1741 else
1742 {
1743 rtx mem;
1744
1745 gcc_assert (!bytepos);
1746 mem = assign_stack_temp (GET_MODE (src), slen);
1747 emit_move_insn (mem, src);
1748 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1749 0, 1, NULL_RTX, mode, mode);
1750 }
1751 }
1752 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1753 SIMD register, which is currently broken. While we get GCC
1754 to emit proper RTL for these cases, let's dump to memory. */
1755 else if (VECTOR_MODE_P (GET_MODE (dst))
1756 && REG_P (src))
1757 {
1758 int slen = GET_MODE_SIZE (GET_MODE (src));
1759 rtx mem;
1760
1761 mem = assign_stack_temp (GET_MODE (src), slen);
1762 emit_move_insn (mem, src);
1763 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1764 }
1765 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1766 && XVECLEN (dst, 0) > 1)
1767 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1768 else if (CONSTANT_P (src))
1769 {
1770 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1771
1772 if (len == ssize)
1773 tmps[i] = src;
1774 else
1775 {
1776 rtx first, second;
1777
1778 /* TODO: const_wide_int can have sizes other than this... */
1779 gcc_assert (2 * len == ssize);
1780 split_double (src, &first, &second);
1781 if (i)
1782 tmps[i] = second;
1783 else
1784 tmps[i] = first;
1785 }
1786 }
1787 else if (REG_P (src) && GET_MODE (src) == mode)
1788 tmps[i] = src;
1789 else
1790 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1791 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1792 mode, mode);
1793
1794 if (shift)
1795 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1796 shift, tmps[i], 0);
1797 }
1798 }
1799
1800 /* Emit code to move a block SRC of type TYPE to a block DST,
1801 where DST is non-consecutive registers represented by a PARALLEL.
1802 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1803 if not known. */
1804
1805 void
1806 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1807 {
1808 rtx *tmps;
1809 int i;
1810
1811 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1812 emit_group_load_1 (tmps, dst, src, type, ssize);
1813
1814 /* Copy the extracted pieces into the proper (probable) hard regs. */
1815 for (i = 0; i < XVECLEN (dst, 0); i++)
1816 {
1817 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1818 if (d == NULL)
1819 continue;
1820 emit_move_insn (d, tmps[i]);
1821 }
1822 }
1823
1824 /* Similar, but load SRC into new pseudos in a format that looks like
1825 PARALLEL. This can later be fed to emit_group_move to get things
1826 in the right place. */
1827
1828 rtx
1829 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1830 {
1831 rtvec vec;
1832 int i;
1833
1834 vec = rtvec_alloc (XVECLEN (parallel, 0));
1835 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1836
1837 /* Convert the vector to look just like the original PARALLEL, except
1838 with the computed values. */
1839 for (i = 0; i < XVECLEN (parallel, 0); i++)
1840 {
1841 rtx e = XVECEXP (parallel, 0, i);
1842 rtx d = XEXP (e, 0);
1843
1844 if (d)
1845 {
1846 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1847 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1848 }
1849 RTVEC_ELT (vec, i) = e;
1850 }
1851
1852 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1853 }
1854
1855 /* Emit code to move a block SRC to block DST, where SRC and DST are
1856 non-consecutive groups of registers, each represented by a PARALLEL. */
1857
1858 void
1859 emit_group_move (rtx dst, rtx src)
1860 {
1861 int i;
1862
1863 gcc_assert (GET_CODE (src) == PARALLEL
1864 && GET_CODE (dst) == PARALLEL
1865 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1866
1867 /* Skip first entry if NULL. */
1868 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1869 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1870 XEXP (XVECEXP (src, 0, i), 0));
1871 }
1872
1873 /* Move a group of registers represented by a PARALLEL into pseudos. */
1874
1875 rtx
1876 emit_group_move_into_temps (rtx src)
1877 {
1878 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1879 int i;
1880
1881 for (i = 0; i < XVECLEN (src, 0); i++)
1882 {
1883 rtx e = XVECEXP (src, 0, i);
1884 rtx d = XEXP (e, 0);
1885
1886 if (d)
1887 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1888 RTVEC_ELT (vec, i) = e;
1889 }
1890
1891 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1892 }
1893
1894 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1895 where SRC is non-consecutive registers represented by a PARALLEL.
1896 SSIZE represents the total size of block ORIG_DST, or -1 if not
1897 known. */
1898
1899 void
1900 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1901 {
1902 rtx *tmps, dst;
1903 int start, finish, i;
1904 machine_mode m = GET_MODE (orig_dst);
1905
1906 gcc_assert (GET_CODE (src) == PARALLEL);
1907
1908 if (!SCALAR_INT_MODE_P (m)
1909 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1910 {
1911 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1912 if (imode == BLKmode)
1913 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1914 else
1915 dst = gen_reg_rtx (imode);
1916 emit_group_store (dst, src, type, ssize);
1917 if (imode != BLKmode)
1918 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1919 emit_move_insn (orig_dst, dst);
1920 return;
1921 }
1922
1923 /* Check for a NULL entry, used to indicate that the parameter goes
1924 both on the stack and in registers. */
1925 if (XEXP (XVECEXP (src, 0, 0), 0))
1926 start = 0;
1927 else
1928 start = 1;
1929 finish = XVECLEN (src, 0);
1930
1931 tmps = XALLOCAVEC (rtx, finish);
1932
1933 /* Copy the (probable) hard regs into pseudos. */
1934 for (i = start; i < finish; i++)
1935 {
1936 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1937 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1938 {
1939 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1940 emit_move_insn (tmps[i], reg);
1941 }
1942 else
1943 tmps[i] = reg;
1944 }
1945
1946 /* If we won't be storing directly into memory, protect the real destination
1947 from strange tricks we might play. */
1948 dst = orig_dst;
1949 if (GET_CODE (dst) == PARALLEL)
1950 {
1951 rtx temp;
1952
1953 /* We can get a PARALLEL dst if there is a conditional expression in
1954 a return statement. In that case, the dst and src are the same,
1955 so no action is necessary. */
1956 if (rtx_equal_p (dst, src))
1957 return;
1958
1959 /* It is unclear if we can ever reach here, but we may as well handle
1960 it. Allocate a temporary, and split this into a store/load to/from
1961 the temporary. */
1962 temp = assign_stack_temp (GET_MODE (dst), ssize);
1963 emit_group_store (temp, src, type, ssize);
1964 emit_group_load (dst, temp, type, ssize);
1965 return;
1966 }
1967 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1968 {
1969 machine_mode outer = GET_MODE (dst);
1970 machine_mode inner;
1971 HOST_WIDE_INT bytepos;
1972 bool done = false;
1973 rtx temp;
1974
1975 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1976 dst = gen_reg_rtx (outer);
1977
1978 /* Make life a bit easier for combine. */
1979 /* If the first element of the vector is the low part
1980 of the destination mode, use a paradoxical subreg to
1981 initialize the destination. */
1982 if (start < finish)
1983 {
1984 inner = GET_MODE (tmps[start]);
1985 bytepos = subreg_lowpart_offset (inner, outer);
1986 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1987 {
1988 temp = simplify_gen_subreg (outer, tmps[start],
1989 inner, 0);
1990 if (temp)
1991 {
1992 emit_move_insn (dst, temp);
1993 done = true;
1994 start++;
1995 }
1996 }
1997 }
1998
1999 /* If the first element wasn't the low part, try the last. */
2000 if (!done
2001 && start < finish - 1)
2002 {
2003 inner = GET_MODE (tmps[finish - 1]);
2004 bytepos = subreg_lowpart_offset (inner, outer);
2005 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2006 {
2007 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2008 inner, 0);
2009 if (temp)
2010 {
2011 emit_move_insn (dst, temp);
2012 done = true;
2013 finish--;
2014 }
2015 }
2016 }
2017
2018 /* Otherwise, simply initialize the result to zero. */
2019 if (!done)
2020 emit_move_insn (dst, CONST0_RTX (outer));
2021 }
2022
2023 /* Process the pieces. */
2024 for (i = start; i < finish; i++)
2025 {
2026 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2027 machine_mode mode = GET_MODE (tmps[i]);
2028 unsigned int bytelen = GET_MODE_SIZE (mode);
2029 unsigned int adj_bytelen;
2030 rtx dest = dst;
2031
2032 /* Handle trailing fragments that run over the size of the struct. */
2033 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2034 adj_bytelen = ssize - bytepos;
2035 else
2036 adj_bytelen = bytelen;
2037
2038 if (GET_CODE (dst) == CONCAT)
2039 {
2040 if (bytepos + adj_bytelen
2041 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2042 dest = XEXP (dst, 0);
2043 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2044 {
2045 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2046 dest = XEXP (dst, 1);
2047 }
2048 else
2049 {
2050 machine_mode dest_mode = GET_MODE (dest);
2051 machine_mode tmp_mode = GET_MODE (tmps[i]);
2052
2053 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2054
2055 if (GET_MODE_ALIGNMENT (dest_mode)
2056 >= GET_MODE_ALIGNMENT (tmp_mode))
2057 {
2058 dest = assign_stack_temp (dest_mode,
2059 GET_MODE_SIZE (dest_mode));
2060 emit_move_insn (adjust_address (dest,
2061 tmp_mode,
2062 bytepos),
2063 tmps[i]);
2064 dst = dest;
2065 }
2066 else
2067 {
2068 dest = assign_stack_temp (tmp_mode,
2069 GET_MODE_SIZE (tmp_mode));
2070 emit_move_insn (dest, tmps[i]);
2071 dst = adjust_address (dest, dest_mode, bytepos);
2072 }
2073 break;
2074 }
2075 }
2076
2077 /* Handle trailing fragments that run over the size of the struct. */
2078 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2079 {
2080 /* store_bit_field always takes its value from the lsb.
2081 Move the fragment to the lsb if it's not already there. */
2082 if (
2083 #ifdef BLOCK_REG_PADDING
2084 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2085 == (BYTES_BIG_ENDIAN ? upward : downward)
2086 #else
2087 BYTES_BIG_ENDIAN
2088 #endif
2089 )
2090 {
2091 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2092 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2093 shift, tmps[i], 0);
2094 }
2095
2096 /* Make sure not to write past the end of the struct. */
2097 store_bit_field (dest,
2098 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2099 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2100 VOIDmode, tmps[i]);
2101 }
2102
2103 /* Optimize the access just a bit. */
2104 else if (MEM_P (dest)
2105 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2106 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2107 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2108 && bytelen == GET_MODE_SIZE (mode))
2109 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2110
2111 else
2112 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2113 0, 0, mode, tmps[i]);
2114 }
2115
2116 /* Copy from the pseudo into the (probable) hard reg. */
2117 if (orig_dst != dst)
2118 emit_move_insn (orig_dst, dst);
2119 }
2120
2121 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2122 of the value stored in X. */
2123
2124 rtx
2125 maybe_emit_group_store (rtx x, tree type)
2126 {
2127 machine_mode mode = TYPE_MODE (type);
2128 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2129 if (GET_CODE (x) == PARALLEL)
2130 {
2131 rtx result = gen_reg_rtx (mode);
2132 emit_group_store (result, x, type, int_size_in_bytes (type));
2133 return result;
2134 }
2135 return x;
2136 }
2137
2138 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2139
2140 This is used on targets that return BLKmode values in registers. */
2141
2142 void
2143 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2144 {
2145 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2146 rtx src = NULL, dst = NULL;
2147 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2148 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2149 machine_mode mode = GET_MODE (srcreg);
2150 machine_mode tmode = GET_MODE (target);
2151 machine_mode copy_mode;
2152
2153 /* BLKmode registers created in the back-end shouldn't have survived. */
2154 gcc_assert (mode != BLKmode);
2155
2156 /* If the structure doesn't take up a whole number of words, see whether
2157 SRCREG is padded on the left or on the right. If it's on the left,
2158 set PADDING_CORRECTION to the number of bits to skip.
2159
2160 In most ABIs, the structure will be returned at the least end of
2161 the register, which translates to right padding on little-endian
2162 targets and left padding on big-endian targets. The opposite
2163 holds if the structure is returned at the most significant
2164 end of the register. */
2165 if (bytes % UNITS_PER_WORD != 0
2166 && (targetm.calls.return_in_msb (type)
2167 ? !BYTES_BIG_ENDIAN
2168 : BYTES_BIG_ENDIAN))
2169 padding_correction
2170 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2171
2172 /* We can use a single move if we have an exact mode for the size. */
2173 else if (MEM_P (target)
2174 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2175 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2176 && bytes == GET_MODE_SIZE (mode))
2177 {
2178 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2179 return;
2180 }
2181
2182 /* And if we additionally have the same mode for a register. */
2183 else if (REG_P (target)
2184 && GET_MODE (target) == mode
2185 && bytes == GET_MODE_SIZE (mode))
2186 {
2187 emit_move_insn (target, srcreg);
2188 return;
2189 }
2190
2191 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2192 into a new pseudo which is a full word. */
2193 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2194 {
2195 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2196 mode = word_mode;
2197 }
2198
2199 /* Copy the structure BITSIZE bits at a time. If the target lives in
2200 memory, take care of not reading/writing past its end by selecting
2201 a copy mode suited to BITSIZE. This should always be possible given
2202 how it is computed.
2203
2204 If the target lives in register, make sure not to select a copy mode
2205 larger than the mode of the register.
2206
2207 We could probably emit more efficient code for machines which do not use
2208 strict alignment, but it doesn't seem worth the effort at the current
2209 time. */
2210
2211 copy_mode = word_mode;
2212 if (MEM_P (target))
2213 {
2214 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2215 if (mem_mode != BLKmode)
2216 copy_mode = mem_mode;
2217 }
2218 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2219 copy_mode = tmode;
2220
2221 for (bitpos = 0, xbitpos = padding_correction;
2222 bitpos < bytes * BITS_PER_UNIT;
2223 bitpos += bitsize, xbitpos += bitsize)
2224 {
2225 /* We need a new source operand each time xbitpos is on a
2226 word boundary and when xbitpos == padding_correction
2227 (the first time through). */
2228 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2229 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2230
2231 /* We need a new destination operand each time bitpos is on
2232 a word boundary. */
2233 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2234 dst = target;
2235 else if (bitpos % BITS_PER_WORD == 0)
2236 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2237
2238 /* Use xbitpos for the source extraction (right justified) and
2239 bitpos for the destination store (left justified). */
2240 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2241 extract_bit_field (src, bitsize,
2242 xbitpos % BITS_PER_WORD, 1,
2243 NULL_RTX, copy_mode, copy_mode));
2244 }
2245 }
2246
2247 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2248 register if it contains any data, otherwise return null.
2249
2250 This is used on targets that return BLKmode values in registers. */
2251
2252 rtx
2253 copy_blkmode_to_reg (machine_mode mode, tree src)
2254 {
2255 int i, n_regs;
2256 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2257 unsigned int bitsize;
2258 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2259 machine_mode dst_mode;
2260
2261 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2262
2263 x = expand_normal (src);
2264
2265 bytes = int_size_in_bytes (TREE_TYPE (src));
2266 if (bytes == 0)
2267 return NULL_RTX;
2268
2269 /* If the structure doesn't take up a whole number of words, see
2270 whether the register value should be padded on the left or on
2271 the right. Set PADDING_CORRECTION to the number of padding
2272 bits needed on the left side.
2273
2274 In most ABIs, the structure will be returned at the least end of
2275 the register, which translates to right padding on little-endian
2276 targets and left padding on big-endian targets. The opposite
2277 holds if the structure is returned at the most significant
2278 end of the register. */
2279 if (bytes % UNITS_PER_WORD != 0
2280 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2281 ? !BYTES_BIG_ENDIAN
2282 : BYTES_BIG_ENDIAN))
2283 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2284 * BITS_PER_UNIT));
2285
2286 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2287 dst_words = XALLOCAVEC (rtx, n_regs);
2288 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2289
2290 /* Copy the structure BITSIZE bits at a time. */
2291 for (bitpos = 0, xbitpos = padding_correction;
2292 bitpos < bytes * BITS_PER_UNIT;
2293 bitpos += bitsize, xbitpos += bitsize)
2294 {
2295 /* We need a new destination pseudo each time xbitpos is
2296 on a word boundary and when xbitpos == padding_correction
2297 (the first time through). */
2298 if (xbitpos % BITS_PER_WORD == 0
2299 || xbitpos == padding_correction)
2300 {
2301 /* Generate an appropriate register. */
2302 dst_word = gen_reg_rtx (word_mode);
2303 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2304
2305 /* Clear the destination before we move anything into it. */
2306 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2307 }
2308
2309 /* We need a new source operand each time bitpos is on a word
2310 boundary. */
2311 if (bitpos % BITS_PER_WORD == 0)
2312 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2313
2314 /* Use bitpos for the source extraction (left justified) and
2315 xbitpos for the destination store (right justified). */
2316 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2317 0, 0, word_mode,
2318 extract_bit_field (src_word, bitsize,
2319 bitpos % BITS_PER_WORD, 1,
2320 NULL_RTX, word_mode, word_mode));
2321 }
2322
2323 if (mode == BLKmode)
2324 {
2325 /* Find the smallest integer mode large enough to hold the
2326 entire structure. */
2327 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2328 mode != VOIDmode;
2329 mode = GET_MODE_WIDER_MODE (mode))
2330 /* Have we found a large enough mode? */
2331 if (GET_MODE_SIZE (mode) >= bytes)
2332 break;
2333
2334 /* A suitable mode should have been found. */
2335 gcc_assert (mode != VOIDmode);
2336 }
2337
2338 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2339 dst_mode = word_mode;
2340 else
2341 dst_mode = mode;
2342 dst = gen_reg_rtx (dst_mode);
2343
2344 for (i = 0; i < n_regs; i++)
2345 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2346
2347 if (mode != dst_mode)
2348 dst = gen_lowpart (mode, dst);
2349
2350 return dst;
2351 }
2352
2353 /* Add a USE expression for REG to the (possibly empty) list pointed
2354 to by CALL_FUSAGE. REG must denote a hard register. */
2355
2356 void
2357 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2358 {
2359 gcc_assert (REG_P (reg));
2360
2361 if (!HARD_REGISTER_P (reg))
2362 return;
2363
2364 *call_fusage
2365 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2366 }
2367
2368 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2369 to by CALL_FUSAGE. REG must denote a hard register. */
2370
2371 void
2372 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2373 {
2374 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2375
2376 *call_fusage
2377 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2378 }
2379
2380 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2381 starting at REGNO. All of these registers must be hard registers. */
2382
2383 void
2384 use_regs (rtx *call_fusage, int regno, int nregs)
2385 {
2386 int i;
2387
2388 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2389
2390 for (i = 0; i < nregs; i++)
2391 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2392 }
2393
2394 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2395 PARALLEL REGS. This is for calls that pass values in multiple
2396 non-contiguous locations. The Irix 6 ABI has examples of this. */
2397
2398 void
2399 use_group_regs (rtx *call_fusage, rtx regs)
2400 {
2401 int i;
2402
2403 for (i = 0; i < XVECLEN (regs, 0); i++)
2404 {
2405 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2406
2407 /* A NULL entry means the parameter goes both on the stack and in
2408 registers. This can also be a MEM for targets that pass values
2409 partially on the stack and partially in registers. */
2410 if (reg != 0 && REG_P (reg))
2411 use_reg (call_fusage, reg);
2412 }
2413 }
2414
2415 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2416 assigment and the code of the expresion on the RHS is CODE. Return
2417 NULL otherwise. */
2418
2419 static gimple
2420 get_def_for_expr (tree name, enum tree_code code)
2421 {
2422 gimple def_stmt;
2423
2424 if (TREE_CODE (name) != SSA_NAME)
2425 return NULL;
2426
2427 def_stmt = get_gimple_for_ssa_name (name);
2428 if (!def_stmt
2429 || gimple_assign_rhs_code (def_stmt) != code)
2430 return NULL;
2431
2432 return def_stmt;
2433 }
2434
2435 #ifdef HAVE_conditional_move
2436 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2437 assigment and the class of the expresion on the RHS is CLASS. Return
2438 NULL otherwise. */
2439
2440 static gimple
2441 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2442 {
2443 gimple def_stmt;
2444
2445 if (TREE_CODE (name) != SSA_NAME)
2446 return NULL;
2447
2448 def_stmt = get_gimple_for_ssa_name (name);
2449 if (!def_stmt
2450 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2451 return NULL;
2452
2453 return def_stmt;
2454 }
2455 #endif
2456 \f
2457
2458 /* Determine whether the LEN bytes generated by CONSTFUN can be
2459 stored to memory using several move instructions. CONSTFUNDATA is
2460 a pointer which will be passed as argument in every CONSTFUN call.
2461 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2462 a memset operation and false if it's a copy of a constant string.
2463 Return nonzero if a call to store_by_pieces should succeed. */
2464
2465 int
2466 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2467 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2468 void *constfundata, unsigned int align, bool memsetp)
2469 {
2470 unsigned HOST_WIDE_INT l;
2471 unsigned int max_size;
2472 HOST_WIDE_INT offset = 0;
2473 machine_mode mode;
2474 enum insn_code icode;
2475 int reverse;
2476 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2477 rtx cst ATTRIBUTE_UNUSED;
2478
2479 if (len == 0)
2480 return 1;
2481
2482 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2483 memsetp
2484 ? SET_BY_PIECES
2485 : STORE_BY_PIECES,
2486 optimize_insn_for_speed_p ()))
2487 return 0;
2488
2489 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2490
2491 /* We would first store what we can in the largest integer mode, then go to
2492 successively smaller modes. */
2493
2494 for (reverse = 0;
2495 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2496 reverse++)
2497 {
2498 l = len;
2499 max_size = STORE_MAX_PIECES + 1;
2500 while (max_size > 1 && l > 0)
2501 {
2502 mode = widest_int_mode_for_size (max_size);
2503
2504 if (mode == VOIDmode)
2505 break;
2506
2507 icode = optab_handler (mov_optab, mode);
2508 if (icode != CODE_FOR_nothing
2509 && align >= GET_MODE_ALIGNMENT (mode))
2510 {
2511 unsigned int size = GET_MODE_SIZE (mode);
2512
2513 while (l >= size)
2514 {
2515 if (reverse)
2516 offset -= size;
2517
2518 cst = (*constfun) (constfundata, offset, mode);
2519 if (!targetm.legitimate_constant_p (mode, cst))
2520 return 0;
2521
2522 if (!reverse)
2523 offset += size;
2524
2525 l -= size;
2526 }
2527 }
2528
2529 max_size = GET_MODE_SIZE (mode);
2530 }
2531
2532 /* The code above should have handled everything. */
2533 gcc_assert (!l);
2534 }
2535
2536 return 1;
2537 }
2538
2539 /* Generate several move instructions to store LEN bytes generated by
2540 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2541 pointer which will be passed as argument in every CONSTFUN call.
2542 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2543 a memset operation and false if it's a copy of a constant string.
2544 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2545 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2546 stpcpy. */
2547
2548 rtx
2549 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2550 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2551 void *constfundata, unsigned int align, bool memsetp, int endp)
2552 {
2553 machine_mode to_addr_mode = get_address_mode (to);
2554 struct store_by_pieces_d data;
2555
2556 if (len == 0)
2557 {
2558 gcc_assert (endp != 2);
2559 return to;
2560 }
2561
2562 gcc_assert (targetm.use_by_pieces_infrastructure_p
2563 (len, align,
2564 memsetp
2565 ? SET_BY_PIECES
2566 : STORE_BY_PIECES,
2567 optimize_insn_for_speed_p ()));
2568
2569 data.constfun = constfun;
2570 data.constfundata = constfundata;
2571 data.len = len;
2572 data.to = to;
2573 store_by_pieces_1 (&data, align);
2574 if (endp)
2575 {
2576 rtx to1;
2577
2578 gcc_assert (!data.reverse);
2579 if (data.autinc_to)
2580 {
2581 if (endp == 2)
2582 {
2583 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2584 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2585 else
2586 data.to_addr = copy_to_mode_reg (to_addr_mode,
2587 plus_constant (to_addr_mode,
2588 data.to_addr,
2589 -1));
2590 }
2591 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2592 data.offset);
2593 }
2594 else
2595 {
2596 if (endp == 2)
2597 --data.offset;
2598 to1 = adjust_address (data.to, QImode, data.offset);
2599 }
2600 return to1;
2601 }
2602 else
2603 return data.to;
2604 }
2605
2606 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2607 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2608
2609 static void
2610 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2611 {
2612 struct store_by_pieces_d data;
2613
2614 if (len == 0)
2615 return;
2616
2617 data.constfun = clear_by_pieces_1;
2618 data.constfundata = NULL;
2619 data.len = len;
2620 data.to = to;
2621 store_by_pieces_1 (&data, align);
2622 }
2623
2624 /* Callback routine for clear_by_pieces.
2625 Return const0_rtx unconditionally. */
2626
2627 static rtx
2628 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2629 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2630 machine_mode mode ATTRIBUTE_UNUSED)
2631 {
2632 return const0_rtx;
2633 }
2634
2635 /* Subroutine of clear_by_pieces and store_by_pieces.
2636 Generate several move instructions to store LEN bytes of block TO. (A MEM
2637 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2638
2639 static void
2640 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2641 unsigned int align ATTRIBUTE_UNUSED)
2642 {
2643 machine_mode to_addr_mode = get_address_mode (data->to);
2644 rtx to_addr = XEXP (data->to, 0);
2645 unsigned int max_size = STORE_MAX_PIECES + 1;
2646 enum insn_code icode;
2647
2648 data->offset = 0;
2649 data->to_addr = to_addr;
2650 data->autinc_to
2651 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2652 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2653
2654 data->explicit_inc_to = 0;
2655 data->reverse
2656 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2657 if (data->reverse)
2658 data->offset = data->len;
2659
2660 /* If storing requires more than two move insns,
2661 copy addresses to registers (to make displacements shorter)
2662 and use post-increment if available. */
2663 if (!data->autinc_to
2664 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2665 {
2666 /* Determine the main mode we'll be using.
2667 MODE might not be used depending on the definitions of the
2668 USE_* macros below. */
2669 machine_mode mode ATTRIBUTE_UNUSED
2670 = widest_int_mode_for_size (max_size);
2671
2672 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2673 {
2674 data->to_addr = copy_to_mode_reg (to_addr_mode,
2675 plus_constant (to_addr_mode,
2676 to_addr,
2677 data->len));
2678 data->autinc_to = 1;
2679 data->explicit_inc_to = -1;
2680 }
2681
2682 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2683 && ! data->autinc_to)
2684 {
2685 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2686 data->autinc_to = 1;
2687 data->explicit_inc_to = 1;
2688 }
2689
2690 if ( !data->autinc_to && CONSTANT_P (to_addr))
2691 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2692 }
2693
2694 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2695
2696 /* First store what we can in the largest integer mode, then go to
2697 successively smaller modes. */
2698
2699 while (max_size > 1 && data->len > 0)
2700 {
2701 machine_mode mode = widest_int_mode_for_size (max_size);
2702
2703 if (mode == VOIDmode)
2704 break;
2705
2706 icode = optab_handler (mov_optab, mode);
2707 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2708 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2709
2710 max_size = GET_MODE_SIZE (mode);
2711 }
2712
2713 /* The code above should have handled everything. */
2714 gcc_assert (!data->len);
2715 }
2716
2717 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2718 with move instructions for mode MODE. GENFUN is the gen_... function
2719 to make a move insn for that mode. DATA has all the other info. */
2720
2721 static void
2722 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2723 struct store_by_pieces_d *data)
2724 {
2725 unsigned int size = GET_MODE_SIZE (mode);
2726 rtx to1, cst;
2727
2728 while (data->len >= size)
2729 {
2730 if (data->reverse)
2731 data->offset -= size;
2732
2733 if (data->autinc_to)
2734 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2735 data->offset);
2736 else
2737 to1 = adjust_address (data->to, mode, data->offset);
2738
2739 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2740 emit_insn (gen_add2_insn (data->to_addr,
2741 gen_int_mode (-(HOST_WIDE_INT) size,
2742 GET_MODE (data->to_addr))));
2743
2744 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2745 emit_insn ((*genfun) (to1, cst));
2746
2747 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2748 emit_insn (gen_add2_insn (data->to_addr,
2749 gen_int_mode (size,
2750 GET_MODE (data->to_addr))));
2751
2752 if (! data->reverse)
2753 data->offset += size;
2754
2755 data->len -= size;
2756 }
2757 }
2758 \f
2759 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2760 its length in bytes. */
2761
2762 rtx
2763 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2764 unsigned int expected_align, HOST_WIDE_INT expected_size,
2765 unsigned HOST_WIDE_INT min_size,
2766 unsigned HOST_WIDE_INT max_size,
2767 unsigned HOST_WIDE_INT probable_max_size)
2768 {
2769 machine_mode mode = GET_MODE (object);
2770 unsigned int align;
2771
2772 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2773
2774 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2775 just move a zero. Otherwise, do this a piece at a time. */
2776 if (mode != BLKmode
2777 && CONST_INT_P (size)
2778 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2779 {
2780 rtx zero = CONST0_RTX (mode);
2781 if (zero != NULL)
2782 {
2783 emit_move_insn (object, zero);
2784 return NULL;
2785 }
2786
2787 if (COMPLEX_MODE_P (mode))
2788 {
2789 zero = CONST0_RTX (GET_MODE_INNER (mode));
2790 if (zero != NULL)
2791 {
2792 write_complex_part (object, zero, 0);
2793 write_complex_part (object, zero, 1);
2794 return NULL;
2795 }
2796 }
2797 }
2798
2799 if (size == const0_rtx)
2800 return NULL;
2801
2802 align = MEM_ALIGN (object);
2803
2804 if (CONST_INT_P (size)
2805 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2806 CLEAR_BY_PIECES,
2807 optimize_insn_for_speed_p ()))
2808 clear_by_pieces (object, INTVAL (size), align);
2809 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2810 expected_align, expected_size,
2811 min_size, max_size, probable_max_size))
2812 ;
2813 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2814 return set_storage_via_libcall (object, size, const0_rtx,
2815 method == BLOCK_OP_TAILCALL);
2816 else
2817 gcc_unreachable ();
2818
2819 return NULL;
2820 }
2821
2822 rtx
2823 clear_storage (rtx object, rtx size, enum block_op_methods method)
2824 {
2825 unsigned HOST_WIDE_INT max, min = 0;
2826 if (GET_CODE (size) == CONST_INT)
2827 min = max = UINTVAL (size);
2828 else
2829 max = GET_MODE_MASK (GET_MODE (size));
2830 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2831 }
2832
2833
2834 /* A subroutine of clear_storage. Expand a call to memset.
2835 Return the return value of memset, 0 otherwise. */
2836
2837 rtx
2838 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2839 {
2840 tree call_expr, fn, object_tree, size_tree, val_tree;
2841 machine_mode size_mode;
2842 rtx retval;
2843
2844 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2845 place those into new pseudos into a VAR_DECL and use them later. */
2846
2847 object = copy_addr_to_reg (XEXP (object, 0));
2848
2849 size_mode = TYPE_MODE (sizetype);
2850 size = convert_to_mode (size_mode, size, 1);
2851 size = copy_to_mode_reg (size_mode, size);
2852
2853 /* It is incorrect to use the libcall calling conventions to call
2854 memset in this context. This could be a user call to memset and
2855 the user may wish to examine the return value from memset. For
2856 targets where libcalls and normal calls have different conventions
2857 for returning pointers, we could end up generating incorrect code. */
2858
2859 object_tree = make_tree (ptr_type_node, object);
2860 if (!CONST_INT_P (val))
2861 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2862 size_tree = make_tree (sizetype, size);
2863 val_tree = make_tree (integer_type_node, val);
2864
2865 fn = clear_storage_libcall_fn (true);
2866 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2867 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2868
2869 retval = expand_normal (call_expr);
2870
2871 return retval;
2872 }
2873
2874 /* A subroutine of set_storage_via_libcall. Create the tree node
2875 for the function we use for block clears. */
2876
2877 tree block_clear_fn;
2878
2879 void
2880 init_block_clear_fn (const char *asmspec)
2881 {
2882 if (!block_clear_fn)
2883 {
2884 tree fn, args;
2885
2886 fn = get_identifier ("memset");
2887 args = build_function_type_list (ptr_type_node, ptr_type_node,
2888 integer_type_node, sizetype,
2889 NULL_TREE);
2890
2891 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2892 DECL_EXTERNAL (fn) = 1;
2893 TREE_PUBLIC (fn) = 1;
2894 DECL_ARTIFICIAL (fn) = 1;
2895 TREE_NOTHROW (fn) = 1;
2896 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2897 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2898
2899 block_clear_fn = fn;
2900 }
2901
2902 if (asmspec)
2903 set_user_assembler_name (block_clear_fn, asmspec);
2904 }
2905
2906 static tree
2907 clear_storage_libcall_fn (int for_call)
2908 {
2909 static bool emitted_extern;
2910
2911 if (!block_clear_fn)
2912 init_block_clear_fn (NULL);
2913
2914 if (for_call && !emitted_extern)
2915 {
2916 emitted_extern = true;
2917 make_decl_rtl (block_clear_fn);
2918 }
2919
2920 return block_clear_fn;
2921 }
2922 \f
2923 /* Expand a setmem pattern; return true if successful. */
2924
2925 bool
2926 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2927 unsigned int expected_align, HOST_WIDE_INT expected_size,
2928 unsigned HOST_WIDE_INT min_size,
2929 unsigned HOST_WIDE_INT max_size,
2930 unsigned HOST_WIDE_INT probable_max_size)
2931 {
2932 /* Try the most limited insn first, because there's no point
2933 including more than one in the machine description unless
2934 the more limited one has some advantage. */
2935
2936 machine_mode mode;
2937
2938 if (expected_align < align)
2939 expected_align = align;
2940 if (expected_size != -1)
2941 {
2942 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2943 expected_size = max_size;
2944 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2945 expected_size = min_size;
2946 }
2947
2948 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2949 mode = GET_MODE_WIDER_MODE (mode))
2950 {
2951 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2952
2953 if (code != CODE_FOR_nothing
2954 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2955 here because if SIZE is less than the mode mask, as it is
2956 returned by the macro, it will definitely be less than the
2957 actual mode mask. Since SIZE is within the Pmode address
2958 space, we limit MODE to Pmode. */
2959 && ((CONST_INT_P (size)
2960 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2961 <= (GET_MODE_MASK (mode) >> 1)))
2962 || max_size <= (GET_MODE_MASK (mode) >> 1)
2963 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2964 {
2965 struct expand_operand ops[9];
2966 unsigned int nops;
2967
2968 nops = insn_data[(int) code].n_generator_args;
2969 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2970
2971 create_fixed_operand (&ops[0], object);
2972 /* The check above guarantees that this size conversion is valid. */
2973 create_convert_operand_to (&ops[1], size, mode, true);
2974 create_convert_operand_from (&ops[2], val, byte_mode, true);
2975 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2976 if (nops >= 6)
2977 {
2978 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2979 create_integer_operand (&ops[5], expected_size);
2980 }
2981 if (nops >= 8)
2982 {
2983 create_integer_operand (&ops[6], min_size);
2984 /* If we can not represent the maximal size,
2985 make parameter NULL. */
2986 if ((HOST_WIDE_INT) max_size != -1)
2987 create_integer_operand (&ops[7], max_size);
2988 else
2989 create_fixed_operand (&ops[7], NULL);
2990 }
2991 if (nops == 9)
2992 {
2993 /* If we can not represent the maximal size,
2994 make parameter NULL. */
2995 if ((HOST_WIDE_INT) probable_max_size != -1)
2996 create_integer_operand (&ops[8], probable_max_size);
2997 else
2998 create_fixed_operand (&ops[8], NULL);
2999 }
3000 if (maybe_expand_insn (code, nops, ops))
3001 return true;
3002 }
3003 }
3004
3005 return false;
3006 }
3007
3008 \f
3009 /* Write to one of the components of the complex value CPLX. Write VAL to
3010 the real part if IMAG_P is false, and the imaginary part if its true. */
3011
3012 void
3013 write_complex_part (rtx cplx, rtx val, bool imag_p)
3014 {
3015 machine_mode cmode;
3016 machine_mode imode;
3017 unsigned ibitsize;
3018
3019 if (GET_CODE (cplx) == CONCAT)
3020 {
3021 emit_move_insn (XEXP (cplx, imag_p), val);
3022 return;
3023 }
3024
3025 cmode = GET_MODE (cplx);
3026 imode = GET_MODE_INNER (cmode);
3027 ibitsize = GET_MODE_BITSIZE (imode);
3028
3029 /* For MEMs simplify_gen_subreg may generate an invalid new address
3030 because, e.g., the original address is considered mode-dependent
3031 by the target, which restricts simplify_subreg from invoking
3032 adjust_address_nv. Instead of preparing fallback support for an
3033 invalid address, we call adjust_address_nv directly. */
3034 if (MEM_P (cplx))
3035 {
3036 emit_move_insn (adjust_address_nv (cplx, imode,
3037 imag_p ? GET_MODE_SIZE (imode) : 0),
3038 val);
3039 return;
3040 }
3041
3042 /* If the sub-object is at least word sized, then we know that subregging
3043 will work. This special case is important, since store_bit_field
3044 wants to operate on integer modes, and there's rarely an OImode to
3045 correspond to TCmode. */
3046 if (ibitsize >= BITS_PER_WORD
3047 /* For hard regs we have exact predicates. Assume we can split
3048 the original object if it spans an even number of hard regs.
3049 This special case is important for SCmode on 64-bit platforms
3050 where the natural size of floating-point regs is 32-bit. */
3051 || (REG_P (cplx)
3052 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3053 && REG_NREGS (cplx) % 2 == 0))
3054 {
3055 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3056 imag_p ? GET_MODE_SIZE (imode) : 0);
3057 if (part)
3058 {
3059 emit_move_insn (part, val);
3060 return;
3061 }
3062 else
3063 /* simplify_gen_subreg may fail for sub-word MEMs. */
3064 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3065 }
3066
3067 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3068 }
3069
3070 /* Extract one of the components of the complex value CPLX. Extract the
3071 real part if IMAG_P is false, and the imaginary part if it's true. */
3072
3073 static rtx
3074 read_complex_part (rtx cplx, bool imag_p)
3075 {
3076 machine_mode cmode, imode;
3077 unsigned ibitsize;
3078
3079 if (GET_CODE (cplx) == CONCAT)
3080 return XEXP (cplx, imag_p);
3081
3082 cmode = GET_MODE (cplx);
3083 imode = GET_MODE_INNER (cmode);
3084 ibitsize = GET_MODE_BITSIZE (imode);
3085
3086 /* Special case reads from complex constants that got spilled to memory. */
3087 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3088 {
3089 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3090 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3091 {
3092 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3093 if (CONSTANT_CLASS_P (part))
3094 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3095 }
3096 }
3097
3098 /* For MEMs simplify_gen_subreg may generate an invalid new address
3099 because, e.g., the original address is considered mode-dependent
3100 by the target, which restricts simplify_subreg from invoking
3101 adjust_address_nv. Instead of preparing fallback support for an
3102 invalid address, we call adjust_address_nv directly. */
3103 if (MEM_P (cplx))
3104 return adjust_address_nv (cplx, imode,
3105 imag_p ? GET_MODE_SIZE (imode) : 0);
3106
3107 /* If the sub-object is at least word sized, then we know that subregging
3108 will work. This special case is important, since extract_bit_field
3109 wants to operate on integer modes, and there's rarely an OImode to
3110 correspond to TCmode. */
3111 if (ibitsize >= BITS_PER_WORD
3112 /* For hard regs we have exact predicates. Assume we can split
3113 the original object if it spans an even number of hard regs.
3114 This special case is important for SCmode on 64-bit platforms
3115 where the natural size of floating-point regs is 32-bit. */
3116 || (REG_P (cplx)
3117 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3118 && REG_NREGS (cplx) % 2 == 0))
3119 {
3120 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3121 imag_p ? GET_MODE_SIZE (imode) : 0);
3122 if (ret)
3123 return ret;
3124 else
3125 /* simplify_gen_subreg may fail for sub-word MEMs. */
3126 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3127 }
3128
3129 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3130 true, NULL_RTX, imode, imode);
3131 }
3132 \f
3133 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3134 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3135 represented in NEW_MODE. If FORCE is true, this will never happen, as
3136 we'll force-create a SUBREG if needed. */
3137
3138 static rtx
3139 emit_move_change_mode (machine_mode new_mode,
3140 machine_mode old_mode, rtx x, bool force)
3141 {
3142 rtx ret;
3143
3144 if (push_operand (x, GET_MODE (x)))
3145 {
3146 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3147 MEM_COPY_ATTRIBUTES (ret, x);
3148 }
3149 else if (MEM_P (x))
3150 {
3151 /* We don't have to worry about changing the address since the
3152 size in bytes is supposed to be the same. */
3153 if (reload_in_progress)
3154 {
3155 /* Copy the MEM to change the mode and move any
3156 substitutions from the old MEM to the new one. */
3157 ret = adjust_address_nv (x, new_mode, 0);
3158 copy_replacements (x, ret);
3159 }
3160 else
3161 ret = adjust_address (x, new_mode, 0);
3162 }
3163 else
3164 {
3165 /* Note that we do want simplify_subreg's behavior of validating
3166 that the new mode is ok for a hard register. If we were to use
3167 simplify_gen_subreg, we would create the subreg, but would
3168 probably run into the target not being able to implement it. */
3169 /* Except, of course, when FORCE is true, when this is exactly what
3170 we want. Which is needed for CCmodes on some targets. */
3171 if (force)
3172 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3173 else
3174 ret = simplify_subreg (new_mode, x, old_mode, 0);
3175 }
3176
3177 return ret;
3178 }
3179
3180 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3181 an integer mode of the same size as MODE. Returns the instruction
3182 emitted, or NULL if such a move could not be generated. */
3183
3184 static rtx_insn *
3185 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3186 {
3187 machine_mode imode;
3188 enum insn_code code;
3189
3190 /* There must exist a mode of the exact size we require. */
3191 imode = int_mode_for_mode (mode);
3192 if (imode == BLKmode)
3193 return NULL;
3194
3195 /* The target must support moves in this mode. */
3196 code = optab_handler (mov_optab, imode);
3197 if (code == CODE_FOR_nothing)
3198 return NULL;
3199
3200 x = emit_move_change_mode (imode, mode, x, force);
3201 if (x == NULL_RTX)
3202 return NULL;
3203 y = emit_move_change_mode (imode, mode, y, force);
3204 if (y == NULL_RTX)
3205 return NULL;
3206 return emit_insn (GEN_FCN (code) (x, y));
3207 }
3208
3209 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3210 Return an equivalent MEM that does not use an auto-increment. */
3211
3212 rtx
3213 emit_move_resolve_push (machine_mode mode, rtx x)
3214 {
3215 enum rtx_code code = GET_CODE (XEXP (x, 0));
3216 HOST_WIDE_INT adjust;
3217 rtx temp;
3218
3219 adjust = GET_MODE_SIZE (mode);
3220 #ifdef PUSH_ROUNDING
3221 adjust = PUSH_ROUNDING (adjust);
3222 #endif
3223 if (code == PRE_DEC || code == POST_DEC)
3224 adjust = -adjust;
3225 else if (code == PRE_MODIFY || code == POST_MODIFY)
3226 {
3227 rtx expr = XEXP (XEXP (x, 0), 1);
3228 HOST_WIDE_INT val;
3229
3230 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3231 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3232 val = INTVAL (XEXP (expr, 1));
3233 if (GET_CODE (expr) == MINUS)
3234 val = -val;
3235 gcc_assert (adjust == val || adjust == -val);
3236 adjust = val;
3237 }
3238
3239 /* Do not use anti_adjust_stack, since we don't want to update
3240 stack_pointer_delta. */
3241 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3242 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3243 0, OPTAB_LIB_WIDEN);
3244 if (temp != stack_pointer_rtx)
3245 emit_move_insn (stack_pointer_rtx, temp);
3246
3247 switch (code)
3248 {
3249 case PRE_INC:
3250 case PRE_DEC:
3251 case PRE_MODIFY:
3252 temp = stack_pointer_rtx;
3253 break;
3254 case POST_INC:
3255 case POST_DEC:
3256 case POST_MODIFY:
3257 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3258 break;
3259 default:
3260 gcc_unreachable ();
3261 }
3262
3263 return replace_equiv_address (x, temp);
3264 }
3265
3266 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3267 X is known to satisfy push_operand, and MODE is known to be complex.
3268 Returns the last instruction emitted. */
3269
3270 rtx_insn *
3271 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3272 {
3273 machine_mode submode = GET_MODE_INNER (mode);
3274 bool imag_first;
3275
3276 #ifdef PUSH_ROUNDING
3277 unsigned int submodesize = GET_MODE_SIZE (submode);
3278
3279 /* In case we output to the stack, but the size is smaller than the
3280 machine can push exactly, we need to use move instructions. */
3281 if (PUSH_ROUNDING (submodesize) != submodesize)
3282 {
3283 x = emit_move_resolve_push (mode, x);
3284 return emit_move_insn (x, y);
3285 }
3286 #endif
3287
3288 /* Note that the real part always precedes the imag part in memory
3289 regardless of machine's endianness. */
3290 switch (GET_CODE (XEXP (x, 0)))
3291 {
3292 case PRE_DEC:
3293 case POST_DEC:
3294 imag_first = true;
3295 break;
3296 case PRE_INC:
3297 case POST_INC:
3298 imag_first = false;
3299 break;
3300 default:
3301 gcc_unreachable ();
3302 }
3303
3304 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3305 read_complex_part (y, imag_first));
3306 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3307 read_complex_part (y, !imag_first));
3308 }
3309
3310 /* A subroutine of emit_move_complex. Perform the move from Y to X
3311 via two moves of the parts. Returns the last instruction emitted. */
3312
3313 rtx_insn *
3314 emit_move_complex_parts (rtx x, rtx y)
3315 {
3316 /* Show the output dies here. This is necessary for SUBREGs
3317 of pseudos since we cannot track their lifetimes correctly;
3318 hard regs shouldn't appear here except as return values. */
3319 if (!reload_completed && !reload_in_progress
3320 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3321 emit_clobber (x);
3322
3323 write_complex_part (x, read_complex_part (y, false), false);
3324 write_complex_part (x, read_complex_part (y, true), true);
3325
3326 return get_last_insn ();
3327 }
3328
3329 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3330 MODE is known to be complex. Returns the last instruction emitted. */
3331
3332 static rtx_insn *
3333 emit_move_complex (machine_mode mode, rtx x, rtx y)
3334 {
3335 bool try_int;
3336
3337 /* Need to take special care for pushes, to maintain proper ordering
3338 of the data, and possibly extra padding. */
3339 if (push_operand (x, mode))
3340 return emit_move_complex_push (mode, x, y);
3341
3342 /* See if we can coerce the target into moving both values at once, except
3343 for floating point where we favor moving as parts if this is easy. */
3344 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3345 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3346 && !(REG_P (x)
3347 && HARD_REGISTER_P (x)
3348 && REG_NREGS (x) == 1)
3349 && !(REG_P (y)
3350 && HARD_REGISTER_P (y)
3351 && REG_NREGS (y) == 1))
3352 try_int = false;
3353 /* Not possible if the values are inherently not adjacent. */
3354 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3355 try_int = false;
3356 /* Is possible if both are registers (or subregs of registers). */
3357 else if (register_operand (x, mode) && register_operand (y, mode))
3358 try_int = true;
3359 /* If one of the operands is a memory, and alignment constraints
3360 are friendly enough, we may be able to do combined memory operations.
3361 We do not attempt this if Y is a constant because that combination is
3362 usually better with the by-parts thing below. */
3363 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3364 && (!STRICT_ALIGNMENT
3365 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3366 try_int = true;
3367 else
3368 try_int = false;
3369
3370 if (try_int)
3371 {
3372 rtx_insn *ret;
3373
3374 /* For memory to memory moves, optimal behavior can be had with the
3375 existing block move logic. */
3376 if (MEM_P (x) && MEM_P (y))
3377 {
3378 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3379 BLOCK_OP_NO_LIBCALL);
3380 return get_last_insn ();
3381 }
3382
3383 ret = emit_move_via_integer (mode, x, y, true);
3384 if (ret)
3385 return ret;
3386 }
3387
3388 return emit_move_complex_parts (x, y);
3389 }
3390
3391 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3392 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3393
3394 static rtx_insn *
3395 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3396 {
3397 rtx_insn *ret;
3398
3399 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3400 if (mode != CCmode)
3401 {
3402 enum insn_code code = optab_handler (mov_optab, CCmode);
3403 if (code != CODE_FOR_nothing)
3404 {
3405 x = emit_move_change_mode (CCmode, mode, x, true);
3406 y = emit_move_change_mode (CCmode, mode, y, true);
3407 return emit_insn (GEN_FCN (code) (x, y));
3408 }
3409 }
3410
3411 /* Otherwise, find the MODE_INT mode of the same width. */
3412 ret = emit_move_via_integer (mode, x, y, false);
3413 gcc_assert (ret != NULL);
3414 return ret;
3415 }
3416
3417 /* Return true if word I of OP lies entirely in the
3418 undefined bits of a paradoxical subreg. */
3419
3420 static bool
3421 undefined_operand_subword_p (const_rtx op, int i)
3422 {
3423 machine_mode innermode, innermostmode;
3424 int offset;
3425 if (GET_CODE (op) != SUBREG)
3426 return false;
3427 innermode = GET_MODE (op);
3428 innermostmode = GET_MODE (SUBREG_REG (op));
3429 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3430 /* The SUBREG_BYTE represents offset, as if the value were stored in
3431 memory, except for a paradoxical subreg where we define
3432 SUBREG_BYTE to be 0; undo this exception as in
3433 simplify_subreg. */
3434 if (SUBREG_BYTE (op) == 0
3435 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3436 {
3437 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3438 if (WORDS_BIG_ENDIAN)
3439 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3440 if (BYTES_BIG_ENDIAN)
3441 offset += difference % UNITS_PER_WORD;
3442 }
3443 if (offset >= GET_MODE_SIZE (innermostmode)
3444 || offset <= -GET_MODE_SIZE (word_mode))
3445 return true;
3446 return false;
3447 }
3448
3449 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3450 MODE is any multi-word or full-word mode that lacks a move_insn
3451 pattern. Note that you will get better code if you define such
3452 patterns, even if they must turn into multiple assembler instructions. */
3453
3454 static rtx_insn *
3455 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3456 {
3457 rtx_insn *last_insn = 0;
3458 rtx_insn *seq;
3459 rtx inner;
3460 bool need_clobber;
3461 int i;
3462
3463 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3464
3465 /* If X is a push on the stack, do the push now and replace
3466 X with a reference to the stack pointer. */
3467 if (push_operand (x, mode))
3468 x = emit_move_resolve_push (mode, x);
3469
3470 /* If we are in reload, see if either operand is a MEM whose address
3471 is scheduled for replacement. */
3472 if (reload_in_progress && MEM_P (x)
3473 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3474 x = replace_equiv_address_nv (x, inner);
3475 if (reload_in_progress && MEM_P (y)
3476 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3477 y = replace_equiv_address_nv (y, inner);
3478
3479 start_sequence ();
3480
3481 need_clobber = false;
3482 for (i = 0;
3483 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3484 i++)
3485 {
3486 rtx xpart = operand_subword (x, i, 1, mode);
3487 rtx ypart;
3488
3489 /* Do not generate code for a move if it would come entirely
3490 from the undefined bits of a paradoxical subreg. */
3491 if (undefined_operand_subword_p (y, i))
3492 continue;
3493
3494 ypart = operand_subword (y, i, 1, mode);
3495
3496 /* If we can't get a part of Y, put Y into memory if it is a
3497 constant. Otherwise, force it into a register. Then we must
3498 be able to get a part of Y. */
3499 if (ypart == 0 && CONSTANT_P (y))
3500 {
3501 y = use_anchored_address (force_const_mem (mode, y));
3502 ypart = operand_subword (y, i, 1, mode);
3503 }
3504 else if (ypart == 0)
3505 ypart = operand_subword_force (y, i, mode);
3506
3507 gcc_assert (xpart && ypart);
3508
3509 need_clobber |= (GET_CODE (xpart) == SUBREG);
3510
3511 last_insn = emit_move_insn (xpart, ypart);
3512 }
3513
3514 seq = get_insns ();
3515 end_sequence ();
3516
3517 /* Show the output dies here. This is necessary for SUBREGs
3518 of pseudos since we cannot track their lifetimes correctly;
3519 hard regs shouldn't appear here except as return values.
3520 We never want to emit such a clobber after reload. */
3521 if (x != y
3522 && ! (reload_in_progress || reload_completed)
3523 && need_clobber != 0)
3524 emit_clobber (x);
3525
3526 emit_insn (seq);
3527
3528 return last_insn;
3529 }
3530
3531 /* Low level part of emit_move_insn.
3532 Called just like emit_move_insn, but assumes X and Y
3533 are basically valid. */
3534
3535 rtx_insn *
3536 emit_move_insn_1 (rtx x, rtx y)
3537 {
3538 machine_mode mode = GET_MODE (x);
3539 enum insn_code code;
3540
3541 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3542
3543 code = optab_handler (mov_optab, mode);
3544 if (code != CODE_FOR_nothing)
3545 return emit_insn (GEN_FCN (code) (x, y));
3546
3547 /* Expand complex moves by moving real part and imag part. */
3548 if (COMPLEX_MODE_P (mode))
3549 return emit_move_complex (mode, x, y);
3550
3551 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3552 || ALL_FIXED_POINT_MODE_P (mode))
3553 {
3554 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3555
3556 /* If we can't find an integer mode, use multi words. */
3557 if (result)
3558 return result;
3559 else
3560 return emit_move_multi_word (mode, x, y);
3561 }
3562
3563 if (GET_MODE_CLASS (mode) == MODE_CC)
3564 return emit_move_ccmode (mode, x, y);
3565
3566 /* Try using a move pattern for the corresponding integer mode. This is
3567 only safe when simplify_subreg can convert MODE constants into integer
3568 constants. At present, it can only do this reliably if the value
3569 fits within a HOST_WIDE_INT. */
3570 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3571 {
3572 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3573
3574 if (ret)
3575 {
3576 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3577 return ret;
3578 }
3579 }
3580
3581 return emit_move_multi_word (mode, x, y);
3582 }
3583
3584 /* Generate code to copy Y into X.
3585 Both Y and X must have the same mode, except that
3586 Y can be a constant with VOIDmode.
3587 This mode cannot be BLKmode; use emit_block_move for that.
3588
3589 Return the last instruction emitted. */
3590
3591 rtx_insn *
3592 emit_move_insn (rtx x, rtx y)
3593 {
3594 machine_mode mode = GET_MODE (x);
3595 rtx y_cst = NULL_RTX;
3596 rtx_insn *last_insn;
3597 rtx set;
3598
3599 gcc_assert (mode != BLKmode
3600 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3601
3602 if (CONSTANT_P (y))
3603 {
3604 if (optimize
3605 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3606 && (last_insn = compress_float_constant (x, y)))
3607 return last_insn;
3608
3609 y_cst = y;
3610
3611 if (!targetm.legitimate_constant_p (mode, y))
3612 {
3613 y = force_const_mem (mode, y);
3614
3615 /* If the target's cannot_force_const_mem prevented the spill,
3616 assume that the target's move expanders will also take care
3617 of the non-legitimate constant. */
3618 if (!y)
3619 y = y_cst;
3620 else
3621 y = use_anchored_address (y);
3622 }
3623 }
3624
3625 /* If X or Y are memory references, verify that their addresses are valid
3626 for the machine. */
3627 if (MEM_P (x)
3628 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3629 MEM_ADDR_SPACE (x))
3630 && ! push_operand (x, GET_MODE (x))))
3631 x = validize_mem (x);
3632
3633 if (MEM_P (y)
3634 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3635 MEM_ADDR_SPACE (y)))
3636 y = validize_mem (y);
3637
3638 gcc_assert (mode != BLKmode);
3639
3640 last_insn = emit_move_insn_1 (x, y);
3641
3642 if (y_cst && REG_P (x)
3643 && (set = single_set (last_insn)) != NULL_RTX
3644 && SET_DEST (set) == x
3645 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3646 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3647
3648 return last_insn;
3649 }
3650
3651 /* Generate the body of an instruction to copy Y into X.
3652 It may be a list of insns, if one insn isn't enough. */
3653
3654 rtx
3655 gen_move_insn (rtx x, rtx y)
3656 {
3657 rtx_insn *seq;
3658
3659 start_sequence ();
3660 emit_move_insn_1 (x, y);
3661 seq = get_insns ();
3662 end_sequence ();
3663 return seq;
3664 }
3665
3666 /* If Y is representable exactly in a narrower mode, and the target can
3667 perform the extension directly from constant or memory, then emit the
3668 move as an extension. */
3669
3670 static rtx_insn *
3671 compress_float_constant (rtx x, rtx y)
3672 {
3673 machine_mode dstmode = GET_MODE (x);
3674 machine_mode orig_srcmode = GET_MODE (y);
3675 machine_mode srcmode;
3676 REAL_VALUE_TYPE r;
3677 int oldcost, newcost;
3678 bool speed = optimize_insn_for_speed_p ();
3679
3680 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3681
3682 if (targetm.legitimate_constant_p (dstmode, y))
3683 oldcost = set_src_cost (y, speed);
3684 else
3685 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3686
3687 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3688 srcmode != orig_srcmode;
3689 srcmode = GET_MODE_WIDER_MODE (srcmode))
3690 {
3691 enum insn_code ic;
3692 rtx trunc_y;
3693 rtx_insn *last_insn;
3694
3695 /* Skip if the target can't extend this way. */
3696 ic = can_extend_p (dstmode, srcmode, 0);
3697 if (ic == CODE_FOR_nothing)
3698 continue;
3699
3700 /* Skip if the narrowed value isn't exact. */
3701 if (! exact_real_truncate (srcmode, &r))
3702 continue;
3703
3704 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3705
3706 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3707 {
3708 /* Skip if the target needs extra instructions to perform
3709 the extension. */
3710 if (!insn_operand_matches (ic, 1, trunc_y))
3711 continue;
3712 /* This is valid, but may not be cheaper than the original. */
3713 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3714 speed);
3715 if (oldcost < newcost)
3716 continue;
3717 }
3718 else if (float_extend_from_mem[dstmode][srcmode])
3719 {
3720 trunc_y = force_const_mem (srcmode, trunc_y);
3721 /* This is valid, but may not be cheaper than the original. */
3722 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3723 speed);
3724 if (oldcost < newcost)
3725 continue;
3726 trunc_y = validize_mem (trunc_y);
3727 }
3728 else
3729 continue;
3730
3731 /* For CSE's benefit, force the compressed constant pool entry
3732 into a new pseudo. This constant may be used in different modes,
3733 and if not, combine will put things back together for us. */
3734 trunc_y = force_reg (srcmode, trunc_y);
3735
3736 /* If x is a hard register, perform the extension into a pseudo,
3737 so that e.g. stack realignment code is aware of it. */
3738 rtx target = x;
3739 if (REG_P (x) && HARD_REGISTER_P (x))
3740 target = gen_reg_rtx (dstmode);
3741
3742 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3743 last_insn = get_last_insn ();
3744
3745 if (REG_P (target))
3746 set_unique_reg_note (last_insn, REG_EQUAL, y);
3747
3748 if (target != x)
3749 return emit_move_insn (x, target);
3750 return last_insn;
3751 }
3752
3753 return NULL;
3754 }
3755 \f
3756 /* Pushing data onto the stack. */
3757
3758 /* Push a block of length SIZE (perhaps variable)
3759 and return an rtx to address the beginning of the block.
3760 The value may be virtual_outgoing_args_rtx.
3761
3762 EXTRA is the number of bytes of padding to push in addition to SIZE.
3763 BELOW nonzero means this padding comes at low addresses;
3764 otherwise, the padding comes at high addresses. */
3765
3766 rtx
3767 push_block (rtx size, int extra, int below)
3768 {
3769 rtx temp;
3770
3771 size = convert_modes (Pmode, ptr_mode, size, 1);
3772 if (CONSTANT_P (size))
3773 anti_adjust_stack (plus_constant (Pmode, size, extra));
3774 else if (REG_P (size) && extra == 0)
3775 anti_adjust_stack (size);
3776 else
3777 {
3778 temp = copy_to_mode_reg (Pmode, size);
3779 if (extra != 0)
3780 temp = expand_binop (Pmode, add_optab, temp,
3781 gen_int_mode (extra, Pmode),
3782 temp, 0, OPTAB_LIB_WIDEN);
3783 anti_adjust_stack (temp);
3784 }
3785
3786 #ifndef STACK_GROWS_DOWNWARD
3787 if (0)
3788 #else
3789 if (1)
3790 #endif
3791 {
3792 temp = virtual_outgoing_args_rtx;
3793 if (extra != 0 && below)
3794 temp = plus_constant (Pmode, temp, extra);
3795 }
3796 else
3797 {
3798 if (CONST_INT_P (size))
3799 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3800 -INTVAL (size) - (below ? 0 : extra));
3801 else if (extra != 0 && !below)
3802 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3803 negate_rtx (Pmode, plus_constant (Pmode, size,
3804 extra)));
3805 else
3806 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3807 negate_rtx (Pmode, size));
3808 }
3809
3810 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3811 }
3812
3813 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3814
3815 static rtx
3816 mem_autoinc_base (rtx mem)
3817 {
3818 if (MEM_P (mem))
3819 {
3820 rtx addr = XEXP (mem, 0);
3821 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3822 return XEXP (addr, 0);
3823 }
3824 return NULL;
3825 }
3826
3827 /* A utility routine used here, in reload, and in try_split. The insns
3828 after PREV up to and including LAST are known to adjust the stack,
3829 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3830 placing notes as appropriate. PREV may be NULL, indicating the
3831 entire insn sequence prior to LAST should be scanned.
3832
3833 The set of allowed stack pointer modifications is small:
3834 (1) One or more auto-inc style memory references (aka pushes),
3835 (2) One or more addition/subtraction with the SP as destination,
3836 (3) A single move insn with the SP as destination,
3837 (4) A call_pop insn,
3838 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3839
3840 Insns in the sequence that do not modify the SP are ignored,
3841 except for noreturn calls.
3842
3843 The return value is the amount of adjustment that can be trivially
3844 verified, via immediate operand or auto-inc. If the adjustment
3845 cannot be trivially extracted, the return value is INT_MIN. */
3846
3847 HOST_WIDE_INT
3848 find_args_size_adjust (rtx_insn *insn)
3849 {
3850 rtx dest, set, pat;
3851 int i;
3852
3853 pat = PATTERN (insn);
3854 set = NULL;
3855
3856 /* Look for a call_pop pattern. */
3857 if (CALL_P (insn))
3858 {
3859 /* We have to allow non-call_pop patterns for the case
3860 of emit_single_push_insn of a TLS address. */
3861 if (GET_CODE (pat) != PARALLEL)
3862 return 0;
3863
3864 /* All call_pop have a stack pointer adjust in the parallel.
3865 The call itself is always first, and the stack adjust is
3866 usually last, so search from the end. */
3867 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3868 {
3869 set = XVECEXP (pat, 0, i);
3870 if (GET_CODE (set) != SET)
3871 continue;
3872 dest = SET_DEST (set);
3873 if (dest == stack_pointer_rtx)
3874 break;
3875 }
3876 /* We'd better have found the stack pointer adjust. */
3877 if (i == 0)
3878 return 0;
3879 /* Fall through to process the extracted SET and DEST
3880 as if it was a standalone insn. */
3881 }
3882 else if (GET_CODE (pat) == SET)
3883 set = pat;
3884 else if ((set = single_set (insn)) != NULL)
3885 ;
3886 else if (GET_CODE (pat) == PARALLEL)
3887 {
3888 /* ??? Some older ports use a parallel with a stack adjust
3889 and a store for a PUSH_ROUNDING pattern, rather than a
3890 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3891 /* ??? See h8300 and m68k, pushqi1. */
3892 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3893 {
3894 set = XVECEXP (pat, 0, i);
3895 if (GET_CODE (set) != SET)
3896 continue;
3897 dest = SET_DEST (set);
3898 if (dest == stack_pointer_rtx)
3899 break;
3900
3901 /* We do not expect an auto-inc of the sp in the parallel. */
3902 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3903 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3904 != stack_pointer_rtx);
3905 }
3906 if (i < 0)
3907 return 0;
3908 }
3909 else
3910 return 0;
3911
3912 dest = SET_DEST (set);
3913
3914 /* Look for direct modifications of the stack pointer. */
3915 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3916 {
3917 /* Look for a trivial adjustment, otherwise assume nothing. */
3918 /* Note that the SPU restore_stack_block pattern refers to
3919 the stack pointer in V4SImode. Consider that non-trivial. */
3920 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3921 && GET_CODE (SET_SRC (set)) == PLUS
3922 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3923 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3924 return INTVAL (XEXP (SET_SRC (set), 1));
3925 /* ??? Reload can generate no-op moves, which will be cleaned
3926 up later. Recognize it and continue searching. */
3927 else if (rtx_equal_p (dest, SET_SRC (set)))
3928 return 0;
3929 else
3930 return HOST_WIDE_INT_MIN;
3931 }
3932 else
3933 {
3934 rtx mem, addr;
3935
3936 /* Otherwise only think about autoinc patterns. */
3937 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3938 {
3939 mem = dest;
3940 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3941 != stack_pointer_rtx);
3942 }
3943 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3944 mem = SET_SRC (set);
3945 else
3946 return 0;
3947
3948 addr = XEXP (mem, 0);
3949 switch (GET_CODE (addr))
3950 {
3951 case PRE_INC:
3952 case POST_INC:
3953 return GET_MODE_SIZE (GET_MODE (mem));
3954 case PRE_DEC:
3955 case POST_DEC:
3956 return -GET_MODE_SIZE (GET_MODE (mem));
3957 case PRE_MODIFY:
3958 case POST_MODIFY:
3959 addr = XEXP (addr, 1);
3960 gcc_assert (GET_CODE (addr) == PLUS);
3961 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3962 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3963 return INTVAL (XEXP (addr, 1));
3964 default:
3965 gcc_unreachable ();
3966 }
3967 }
3968 }
3969
3970 int
3971 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3972 {
3973 int args_size = end_args_size;
3974 bool saw_unknown = false;
3975 rtx_insn *insn;
3976
3977 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3978 {
3979 HOST_WIDE_INT this_delta;
3980
3981 if (!NONDEBUG_INSN_P (insn))
3982 continue;
3983
3984 this_delta = find_args_size_adjust (insn);
3985 if (this_delta == 0)
3986 {
3987 if (!CALL_P (insn)
3988 || ACCUMULATE_OUTGOING_ARGS
3989 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3990 continue;
3991 }
3992
3993 gcc_assert (!saw_unknown);
3994 if (this_delta == HOST_WIDE_INT_MIN)
3995 saw_unknown = true;
3996
3997 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3998 #ifdef STACK_GROWS_DOWNWARD
3999 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
4000 #endif
4001 args_size -= this_delta;
4002 }
4003
4004 return saw_unknown ? INT_MIN : args_size;
4005 }
4006
4007 #ifdef PUSH_ROUNDING
4008 /* Emit single push insn. */
4009
4010 static void
4011 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4012 {
4013 rtx dest_addr;
4014 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4015 rtx dest;
4016 enum insn_code icode;
4017
4018 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4019 /* If there is push pattern, use it. Otherwise try old way of throwing
4020 MEM representing push operation to move expander. */
4021 icode = optab_handler (push_optab, mode);
4022 if (icode != CODE_FOR_nothing)
4023 {
4024 struct expand_operand ops[1];
4025
4026 create_input_operand (&ops[0], x, mode);
4027 if (maybe_expand_insn (icode, 1, ops))
4028 return;
4029 }
4030 if (GET_MODE_SIZE (mode) == rounded_size)
4031 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4032 /* If we are to pad downward, adjust the stack pointer first and
4033 then store X into the stack location using an offset. This is
4034 because emit_move_insn does not know how to pad; it does not have
4035 access to type. */
4036 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4037 {
4038 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4039 HOST_WIDE_INT offset;
4040
4041 emit_move_insn (stack_pointer_rtx,
4042 expand_binop (Pmode,
4043 #ifdef STACK_GROWS_DOWNWARD
4044 sub_optab,
4045 #else
4046 add_optab,
4047 #endif
4048 stack_pointer_rtx,
4049 gen_int_mode (rounded_size, Pmode),
4050 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4051
4052 offset = (HOST_WIDE_INT) padding_size;
4053 #ifdef STACK_GROWS_DOWNWARD
4054 if (STACK_PUSH_CODE == POST_DEC)
4055 /* We have already decremented the stack pointer, so get the
4056 previous value. */
4057 offset += (HOST_WIDE_INT) rounded_size;
4058 #else
4059 if (STACK_PUSH_CODE == POST_INC)
4060 /* We have already incremented the stack pointer, so get the
4061 previous value. */
4062 offset -= (HOST_WIDE_INT) rounded_size;
4063 #endif
4064 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4065 gen_int_mode (offset, Pmode));
4066 }
4067 else
4068 {
4069 #ifdef STACK_GROWS_DOWNWARD
4070 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4071 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4072 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4073 Pmode));
4074 #else
4075 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4076 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4077 gen_int_mode (rounded_size, Pmode));
4078 #endif
4079 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4080 }
4081
4082 dest = gen_rtx_MEM (mode, dest_addr);
4083
4084 if (type != 0)
4085 {
4086 set_mem_attributes (dest, type, 1);
4087
4088 if (cfun->tail_call_marked)
4089 /* Function incoming arguments may overlap with sibling call
4090 outgoing arguments and we cannot allow reordering of reads
4091 from function arguments with stores to outgoing arguments
4092 of sibling calls. */
4093 set_mem_alias_set (dest, 0);
4094 }
4095 emit_move_insn (dest, x);
4096 }
4097
4098 /* Emit and annotate a single push insn. */
4099
4100 static void
4101 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4102 {
4103 int delta, old_delta = stack_pointer_delta;
4104 rtx_insn *prev = get_last_insn ();
4105 rtx_insn *last;
4106
4107 emit_single_push_insn_1 (mode, x, type);
4108
4109 last = get_last_insn ();
4110
4111 /* Notice the common case where we emitted exactly one insn. */
4112 if (PREV_INSN (last) == prev)
4113 {
4114 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4115 return;
4116 }
4117
4118 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4119 gcc_assert (delta == INT_MIN || delta == old_delta);
4120 }
4121 #endif
4122
4123 /* Generate code to push X onto the stack, assuming it has mode MODE and
4124 type TYPE.
4125 MODE is redundant except when X is a CONST_INT (since they don't
4126 carry mode info).
4127 SIZE is an rtx for the size of data to be copied (in bytes),
4128 needed only if X is BLKmode.
4129
4130 ALIGN (in bits) is maximum alignment we can assume.
4131
4132 If PARTIAL and REG are both nonzero, then copy that many of the first
4133 bytes of X into registers starting with REG, and push the rest of X.
4134 The amount of space pushed is decreased by PARTIAL bytes.
4135 REG must be a hard register in this case.
4136 If REG is zero but PARTIAL is not, take any all others actions for an
4137 argument partially in registers, but do not actually load any
4138 registers.
4139
4140 EXTRA is the amount in bytes of extra space to leave next to this arg.
4141 This is ignored if an argument block has already been allocated.
4142
4143 On a machine that lacks real push insns, ARGS_ADDR is the address of
4144 the bottom of the argument block for this call. We use indexing off there
4145 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4146 argument block has not been preallocated.
4147
4148 ARGS_SO_FAR is the size of args previously pushed for this call.
4149
4150 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4151 for arguments passed in registers. If nonzero, it will be the number
4152 of bytes required. */
4153
4154 void
4155 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4156 unsigned int align, int partial, rtx reg, int extra,
4157 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4158 rtx alignment_pad)
4159 {
4160 rtx xinner;
4161 enum direction stack_direction
4162 #ifdef STACK_GROWS_DOWNWARD
4163 = downward;
4164 #else
4165 = upward;
4166 #endif
4167
4168 /* Decide where to pad the argument: `downward' for below,
4169 `upward' for above, or `none' for don't pad it.
4170 Default is below for small data on big-endian machines; else above. */
4171 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4172
4173 /* Invert direction if stack is post-decrement.
4174 FIXME: why? */
4175 if (STACK_PUSH_CODE == POST_DEC)
4176 if (where_pad != none)
4177 where_pad = (where_pad == downward ? upward : downward);
4178
4179 xinner = x;
4180
4181 if (mode == BLKmode
4182 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4183 {
4184 /* Copy a block into the stack, entirely or partially. */
4185
4186 rtx temp;
4187 int used;
4188 int offset;
4189 int skip;
4190
4191 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4192 used = partial - offset;
4193
4194 if (mode != BLKmode)
4195 {
4196 /* A value is to be stored in an insufficiently aligned
4197 stack slot; copy via a suitably aligned slot if
4198 necessary. */
4199 size = GEN_INT (GET_MODE_SIZE (mode));
4200 if (!MEM_P (xinner))
4201 {
4202 temp = assign_temp (type, 1, 1);
4203 emit_move_insn (temp, xinner);
4204 xinner = temp;
4205 }
4206 }
4207
4208 gcc_assert (size);
4209
4210 /* USED is now the # of bytes we need not copy to the stack
4211 because registers will take care of them. */
4212
4213 if (partial != 0)
4214 xinner = adjust_address (xinner, BLKmode, used);
4215
4216 /* If the partial register-part of the arg counts in its stack size,
4217 skip the part of stack space corresponding to the registers.
4218 Otherwise, start copying to the beginning of the stack space,
4219 by setting SKIP to 0. */
4220 skip = (reg_parm_stack_space == 0) ? 0 : used;
4221
4222 #ifdef PUSH_ROUNDING
4223 /* Do it with several push insns if that doesn't take lots of insns
4224 and if there is no difficulty with push insns that skip bytes
4225 on the stack for alignment purposes. */
4226 if (args_addr == 0
4227 && PUSH_ARGS
4228 && CONST_INT_P (size)
4229 && skip == 0
4230 && MEM_ALIGN (xinner) >= align
4231 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4232 /* Here we avoid the case of a structure whose weak alignment
4233 forces many pushes of a small amount of data,
4234 and such small pushes do rounding that causes trouble. */
4235 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4236 || align >= BIGGEST_ALIGNMENT
4237 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4238 == (align / BITS_PER_UNIT)))
4239 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4240 {
4241 /* Push padding now if padding above and stack grows down,
4242 or if padding below and stack grows up.
4243 But if space already allocated, this has already been done. */
4244 if (extra && args_addr == 0
4245 && where_pad != none && where_pad != stack_direction)
4246 anti_adjust_stack (GEN_INT (extra));
4247
4248 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4249 }
4250 else
4251 #endif /* PUSH_ROUNDING */
4252 {
4253 rtx target;
4254
4255 /* Otherwise make space on the stack and copy the data
4256 to the address of that space. */
4257
4258 /* Deduct words put into registers from the size we must copy. */
4259 if (partial != 0)
4260 {
4261 if (CONST_INT_P (size))
4262 size = GEN_INT (INTVAL (size) - used);
4263 else
4264 size = expand_binop (GET_MODE (size), sub_optab, size,
4265 gen_int_mode (used, GET_MODE (size)),
4266 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4267 }
4268
4269 /* Get the address of the stack space.
4270 In this case, we do not deal with EXTRA separately.
4271 A single stack adjust will do. */
4272 if (! args_addr)
4273 {
4274 temp = push_block (size, extra, where_pad == downward);
4275 extra = 0;
4276 }
4277 else if (CONST_INT_P (args_so_far))
4278 temp = memory_address (BLKmode,
4279 plus_constant (Pmode, args_addr,
4280 skip + INTVAL (args_so_far)));
4281 else
4282 temp = memory_address (BLKmode,
4283 plus_constant (Pmode,
4284 gen_rtx_PLUS (Pmode,
4285 args_addr,
4286 args_so_far),
4287 skip));
4288
4289 if (!ACCUMULATE_OUTGOING_ARGS)
4290 {
4291 /* If the source is referenced relative to the stack pointer,
4292 copy it to another register to stabilize it. We do not need
4293 to do this if we know that we won't be changing sp. */
4294
4295 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4296 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4297 temp = copy_to_reg (temp);
4298 }
4299
4300 target = gen_rtx_MEM (BLKmode, temp);
4301
4302 /* We do *not* set_mem_attributes here, because incoming arguments
4303 may overlap with sibling call outgoing arguments and we cannot
4304 allow reordering of reads from function arguments with stores
4305 to outgoing arguments of sibling calls. We do, however, want
4306 to record the alignment of the stack slot. */
4307 /* ALIGN may well be better aligned than TYPE, e.g. due to
4308 PARM_BOUNDARY. Assume the caller isn't lying. */
4309 set_mem_align (target, align);
4310
4311 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4312 }
4313 }
4314 else if (partial > 0)
4315 {
4316 /* Scalar partly in registers. */
4317
4318 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4319 int i;
4320 int not_stack;
4321 /* # bytes of start of argument
4322 that we must make space for but need not store. */
4323 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4324 int args_offset = INTVAL (args_so_far);
4325 int skip;
4326
4327 /* Push padding now if padding above and stack grows down,
4328 or if padding below and stack grows up.
4329 But if space already allocated, this has already been done. */
4330 if (extra && args_addr == 0
4331 && where_pad != none && where_pad != stack_direction)
4332 anti_adjust_stack (GEN_INT (extra));
4333
4334 /* If we make space by pushing it, we might as well push
4335 the real data. Otherwise, we can leave OFFSET nonzero
4336 and leave the space uninitialized. */
4337 if (args_addr == 0)
4338 offset = 0;
4339
4340 /* Now NOT_STACK gets the number of words that we don't need to
4341 allocate on the stack. Convert OFFSET to words too. */
4342 not_stack = (partial - offset) / UNITS_PER_WORD;
4343 offset /= UNITS_PER_WORD;
4344
4345 /* If the partial register-part of the arg counts in its stack size,
4346 skip the part of stack space corresponding to the registers.
4347 Otherwise, start copying to the beginning of the stack space,
4348 by setting SKIP to 0. */
4349 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4350
4351 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4352 x = validize_mem (force_const_mem (mode, x));
4353
4354 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4355 SUBREGs of such registers are not allowed. */
4356 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4357 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4358 x = copy_to_reg (x);
4359
4360 /* Loop over all the words allocated on the stack for this arg. */
4361 /* We can do it by words, because any scalar bigger than a word
4362 has a size a multiple of a word. */
4363 for (i = size - 1; i >= not_stack; i--)
4364 if (i >= not_stack + offset)
4365 emit_push_insn (operand_subword_force (x, i, mode),
4366 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4367 0, args_addr,
4368 GEN_INT (args_offset + ((i - not_stack + skip)
4369 * UNITS_PER_WORD)),
4370 reg_parm_stack_space, alignment_pad);
4371 }
4372 else
4373 {
4374 rtx addr;
4375 rtx dest;
4376
4377 /* Push padding now if padding above and stack grows down,
4378 or if padding below and stack grows up.
4379 But if space already allocated, this has already been done. */
4380 if (extra && args_addr == 0
4381 && where_pad != none && where_pad != stack_direction)
4382 anti_adjust_stack (GEN_INT (extra));
4383
4384 #ifdef PUSH_ROUNDING
4385 if (args_addr == 0 && PUSH_ARGS)
4386 emit_single_push_insn (mode, x, type);
4387 else
4388 #endif
4389 {
4390 if (CONST_INT_P (args_so_far))
4391 addr
4392 = memory_address (mode,
4393 plus_constant (Pmode, args_addr,
4394 INTVAL (args_so_far)));
4395 else
4396 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4397 args_so_far));
4398 dest = gen_rtx_MEM (mode, addr);
4399
4400 /* We do *not* set_mem_attributes here, because incoming arguments
4401 may overlap with sibling call outgoing arguments and we cannot
4402 allow reordering of reads from function arguments with stores
4403 to outgoing arguments of sibling calls. We do, however, want
4404 to record the alignment of the stack slot. */
4405 /* ALIGN may well be better aligned than TYPE, e.g. due to
4406 PARM_BOUNDARY. Assume the caller isn't lying. */
4407 set_mem_align (dest, align);
4408
4409 emit_move_insn (dest, x);
4410 }
4411 }
4412
4413 /* If part should go in registers, copy that part
4414 into the appropriate registers. Do this now, at the end,
4415 since mem-to-mem copies above may do function calls. */
4416 if (partial > 0 && reg != 0)
4417 {
4418 /* Handle calls that pass values in multiple non-contiguous locations.
4419 The Irix 6 ABI has examples of this. */
4420 if (GET_CODE (reg) == PARALLEL)
4421 emit_group_load (reg, x, type, -1);
4422 else
4423 {
4424 gcc_assert (partial % UNITS_PER_WORD == 0);
4425 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4426 }
4427 }
4428
4429 if (extra && args_addr == 0 && where_pad == stack_direction)
4430 anti_adjust_stack (GEN_INT (extra));
4431
4432 if (alignment_pad && args_addr == 0)
4433 anti_adjust_stack (alignment_pad);
4434 }
4435 \f
4436 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4437 operations. */
4438
4439 static rtx
4440 get_subtarget (rtx x)
4441 {
4442 return (optimize
4443 || x == 0
4444 /* Only registers can be subtargets. */
4445 || !REG_P (x)
4446 /* Don't use hard regs to avoid extending their life. */
4447 || REGNO (x) < FIRST_PSEUDO_REGISTER
4448 ? 0 : x);
4449 }
4450
4451 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4452 FIELD is a bitfield. Returns true if the optimization was successful,
4453 and there's nothing else to do. */
4454
4455 static bool
4456 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4457 unsigned HOST_WIDE_INT bitpos,
4458 unsigned HOST_WIDE_INT bitregion_start,
4459 unsigned HOST_WIDE_INT bitregion_end,
4460 machine_mode mode1, rtx str_rtx,
4461 tree to, tree src)
4462 {
4463 machine_mode str_mode = GET_MODE (str_rtx);
4464 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4465 tree op0, op1;
4466 rtx value, result;
4467 optab binop;
4468 gimple srcstmt;
4469 enum tree_code code;
4470
4471 if (mode1 != VOIDmode
4472 || bitsize >= BITS_PER_WORD
4473 || str_bitsize > BITS_PER_WORD
4474 || TREE_SIDE_EFFECTS (to)
4475 || TREE_THIS_VOLATILE (to))
4476 return false;
4477
4478 STRIP_NOPS (src);
4479 if (TREE_CODE (src) != SSA_NAME)
4480 return false;
4481 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4482 return false;
4483
4484 srcstmt = get_gimple_for_ssa_name (src);
4485 if (!srcstmt
4486 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4487 return false;
4488
4489 code = gimple_assign_rhs_code (srcstmt);
4490
4491 op0 = gimple_assign_rhs1 (srcstmt);
4492
4493 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4494 to find its initialization. Hopefully the initialization will
4495 be from a bitfield load. */
4496 if (TREE_CODE (op0) == SSA_NAME)
4497 {
4498 gimple op0stmt = get_gimple_for_ssa_name (op0);
4499
4500 /* We want to eventually have OP0 be the same as TO, which
4501 should be a bitfield. */
4502 if (!op0stmt
4503 || !is_gimple_assign (op0stmt)
4504 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4505 return false;
4506 op0 = gimple_assign_rhs1 (op0stmt);
4507 }
4508
4509 op1 = gimple_assign_rhs2 (srcstmt);
4510
4511 if (!operand_equal_p (to, op0, 0))
4512 return false;
4513
4514 if (MEM_P (str_rtx))
4515 {
4516 unsigned HOST_WIDE_INT offset1;
4517
4518 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4519 str_mode = word_mode;
4520 str_mode = get_best_mode (bitsize, bitpos,
4521 bitregion_start, bitregion_end,
4522 MEM_ALIGN (str_rtx), str_mode, 0);
4523 if (str_mode == VOIDmode)
4524 return false;
4525 str_bitsize = GET_MODE_BITSIZE (str_mode);
4526
4527 offset1 = bitpos;
4528 bitpos %= str_bitsize;
4529 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4530 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4531 }
4532 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4533 return false;
4534
4535 /* If the bit field covers the whole REG/MEM, store_field
4536 will likely generate better code. */
4537 if (bitsize >= str_bitsize)
4538 return false;
4539
4540 /* We can't handle fields split across multiple entities. */
4541 if (bitpos + bitsize > str_bitsize)
4542 return false;
4543
4544 if (BYTES_BIG_ENDIAN)
4545 bitpos = str_bitsize - bitpos - bitsize;
4546
4547 switch (code)
4548 {
4549 case PLUS_EXPR:
4550 case MINUS_EXPR:
4551 /* For now, just optimize the case of the topmost bitfield
4552 where we don't need to do any masking and also
4553 1 bit bitfields where xor can be used.
4554 We might win by one instruction for the other bitfields
4555 too if insv/extv instructions aren't used, so that
4556 can be added later. */
4557 if (bitpos + bitsize != str_bitsize
4558 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4559 break;
4560
4561 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4562 value = convert_modes (str_mode,
4563 TYPE_MODE (TREE_TYPE (op1)), value,
4564 TYPE_UNSIGNED (TREE_TYPE (op1)));
4565
4566 /* We may be accessing data outside the field, which means
4567 we can alias adjacent data. */
4568 if (MEM_P (str_rtx))
4569 {
4570 str_rtx = shallow_copy_rtx (str_rtx);
4571 set_mem_alias_set (str_rtx, 0);
4572 set_mem_expr (str_rtx, 0);
4573 }
4574
4575 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4576 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4577 {
4578 value = expand_and (str_mode, value, const1_rtx, NULL);
4579 binop = xor_optab;
4580 }
4581 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4582 result = expand_binop (str_mode, binop, str_rtx,
4583 value, str_rtx, 1, OPTAB_WIDEN);
4584 if (result != str_rtx)
4585 emit_move_insn (str_rtx, result);
4586 return true;
4587
4588 case BIT_IOR_EXPR:
4589 case BIT_XOR_EXPR:
4590 if (TREE_CODE (op1) != INTEGER_CST)
4591 break;
4592 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4593 value = convert_modes (str_mode,
4594 TYPE_MODE (TREE_TYPE (op1)), value,
4595 TYPE_UNSIGNED (TREE_TYPE (op1)));
4596
4597 /* We may be accessing data outside the field, which means
4598 we can alias adjacent data. */
4599 if (MEM_P (str_rtx))
4600 {
4601 str_rtx = shallow_copy_rtx (str_rtx);
4602 set_mem_alias_set (str_rtx, 0);
4603 set_mem_expr (str_rtx, 0);
4604 }
4605
4606 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4607 if (bitpos + bitsize != str_bitsize)
4608 {
4609 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4610 str_mode);
4611 value = expand_and (str_mode, value, mask, NULL_RTX);
4612 }
4613 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4614 result = expand_binop (str_mode, binop, str_rtx,
4615 value, str_rtx, 1, OPTAB_WIDEN);
4616 if (result != str_rtx)
4617 emit_move_insn (str_rtx, result);
4618 return true;
4619
4620 default:
4621 break;
4622 }
4623
4624 return false;
4625 }
4626
4627 /* In the C++ memory model, consecutive bit fields in a structure are
4628 considered one memory location.
4629
4630 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4631 returns the bit range of consecutive bits in which this COMPONENT_REF
4632 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4633 and *OFFSET may be adjusted in the process.
4634
4635 If the access does not need to be restricted, 0 is returned in both
4636 *BITSTART and *BITEND. */
4637
4638 static void
4639 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4640 unsigned HOST_WIDE_INT *bitend,
4641 tree exp,
4642 HOST_WIDE_INT *bitpos,
4643 tree *offset)
4644 {
4645 HOST_WIDE_INT bitoffset;
4646 tree field, repr;
4647
4648 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4649
4650 field = TREE_OPERAND (exp, 1);
4651 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4652 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4653 need to limit the range we can access. */
4654 if (!repr)
4655 {
4656 *bitstart = *bitend = 0;
4657 return;
4658 }
4659
4660 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4661 part of a larger bit field, then the representative does not serve any
4662 useful purpose. This can occur in Ada. */
4663 if (handled_component_p (TREE_OPERAND (exp, 0)))
4664 {
4665 machine_mode rmode;
4666 HOST_WIDE_INT rbitsize, rbitpos;
4667 tree roffset;
4668 int unsignedp;
4669 int volatilep = 0;
4670 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4671 &roffset, &rmode, &unsignedp, &volatilep, false);
4672 if ((rbitpos % BITS_PER_UNIT) != 0)
4673 {
4674 *bitstart = *bitend = 0;
4675 return;
4676 }
4677 }
4678
4679 /* Compute the adjustment to bitpos from the offset of the field
4680 relative to the representative. DECL_FIELD_OFFSET of field and
4681 repr are the same by construction if they are not constants,
4682 see finish_bitfield_layout. */
4683 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4684 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4685 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4686 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4687 else
4688 bitoffset = 0;
4689 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4690 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4691
4692 /* If the adjustment is larger than bitpos, we would have a negative bit
4693 position for the lower bound and this may wreak havoc later. Adjust
4694 offset and bitpos to make the lower bound non-negative in that case. */
4695 if (bitoffset > *bitpos)
4696 {
4697 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4698 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4699
4700 *bitpos += adjust;
4701 if (*offset == NULL_TREE)
4702 *offset = size_int (-adjust / BITS_PER_UNIT);
4703 else
4704 *offset
4705 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4706 *bitstart = 0;
4707 }
4708 else
4709 *bitstart = *bitpos - bitoffset;
4710
4711 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4712 }
4713
4714 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4715 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4716 DECL_RTL was not set yet, return NORTL. */
4717
4718 static inline bool
4719 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4720 {
4721 if (TREE_CODE (addr) != ADDR_EXPR)
4722 return false;
4723
4724 tree base = TREE_OPERAND (addr, 0);
4725
4726 if (!DECL_P (base)
4727 || TREE_ADDRESSABLE (base)
4728 || DECL_MODE (base) == BLKmode)
4729 return false;
4730
4731 if (!DECL_RTL_SET_P (base))
4732 return nortl;
4733
4734 return (!MEM_P (DECL_RTL (base)));
4735 }
4736
4737 /* Returns true if the MEM_REF REF refers to an object that does not
4738 reside in memory and has non-BLKmode. */
4739
4740 static inline bool
4741 mem_ref_refers_to_non_mem_p (tree ref)
4742 {
4743 tree base = TREE_OPERAND (ref, 0);
4744 return addr_expr_of_non_mem_decl_p_1 (base, false);
4745 }
4746
4747 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4748 is true, try generating a nontemporal store. */
4749
4750 void
4751 expand_assignment (tree to, tree from, bool nontemporal)
4752 {
4753 rtx to_rtx = 0;
4754 rtx result;
4755 machine_mode mode;
4756 unsigned int align;
4757 enum insn_code icode;
4758
4759 /* Don't crash if the lhs of the assignment was erroneous. */
4760 if (TREE_CODE (to) == ERROR_MARK)
4761 {
4762 expand_normal (from);
4763 return;
4764 }
4765
4766 /* Optimize away no-op moves without side-effects. */
4767 if (operand_equal_p (to, from, 0))
4768 return;
4769
4770 /* Handle misaligned stores. */
4771 mode = TYPE_MODE (TREE_TYPE (to));
4772 if ((TREE_CODE (to) == MEM_REF
4773 || TREE_CODE (to) == TARGET_MEM_REF)
4774 && mode != BLKmode
4775 && !mem_ref_refers_to_non_mem_p (to)
4776 && ((align = get_object_alignment (to))
4777 < GET_MODE_ALIGNMENT (mode))
4778 && (((icode = optab_handler (movmisalign_optab, mode))
4779 != CODE_FOR_nothing)
4780 || SLOW_UNALIGNED_ACCESS (mode, align)))
4781 {
4782 rtx reg, mem;
4783
4784 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4785 reg = force_not_mem (reg);
4786 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4787
4788 if (icode != CODE_FOR_nothing)
4789 {
4790 struct expand_operand ops[2];
4791
4792 create_fixed_operand (&ops[0], mem);
4793 create_input_operand (&ops[1], reg, mode);
4794 /* The movmisalign<mode> pattern cannot fail, else the assignment
4795 would silently be omitted. */
4796 expand_insn (icode, 2, ops);
4797 }
4798 else
4799 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4800 return;
4801 }
4802
4803 /* Assignment of a structure component needs special treatment
4804 if the structure component's rtx is not simply a MEM.
4805 Assignment of an array element at a constant index, and assignment of
4806 an array element in an unaligned packed structure field, has the same
4807 problem. Same for (partially) storing into a non-memory object. */
4808 if (handled_component_p (to)
4809 || (TREE_CODE (to) == MEM_REF
4810 && mem_ref_refers_to_non_mem_p (to))
4811 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4812 {
4813 machine_mode mode1;
4814 HOST_WIDE_INT bitsize, bitpos;
4815 unsigned HOST_WIDE_INT bitregion_start = 0;
4816 unsigned HOST_WIDE_INT bitregion_end = 0;
4817 tree offset;
4818 int unsignedp;
4819 int volatilep = 0;
4820 tree tem;
4821
4822 push_temp_slots ();
4823 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4824 &unsignedp, &volatilep, true);
4825
4826 /* Make sure bitpos is not negative, it can wreak havoc later. */
4827 if (bitpos < 0)
4828 {
4829 gcc_assert (offset == NULL_TREE);
4830 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4831 ? 3 : exact_log2 (BITS_PER_UNIT)));
4832 bitpos &= BITS_PER_UNIT - 1;
4833 }
4834
4835 if (TREE_CODE (to) == COMPONENT_REF
4836 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4837 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4838 /* The C++ memory model naturally applies to byte-aligned fields.
4839 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4840 BITSIZE are not byte-aligned, there is no need to limit the range
4841 we can access. This can occur with packed structures in Ada. */
4842 else if (bitsize > 0
4843 && bitsize % BITS_PER_UNIT == 0
4844 && bitpos % BITS_PER_UNIT == 0)
4845 {
4846 bitregion_start = bitpos;
4847 bitregion_end = bitpos + bitsize - 1;
4848 }
4849
4850 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4851
4852 /* If the field has a mode, we want to access it in the
4853 field's mode, not the computed mode.
4854 If a MEM has VOIDmode (external with incomplete type),
4855 use BLKmode for it instead. */
4856 if (MEM_P (to_rtx))
4857 {
4858 if (mode1 != VOIDmode)
4859 to_rtx = adjust_address (to_rtx, mode1, 0);
4860 else if (GET_MODE (to_rtx) == VOIDmode)
4861 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4862 }
4863
4864 if (offset != 0)
4865 {
4866 machine_mode address_mode;
4867 rtx offset_rtx;
4868
4869 if (!MEM_P (to_rtx))
4870 {
4871 /* We can get constant negative offsets into arrays with broken
4872 user code. Translate this to a trap instead of ICEing. */
4873 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4874 expand_builtin_trap ();
4875 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4876 }
4877
4878 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4879 address_mode = get_address_mode (to_rtx);
4880 if (GET_MODE (offset_rtx) != address_mode)
4881 {
4882 /* We cannot be sure that the RTL in offset_rtx is valid outside
4883 of a memory address context, so force it into a register
4884 before attempting to convert it to the desired mode. */
4885 offset_rtx = force_operand (offset_rtx, NULL_RTX);
4886 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4887 }
4888
4889 /* If we have an expression in OFFSET_RTX and a non-zero
4890 byte offset in BITPOS, adding the byte offset before the
4891 OFFSET_RTX results in better intermediate code, which makes
4892 later rtl optimization passes perform better.
4893
4894 We prefer intermediate code like this:
4895
4896 r124:DI=r123:DI+0x18
4897 [r124:DI]=r121:DI
4898
4899 ... instead of ...
4900
4901 r124:DI=r123:DI+0x10
4902 [r124:DI+0x8]=r121:DI
4903
4904 This is only done for aligned data values, as these can
4905 be expected to result in single move instructions. */
4906 if (mode1 != VOIDmode
4907 && bitpos != 0
4908 && bitsize > 0
4909 && (bitpos % bitsize) == 0
4910 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4911 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4912 {
4913 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4914 bitregion_start = 0;
4915 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4916 bitregion_end -= bitpos;
4917 bitpos = 0;
4918 }
4919
4920 to_rtx = offset_address (to_rtx, offset_rtx,
4921 highest_pow2_factor_for_target (to,
4922 offset));
4923 }
4924
4925 /* No action is needed if the target is not a memory and the field
4926 lies completely outside that target. This can occur if the source
4927 code contains an out-of-bounds access to a small array. */
4928 if (!MEM_P (to_rtx)
4929 && GET_MODE (to_rtx) != BLKmode
4930 && (unsigned HOST_WIDE_INT) bitpos
4931 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4932 {
4933 expand_normal (from);
4934 result = NULL;
4935 }
4936 /* Handle expand_expr of a complex value returning a CONCAT. */
4937 else if (GET_CODE (to_rtx) == CONCAT)
4938 {
4939 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4940 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4941 && bitpos == 0
4942 && bitsize == mode_bitsize)
4943 result = store_expr (from, to_rtx, false, nontemporal);
4944 else if (bitsize == mode_bitsize / 2
4945 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4946 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4947 nontemporal);
4948 else if (bitpos + bitsize <= mode_bitsize / 2)
4949 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4950 bitregion_start, bitregion_end,
4951 mode1, from,
4952 get_alias_set (to), nontemporal);
4953 else if (bitpos >= mode_bitsize / 2)
4954 result = store_field (XEXP (to_rtx, 1), bitsize,
4955 bitpos - mode_bitsize / 2,
4956 bitregion_start, bitregion_end,
4957 mode1, from,
4958 get_alias_set (to), nontemporal);
4959 else if (bitpos == 0 && bitsize == mode_bitsize)
4960 {
4961 rtx from_rtx;
4962 result = expand_normal (from);
4963 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4964 TYPE_MODE (TREE_TYPE (from)), 0);
4965 emit_move_insn (XEXP (to_rtx, 0),
4966 read_complex_part (from_rtx, false));
4967 emit_move_insn (XEXP (to_rtx, 1),
4968 read_complex_part (from_rtx, true));
4969 }
4970 else
4971 {
4972 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4973 GET_MODE_SIZE (GET_MODE (to_rtx)));
4974 write_complex_part (temp, XEXP (to_rtx, 0), false);
4975 write_complex_part (temp, XEXP (to_rtx, 1), true);
4976 result = store_field (temp, bitsize, bitpos,
4977 bitregion_start, bitregion_end,
4978 mode1, from,
4979 get_alias_set (to), nontemporal);
4980 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4981 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4982 }
4983 }
4984 else
4985 {
4986 if (MEM_P (to_rtx))
4987 {
4988 /* If the field is at offset zero, we could have been given the
4989 DECL_RTX of the parent struct. Don't munge it. */
4990 to_rtx = shallow_copy_rtx (to_rtx);
4991 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4992 if (volatilep)
4993 MEM_VOLATILE_P (to_rtx) = 1;
4994 }
4995
4996 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4997 bitregion_start, bitregion_end,
4998 mode1,
4999 to_rtx, to, from))
5000 result = NULL;
5001 else
5002 result = store_field (to_rtx, bitsize, bitpos,
5003 bitregion_start, bitregion_end,
5004 mode1, from,
5005 get_alias_set (to), nontemporal);
5006 }
5007
5008 if (result)
5009 preserve_temp_slots (result);
5010 pop_temp_slots ();
5011 return;
5012 }
5013
5014 /* If the rhs is a function call and its value is not an aggregate,
5015 call the function before we start to compute the lhs.
5016 This is needed for correct code for cases such as
5017 val = setjmp (buf) on machines where reference to val
5018 requires loading up part of an address in a separate insn.
5019
5020 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5021 since it might be a promoted variable where the zero- or sign- extension
5022 needs to be done. Handling this in the normal way is safe because no
5023 computation is done before the call. The same is true for SSA names. */
5024 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5025 && COMPLETE_TYPE_P (TREE_TYPE (from))
5026 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5027 && ! (((TREE_CODE (to) == VAR_DECL
5028 || TREE_CODE (to) == PARM_DECL
5029 || TREE_CODE (to) == RESULT_DECL)
5030 && REG_P (DECL_RTL (to)))
5031 || TREE_CODE (to) == SSA_NAME))
5032 {
5033 rtx value;
5034 rtx bounds;
5035
5036 push_temp_slots ();
5037 value = expand_normal (from);
5038
5039 /* Split value and bounds to store them separately. */
5040 chkp_split_slot (value, &value, &bounds);
5041
5042 if (to_rtx == 0)
5043 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5044
5045 /* Handle calls that return values in multiple non-contiguous locations.
5046 The Irix 6 ABI has examples of this. */
5047 if (GET_CODE (to_rtx) == PARALLEL)
5048 {
5049 if (GET_CODE (value) == PARALLEL)
5050 emit_group_move (to_rtx, value);
5051 else
5052 emit_group_load (to_rtx, value, TREE_TYPE (from),
5053 int_size_in_bytes (TREE_TYPE (from)));
5054 }
5055 else if (GET_CODE (value) == PARALLEL)
5056 emit_group_store (to_rtx, value, TREE_TYPE (from),
5057 int_size_in_bytes (TREE_TYPE (from)));
5058 else if (GET_MODE (to_rtx) == BLKmode)
5059 {
5060 /* Handle calls that return BLKmode values in registers. */
5061 if (REG_P (value))
5062 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5063 else
5064 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5065 }
5066 else
5067 {
5068 if (POINTER_TYPE_P (TREE_TYPE (to)))
5069 value = convert_memory_address_addr_space
5070 (GET_MODE (to_rtx), value,
5071 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5072
5073 emit_move_insn (to_rtx, value);
5074 }
5075
5076 /* Store bounds if required. */
5077 if (bounds
5078 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5079 {
5080 gcc_assert (MEM_P (to_rtx));
5081 chkp_emit_bounds_store (bounds, value, to_rtx);
5082 }
5083
5084 preserve_temp_slots (to_rtx);
5085 pop_temp_slots ();
5086 return;
5087 }
5088
5089 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5090 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5091
5092 /* Don't move directly into a return register. */
5093 if (TREE_CODE (to) == RESULT_DECL
5094 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5095 {
5096 rtx temp;
5097
5098 push_temp_slots ();
5099
5100 /* If the source is itself a return value, it still is in a pseudo at
5101 this point so we can move it back to the return register directly. */
5102 if (REG_P (to_rtx)
5103 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5104 && TREE_CODE (from) != CALL_EXPR)
5105 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5106 else
5107 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5108
5109 /* Handle calls that return values in multiple non-contiguous locations.
5110 The Irix 6 ABI has examples of this. */
5111 if (GET_CODE (to_rtx) == PARALLEL)
5112 {
5113 if (GET_CODE (temp) == PARALLEL)
5114 emit_group_move (to_rtx, temp);
5115 else
5116 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5117 int_size_in_bytes (TREE_TYPE (from)));
5118 }
5119 else if (temp)
5120 emit_move_insn (to_rtx, temp);
5121
5122 preserve_temp_slots (to_rtx);
5123 pop_temp_slots ();
5124 return;
5125 }
5126
5127 /* In case we are returning the contents of an object which overlaps
5128 the place the value is being stored, use a safe function when copying
5129 a value through a pointer into a structure value return block. */
5130 if (TREE_CODE (to) == RESULT_DECL
5131 && TREE_CODE (from) == INDIRECT_REF
5132 && ADDR_SPACE_GENERIC_P
5133 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5134 && refs_may_alias_p (to, from)
5135 && cfun->returns_struct
5136 && !cfun->returns_pcc_struct)
5137 {
5138 rtx from_rtx, size;
5139
5140 push_temp_slots ();
5141 size = expr_size (from);
5142 from_rtx = expand_normal (from);
5143
5144 emit_library_call (memmove_libfunc, LCT_NORMAL,
5145 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5146 XEXP (from_rtx, 0), Pmode,
5147 convert_to_mode (TYPE_MODE (sizetype),
5148 size, TYPE_UNSIGNED (sizetype)),
5149 TYPE_MODE (sizetype));
5150
5151 preserve_temp_slots (to_rtx);
5152 pop_temp_slots ();
5153 return;
5154 }
5155
5156 /* Compute FROM and store the value in the rtx we got. */
5157
5158 push_temp_slots ();
5159 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, to);
5160 preserve_temp_slots (result);
5161 pop_temp_slots ();
5162 return;
5163 }
5164
5165 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5166 succeeded, false otherwise. */
5167
5168 bool
5169 emit_storent_insn (rtx to, rtx from)
5170 {
5171 struct expand_operand ops[2];
5172 machine_mode mode = GET_MODE (to);
5173 enum insn_code code = optab_handler (storent_optab, mode);
5174
5175 if (code == CODE_FOR_nothing)
5176 return false;
5177
5178 create_fixed_operand (&ops[0], to);
5179 create_input_operand (&ops[1], from, mode);
5180 return maybe_expand_insn (code, 2, ops);
5181 }
5182
5183 /* Generate code for computing expression EXP,
5184 and storing the value into TARGET.
5185
5186 If the mode is BLKmode then we may return TARGET itself.
5187 It turns out that in BLKmode it doesn't cause a problem.
5188 because C has no operators that could combine two different
5189 assignments into the same BLKmode object with different values
5190 with no sequence point. Will other languages need this to
5191 be more thorough?
5192
5193 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5194 stack, and block moves may need to be treated specially.
5195
5196 If NONTEMPORAL is true, try using a nontemporal store instruction.
5197
5198 If BTARGET is not NULL then computed bounds of EXP are
5199 associated with BTARGET. */
5200
5201 rtx
5202 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5203 bool nontemporal, tree btarget)
5204 {
5205 rtx temp;
5206 rtx alt_rtl = NULL_RTX;
5207 location_t loc = curr_insn_location ();
5208
5209 if (VOID_TYPE_P (TREE_TYPE (exp)))
5210 {
5211 /* C++ can generate ?: expressions with a throw expression in one
5212 branch and an rvalue in the other. Here, we resolve attempts to
5213 store the throw expression's nonexistent result. */
5214 gcc_assert (!call_param_p);
5215 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5216 return NULL_RTX;
5217 }
5218 if (TREE_CODE (exp) == COMPOUND_EXPR)
5219 {
5220 /* Perform first part of compound expression, then assign from second
5221 part. */
5222 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5223 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5224 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5225 call_param_p, nontemporal, btarget);
5226 }
5227 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5228 {
5229 /* For conditional expression, get safe form of the target. Then
5230 test the condition, doing the appropriate assignment on either
5231 side. This avoids the creation of unnecessary temporaries.
5232 For non-BLKmode, it is more efficient not to do this. */
5233
5234 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5235
5236 do_pending_stack_adjust ();
5237 NO_DEFER_POP;
5238 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5239 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5240 nontemporal, btarget);
5241 emit_jump_insn (gen_jump (lab2));
5242 emit_barrier ();
5243 emit_label (lab1);
5244 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5245 nontemporal, btarget);
5246 emit_label (lab2);
5247 OK_DEFER_POP;
5248
5249 return NULL_RTX;
5250 }
5251 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5252 /* If this is a scalar in a register that is stored in a wider mode
5253 than the declared mode, compute the result into its declared mode
5254 and then convert to the wider mode. Our value is the computed
5255 expression. */
5256 {
5257 rtx inner_target = 0;
5258
5259 /* We can do the conversion inside EXP, which will often result
5260 in some optimizations. Do the conversion in two steps: first
5261 change the signedness, if needed, then the extend. But don't
5262 do this if the type of EXP is a subtype of something else
5263 since then the conversion might involve more than just
5264 converting modes. */
5265 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5266 && TREE_TYPE (TREE_TYPE (exp)) == 0
5267 && GET_MODE_PRECISION (GET_MODE (target))
5268 == TYPE_PRECISION (TREE_TYPE (exp)))
5269 {
5270 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5271 TYPE_UNSIGNED (TREE_TYPE (exp))))
5272 {
5273 /* Some types, e.g. Fortran's logical*4, won't have a signed
5274 version, so use the mode instead. */
5275 tree ntype
5276 = (signed_or_unsigned_type_for
5277 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5278 if (ntype == NULL)
5279 ntype = lang_hooks.types.type_for_mode
5280 (TYPE_MODE (TREE_TYPE (exp)),
5281 SUBREG_PROMOTED_SIGN (target));
5282
5283 exp = fold_convert_loc (loc, ntype, exp);
5284 }
5285
5286 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5287 (GET_MODE (SUBREG_REG (target)),
5288 SUBREG_PROMOTED_SIGN (target)),
5289 exp);
5290
5291 inner_target = SUBREG_REG (target);
5292 }
5293
5294 temp = expand_expr (exp, inner_target, VOIDmode,
5295 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5296
5297 /* Handle bounds returned by call. */
5298 if (TREE_CODE (exp) == CALL_EXPR)
5299 {
5300 rtx bounds;
5301 chkp_split_slot (temp, &temp, &bounds);
5302 if (bounds && btarget)
5303 {
5304 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5305 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5306 chkp_set_rtl_bounds (btarget, tmp);
5307 }
5308 }
5309
5310 /* If TEMP is a VOIDmode constant, use convert_modes to make
5311 sure that we properly convert it. */
5312 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5313 {
5314 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5315 temp, SUBREG_PROMOTED_SIGN (target));
5316 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5317 GET_MODE (target), temp,
5318 SUBREG_PROMOTED_SIGN (target));
5319 }
5320
5321 convert_move (SUBREG_REG (target), temp,
5322 SUBREG_PROMOTED_SIGN (target));
5323
5324 return NULL_RTX;
5325 }
5326 else if ((TREE_CODE (exp) == STRING_CST
5327 || (TREE_CODE (exp) == MEM_REF
5328 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5329 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5330 == STRING_CST
5331 && integer_zerop (TREE_OPERAND (exp, 1))))
5332 && !nontemporal && !call_param_p
5333 && MEM_P (target))
5334 {
5335 /* Optimize initialization of an array with a STRING_CST. */
5336 HOST_WIDE_INT exp_len, str_copy_len;
5337 rtx dest_mem;
5338 tree str = TREE_CODE (exp) == STRING_CST
5339 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5340
5341 exp_len = int_expr_size (exp);
5342 if (exp_len <= 0)
5343 goto normal_expr;
5344
5345 if (TREE_STRING_LENGTH (str) <= 0)
5346 goto normal_expr;
5347
5348 str_copy_len = strlen (TREE_STRING_POINTER (str));
5349 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5350 goto normal_expr;
5351
5352 str_copy_len = TREE_STRING_LENGTH (str);
5353 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5354 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5355 {
5356 str_copy_len += STORE_MAX_PIECES - 1;
5357 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5358 }
5359 str_copy_len = MIN (str_copy_len, exp_len);
5360 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5361 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5362 MEM_ALIGN (target), false))
5363 goto normal_expr;
5364
5365 dest_mem = target;
5366
5367 dest_mem = store_by_pieces (dest_mem,
5368 str_copy_len, builtin_strncpy_read_str,
5369 CONST_CAST (char *,
5370 TREE_STRING_POINTER (str)),
5371 MEM_ALIGN (target), false,
5372 exp_len > str_copy_len ? 1 : 0);
5373 if (exp_len > str_copy_len)
5374 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5375 GEN_INT (exp_len - str_copy_len),
5376 BLOCK_OP_NORMAL);
5377 return NULL_RTX;
5378 }
5379 else
5380 {
5381 rtx tmp_target;
5382
5383 normal_expr:
5384 /* If we want to use a nontemporal store, force the value to
5385 register first. */
5386 tmp_target = nontemporal ? NULL_RTX : target;
5387 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5388 (call_param_p
5389 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5390 &alt_rtl, false);
5391
5392 /* Handle bounds returned by call. */
5393 if (TREE_CODE (exp) == CALL_EXPR)
5394 {
5395 rtx bounds;
5396 chkp_split_slot (temp, &temp, &bounds);
5397 if (bounds && btarget)
5398 {
5399 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5400 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5401 chkp_set_rtl_bounds (btarget, tmp);
5402 }
5403 }
5404 }
5405
5406 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5407 the same as that of TARGET, adjust the constant. This is needed, for
5408 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5409 only a word-sized value. */
5410 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5411 && TREE_CODE (exp) != ERROR_MARK
5412 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5413 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5414 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5415
5416 /* If value was not generated in the target, store it there.
5417 Convert the value to TARGET's type first if necessary and emit the
5418 pending incrementations that have been queued when expanding EXP.
5419 Note that we cannot emit the whole queue blindly because this will
5420 effectively disable the POST_INC optimization later.
5421
5422 If TEMP and TARGET compare equal according to rtx_equal_p, but
5423 one or both of them are volatile memory refs, we have to distinguish
5424 two cases:
5425 - expand_expr has used TARGET. In this case, we must not generate
5426 another copy. This can be detected by TARGET being equal according
5427 to == .
5428 - expand_expr has not used TARGET - that means that the source just
5429 happens to have the same RTX form. Since temp will have been created
5430 by expand_expr, it will compare unequal according to == .
5431 We must generate a copy in this case, to reach the correct number
5432 of volatile memory references. */
5433
5434 if ((! rtx_equal_p (temp, target)
5435 || (temp != target && (side_effects_p (temp)
5436 || side_effects_p (target))))
5437 && TREE_CODE (exp) != ERROR_MARK
5438 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5439 but TARGET is not valid memory reference, TEMP will differ
5440 from TARGET although it is really the same location. */
5441 && !(alt_rtl
5442 && rtx_equal_p (alt_rtl, target)
5443 && !side_effects_p (alt_rtl)
5444 && !side_effects_p (target))
5445 /* If there's nothing to copy, don't bother. Don't call
5446 expr_size unless necessary, because some front-ends (C++)
5447 expr_size-hook must not be given objects that are not
5448 supposed to be bit-copied or bit-initialized. */
5449 && expr_size (exp) != const0_rtx)
5450 {
5451 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5452 {
5453 if (GET_MODE (target) == BLKmode)
5454 {
5455 /* Handle calls that return BLKmode values in registers. */
5456 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5457 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5458 else
5459 store_bit_field (target,
5460 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5461 0, 0, 0, GET_MODE (temp), temp);
5462 }
5463 else
5464 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5465 }
5466
5467 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5468 {
5469 /* Handle copying a string constant into an array. The string
5470 constant may be shorter than the array. So copy just the string's
5471 actual length, and clear the rest. First get the size of the data
5472 type of the string, which is actually the size of the target. */
5473 rtx size = expr_size (exp);
5474
5475 if (CONST_INT_P (size)
5476 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5477 emit_block_move (target, temp, size,
5478 (call_param_p
5479 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5480 else
5481 {
5482 machine_mode pointer_mode
5483 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5484 machine_mode address_mode = get_address_mode (target);
5485
5486 /* Compute the size of the data to copy from the string. */
5487 tree copy_size
5488 = size_binop_loc (loc, MIN_EXPR,
5489 make_tree (sizetype, size),
5490 size_int (TREE_STRING_LENGTH (exp)));
5491 rtx copy_size_rtx
5492 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5493 (call_param_p
5494 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5495 rtx_code_label *label = 0;
5496
5497 /* Copy that much. */
5498 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5499 TYPE_UNSIGNED (sizetype));
5500 emit_block_move (target, temp, copy_size_rtx,
5501 (call_param_p
5502 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5503
5504 /* Figure out how much is left in TARGET that we have to clear.
5505 Do all calculations in pointer_mode. */
5506 if (CONST_INT_P (copy_size_rtx))
5507 {
5508 size = plus_constant (address_mode, size,
5509 -INTVAL (copy_size_rtx));
5510 target = adjust_address (target, BLKmode,
5511 INTVAL (copy_size_rtx));
5512 }
5513 else
5514 {
5515 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5516 copy_size_rtx, NULL_RTX, 0,
5517 OPTAB_LIB_WIDEN);
5518
5519 if (GET_MODE (copy_size_rtx) != address_mode)
5520 copy_size_rtx = convert_to_mode (address_mode,
5521 copy_size_rtx,
5522 TYPE_UNSIGNED (sizetype));
5523
5524 target = offset_address (target, copy_size_rtx,
5525 highest_pow2_factor (copy_size));
5526 label = gen_label_rtx ();
5527 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5528 GET_MODE (size), 0, label);
5529 }
5530
5531 if (size != const0_rtx)
5532 clear_storage (target, size, BLOCK_OP_NORMAL);
5533
5534 if (label)
5535 emit_label (label);
5536 }
5537 }
5538 /* Handle calls that return values in multiple non-contiguous locations.
5539 The Irix 6 ABI has examples of this. */
5540 else if (GET_CODE (target) == PARALLEL)
5541 {
5542 if (GET_CODE (temp) == PARALLEL)
5543 emit_group_move (target, temp);
5544 else
5545 emit_group_load (target, temp, TREE_TYPE (exp),
5546 int_size_in_bytes (TREE_TYPE (exp)));
5547 }
5548 else if (GET_CODE (temp) == PARALLEL)
5549 emit_group_store (target, temp, TREE_TYPE (exp),
5550 int_size_in_bytes (TREE_TYPE (exp)));
5551 else if (GET_MODE (temp) == BLKmode)
5552 emit_block_move (target, temp, expr_size (exp),
5553 (call_param_p
5554 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5555 /* If we emit a nontemporal store, there is nothing else to do. */
5556 else if (nontemporal && emit_storent_insn (target, temp))
5557 ;
5558 else
5559 {
5560 temp = force_operand (temp, target);
5561 if (temp != target)
5562 emit_move_insn (target, temp);
5563 }
5564 }
5565
5566 return NULL_RTX;
5567 }
5568
5569 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5570 rtx
5571 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5572 {
5573 return store_expr_with_bounds (exp, target, call_param_p, nontemporal, NULL);
5574 }
5575 \f
5576 /* Return true if field F of structure TYPE is a flexible array. */
5577
5578 static bool
5579 flexible_array_member_p (const_tree f, const_tree type)
5580 {
5581 const_tree tf;
5582
5583 tf = TREE_TYPE (f);
5584 return (DECL_CHAIN (f) == NULL
5585 && TREE_CODE (tf) == ARRAY_TYPE
5586 && TYPE_DOMAIN (tf)
5587 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5588 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5589 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5590 && int_size_in_bytes (type) >= 0);
5591 }
5592
5593 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5594 must have in order for it to completely initialize a value of type TYPE.
5595 Return -1 if the number isn't known.
5596
5597 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5598
5599 static HOST_WIDE_INT
5600 count_type_elements (const_tree type, bool for_ctor_p)
5601 {
5602 switch (TREE_CODE (type))
5603 {
5604 case ARRAY_TYPE:
5605 {
5606 tree nelts;
5607
5608 nelts = array_type_nelts (type);
5609 if (nelts && tree_fits_uhwi_p (nelts))
5610 {
5611 unsigned HOST_WIDE_INT n;
5612
5613 n = tree_to_uhwi (nelts) + 1;
5614 if (n == 0 || for_ctor_p)
5615 return n;
5616 else
5617 return n * count_type_elements (TREE_TYPE (type), false);
5618 }
5619 return for_ctor_p ? -1 : 1;
5620 }
5621
5622 case RECORD_TYPE:
5623 {
5624 unsigned HOST_WIDE_INT n;
5625 tree f;
5626
5627 n = 0;
5628 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5629 if (TREE_CODE (f) == FIELD_DECL)
5630 {
5631 if (!for_ctor_p)
5632 n += count_type_elements (TREE_TYPE (f), false);
5633 else if (!flexible_array_member_p (f, type))
5634 /* Don't count flexible arrays, which are not supposed
5635 to be initialized. */
5636 n += 1;
5637 }
5638
5639 return n;
5640 }
5641
5642 case UNION_TYPE:
5643 case QUAL_UNION_TYPE:
5644 {
5645 tree f;
5646 HOST_WIDE_INT n, m;
5647
5648 gcc_assert (!for_ctor_p);
5649 /* Estimate the number of scalars in each field and pick the
5650 maximum. Other estimates would do instead; the idea is simply
5651 to make sure that the estimate is not sensitive to the ordering
5652 of the fields. */
5653 n = 1;
5654 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5655 if (TREE_CODE (f) == FIELD_DECL)
5656 {
5657 m = count_type_elements (TREE_TYPE (f), false);
5658 /* If the field doesn't span the whole union, add an extra
5659 scalar for the rest. */
5660 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5661 TYPE_SIZE (type)) != 1)
5662 m++;
5663 if (n < m)
5664 n = m;
5665 }
5666 return n;
5667 }
5668
5669 case COMPLEX_TYPE:
5670 return 2;
5671
5672 case VECTOR_TYPE:
5673 return TYPE_VECTOR_SUBPARTS (type);
5674
5675 case INTEGER_TYPE:
5676 case REAL_TYPE:
5677 case FIXED_POINT_TYPE:
5678 case ENUMERAL_TYPE:
5679 case BOOLEAN_TYPE:
5680 case POINTER_TYPE:
5681 case OFFSET_TYPE:
5682 case REFERENCE_TYPE:
5683 case NULLPTR_TYPE:
5684 return 1;
5685
5686 case ERROR_MARK:
5687 return 0;
5688
5689 case VOID_TYPE:
5690 case METHOD_TYPE:
5691 case FUNCTION_TYPE:
5692 case LANG_TYPE:
5693 default:
5694 gcc_unreachable ();
5695 }
5696 }
5697
5698 /* Helper for categorize_ctor_elements. Identical interface. */
5699
5700 static bool
5701 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5702 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5703 {
5704 unsigned HOST_WIDE_INT idx;
5705 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5706 tree value, purpose, elt_type;
5707
5708 /* Whether CTOR is a valid constant initializer, in accordance with what
5709 initializer_constant_valid_p does. If inferred from the constructor
5710 elements, true until proven otherwise. */
5711 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5712 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5713
5714 nz_elts = 0;
5715 init_elts = 0;
5716 num_fields = 0;
5717 elt_type = NULL_TREE;
5718
5719 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5720 {
5721 HOST_WIDE_INT mult = 1;
5722
5723 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5724 {
5725 tree lo_index = TREE_OPERAND (purpose, 0);
5726 tree hi_index = TREE_OPERAND (purpose, 1);
5727
5728 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5729 mult = (tree_to_uhwi (hi_index)
5730 - tree_to_uhwi (lo_index) + 1);
5731 }
5732 num_fields += mult;
5733 elt_type = TREE_TYPE (value);
5734
5735 switch (TREE_CODE (value))
5736 {
5737 case CONSTRUCTOR:
5738 {
5739 HOST_WIDE_INT nz = 0, ic = 0;
5740
5741 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5742 p_complete);
5743
5744 nz_elts += mult * nz;
5745 init_elts += mult * ic;
5746
5747 if (const_from_elts_p && const_p)
5748 const_p = const_elt_p;
5749 }
5750 break;
5751
5752 case INTEGER_CST:
5753 case REAL_CST:
5754 case FIXED_CST:
5755 if (!initializer_zerop (value))
5756 nz_elts += mult;
5757 init_elts += mult;
5758 break;
5759
5760 case STRING_CST:
5761 nz_elts += mult * TREE_STRING_LENGTH (value);
5762 init_elts += mult * TREE_STRING_LENGTH (value);
5763 break;
5764
5765 case COMPLEX_CST:
5766 if (!initializer_zerop (TREE_REALPART (value)))
5767 nz_elts += mult;
5768 if (!initializer_zerop (TREE_IMAGPART (value)))
5769 nz_elts += mult;
5770 init_elts += mult;
5771 break;
5772
5773 case VECTOR_CST:
5774 {
5775 unsigned i;
5776 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5777 {
5778 tree v = VECTOR_CST_ELT (value, i);
5779 if (!initializer_zerop (v))
5780 nz_elts += mult;
5781 init_elts += mult;
5782 }
5783 }
5784 break;
5785
5786 default:
5787 {
5788 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5789 nz_elts += mult * tc;
5790 init_elts += mult * tc;
5791
5792 if (const_from_elts_p && const_p)
5793 const_p = initializer_constant_valid_p (value, elt_type)
5794 != NULL_TREE;
5795 }
5796 break;
5797 }
5798 }
5799
5800 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5801 num_fields, elt_type))
5802 *p_complete = false;
5803
5804 *p_nz_elts += nz_elts;
5805 *p_init_elts += init_elts;
5806
5807 return const_p;
5808 }
5809
5810 /* Examine CTOR to discover:
5811 * how many scalar fields are set to nonzero values,
5812 and place it in *P_NZ_ELTS;
5813 * how many scalar fields in total are in CTOR,
5814 and place it in *P_ELT_COUNT.
5815 * whether the constructor is complete -- in the sense that every
5816 meaningful byte is explicitly given a value --
5817 and place it in *P_COMPLETE.
5818
5819 Return whether or not CTOR is a valid static constant initializer, the same
5820 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5821
5822 bool
5823 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5824 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5825 {
5826 *p_nz_elts = 0;
5827 *p_init_elts = 0;
5828 *p_complete = true;
5829
5830 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5831 }
5832
5833 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5834 of which had type LAST_TYPE. Each element was itself a complete
5835 initializer, in the sense that every meaningful byte was explicitly
5836 given a value. Return true if the same is true for the constructor
5837 as a whole. */
5838
5839 bool
5840 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5841 const_tree last_type)
5842 {
5843 if (TREE_CODE (type) == UNION_TYPE
5844 || TREE_CODE (type) == QUAL_UNION_TYPE)
5845 {
5846 if (num_elts == 0)
5847 return false;
5848
5849 gcc_assert (num_elts == 1 && last_type);
5850
5851 /* ??? We could look at each element of the union, and find the
5852 largest element. Which would avoid comparing the size of the
5853 initialized element against any tail padding in the union.
5854 Doesn't seem worth the effort... */
5855 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5856 }
5857
5858 return count_type_elements (type, true) == num_elts;
5859 }
5860
5861 /* Return 1 if EXP contains mostly (3/4) zeros. */
5862
5863 static int
5864 mostly_zeros_p (const_tree exp)
5865 {
5866 if (TREE_CODE (exp) == CONSTRUCTOR)
5867 {
5868 HOST_WIDE_INT nz_elts, init_elts;
5869 bool complete_p;
5870
5871 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5872 return !complete_p || nz_elts < init_elts / 4;
5873 }
5874
5875 return initializer_zerop (exp);
5876 }
5877
5878 /* Return 1 if EXP contains all zeros. */
5879
5880 static int
5881 all_zeros_p (const_tree exp)
5882 {
5883 if (TREE_CODE (exp) == CONSTRUCTOR)
5884 {
5885 HOST_WIDE_INT nz_elts, init_elts;
5886 bool complete_p;
5887
5888 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5889 return nz_elts == 0;
5890 }
5891
5892 return initializer_zerop (exp);
5893 }
5894 \f
5895 /* Helper function for store_constructor.
5896 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5897 CLEARED is as for store_constructor.
5898 ALIAS_SET is the alias set to use for any stores.
5899
5900 This provides a recursive shortcut back to store_constructor when it isn't
5901 necessary to go through store_field. This is so that we can pass through
5902 the cleared field to let store_constructor know that we may not have to
5903 clear a substructure if the outer structure has already been cleared. */
5904
5905 static void
5906 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5907 HOST_WIDE_INT bitpos, machine_mode mode,
5908 tree exp, int cleared, alias_set_type alias_set)
5909 {
5910 if (TREE_CODE (exp) == CONSTRUCTOR
5911 /* We can only call store_constructor recursively if the size and
5912 bit position are on a byte boundary. */
5913 && bitpos % BITS_PER_UNIT == 0
5914 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5915 /* If we have a nonzero bitpos for a register target, then we just
5916 let store_field do the bitfield handling. This is unlikely to
5917 generate unnecessary clear instructions anyways. */
5918 && (bitpos == 0 || MEM_P (target)))
5919 {
5920 if (MEM_P (target))
5921 target
5922 = adjust_address (target,
5923 GET_MODE (target) == BLKmode
5924 || 0 != (bitpos
5925 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5926 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5927
5928
5929 /* Update the alias set, if required. */
5930 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5931 && MEM_ALIAS_SET (target) != 0)
5932 {
5933 target = copy_rtx (target);
5934 set_mem_alias_set (target, alias_set);
5935 }
5936
5937 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5938 }
5939 else
5940 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5941 }
5942
5943
5944 /* Returns the number of FIELD_DECLs in TYPE. */
5945
5946 static int
5947 fields_length (const_tree type)
5948 {
5949 tree t = TYPE_FIELDS (type);
5950 int count = 0;
5951
5952 for (; t; t = DECL_CHAIN (t))
5953 if (TREE_CODE (t) == FIELD_DECL)
5954 ++count;
5955
5956 return count;
5957 }
5958
5959
5960 /* Store the value of constructor EXP into the rtx TARGET.
5961 TARGET is either a REG or a MEM; we know it cannot conflict, since
5962 safe_from_p has been called.
5963 CLEARED is true if TARGET is known to have been zero'd.
5964 SIZE is the number of bytes of TARGET we are allowed to modify: this
5965 may not be the same as the size of EXP if we are assigning to a field
5966 which has been packed to exclude padding bits. */
5967
5968 static void
5969 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5970 {
5971 tree type = TREE_TYPE (exp);
5972 #ifdef WORD_REGISTER_OPERATIONS
5973 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5974 #endif
5975
5976 switch (TREE_CODE (type))
5977 {
5978 case RECORD_TYPE:
5979 case UNION_TYPE:
5980 case QUAL_UNION_TYPE:
5981 {
5982 unsigned HOST_WIDE_INT idx;
5983 tree field, value;
5984
5985 /* If size is zero or the target is already cleared, do nothing. */
5986 if (size == 0 || cleared)
5987 cleared = 1;
5988 /* We either clear the aggregate or indicate the value is dead. */
5989 else if ((TREE_CODE (type) == UNION_TYPE
5990 || TREE_CODE (type) == QUAL_UNION_TYPE)
5991 && ! CONSTRUCTOR_ELTS (exp))
5992 /* If the constructor is empty, clear the union. */
5993 {
5994 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5995 cleared = 1;
5996 }
5997
5998 /* If we are building a static constructor into a register,
5999 set the initial value as zero so we can fold the value into
6000 a constant. But if more than one register is involved,
6001 this probably loses. */
6002 else if (REG_P (target) && TREE_STATIC (exp)
6003 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6004 {
6005 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6006 cleared = 1;
6007 }
6008
6009 /* If the constructor has fewer fields than the structure or
6010 if we are initializing the structure to mostly zeros, clear
6011 the whole structure first. Don't do this if TARGET is a
6012 register whose mode size isn't equal to SIZE since
6013 clear_storage can't handle this case. */
6014 else if (size > 0
6015 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6016 != fields_length (type))
6017 || mostly_zeros_p (exp))
6018 && (!REG_P (target)
6019 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6020 == size)))
6021 {
6022 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6023 cleared = 1;
6024 }
6025
6026 if (REG_P (target) && !cleared)
6027 emit_clobber (target);
6028
6029 /* Store each element of the constructor into the
6030 corresponding field of TARGET. */
6031 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6032 {
6033 machine_mode mode;
6034 HOST_WIDE_INT bitsize;
6035 HOST_WIDE_INT bitpos = 0;
6036 tree offset;
6037 rtx to_rtx = target;
6038
6039 /* Just ignore missing fields. We cleared the whole
6040 structure, above, if any fields are missing. */
6041 if (field == 0)
6042 continue;
6043
6044 if (cleared && initializer_zerop (value))
6045 continue;
6046
6047 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6048 bitsize = tree_to_uhwi (DECL_SIZE (field));
6049 else
6050 bitsize = -1;
6051
6052 mode = DECL_MODE (field);
6053 if (DECL_BIT_FIELD (field))
6054 mode = VOIDmode;
6055
6056 offset = DECL_FIELD_OFFSET (field);
6057 if (tree_fits_shwi_p (offset)
6058 && tree_fits_shwi_p (bit_position (field)))
6059 {
6060 bitpos = int_bit_position (field);
6061 offset = 0;
6062 }
6063 else
6064 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6065
6066 if (offset)
6067 {
6068 machine_mode address_mode;
6069 rtx offset_rtx;
6070
6071 offset
6072 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6073 make_tree (TREE_TYPE (exp),
6074 target));
6075
6076 offset_rtx = expand_normal (offset);
6077 gcc_assert (MEM_P (to_rtx));
6078
6079 address_mode = get_address_mode (to_rtx);
6080 if (GET_MODE (offset_rtx) != address_mode)
6081 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6082
6083 to_rtx = offset_address (to_rtx, offset_rtx,
6084 highest_pow2_factor (offset));
6085 }
6086
6087 #ifdef WORD_REGISTER_OPERATIONS
6088 /* If this initializes a field that is smaller than a
6089 word, at the start of a word, try to widen it to a full
6090 word. This special case allows us to output C++ member
6091 function initializations in a form that the optimizers
6092 can understand. */
6093 if (REG_P (target)
6094 && bitsize < BITS_PER_WORD
6095 && bitpos % BITS_PER_WORD == 0
6096 && GET_MODE_CLASS (mode) == MODE_INT
6097 && TREE_CODE (value) == INTEGER_CST
6098 && exp_size >= 0
6099 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6100 {
6101 tree type = TREE_TYPE (value);
6102
6103 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6104 {
6105 type = lang_hooks.types.type_for_mode
6106 (word_mode, TYPE_UNSIGNED (type));
6107 value = fold_convert (type, value);
6108 }
6109
6110 if (BYTES_BIG_ENDIAN)
6111 value
6112 = fold_build2 (LSHIFT_EXPR, type, value,
6113 build_int_cst (type,
6114 BITS_PER_WORD - bitsize));
6115 bitsize = BITS_PER_WORD;
6116 mode = word_mode;
6117 }
6118 #endif
6119
6120 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6121 && DECL_NONADDRESSABLE_P (field))
6122 {
6123 to_rtx = copy_rtx (to_rtx);
6124 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6125 }
6126
6127 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6128 value, cleared,
6129 get_alias_set (TREE_TYPE (field)));
6130 }
6131 break;
6132 }
6133 case ARRAY_TYPE:
6134 {
6135 tree value, index;
6136 unsigned HOST_WIDE_INT i;
6137 int need_to_clear;
6138 tree domain;
6139 tree elttype = TREE_TYPE (type);
6140 int const_bounds_p;
6141 HOST_WIDE_INT minelt = 0;
6142 HOST_WIDE_INT maxelt = 0;
6143
6144 domain = TYPE_DOMAIN (type);
6145 const_bounds_p = (TYPE_MIN_VALUE (domain)
6146 && TYPE_MAX_VALUE (domain)
6147 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6148 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6149
6150 /* If we have constant bounds for the range of the type, get them. */
6151 if (const_bounds_p)
6152 {
6153 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6154 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6155 }
6156
6157 /* If the constructor has fewer elements than the array, clear
6158 the whole array first. Similarly if this is static
6159 constructor of a non-BLKmode object. */
6160 if (cleared)
6161 need_to_clear = 0;
6162 else if (REG_P (target) && TREE_STATIC (exp))
6163 need_to_clear = 1;
6164 else
6165 {
6166 unsigned HOST_WIDE_INT idx;
6167 tree index, value;
6168 HOST_WIDE_INT count = 0, zero_count = 0;
6169 need_to_clear = ! const_bounds_p;
6170
6171 /* This loop is a more accurate version of the loop in
6172 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6173 is also needed to check for missing elements. */
6174 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6175 {
6176 HOST_WIDE_INT this_node_count;
6177
6178 if (need_to_clear)
6179 break;
6180
6181 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6182 {
6183 tree lo_index = TREE_OPERAND (index, 0);
6184 tree hi_index = TREE_OPERAND (index, 1);
6185
6186 if (! tree_fits_uhwi_p (lo_index)
6187 || ! tree_fits_uhwi_p (hi_index))
6188 {
6189 need_to_clear = 1;
6190 break;
6191 }
6192
6193 this_node_count = (tree_to_uhwi (hi_index)
6194 - tree_to_uhwi (lo_index) + 1);
6195 }
6196 else
6197 this_node_count = 1;
6198
6199 count += this_node_count;
6200 if (mostly_zeros_p (value))
6201 zero_count += this_node_count;
6202 }
6203
6204 /* Clear the entire array first if there are any missing
6205 elements, or if the incidence of zero elements is >=
6206 75%. */
6207 if (! need_to_clear
6208 && (count < maxelt - minelt + 1
6209 || 4 * zero_count >= 3 * count))
6210 need_to_clear = 1;
6211 }
6212
6213 if (need_to_clear && size > 0)
6214 {
6215 if (REG_P (target))
6216 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6217 else
6218 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6219 cleared = 1;
6220 }
6221
6222 if (!cleared && REG_P (target))
6223 /* Inform later passes that the old value is dead. */
6224 emit_clobber (target);
6225
6226 /* Store each element of the constructor into the
6227 corresponding element of TARGET, determined by counting the
6228 elements. */
6229 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6230 {
6231 machine_mode mode;
6232 HOST_WIDE_INT bitsize;
6233 HOST_WIDE_INT bitpos;
6234 rtx xtarget = target;
6235
6236 if (cleared && initializer_zerop (value))
6237 continue;
6238
6239 mode = TYPE_MODE (elttype);
6240 if (mode == BLKmode)
6241 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6242 ? tree_to_uhwi (TYPE_SIZE (elttype))
6243 : -1);
6244 else
6245 bitsize = GET_MODE_BITSIZE (mode);
6246
6247 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6248 {
6249 tree lo_index = TREE_OPERAND (index, 0);
6250 tree hi_index = TREE_OPERAND (index, 1);
6251 rtx index_r, pos_rtx;
6252 HOST_WIDE_INT lo, hi, count;
6253 tree position;
6254
6255 /* If the range is constant and "small", unroll the loop. */
6256 if (const_bounds_p
6257 && tree_fits_shwi_p (lo_index)
6258 && tree_fits_shwi_p (hi_index)
6259 && (lo = tree_to_shwi (lo_index),
6260 hi = tree_to_shwi (hi_index),
6261 count = hi - lo + 1,
6262 (!MEM_P (target)
6263 || count <= 2
6264 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6265 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6266 <= 40 * 8)))))
6267 {
6268 lo -= minelt; hi -= minelt;
6269 for (; lo <= hi; lo++)
6270 {
6271 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6272
6273 if (MEM_P (target)
6274 && !MEM_KEEP_ALIAS_SET_P (target)
6275 && TREE_CODE (type) == ARRAY_TYPE
6276 && TYPE_NONALIASED_COMPONENT (type))
6277 {
6278 target = copy_rtx (target);
6279 MEM_KEEP_ALIAS_SET_P (target) = 1;
6280 }
6281
6282 store_constructor_field
6283 (target, bitsize, bitpos, mode, value, cleared,
6284 get_alias_set (elttype));
6285 }
6286 }
6287 else
6288 {
6289 rtx_code_label *loop_start = gen_label_rtx ();
6290 rtx_code_label *loop_end = gen_label_rtx ();
6291 tree exit_cond;
6292
6293 expand_normal (hi_index);
6294
6295 index = build_decl (EXPR_LOCATION (exp),
6296 VAR_DECL, NULL_TREE, domain);
6297 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6298 SET_DECL_RTL (index, index_r);
6299 store_expr (lo_index, index_r, 0, false);
6300
6301 /* Build the head of the loop. */
6302 do_pending_stack_adjust ();
6303 emit_label (loop_start);
6304
6305 /* Assign value to element index. */
6306 position =
6307 fold_convert (ssizetype,
6308 fold_build2 (MINUS_EXPR,
6309 TREE_TYPE (index),
6310 index,
6311 TYPE_MIN_VALUE (domain)));
6312
6313 position =
6314 size_binop (MULT_EXPR, position,
6315 fold_convert (ssizetype,
6316 TYPE_SIZE_UNIT (elttype)));
6317
6318 pos_rtx = expand_normal (position);
6319 xtarget = offset_address (target, pos_rtx,
6320 highest_pow2_factor (position));
6321 xtarget = adjust_address (xtarget, mode, 0);
6322 if (TREE_CODE (value) == CONSTRUCTOR)
6323 store_constructor (value, xtarget, cleared,
6324 bitsize / BITS_PER_UNIT);
6325 else
6326 store_expr (value, xtarget, 0, false);
6327
6328 /* Generate a conditional jump to exit the loop. */
6329 exit_cond = build2 (LT_EXPR, integer_type_node,
6330 index, hi_index);
6331 jumpif (exit_cond, loop_end, -1);
6332
6333 /* Update the loop counter, and jump to the head of
6334 the loop. */
6335 expand_assignment (index,
6336 build2 (PLUS_EXPR, TREE_TYPE (index),
6337 index, integer_one_node),
6338 false);
6339
6340 emit_jump (loop_start);
6341
6342 /* Build the end of the loop. */
6343 emit_label (loop_end);
6344 }
6345 }
6346 else if ((index != 0 && ! tree_fits_shwi_p (index))
6347 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6348 {
6349 tree position;
6350
6351 if (index == 0)
6352 index = ssize_int (1);
6353
6354 if (minelt)
6355 index = fold_convert (ssizetype,
6356 fold_build2 (MINUS_EXPR,
6357 TREE_TYPE (index),
6358 index,
6359 TYPE_MIN_VALUE (domain)));
6360
6361 position =
6362 size_binop (MULT_EXPR, index,
6363 fold_convert (ssizetype,
6364 TYPE_SIZE_UNIT (elttype)));
6365 xtarget = offset_address (target,
6366 expand_normal (position),
6367 highest_pow2_factor (position));
6368 xtarget = adjust_address (xtarget, mode, 0);
6369 store_expr (value, xtarget, 0, false);
6370 }
6371 else
6372 {
6373 if (index != 0)
6374 bitpos = ((tree_to_shwi (index) - minelt)
6375 * tree_to_uhwi (TYPE_SIZE (elttype)));
6376 else
6377 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6378
6379 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6380 && TREE_CODE (type) == ARRAY_TYPE
6381 && TYPE_NONALIASED_COMPONENT (type))
6382 {
6383 target = copy_rtx (target);
6384 MEM_KEEP_ALIAS_SET_P (target) = 1;
6385 }
6386 store_constructor_field (target, bitsize, bitpos, mode, value,
6387 cleared, get_alias_set (elttype));
6388 }
6389 }
6390 break;
6391 }
6392
6393 case VECTOR_TYPE:
6394 {
6395 unsigned HOST_WIDE_INT idx;
6396 constructor_elt *ce;
6397 int i;
6398 int need_to_clear;
6399 int icode = CODE_FOR_nothing;
6400 tree elttype = TREE_TYPE (type);
6401 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6402 machine_mode eltmode = TYPE_MODE (elttype);
6403 HOST_WIDE_INT bitsize;
6404 HOST_WIDE_INT bitpos;
6405 rtvec vector = NULL;
6406 unsigned n_elts;
6407 alias_set_type alias;
6408
6409 gcc_assert (eltmode != BLKmode);
6410
6411 n_elts = TYPE_VECTOR_SUBPARTS (type);
6412 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6413 {
6414 machine_mode mode = GET_MODE (target);
6415
6416 icode = (int) optab_handler (vec_init_optab, mode);
6417 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6418 if (icode != CODE_FOR_nothing)
6419 {
6420 tree value;
6421
6422 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6423 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6424 {
6425 icode = CODE_FOR_nothing;
6426 break;
6427 }
6428 }
6429 if (icode != CODE_FOR_nothing)
6430 {
6431 unsigned int i;
6432
6433 vector = rtvec_alloc (n_elts);
6434 for (i = 0; i < n_elts; i++)
6435 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6436 }
6437 }
6438
6439 /* If the constructor has fewer elements than the vector,
6440 clear the whole array first. Similarly if this is static
6441 constructor of a non-BLKmode object. */
6442 if (cleared)
6443 need_to_clear = 0;
6444 else if (REG_P (target) && TREE_STATIC (exp))
6445 need_to_clear = 1;
6446 else
6447 {
6448 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6449 tree value;
6450
6451 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6452 {
6453 int n_elts_here = tree_to_uhwi
6454 (int_const_binop (TRUNC_DIV_EXPR,
6455 TYPE_SIZE (TREE_TYPE (value)),
6456 TYPE_SIZE (elttype)));
6457
6458 count += n_elts_here;
6459 if (mostly_zeros_p (value))
6460 zero_count += n_elts_here;
6461 }
6462
6463 /* Clear the entire vector first if there are any missing elements,
6464 or if the incidence of zero elements is >= 75%. */
6465 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6466 }
6467
6468 if (need_to_clear && size > 0 && !vector)
6469 {
6470 if (REG_P (target))
6471 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6472 else
6473 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6474 cleared = 1;
6475 }
6476
6477 /* Inform later passes that the old value is dead. */
6478 if (!cleared && !vector && REG_P (target))
6479 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6480
6481 if (MEM_P (target))
6482 alias = MEM_ALIAS_SET (target);
6483 else
6484 alias = get_alias_set (elttype);
6485
6486 /* Store each element of the constructor into the corresponding
6487 element of TARGET, determined by counting the elements. */
6488 for (idx = 0, i = 0;
6489 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6490 idx++, i += bitsize / elt_size)
6491 {
6492 HOST_WIDE_INT eltpos;
6493 tree value = ce->value;
6494
6495 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6496 if (cleared && initializer_zerop (value))
6497 continue;
6498
6499 if (ce->index)
6500 eltpos = tree_to_uhwi (ce->index);
6501 else
6502 eltpos = i;
6503
6504 if (vector)
6505 {
6506 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6507 elements. */
6508 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6509 RTVEC_ELT (vector, eltpos)
6510 = expand_normal (value);
6511 }
6512 else
6513 {
6514 machine_mode value_mode =
6515 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6516 ? TYPE_MODE (TREE_TYPE (value))
6517 : eltmode;
6518 bitpos = eltpos * elt_size;
6519 store_constructor_field (target, bitsize, bitpos, value_mode,
6520 value, cleared, alias);
6521 }
6522 }
6523
6524 if (vector)
6525 emit_insn (GEN_FCN (icode)
6526 (target,
6527 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6528 break;
6529 }
6530
6531 default:
6532 gcc_unreachable ();
6533 }
6534 }
6535
6536 /* Store the value of EXP (an expression tree)
6537 into a subfield of TARGET which has mode MODE and occupies
6538 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6539 If MODE is VOIDmode, it means that we are storing into a bit-field.
6540
6541 BITREGION_START is bitpos of the first bitfield in this region.
6542 BITREGION_END is the bitpos of the ending bitfield in this region.
6543 These two fields are 0, if the C++ memory model does not apply,
6544 or we are not interested in keeping track of bitfield regions.
6545
6546 Always return const0_rtx unless we have something particular to
6547 return.
6548
6549 ALIAS_SET is the alias set for the destination. This value will
6550 (in general) be different from that for TARGET, since TARGET is a
6551 reference to the containing structure.
6552
6553 If NONTEMPORAL is true, try generating a nontemporal store. */
6554
6555 static rtx
6556 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6557 unsigned HOST_WIDE_INT bitregion_start,
6558 unsigned HOST_WIDE_INT bitregion_end,
6559 machine_mode mode, tree exp,
6560 alias_set_type alias_set, bool nontemporal)
6561 {
6562 if (TREE_CODE (exp) == ERROR_MARK)
6563 return const0_rtx;
6564
6565 /* If we have nothing to store, do nothing unless the expression has
6566 side-effects. */
6567 if (bitsize == 0)
6568 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6569
6570 if (GET_CODE (target) == CONCAT)
6571 {
6572 /* We're storing into a struct containing a single __complex. */
6573
6574 gcc_assert (!bitpos);
6575 return store_expr (exp, target, 0, nontemporal);
6576 }
6577
6578 /* If the structure is in a register or if the component
6579 is a bit field, we cannot use addressing to access it.
6580 Use bit-field techniques or SUBREG to store in it. */
6581
6582 if (mode == VOIDmode
6583 || (mode != BLKmode && ! direct_store[(int) mode]
6584 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6585 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6586 || REG_P (target)
6587 || GET_CODE (target) == SUBREG
6588 /* If the field isn't aligned enough to store as an ordinary memref,
6589 store it as a bit field. */
6590 || (mode != BLKmode
6591 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6592 || bitpos % GET_MODE_ALIGNMENT (mode))
6593 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6594 || (bitpos % BITS_PER_UNIT != 0)))
6595 || (bitsize >= 0 && mode != BLKmode
6596 && GET_MODE_BITSIZE (mode) > bitsize)
6597 /* If the RHS and field are a constant size and the size of the
6598 RHS isn't the same size as the bitfield, we must use bitfield
6599 operations. */
6600 || (bitsize >= 0
6601 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6602 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6603 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6604 decl we must use bitfield operations. */
6605 || (bitsize >= 0
6606 && TREE_CODE (exp) == MEM_REF
6607 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6608 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6609 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6610 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6611 {
6612 rtx temp;
6613 gimple nop_def;
6614
6615 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6616 implies a mask operation. If the precision is the same size as
6617 the field we're storing into, that mask is redundant. This is
6618 particularly common with bit field assignments generated by the
6619 C front end. */
6620 nop_def = get_def_for_expr (exp, NOP_EXPR);
6621 if (nop_def)
6622 {
6623 tree type = TREE_TYPE (exp);
6624 if (INTEGRAL_TYPE_P (type)
6625 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6626 && bitsize == TYPE_PRECISION (type))
6627 {
6628 tree op = gimple_assign_rhs1 (nop_def);
6629 type = TREE_TYPE (op);
6630 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6631 exp = op;
6632 }
6633 }
6634
6635 temp = expand_normal (exp);
6636
6637 /* If BITSIZE is narrower than the size of the type of EXP
6638 we will be narrowing TEMP. Normally, what's wanted are the
6639 low-order bits. However, if EXP's type is a record and this is
6640 big-endian machine, we want the upper BITSIZE bits. */
6641 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6642 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6643 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6644 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6645 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6646 NULL_RTX, 1);
6647
6648 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6649 if (mode != VOIDmode && mode != BLKmode
6650 && mode != TYPE_MODE (TREE_TYPE (exp)))
6651 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6652
6653 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6654 are both BLKmode, both must be in memory and BITPOS must be aligned
6655 on a byte boundary. If so, we simply do a block copy. Likewise for
6656 a BLKmode-like TARGET. */
6657 if (GET_CODE (temp) != PARALLEL
6658 && GET_MODE (temp) == BLKmode
6659 && (GET_MODE (target) == BLKmode
6660 || (MEM_P (target)
6661 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6662 && (bitpos % BITS_PER_UNIT) == 0
6663 && (bitsize % BITS_PER_UNIT) == 0)))
6664 {
6665 gcc_assert (MEM_P (target) && MEM_P (temp)
6666 && (bitpos % BITS_PER_UNIT) == 0);
6667
6668 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6669 emit_block_move (target, temp,
6670 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6671 / BITS_PER_UNIT),
6672 BLOCK_OP_NORMAL);
6673
6674 return const0_rtx;
6675 }
6676
6677 /* Handle calls that return values in multiple non-contiguous locations.
6678 The Irix 6 ABI has examples of this. */
6679 if (GET_CODE (temp) == PARALLEL)
6680 {
6681 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6682 rtx temp_target;
6683 if (mode == BLKmode || mode == VOIDmode)
6684 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6685 temp_target = gen_reg_rtx (mode);
6686 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6687 temp = temp_target;
6688 }
6689 else if (mode == BLKmode)
6690 {
6691 /* Handle calls that return BLKmode values in registers. */
6692 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6693 {
6694 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6695 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6696 temp = temp_target;
6697 }
6698 else
6699 {
6700 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6701 rtx temp_target;
6702 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6703 temp_target = gen_reg_rtx (mode);
6704 temp_target
6705 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6706 temp_target, mode, mode);
6707 temp = temp_target;
6708 }
6709 }
6710
6711 /* Store the value in the bitfield. */
6712 store_bit_field (target, bitsize, bitpos,
6713 bitregion_start, bitregion_end,
6714 mode, temp);
6715
6716 return const0_rtx;
6717 }
6718 else
6719 {
6720 /* Now build a reference to just the desired component. */
6721 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6722
6723 if (to_rtx == target)
6724 to_rtx = copy_rtx (to_rtx);
6725
6726 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6727 set_mem_alias_set (to_rtx, alias_set);
6728
6729 return store_expr (exp, to_rtx, 0, nontemporal);
6730 }
6731 }
6732 \f
6733 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6734 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6735 codes and find the ultimate containing object, which we return.
6736
6737 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6738 bit position, and *PUNSIGNEDP to the signedness of the field.
6739 If the position of the field is variable, we store a tree
6740 giving the variable offset (in units) in *POFFSET.
6741 This offset is in addition to the bit position.
6742 If the position is not variable, we store 0 in *POFFSET.
6743
6744 If any of the extraction expressions is volatile,
6745 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6746
6747 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6748 Otherwise, it is a mode that can be used to access the field.
6749
6750 If the field describes a variable-sized object, *PMODE is set to
6751 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6752 this case, but the address of the object can be found.
6753
6754 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6755 look through nodes that serve as markers of a greater alignment than
6756 the one that can be deduced from the expression. These nodes make it
6757 possible for front-ends to prevent temporaries from being created by
6758 the middle-end on alignment considerations. For that purpose, the
6759 normal operating mode at high-level is to always pass FALSE so that
6760 the ultimate containing object is really returned; moreover, the
6761 associated predicate handled_component_p will always return TRUE
6762 on these nodes, thus indicating that they are essentially handled
6763 by get_inner_reference. TRUE should only be passed when the caller
6764 is scanning the expression in order to build another representation
6765 and specifically knows how to handle these nodes; as such, this is
6766 the normal operating mode in the RTL expanders. */
6767
6768 tree
6769 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6770 HOST_WIDE_INT *pbitpos, tree *poffset,
6771 machine_mode *pmode, int *punsignedp,
6772 int *pvolatilep, bool keep_aligning)
6773 {
6774 tree size_tree = 0;
6775 machine_mode mode = VOIDmode;
6776 bool blkmode_bitfield = false;
6777 tree offset = size_zero_node;
6778 offset_int bit_offset = 0;
6779
6780 /* First get the mode, signedness, and size. We do this from just the
6781 outermost expression. */
6782 *pbitsize = -1;
6783 if (TREE_CODE (exp) == COMPONENT_REF)
6784 {
6785 tree field = TREE_OPERAND (exp, 1);
6786 size_tree = DECL_SIZE (field);
6787 if (flag_strict_volatile_bitfields > 0
6788 && TREE_THIS_VOLATILE (exp)
6789 && DECL_BIT_FIELD_TYPE (field)
6790 && DECL_MODE (field) != BLKmode)
6791 /* Volatile bitfields should be accessed in the mode of the
6792 field's type, not the mode computed based on the bit
6793 size. */
6794 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6795 else if (!DECL_BIT_FIELD (field))
6796 mode = DECL_MODE (field);
6797 else if (DECL_MODE (field) == BLKmode)
6798 blkmode_bitfield = true;
6799
6800 *punsignedp = DECL_UNSIGNED (field);
6801 }
6802 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6803 {
6804 size_tree = TREE_OPERAND (exp, 1);
6805 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6806 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6807
6808 /* For vector types, with the correct size of access, use the mode of
6809 inner type. */
6810 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6811 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6812 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6813 mode = TYPE_MODE (TREE_TYPE (exp));
6814 }
6815 else
6816 {
6817 mode = TYPE_MODE (TREE_TYPE (exp));
6818 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6819
6820 if (mode == BLKmode)
6821 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6822 else
6823 *pbitsize = GET_MODE_BITSIZE (mode);
6824 }
6825
6826 if (size_tree != 0)
6827 {
6828 if (! tree_fits_uhwi_p (size_tree))
6829 mode = BLKmode, *pbitsize = -1;
6830 else
6831 *pbitsize = tree_to_uhwi (size_tree);
6832 }
6833
6834 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6835 and find the ultimate containing object. */
6836 while (1)
6837 {
6838 switch (TREE_CODE (exp))
6839 {
6840 case BIT_FIELD_REF:
6841 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6842 break;
6843
6844 case COMPONENT_REF:
6845 {
6846 tree field = TREE_OPERAND (exp, 1);
6847 tree this_offset = component_ref_field_offset (exp);
6848
6849 /* If this field hasn't been filled in yet, don't go past it.
6850 This should only happen when folding expressions made during
6851 type construction. */
6852 if (this_offset == 0)
6853 break;
6854
6855 offset = size_binop (PLUS_EXPR, offset, this_offset);
6856 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6857
6858 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6859 }
6860 break;
6861
6862 case ARRAY_REF:
6863 case ARRAY_RANGE_REF:
6864 {
6865 tree index = TREE_OPERAND (exp, 1);
6866 tree low_bound = array_ref_low_bound (exp);
6867 tree unit_size = array_ref_element_size (exp);
6868
6869 /* We assume all arrays have sizes that are a multiple of a byte.
6870 First subtract the lower bound, if any, in the type of the
6871 index, then convert to sizetype and multiply by the size of
6872 the array element. */
6873 if (! integer_zerop (low_bound))
6874 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6875 index, low_bound);
6876
6877 offset = size_binop (PLUS_EXPR, offset,
6878 size_binop (MULT_EXPR,
6879 fold_convert (sizetype, index),
6880 unit_size));
6881 }
6882 break;
6883
6884 case REALPART_EXPR:
6885 break;
6886
6887 case IMAGPART_EXPR:
6888 bit_offset += *pbitsize;
6889 break;
6890
6891 case VIEW_CONVERT_EXPR:
6892 if (keep_aligning && STRICT_ALIGNMENT
6893 && (TYPE_ALIGN (TREE_TYPE (exp))
6894 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6895 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6896 < BIGGEST_ALIGNMENT)
6897 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6898 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6899 goto done;
6900 break;
6901
6902 case MEM_REF:
6903 /* Hand back the decl for MEM[&decl, off]. */
6904 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6905 {
6906 tree off = TREE_OPERAND (exp, 1);
6907 if (!integer_zerop (off))
6908 {
6909 offset_int boff, coff = mem_ref_offset (exp);
6910 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6911 bit_offset += boff;
6912 }
6913 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6914 }
6915 goto done;
6916
6917 default:
6918 goto done;
6919 }
6920
6921 /* If any reference in the chain is volatile, the effect is volatile. */
6922 if (TREE_THIS_VOLATILE (exp))
6923 *pvolatilep = 1;
6924
6925 exp = TREE_OPERAND (exp, 0);
6926 }
6927 done:
6928
6929 /* If OFFSET is constant, see if we can return the whole thing as a
6930 constant bit position. Make sure to handle overflow during
6931 this conversion. */
6932 if (TREE_CODE (offset) == INTEGER_CST)
6933 {
6934 offset_int tem = wi::sext (wi::to_offset (offset),
6935 TYPE_PRECISION (sizetype));
6936 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6937 tem += bit_offset;
6938 if (wi::fits_shwi_p (tem))
6939 {
6940 *pbitpos = tem.to_shwi ();
6941 *poffset = offset = NULL_TREE;
6942 }
6943 }
6944
6945 /* Otherwise, split it up. */
6946 if (offset)
6947 {
6948 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6949 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
6950 {
6951 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6952 offset_int tem = bit_offset.and_not (mask);
6953 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6954 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6955 bit_offset -= tem;
6956 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6957 offset = size_binop (PLUS_EXPR, offset,
6958 wide_int_to_tree (sizetype, tem));
6959 }
6960
6961 *pbitpos = bit_offset.to_shwi ();
6962 *poffset = offset;
6963 }
6964
6965 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6966 if (mode == VOIDmode
6967 && blkmode_bitfield
6968 && (*pbitpos % BITS_PER_UNIT) == 0
6969 && (*pbitsize % BITS_PER_UNIT) == 0)
6970 *pmode = BLKmode;
6971 else
6972 *pmode = mode;
6973
6974 return exp;
6975 }
6976
6977 /* Return a tree of sizetype representing the size, in bytes, of the element
6978 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6979
6980 tree
6981 array_ref_element_size (tree exp)
6982 {
6983 tree aligned_size = TREE_OPERAND (exp, 3);
6984 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6985 location_t loc = EXPR_LOCATION (exp);
6986
6987 /* If a size was specified in the ARRAY_REF, it's the size measured
6988 in alignment units of the element type. So multiply by that value. */
6989 if (aligned_size)
6990 {
6991 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6992 sizetype from another type of the same width and signedness. */
6993 if (TREE_TYPE (aligned_size) != sizetype)
6994 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6995 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6996 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6997 }
6998
6999 /* Otherwise, take the size from that of the element type. Substitute
7000 any PLACEHOLDER_EXPR that we have. */
7001 else
7002 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
7003 }
7004
7005 /* Return a tree representing the lower bound of the array mentioned in
7006 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
7007
7008 tree
7009 array_ref_low_bound (tree exp)
7010 {
7011 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7012
7013 /* If a lower bound is specified in EXP, use it. */
7014 if (TREE_OPERAND (exp, 2))
7015 return TREE_OPERAND (exp, 2);
7016
7017 /* Otherwise, if there is a domain type and it has a lower bound, use it,
7018 substituting for a PLACEHOLDER_EXPR as needed. */
7019 if (domain_type && TYPE_MIN_VALUE (domain_type))
7020 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
7021
7022 /* Otherwise, return a zero of the appropriate type. */
7023 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
7024 }
7025
7026 /* Returns true if REF is an array reference to an array at the end of
7027 a structure. If this is the case, the array may be allocated larger
7028 than its upper bound implies. */
7029
7030 bool
7031 array_at_struct_end_p (tree ref)
7032 {
7033 if (TREE_CODE (ref) != ARRAY_REF
7034 && TREE_CODE (ref) != ARRAY_RANGE_REF)
7035 return false;
7036
7037 while (handled_component_p (ref))
7038 {
7039 /* If the reference chain contains a component reference to a
7040 non-union type and there follows another field the reference
7041 is not at the end of a structure. */
7042 if (TREE_CODE (ref) == COMPONENT_REF
7043 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
7044 {
7045 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
7046 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
7047 nextf = DECL_CHAIN (nextf);
7048 if (nextf)
7049 return false;
7050 }
7051
7052 ref = TREE_OPERAND (ref, 0);
7053 }
7054
7055 /* If the reference is based on a declared entity, the size of the array
7056 is constrained by its given domain. */
7057 if (DECL_P (ref))
7058 return false;
7059
7060 return true;
7061 }
7062
7063 /* Return a tree representing the upper bound of the array mentioned in
7064 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
7065
7066 tree
7067 array_ref_up_bound (tree exp)
7068 {
7069 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7070
7071 /* If there is a domain type and it has an upper bound, use it, substituting
7072 for a PLACEHOLDER_EXPR as needed. */
7073 if (domain_type && TYPE_MAX_VALUE (domain_type))
7074 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
7075
7076 /* Otherwise fail. */
7077 return NULL_TREE;
7078 }
7079
7080 /* Return a tree representing the offset, in bytes, of the field referenced
7081 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
7082
7083 tree
7084 component_ref_field_offset (tree exp)
7085 {
7086 tree aligned_offset = TREE_OPERAND (exp, 2);
7087 tree field = TREE_OPERAND (exp, 1);
7088 location_t loc = EXPR_LOCATION (exp);
7089
7090 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
7091 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
7092 value. */
7093 if (aligned_offset)
7094 {
7095 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7096 sizetype from another type of the same width and signedness. */
7097 if (TREE_TYPE (aligned_offset) != sizetype)
7098 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7099 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7100 size_int (DECL_OFFSET_ALIGN (field)
7101 / BITS_PER_UNIT));
7102 }
7103
7104 /* Otherwise, take the offset from that of the field. Substitute
7105 any PLACEHOLDER_EXPR that we have. */
7106 else
7107 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7108 }
7109
7110 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7111
7112 static unsigned HOST_WIDE_INT
7113 target_align (const_tree target)
7114 {
7115 /* We might have a chain of nested references with intermediate misaligning
7116 bitfields components, so need to recurse to find out. */
7117
7118 unsigned HOST_WIDE_INT this_align, outer_align;
7119
7120 switch (TREE_CODE (target))
7121 {
7122 case BIT_FIELD_REF:
7123 return 1;
7124
7125 case COMPONENT_REF:
7126 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7127 outer_align = target_align (TREE_OPERAND (target, 0));
7128 return MIN (this_align, outer_align);
7129
7130 case ARRAY_REF:
7131 case ARRAY_RANGE_REF:
7132 this_align = TYPE_ALIGN (TREE_TYPE (target));
7133 outer_align = target_align (TREE_OPERAND (target, 0));
7134 return MIN (this_align, outer_align);
7135
7136 CASE_CONVERT:
7137 case NON_LVALUE_EXPR:
7138 case VIEW_CONVERT_EXPR:
7139 this_align = TYPE_ALIGN (TREE_TYPE (target));
7140 outer_align = target_align (TREE_OPERAND (target, 0));
7141 return MAX (this_align, outer_align);
7142
7143 default:
7144 return TYPE_ALIGN (TREE_TYPE (target));
7145 }
7146 }
7147
7148 \f
7149 /* Given an rtx VALUE that may contain additions and multiplications, return
7150 an equivalent value that just refers to a register, memory, or constant.
7151 This is done by generating instructions to perform the arithmetic and
7152 returning a pseudo-register containing the value.
7153
7154 The returned value may be a REG, SUBREG, MEM or constant. */
7155
7156 rtx
7157 force_operand (rtx value, rtx target)
7158 {
7159 rtx op1, op2;
7160 /* Use subtarget as the target for operand 0 of a binary operation. */
7161 rtx subtarget = get_subtarget (target);
7162 enum rtx_code code = GET_CODE (value);
7163
7164 /* Check for subreg applied to an expression produced by loop optimizer. */
7165 if (code == SUBREG
7166 && !REG_P (SUBREG_REG (value))
7167 && !MEM_P (SUBREG_REG (value)))
7168 {
7169 value
7170 = simplify_gen_subreg (GET_MODE (value),
7171 force_reg (GET_MODE (SUBREG_REG (value)),
7172 force_operand (SUBREG_REG (value),
7173 NULL_RTX)),
7174 GET_MODE (SUBREG_REG (value)),
7175 SUBREG_BYTE (value));
7176 code = GET_CODE (value);
7177 }
7178
7179 /* Check for a PIC address load. */
7180 if ((code == PLUS || code == MINUS)
7181 && XEXP (value, 0) == pic_offset_table_rtx
7182 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7183 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7184 || GET_CODE (XEXP (value, 1)) == CONST))
7185 {
7186 if (!subtarget)
7187 subtarget = gen_reg_rtx (GET_MODE (value));
7188 emit_move_insn (subtarget, value);
7189 return subtarget;
7190 }
7191
7192 if (ARITHMETIC_P (value))
7193 {
7194 op2 = XEXP (value, 1);
7195 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7196 subtarget = 0;
7197 if (code == MINUS && CONST_INT_P (op2))
7198 {
7199 code = PLUS;
7200 op2 = negate_rtx (GET_MODE (value), op2);
7201 }
7202
7203 /* Check for an addition with OP2 a constant integer and our first
7204 operand a PLUS of a virtual register and something else. In that
7205 case, we want to emit the sum of the virtual register and the
7206 constant first and then add the other value. This allows virtual
7207 register instantiation to simply modify the constant rather than
7208 creating another one around this addition. */
7209 if (code == PLUS && CONST_INT_P (op2)
7210 && GET_CODE (XEXP (value, 0)) == PLUS
7211 && REG_P (XEXP (XEXP (value, 0), 0))
7212 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7213 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7214 {
7215 rtx temp = expand_simple_binop (GET_MODE (value), code,
7216 XEXP (XEXP (value, 0), 0), op2,
7217 subtarget, 0, OPTAB_LIB_WIDEN);
7218 return expand_simple_binop (GET_MODE (value), code, temp,
7219 force_operand (XEXP (XEXP (value,
7220 0), 1), 0),
7221 target, 0, OPTAB_LIB_WIDEN);
7222 }
7223
7224 op1 = force_operand (XEXP (value, 0), subtarget);
7225 op2 = force_operand (op2, NULL_RTX);
7226 switch (code)
7227 {
7228 case MULT:
7229 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7230 case DIV:
7231 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7232 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7233 target, 1, OPTAB_LIB_WIDEN);
7234 else
7235 return expand_divmod (0,
7236 FLOAT_MODE_P (GET_MODE (value))
7237 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7238 GET_MODE (value), op1, op2, target, 0);
7239 case MOD:
7240 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7241 target, 0);
7242 case UDIV:
7243 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7244 target, 1);
7245 case UMOD:
7246 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7247 target, 1);
7248 case ASHIFTRT:
7249 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7250 target, 0, OPTAB_LIB_WIDEN);
7251 default:
7252 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7253 target, 1, OPTAB_LIB_WIDEN);
7254 }
7255 }
7256 if (UNARY_P (value))
7257 {
7258 if (!target)
7259 target = gen_reg_rtx (GET_MODE (value));
7260 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7261 switch (code)
7262 {
7263 case ZERO_EXTEND:
7264 case SIGN_EXTEND:
7265 case TRUNCATE:
7266 case FLOAT_EXTEND:
7267 case FLOAT_TRUNCATE:
7268 convert_move (target, op1, code == ZERO_EXTEND);
7269 return target;
7270
7271 case FIX:
7272 case UNSIGNED_FIX:
7273 expand_fix (target, op1, code == UNSIGNED_FIX);
7274 return target;
7275
7276 case FLOAT:
7277 case UNSIGNED_FLOAT:
7278 expand_float (target, op1, code == UNSIGNED_FLOAT);
7279 return target;
7280
7281 default:
7282 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7283 }
7284 }
7285
7286 #ifdef INSN_SCHEDULING
7287 /* On machines that have insn scheduling, we want all memory reference to be
7288 explicit, so we need to deal with such paradoxical SUBREGs. */
7289 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7290 value
7291 = simplify_gen_subreg (GET_MODE (value),
7292 force_reg (GET_MODE (SUBREG_REG (value)),
7293 force_operand (SUBREG_REG (value),
7294 NULL_RTX)),
7295 GET_MODE (SUBREG_REG (value)),
7296 SUBREG_BYTE (value));
7297 #endif
7298
7299 return value;
7300 }
7301 \f
7302 /* Subroutine of expand_expr: return nonzero iff there is no way that
7303 EXP can reference X, which is being modified. TOP_P is nonzero if this
7304 call is going to be used to determine whether we need a temporary
7305 for EXP, as opposed to a recursive call to this function.
7306
7307 It is always safe for this routine to return zero since it merely
7308 searches for optimization opportunities. */
7309
7310 int
7311 safe_from_p (const_rtx x, tree exp, int top_p)
7312 {
7313 rtx exp_rtl = 0;
7314 int i, nops;
7315
7316 if (x == 0
7317 /* If EXP has varying size, we MUST use a target since we currently
7318 have no way of allocating temporaries of variable size
7319 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7320 So we assume here that something at a higher level has prevented a
7321 clash. This is somewhat bogus, but the best we can do. Only
7322 do this when X is BLKmode and when we are at the top level. */
7323 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7324 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7325 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7326 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7327 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7328 != INTEGER_CST)
7329 && GET_MODE (x) == BLKmode)
7330 /* If X is in the outgoing argument area, it is always safe. */
7331 || (MEM_P (x)
7332 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7333 || (GET_CODE (XEXP (x, 0)) == PLUS
7334 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7335 return 1;
7336
7337 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7338 find the underlying pseudo. */
7339 if (GET_CODE (x) == SUBREG)
7340 {
7341 x = SUBREG_REG (x);
7342 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7343 return 0;
7344 }
7345
7346 /* Now look at our tree code and possibly recurse. */
7347 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7348 {
7349 case tcc_declaration:
7350 exp_rtl = DECL_RTL_IF_SET (exp);
7351 break;
7352
7353 case tcc_constant:
7354 return 1;
7355
7356 case tcc_exceptional:
7357 if (TREE_CODE (exp) == TREE_LIST)
7358 {
7359 while (1)
7360 {
7361 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7362 return 0;
7363 exp = TREE_CHAIN (exp);
7364 if (!exp)
7365 return 1;
7366 if (TREE_CODE (exp) != TREE_LIST)
7367 return safe_from_p (x, exp, 0);
7368 }
7369 }
7370 else if (TREE_CODE (exp) == CONSTRUCTOR)
7371 {
7372 constructor_elt *ce;
7373 unsigned HOST_WIDE_INT idx;
7374
7375 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7376 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7377 || !safe_from_p (x, ce->value, 0))
7378 return 0;
7379 return 1;
7380 }
7381 else if (TREE_CODE (exp) == ERROR_MARK)
7382 return 1; /* An already-visited SAVE_EXPR? */
7383 else
7384 return 0;
7385
7386 case tcc_statement:
7387 /* The only case we look at here is the DECL_INITIAL inside a
7388 DECL_EXPR. */
7389 return (TREE_CODE (exp) != DECL_EXPR
7390 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7391 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7392 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7393
7394 case tcc_binary:
7395 case tcc_comparison:
7396 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7397 return 0;
7398 /* Fall through. */
7399
7400 case tcc_unary:
7401 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7402
7403 case tcc_expression:
7404 case tcc_reference:
7405 case tcc_vl_exp:
7406 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7407 the expression. If it is set, we conflict iff we are that rtx or
7408 both are in memory. Otherwise, we check all operands of the
7409 expression recursively. */
7410
7411 switch (TREE_CODE (exp))
7412 {
7413 case ADDR_EXPR:
7414 /* If the operand is static or we are static, we can't conflict.
7415 Likewise if we don't conflict with the operand at all. */
7416 if (staticp (TREE_OPERAND (exp, 0))
7417 || TREE_STATIC (exp)
7418 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7419 return 1;
7420
7421 /* Otherwise, the only way this can conflict is if we are taking
7422 the address of a DECL a that address if part of X, which is
7423 very rare. */
7424 exp = TREE_OPERAND (exp, 0);
7425 if (DECL_P (exp))
7426 {
7427 if (!DECL_RTL_SET_P (exp)
7428 || !MEM_P (DECL_RTL (exp)))
7429 return 0;
7430 else
7431 exp_rtl = XEXP (DECL_RTL (exp), 0);
7432 }
7433 break;
7434
7435 case MEM_REF:
7436 if (MEM_P (x)
7437 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7438 get_alias_set (exp)))
7439 return 0;
7440 break;
7441
7442 case CALL_EXPR:
7443 /* Assume that the call will clobber all hard registers and
7444 all of memory. */
7445 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7446 || MEM_P (x))
7447 return 0;
7448 break;
7449
7450 case WITH_CLEANUP_EXPR:
7451 case CLEANUP_POINT_EXPR:
7452 /* Lowered by gimplify.c. */
7453 gcc_unreachable ();
7454
7455 case SAVE_EXPR:
7456 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7457
7458 default:
7459 break;
7460 }
7461
7462 /* If we have an rtx, we do not need to scan our operands. */
7463 if (exp_rtl)
7464 break;
7465
7466 nops = TREE_OPERAND_LENGTH (exp);
7467 for (i = 0; i < nops; i++)
7468 if (TREE_OPERAND (exp, i) != 0
7469 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7470 return 0;
7471
7472 break;
7473
7474 case tcc_type:
7475 /* Should never get a type here. */
7476 gcc_unreachable ();
7477 }
7478
7479 /* If we have an rtl, find any enclosed object. Then see if we conflict
7480 with it. */
7481 if (exp_rtl)
7482 {
7483 if (GET_CODE (exp_rtl) == SUBREG)
7484 {
7485 exp_rtl = SUBREG_REG (exp_rtl);
7486 if (REG_P (exp_rtl)
7487 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7488 return 0;
7489 }
7490
7491 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7492 are memory and they conflict. */
7493 return ! (rtx_equal_p (x, exp_rtl)
7494 || (MEM_P (x) && MEM_P (exp_rtl)
7495 && true_dependence (exp_rtl, VOIDmode, x)));
7496 }
7497
7498 /* If we reach here, it is safe. */
7499 return 1;
7500 }
7501
7502 \f
7503 /* Return the highest power of two that EXP is known to be a multiple of.
7504 This is used in updating alignment of MEMs in array references. */
7505
7506 unsigned HOST_WIDE_INT
7507 highest_pow2_factor (const_tree exp)
7508 {
7509 unsigned HOST_WIDE_INT ret;
7510 int trailing_zeros = tree_ctz (exp);
7511 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7512 return BIGGEST_ALIGNMENT;
7513 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7514 if (ret > BIGGEST_ALIGNMENT)
7515 return BIGGEST_ALIGNMENT;
7516 return ret;
7517 }
7518
7519 /* Similar, except that the alignment requirements of TARGET are
7520 taken into account. Assume it is at least as aligned as its
7521 type, unless it is a COMPONENT_REF in which case the layout of
7522 the structure gives the alignment. */
7523
7524 static unsigned HOST_WIDE_INT
7525 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7526 {
7527 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7528 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7529
7530 return MAX (factor, talign);
7531 }
7532 \f
7533 #ifdef HAVE_conditional_move
7534 /* Convert the tree comparison code TCODE to the rtl one where the
7535 signedness is UNSIGNEDP. */
7536
7537 static enum rtx_code
7538 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7539 {
7540 enum rtx_code code;
7541 switch (tcode)
7542 {
7543 case EQ_EXPR:
7544 code = EQ;
7545 break;
7546 case NE_EXPR:
7547 code = NE;
7548 break;
7549 case LT_EXPR:
7550 code = unsignedp ? LTU : LT;
7551 break;
7552 case LE_EXPR:
7553 code = unsignedp ? LEU : LE;
7554 break;
7555 case GT_EXPR:
7556 code = unsignedp ? GTU : GT;
7557 break;
7558 case GE_EXPR:
7559 code = unsignedp ? GEU : GE;
7560 break;
7561 case UNORDERED_EXPR:
7562 code = UNORDERED;
7563 break;
7564 case ORDERED_EXPR:
7565 code = ORDERED;
7566 break;
7567 case UNLT_EXPR:
7568 code = UNLT;
7569 break;
7570 case UNLE_EXPR:
7571 code = UNLE;
7572 break;
7573 case UNGT_EXPR:
7574 code = UNGT;
7575 break;
7576 case UNGE_EXPR:
7577 code = UNGE;
7578 break;
7579 case UNEQ_EXPR:
7580 code = UNEQ;
7581 break;
7582 case LTGT_EXPR:
7583 code = LTGT;
7584 break;
7585
7586 default:
7587 gcc_unreachable ();
7588 }
7589 return code;
7590 }
7591 #endif
7592
7593 /* Subroutine of expand_expr. Expand the two operands of a binary
7594 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7595 The value may be stored in TARGET if TARGET is nonzero. The
7596 MODIFIER argument is as documented by expand_expr. */
7597
7598 void
7599 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7600 enum expand_modifier modifier)
7601 {
7602 if (! safe_from_p (target, exp1, 1))
7603 target = 0;
7604 if (operand_equal_p (exp0, exp1, 0))
7605 {
7606 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7607 *op1 = copy_rtx (*op0);
7608 }
7609 else
7610 {
7611 /* If we need to preserve evaluation order, copy exp0 into its own
7612 temporary variable so that it can't be clobbered by exp1. */
7613 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7614 exp0 = save_expr (exp0);
7615 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7616 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7617 }
7618 }
7619
7620 \f
7621 /* Return a MEM that contains constant EXP. DEFER is as for
7622 output_constant_def and MODIFIER is as for expand_expr. */
7623
7624 static rtx
7625 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7626 {
7627 rtx mem;
7628
7629 mem = output_constant_def (exp, defer);
7630 if (modifier != EXPAND_INITIALIZER)
7631 mem = use_anchored_address (mem);
7632 return mem;
7633 }
7634
7635 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7636 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7637
7638 static rtx
7639 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7640 enum expand_modifier modifier, addr_space_t as)
7641 {
7642 rtx result, subtarget;
7643 tree inner, offset;
7644 HOST_WIDE_INT bitsize, bitpos;
7645 int volatilep, unsignedp;
7646 machine_mode mode1;
7647
7648 /* If we are taking the address of a constant and are at the top level,
7649 we have to use output_constant_def since we can't call force_const_mem
7650 at top level. */
7651 /* ??? This should be considered a front-end bug. We should not be
7652 generating ADDR_EXPR of something that isn't an LVALUE. The only
7653 exception here is STRING_CST. */
7654 if (CONSTANT_CLASS_P (exp))
7655 {
7656 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7657 if (modifier < EXPAND_SUM)
7658 result = force_operand (result, target);
7659 return result;
7660 }
7661
7662 /* Everything must be something allowed by is_gimple_addressable. */
7663 switch (TREE_CODE (exp))
7664 {
7665 case INDIRECT_REF:
7666 /* This case will happen via recursion for &a->b. */
7667 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7668
7669 case MEM_REF:
7670 {
7671 tree tem = TREE_OPERAND (exp, 0);
7672 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7673 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7674 return expand_expr (tem, target, tmode, modifier);
7675 }
7676
7677 case CONST_DECL:
7678 /* Expand the initializer like constants above. */
7679 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7680 0, modifier), 0);
7681 if (modifier < EXPAND_SUM)
7682 result = force_operand (result, target);
7683 return result;
7684
7685 case REALPART_EXPR:
7686 /* The real part of the complex number is always first, therefore
7687 the address is the same as the address of the parent object. */
7688 offset = 0;
7689 bitpos = 0;
7690 inner = TREE_OPERAND (exp, 0);
7691 break;
7692
7693 case IMAGPART_EXPR:
7694 /* The imaginary part of the complex number is always second.
7695 The expression is therefore always offset by the size of the
7696 scalar type. */
7697 offset = 0;
7698 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7699 inner = TREE_OPERAND (exp, 0);
7700 break;
7701
7702 case COMPOUND_LITERAL_EXPR:
7703 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7704 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7705 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7706 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7707 the initializers aren't gimplified. */
7708 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7709 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7710 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7711 target, tmode, modifier, as);
7712 /* FALLTHRU */
7713 default:
7714 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7715 expand_expr, as that can have various side effects; LABEL_DECLs for
7716 example, may not have their DECL_RTL set yet. Expand the rtl of
7717 CONSTRUCTORs too, which should yield a memory reference for the
7718 constructor's contents. Assume language specific tree nodes can
7719 be expanded in some interesting way. */
7720 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7721 if (DECL_P (exp)
7722 || TREE_CODE (exp) == CONSTRUCTOR
7723 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7724 {
7725 result = expand_expr (exp, target, tmode,
7726 modifier == EXPAND_INITIALIZER
7727 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7728
7729 /* If the DECL isn't in memory, then the DECL wasn't properly
7730 marked TREE_ADDRESSABLE, which will be either a front-end
7731 or a tree optimizer bug. */
7732
7733 if (TREE_ADDRESSABLE (exp)
7734 && ! MEM_P (result)
7735 && ! targetm.calls.allocate_stack_slots_for_args ())
7736 {
7737 error ("local frame unavailable (naked function?)");
7738 return result;
7739 }
7740 else
7741 gcc_assert (MEM_P (result));
7742 result = XEXP (result, 0);
7743
7744 /* ??? Is this needed anymore? */
7745 if (DECL_P (exp))
7746 TREE_USED (exp) = 1;
7747
7748 if (modifier != EXPAND_INITIALIZER
7749 && modifier != EXPAND_CONST_ADDRESS
7750 && modifier != EXPAND_SUM)
7751 result = force_operand (result, target);
7752 return result;
7753 }
7754
7755 /* Pass FALSE as the last argument to get_inner_reference although
7756 we are expanding to RTL. The rationale is that we know how to
7757 handle "aligning nodes" here: we can just bypass them because
7758 they won't change the final object whose address will be returned
7759 (they actually exist only for that purpose). */
7760 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7761 &mode1, &unsignedp, &volatilep, false);
7762 break;
7763 }
7764
7765 /* We must have made progress. */
7766 gcc_assert (inner != exp);
7767
7768 subtarget = offset || bitpos ? NULL_RTX : target;
7769 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7770 inner alignment, force the inner to be sufficiently aligned. */
7771 if (CONSTANT_CLASS_P (inner)
7772 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7773 {
7774 inner = copy_node (inner);
7775 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7776 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7777 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7778 }
7779 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7780
7781 if (offset)
7782 {
7783 rtx tmp;
7784
7785 if (modifier != EXPAND_NORMAL)
7786 result = force_operand (result, NULL);
7787 tmp = expand_expr (offset, NULL_RTX, tmode,
7788 modifier == EXPAND_INITIALIZER
7789 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7790
7791 /* expand_expr is allowed to return an object in a mode other
7792 than TMODE. If it did, we need to convert. */
7793 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7794 tmp = convert_modes (tmode, GET_MODE (tmp),
7795 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7796 result = convert_memory_address_addr_space (tmode, result, as);
7797 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7798
7799 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7800 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7801 else
7802 {
7803 subtarget = bitpos ? NULL_RTX : target;
7804 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7805 1, OPTAB_LIB_WIDEN);
7806 }
7807 }
7808
7809 if (bitpos)
7810 {
7811 /* Someone beforehand should have rejected taking the address
7812 of such an object. */
7813 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7814
7815 result = convert_memory_address_addr_space (tmode, result, as);
7816 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7817 if (modifier < EXPAND_SUM)
7818 result = force_operand (result, target);
7819 }
7820
7821 return result;
7822 }
7823
7824 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7825 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7826
7827 static rtx
7828 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7829 enum expand_modifier modifier)
7830 {
7831 addr_space_t as = ADDR_SPACE_GENERIC;
7832 machine_mode address_mode = Pmode;
7833 machine_mode pointer_mode = ptr_mode;
7834 machine_mode rmode;
7835 rtx result;
7836
7837 /* Target mode of VOIDmode says "whatever's natural". */
7838 if (tmode == VOIDmode)
7839 tmode = TYPE_MODE (TREE_TYPE (exp));
7840
7841 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7842 {
7843 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7844 address_mode = targetm.addr_space.address_mode (as);
7845 pointer_mode = targetm.addr_space.pointer_mode (as);
7846 }
7847
7848 /* We can get called with some Weird Things if the user does silliness
7849 like "(short) &a". In that case, convert_memory_address won't do
7850 the right thing, so ignore the given target mode. */
7851 if (tmode != address_mode && tmode != pointer_mode)
7852 tmode = address_mode;
7853
7854 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7855 tmode, modifier, as);
7856
7857 /* Despite expand_expr claims concerning ignoring TMODE when not
7858 strictly convenient, stuff breaks if we don't honor it. Note
7859 that combined with the above, we only do this for pointer modes. */
7860 rmode = GET_MODE (result);
7861 if (rmode == VOIDmode)
7862 rmode = tmode;
7863 if (rmode != tmode)
7864 result = convert_memory_address_addr_space (tmode, result, as);
7865
7866 return result;
7867 }
7868
7869 /* Generate code for computing CONSTRUCTOR EXP.
7870 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7871 is TRUE, instead of creating a temporary variable in memory
7872 NULL is returned and the caller needs to handle it differently. */
7873
7874 static rtx
7875 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7876 bool avoid_temp_mem)
7877 {
7878 tree type = TREE_TYPE (exp);
7879 machine_mode mode = TYPE_MODE (type);
7880
7881 /* Try to avoid creating a temporary at all. This is possible
7882 if all of the initializer is zero.
7883 FIXME: try to handle all [0..255] initializers we can handle
7884 with memset. */
7885 if (TREE_STATIC (exp)
7886 && !TREE_ADDRESSABLE (exp)
7887 && target != 0 && mode == BLKmode
7888 && all_zeros_p (exp))
7889 {
7890 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7891 return target;
7892 }
7893
7894 /* All elts simple constants => refer to a constant in memory. But
7895 if this is a non-BLKmode mode, let it store a field at a time
7896 since that should make a CONST_INT, CONST_WIDE_INT or
7897 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7898 use, it is best to store directly into the target unless the type
7899 is large enough that memcpy will be used. If we are making an
7900 initializer and all operands are constant, put it in memory as
7901 well.
7902
7903 FIXME: Avoid trying to fill vector constructors piece-meal.
7904 Output them with output_constant_def below unless we're sure
7905 they're zeros. This should go away when vector initializers
7906 are treated like VECTOR_CST instead of arrays. */
7907 if ((TREE_STATIC (exp)
7908 && ((mode == BLKmode
7909 && ! (target != 0 && safe_from_p (target, exp, 1)))
7910 || TREE_ADDRESSABLE (exp)
7911 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7912 && (! can_move_by_pieces
7913 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7914 TYPE_ALIGN (type)))
7915 && ! mostly_zeros_p (exp))))
7916 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7917 && TREE_CONSTANT (exp)))
7918 {
7919 rtx constructor;
7920
7921 if (avoid_temp_mem)
7922 return NULL_RTX;
7923
7924 constructor = expand_expr_constant (exp, 1, modifier);
7925
7926 if (modifier != EXPAND_CONST_ADDRESS
7927 && modifier != EXPAND_INITIALIZER
7928 && modifier != EXPAND_SUM)
7929 constructor = validize_mem (constructor);
7930
7931 return constructor;
7932 }
7933
7934 /* Handle calls that pass values in multiple non-contiguous
7935 locations. The Irix 6 ABI has examples of this. */
7936 if (target == 0 || ! safe_from_p (target, exp, 1)
7937 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7938 {
7939 if (avoid_temp_mem)
7940 return NULL_RTX;
7941
7942 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7943 }
7944
7945 store_constructor (exp, target, 0, int_expr_size (exp));
7946 return target;
7947 }
7948
7949
7950 /* expand_expr: generate code for computing expression EXP.
7951 An rtx for the computed value is returned. The value is never null.
7952 In the case of a void EXP, const0_rtx is returned.
7953
7954 The value may be stored in TARGET if TARGET is nonzero.
7955 TARGET is just a suggestion; callers must assume that
7956 the rtx returned may not be the same as TARGET.
7957
7958 If TARGET is CONST0_RTX, it means that the value will be ignored.
7959
7960 If TMODE is not VOIDmode, it suggests generating the
7961 result in mode TMODE. But this is done only when convenient.
7962 Otherwise, TMODE is ignored and the value generated in its natural mode.
7963 TMODE is just a suggestion; callers must assume that
7964 the rtx returned may not have mode TMODE.
7965
7966 Note that TARGET may have neither TMODE nor MODE. In that case, it
7967 probably will not be used.
7968
7969 If MODIFIER is EXPAND_SUM then when EXP is an addition
7970 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7971 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7972 products as above, or REG or MEM, or constant.
7973 Ordinarily in such cases we would output mul or add instructions
7974 and then return a pseudo reg containing the sum.
7975
7976 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7977 it also marks a label as absolutely required (it can't be dead).
7978 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7979 This is used for outputting expressions used in initializers.
7980
7981 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7982 with a constant address even if that address is not normally legitimate.
7983 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7984
7985 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7986 a call parameter. Such targets require special care as we haven't yet
7987 marked TARGET so that it's safe from being trashed by libcalls. We
7988 don't want to use TARGET for anything but the final result;
7989 Intermediate values must go elsewhere. Additionally, calls to
7990 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7991
7992 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7993 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7994 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7995 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7996 recursively.
7997
7998 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7999 In this case, we don't adjust a returned MEM rtx that wouldn't be
8000 sufficiently aligned for its mode; instead, it's up to the caller
8001 to deal with it afterwards. This is used to make sure that unaligned
8002 base objects for which out-of-bounds accesses are supported, for
8003 example record types with trailing arrays, aren't realigned behind
8004 the back of the caller.
8005 The normal operating mode is to pass FALSE for this parameter. */
8006
8007 rtx
8008 expand_expr_real (tree exp, rtx target, machine_mode tmode,
8009 enum expand_modifier modifier, rtx *alt_rtl,
8010 bool inner_reference_p)
8011 {
8012 rtx ret;
8013
8014 /* Handle ERROR_MARK before anybody tries to access its type. */
8015 if (TREE_CODE (exp) == ERROR_MARK
8016 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
8017 {
8018 ret = CONST0_RTX (tmode);
8019 return ret ? ret : const0_rtx;
8020 }
8021
8022 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8023 inner_reference_p);
8024 return ret;
8025 }
8026
8027 /* Try to expand the conditional expression which is represented by
8028 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
8029 return the rtl reg which repsents the result. Otherwise return
8030 NULL_RTL. */
8031
8032 static rtx
8033 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8034 tree treeop1 ATTRIBUTE_UNUSED,
8035 tree treeop2 ATTRIBUTE_UNUSED)
8036 {
8037 #ifdef HAVE_conditional_move
8038 rtx insn;
8039 rtx op00, op01, op1, op2;
8040 enum rtx_code comparison_code;
8041 machine_mode comparison_mode;
8042 gimple srcstmt;
8043 rtx temp;
8044 tree type = TREE_TYPE (treeop1);
8045 int unsignedp = TYPE_UNSIGNED (type);
8046 machine_mode mode = TYPE_MODE (type);
8047 machine_mode orig_mode = mode;
8048
8049 /* If we cannot do a conditional move on the mode, try doing it
8050 with the promoted mode. */
8051 if (!can_conditionally_move_p (mode))
8052 {
8053 mode = promote_mode (type, mode, &unsignedp);
8054 if (!can_conditionally_move_p (mode))
8055 return NULL_RTX;
8056 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
8057 }
8058 else
8059 temp = assign_temp (type, 0, 1);
8060
8061 start_sequence ();
8062 expand_operands (treeop1, treeop2,
8063 temp, &op1, &op2, EXPAND_NORMAL);
8064
8065 if (TREE_CODE (treeop0) == SSA_NAME
8066 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8067 {
8068 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8069 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8070 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8071 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8072 comparison_mode = TYPE_MODE (type);
8073 unsignedp = TYPE_UNSIGNED (type);
8074 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8075 }
8076 else if (COMPARISON_CLASS_P (treeop0))
8077 {
8078 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8079 enum tree_code cmpcode = TREE_CODE (treeop0);
8080 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8081 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8082 unsignedp = TYPE_UNSIGNED (type);
8083 comparison_mode = TYPE_MODE (type);
8084 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8085 }
8086 else
8087 {
8088 op00 = expand_normal (treeop0);
8089 op01 = const0_rtx;
8090 comparison_code = NE;
8091 comparison_mode = GET_MODE (op00);
8092 if (comparison_mode == VOIDmode)
8093 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8094 }
8095
8096 if (GET_MODE (op1) != mode)
8097 op1 = gen_lowpart (mode, op1);
8098
8099 if (GET_MODE (op2) != mode)
8100 op2 = gen_lowpart (mode, op2);
8101
8102 /* Try to emit the conditional move. */
8103 insn = emit_conditional_move (temp, comparison_code,
8104 op00, op01, comparison_mode,
8105 op1, op2, mode,
8106 unsignedp);
8107
8108 /* If we could do the conditional move, emit the sequence,
8109 and return. */
8110 if (insn)
8111 {
8112 rtx_insn *seq = get_insns ();
8113 end_sequence ();
8114 emit_insn (seq);
8115 return convert_modes (orig_mode, mode, temp, 0);
8116 }
8117
8118 /* Otherwise discard the sequence and fall back to code with
8119 branches. */
8120 end_sequence ();
8121 #endif
8122 return NULL_RTX;
8123 }
8124
8125 rtx
8126 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8127 enum expand_modifier modifier)
8128 {
8129 rtx op0, op1, op2, temp;
8130 tree type;
8131 int unsignedp;
8132 machine_mode mode;
8133 enum tree_code code = ops->code;
8134 optab this_optab;
8135 rtx subtarget, original_target;
8136 int ignore;
8137 bool reduce_bit_field;
8138 location_t loc = ops->location;
8139 tree treeop0, treeop1, treeop2;
8140 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8141 ? reduce_to_bit_field_precision ((expr), \
8142 target, \
8143 type) \
8144 : (expr))
8145
8146 type = ops->type;
8147 mode = TYPE_MODE (type);
8148 unsignedp = TYPE_UNSIGNED (type);
8149
8150 treeop0 = ops->op0;
8151 treeop1 = ops->op1;
8152 treeop2 = ops->op2;
8153
8154 /* We should be called only on simple (binary or unary) expressions,
8155 exactly those that are valid in gimple expressions that aren't
8156 GIMPLE_SINGLE_RHS (or invalid). */
8157 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8158 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8159 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8160
8161 ignore = (target == const0_rtx
8162 || ((CONVERT_EXPR_CODE_P (code)
8163 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8164 && TREE_CODE (type) == VOID_TYPE));
8165
8166 /* We should be called only if we need the result. */
8167 gcc_assert (!ignore);
8168
8169 /* An operation in what may be a bit-field type needs the
8170 result to be reduced to the precision of the bit-field type,
8171 which is narrower than that of the type's mode. */
8172 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8173 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8174
8175 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8176 target = 0;
8177
8178 /* Use subtarget as the target for operand 0 of a binary operation. */
8179 subtarget = get_subtarget (target);
8180 original_target = target;
8181
8182 switch (code)
8183 {
8184 case NON_LVALUE_EXPR:
8185 case PAREN_EXPR:
8186 CASE_CONVERT:
8187 if (treeop0 == error_mark_node)
8188 return const0_rtx;
8189
8190 if (TREE_CODE (type) == UNION_TYPE)
8191 {
8192 tree valtype = TREE_TYPE (treeop0);
8193
8194 /* If both input and output are BLKmode, this conversion isn't doing
8195 anything except possibly changing memory attribute. */
8196 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8197 {
8198 rtx result = expand_expr (treeop0, target, tmode,
8199 modifier);
8200
8201 result = copy_rtx (result);
8202 set_mem_attributes (result, type, 0);
8203 return result;
8204 }
8205
8206 if (target == 0)
8207 {
8208 if (TYPE_MODE (type) != BLKmode)
8209 target = gen_reg_rtx (TYPE_MODE (type));
8210 else
8211 target = assign_temp (type, 1, 1);
8212 }
8213
8214 if (MEM_P (target))
8215 /* Store data into beginning of memory target. */
8216 store_expr (treeop0,
8217 adjust_address (target, TYPE_MODE (valtype), 0),
8218 modifier == EXPAND_STACK_PARM,
8219 false);
8220
8221 else
8222 {
8223 gcc_assert (REG_P (target));
8224
8225 /* Store this field into a union of the proper type. */
8226 store_field (target,
8227 MIN ((int_size_in_bytes (TREE_TYPE
8228 (treeop0))
8229 * BITS_PER_UNIT),
8230 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8231 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8232 }
8233
8234 /* Return the entire union. */
8235 return target;
8236 }
8237
8238 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8239 {
8240 op0 = expand_expr (treeop0, target, VOIDmode,
8241 modifier);
8242
8243 /* If the signedness of the conversion differs and OP0 is
8244 a promoted SUBREG, clear that indication since we now
8245 have to do the proper extension. */
8246 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8247 && GET_CODE (op0) == SUBREG)
8248 SUBREG_PROMOTED_VAR_P (op0) = 0;
8249
8250 return REDUCE_BIT_FIELD (op0);
8251 }
8252
8253 op0 = expand_expr (treeop0, NULL_RTX, mode,
8254 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8255 if (GET_MODE (op0) == mode)
8256 ;
8257
8258 /* If OP0 is a constant, just convert it into the proper mode. */
8259 else if (CONSTANT_P (op0))
8260 {
8261 tree inner_type = TREE_TYPE (treeop0);
8262 machine_mode inner_mode = GET_MODE (op0);
8263
8264 if (inner_mode == VOIDmode)
8265 inner_mode = TYPE_MODE (inner_type);
8266
8267 if (modifier == EXPAND_INITIALIZER)
8268 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8269 subreg_lowpart_offset (mode,
8270 inner_mode));
8271 else
8272 op0= convert_modes (mode, inner_mode, op0,
8273 TYPE_UNSIGNED (inner_type));
8274 }
8275
8276 else if (modifier == EXPAND_INITIALIZER)
8277 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8278
8279 else if (target == 0)
8280 op0 = convert_to_mode (mode, op0,
8281 TYPE_UNSIGNED (TREE_TYPE
8282 (treeop0)));
8283 else
8284 {
8285 convert_move (target, op0,
8286 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8287 op0 = target;
8288 }
8289
8290 return REDUCE_BIT_FIELD (op0);
8291
8292 case ADDR_SPACE_CONVERT_EXPR:
8293 {
8294 tree treeop0_type = TREE_TYPE (treeop0);
8295 addr_space_t as_to;
8296 addr_space_t as_from;
8297
8298 gcc_assert (POINTER_TYPE_P (type));
8299 gcc_assert (POINTER_TYPE_P (treeop0_type));
8300
8301 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8302 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8303
8304 /* Conversions between pointers to the same address space should
8305 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8306 gcc_assert (as_to != as_from);
8307
8308 /* Ask target code to handle conversion between pointers
8309 to overlapping address spaces. */
8310 if (targetm.addr_space.subset_p (as_to, as_from)
8311 || targetm.addr_space.subset_p (as_from, as_to))
8312 {
8313 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8314 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8315 gcc_assert (op0);
8316 return op0;
8317 }
8318
8319 /* For disjoint address spaces, converting anything but
8320 a null pointer invokes undefined behaviour. We simply
8321 always return a null pointer here. */
8322 return CONST0_RTX (mode);
8323 }
8324
8325 case POINTER_PLUS_EXPR:
8326 /* Even though the sizetype mode and the pointer's mode can be different
8327 expand is able to handle this correctly and get the correct result out
8328 of the PLUS_EXPR code. */
8329 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8330 if sizetype precision is smaller than pointer precision. */
8331 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8332 treeop1 = fold_convert_loc (loc, type,
8333 fold_convert_loc (loc, ssizetype,
8334 treeop1));
8335 /* If sizetype precision is larger than pointer precision, truncate the
8336 offset to have matching modes. */
8337 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8338 treeop1 = fold_convert_loc (loc, type, treeop1);
8339
8340 case PLUS_EXPR:
8341 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8342 something else, make sure we add the register to the constant and
8343 then to the other thing. This case can occur during strength
8344 reduction and doing it this way will produce better code if the
8345 frame pointer or argument pointer is eliminated.
8346
8347 fold-const.c will ensure that the constant is always in the inner
8348 PLUS_EXPR, so the only case we need to do anything about is if
8349 sp, ap, or fp is our second argument, in which case we must swap
8350 the innermost first argument and our second argument. */
8351
8352 if (TREE_CODE (treeop0) == PLUS_EXPR
8353 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8354 && TREE_CODE (treeop1) == VAR_DECL
8355 && (DECL_RTL (treeop1) == frame_pointer_rtx
8356 || DECL_RTL (treeop1) == stack_pointer_rtx
8357 || DECL_RTL (treeop1) == arg_pointer_rtx))
8358 {
8359 gcc_unreachable ();
8360 }
8361
8362 /* If the result is to be ptr_mode and we are adding an integer to
8363 something, we might be forming a constant. So try to use
8364 plus_constant. If it produces a sum and we can't accept it,
8365 use force_operand. This allows P = &ARR[const] to generate
8366 efficient code on machines where a SYMBOL_REF is not a valid
8367 address.
8368
8369 If this is an EXPAND_SUM call, always return the sum. */
8370 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8371 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8372 {
8373 if (modifier == EXPAND_STACK_PARM)
8374 target = 0;
8375 if (TREE_CODE (treeop0) == INTEGER_CST
8376 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8377 && TREE_CONSTANT (treeop1))
8378 {
8379 rtx constant_part;
8380 HOST_WIDE_INT wc;
8381 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8382
8383 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8384 EXPAND_SUM);
8385 /* Use wi::shwi to ensure that the constant is
8386 truncated according to the mode of OP1, then sign extended
8387 to a HOST_WIDE_INT. Using the constant directly can result
8388 in non-canonical RTL in a 64x32 cross compile. */
8389 wc = TREE_INT_CST_LOW (treeop0);
8390 constant_part =
8391 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8392 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8393 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8394 op1 = force_operand (op1, target);
8395 return REDUCE_BIT_FIELD (op1);
8396 }
8397
8398 else if (TREE_CODE (treeop1) == INTEGER_CST
8399 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8400 && TREE_CONSTANT (treeop0))
8401 {
8402 rtx constant_part;
8403 HOST_WIDE_INT wc;
8404 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8405
8406 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8407 (modifier == EXPAND_INITIALIZER
8408 ? EXPAND_INITIALIZER : EXPAND_SUM));
8409 if (! CONSTANT_P (op0))
8410 {
8411 op1 = expand_expr (treeop1, NULL_RTX,
8412 VOIDmode, modifier);
8413 /* Return a PLUS if modifier says it's OK. */
8414 if (modifier == EXPAND_SUM
8415 || modifier == EXPAND_INITIALIZER)
8416 return simplify_gen_binary (PLUS, mode, op0, op1);
8417 goto binop2;
8418 }
8419 /* Use wi::shwi to ensure that the constant is
8420 truncated according to the mode of OP1, then sign extended
8421 to a HOST_WIDE_INT. Using the constant directly can result
8422 in non-canonical RTL in a 64x32 cross compile. */
8423 wc = TREE_INT_CST_LOW (treeop1);
8424 constant_part
8425 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8426 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8427 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8428 op0 = force_operand (op0, target);
8429 return REDUCE_BIT_FIELD (op0);
8430 }
8431 }
8432
8433 /* Use TER to expand pointer addition of a negated value
8434 as pointer subtraction. */
8435 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8436 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8437 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8438 && TREE_CODE (treeop1) == SSA_NAME
8439 && TYPE_MODE (TREE_TYPE (treeop0))
8440 == TYPE_MODE (TREE_TYPE (treeop1)))
8441 {
8442 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8443 if (def)
8444 {
8445 treeop1 = gimple_assign_rhs1 (def);
8446 code = MINUS_EXPR;
8447 goto do_minus;
8448 }
8449 }
8450
8451 /* No sense saving up arithmetic to be done
8452 if it's all in the wrong mode to form part of an address.
8453 And force_operand won't know whether to sign-extend or
8454 zero-extend. */
8455 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8456 || mode != ptr_mode)
8457 {
8458 expand_operands (treeop0, treeop1,
8459 subtarget, &op0, &op1, EXPAND_NORMAL);
8460 if (op0 == const0_rtx)
8461 return op1;
8462 if (op1 == const0_rtx)
8463 return op0;
8464 goto binop2;
8465 }
8466
8467 expand_operands (treeop0, treeop1,
8468 subtarget, &op0, &op1, modifier);
8469 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8470
8471 case MINUS_EXPR:
8472 do_minus:
8473 /* For initializers, we are allowed to return a MINUS of two
8474 symbolic constants. Here we handle all cases when both operands
8475 are constant. */
8476 /* Handle difference of two symbolic constants,
8477 for the sake of an initializer. */
8478 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8479 && really_constant_p (treeop0)
8480 && really_constant_p (treeop1))
8481 {
8482 expand_operands (treeop0, treeop1,
8483 NULL_RTX, &op0, &op1, modifier);
8484
8485 /* If the last operand is a CONST_INT, use plus_constant of
8486 the negated constant. Else make the MINUS. */
8487 if (CONST_INT_P (op1))
8488 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8489 -INTVAL (op1)));
8490 else
8491 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8492 }
8493
8494 /* No sense saving up arithmetic to be done
8495 if it's all in the wrong mode to form part of an address.
8496 And force_operand won't know whether to sign-extend or
8497 zero-extend. */
8498 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8499 || mode != ptr_mode)
8500 goto binop;
8501
8502 expand_operands (treeop0, treeop1,
8503 subtarget, &op0, &op1, modifier);
8504
8505 /* Convert A - const to A + (-const). */
8506 if (CONST_INT_P (op1))
8507 {
8508 op1 = negate_rtx (mode, op1);
8509 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8510 }
8511
8512 goto binop2;
8513
8514 case WIDEN_MULT_PLUS_EXPR:
8515 case WIDEN_MULT_MINUS_EXPR:
8516 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8517 op2 = expand_normal (treeop2);
8518 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8519 target, unsignedp);
8520 return target;
8521
8522 case WIDEN_MULT_EXPR:
8523 /* If first operand is constant, swap them.
8524 Thus the following special case checks need only
8525 check the second operand. */
8526 if (TREE_CODE (treeop0) == INTEGER_CST)
8527 {
8528 tree t1 = treeop0;
8529 treeop0 = treeop1;
8530 treeop1 = t1;
8531 }
8532
8533 /* First, check if we have a multiplication of one signed and one
8534 unsigned operand. */
8535 if (TREE_CODE (treeop1) != INTEGER_CST
8536 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8537 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8538 {
8539 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8540 this_optab = usmul_widen_optab;
8541 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8542 != CODE_FOR_nothing)
8543 {
8544 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8545 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8546 EXPAND_NORMAL);
8547 else
8548 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8549 EXPAND_NORMAL);
8550 /* op0 and op1 might still be constant, despite the above
8551 != INTEGER_CST check. Handle it. */
8552 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8553 {
8554 op0 = convert_modes (innermode, mode, op0, true);
8555 op1 = convert_modes (innermode, mode, op1, false);
8556 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8557 target, unsignedp));
8558 }
8559 goto binop3;
8560 }
8561 }
8562 /* Check for a multiplication with matching signedness. */
8563 else if ((TREE_CODE (treeop1) == INTEGER_CST
8564 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8565 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8566 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8567 {
8568 tree op0type = TREE_TYPE (treeop0);
8569 machine_mode innermode = TYPE_MODE (op0type);
8570 bool zextend_p = TYPE_UNSIGNED (op0type);
8571 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8572 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8573
8574 if (TREE_CODE (treeop0) != INTEGER_CST)
8575 {
8576 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8577 != CODE_FOR_nothing)
8578 {
8579 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8580 EXPAND_NORMAL);
8581 /* op0 and op1 might still be constant, despite the above
8582 != INTEGER_CST check. Handle it. */
8583 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8584 {
8585 widen_mult_const:
8586 op0 = convert_modes (innermode, mode, op0, zextend_p);
8587 op1
8588 = convert_modes (innermode, mode, op1,
8589 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8590 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8591 target,
8592 unsignedp));
8593 }
8594 temp = expand_widening_mult (mode, op0, op1, target,
8595 unsignedp, this_optab);
8596 return REDUCE_BIT_FIELD (temp);
8597 }
8598 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8599 != CODE_FOR_nothing
8600 && innermode == word_mode)
8601 {
8602 rtx htem, hipart;
8603 op0 = expand_normal (treeop0);
8604 if (TREE_CODE (treeop1) == INTEGER_CST)
8605 op1 = convert_modes (innermode, mode,
8606 expand_normal (treeop1),
8607 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8608 else
8609 op1 = expand_normal (treeop1);
8610 /* op0 and op1 might still be constant, despite the above
8611 != INTEGER_CST check. Handle it. */
8612 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8613 goto widen_mult_const;
8614 temp = expand_binop (mode, other_optab, op0, op1, target,
8615 unsignedp, OPTAB_LIB_WIDEN);
8616 hipart = gen_highpart (innermode, temp);
8617 htem = expand_mult_highpart_adjust (innermode, hipart,
8618 op0, op1, hipart,
8619 zextend_p);
8620 if (htem != hipart)
8621 emit_move_insn (hipart, htem);
8622 return REDUCE_BIT_FIELD (temp);
8623 }
8624 }
8625 }
8626 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8627 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8628 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8629 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8630
8631 case FMA_EXPR:
8632 {
8633 optab opt = fma_optab;
8634 gimple def0, def2;
8635
8636 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8637 call. */
8638 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8639 {
8640 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8641 tree call_expr;
8642
8643 gcc_assert (fn != NULL_TREE);
8644 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8645 return expand_builtin (call_expr, target, subtarget, mode, false);
8646 }
8647
8648 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8649 /* The multiplication is commutative - look at its 2nd operand
8650 if the first isn't fed by a negate. */
8651 if (!def0)
8652 {
8653 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8654 /* Swap operands if the 2nd operand is fed by a negate. */
8655 if (def0)
8656 {
8657 tree tem = treeop0;
8658 treeop0 = treeop1;
8659 treeop1 = tem;
8660 }
8661 }
8662 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8663
8664 op0 = op2 = NULL;
8665
8666 if (def0 && def2
8667 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8668 {
8669 opt = fnms_optab;
8670 op0 = expand_normal (gimple_assign_rhs1 (def0));
8671 op2 = expand_normal (gimple_assign_rhs1 (def2));
8672 }
8673 else if (def0
8674 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8675 {
8676 opt = fnma_optab;
8677 op0 = expand_normal (gimple_assign_rhs1 (def0));
8678 }
8679 else if (def2
8680 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8681 {
8682 opt = fms_optab;
8683 op2 = expand_normal (gimple_assign_rhs1 (def2));
8684 }
8685
8686 if (op0 == NULL)
8687 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8688 if (op2 == NULL)
8689 op2 = expand_normal (treeop2);
8690 op1 = expand_normal (treeop1);
8691
8692 return expand_ternary_op (TYPE_MODE (type), opt,
8693 op0, op1, op2, target, 0);
8694 }
8695
8696 case MULT_EXPR:
8697 /* If this is a fixed-point operation, then we cannot use the code
8698 below because "expand_mult" doesn't support sat/no-sat fixed-point
8699 multiplications. */
8700 if (ALL_FIXED_POINT_MODE_P (mode))
8701 goto binop;
8702
8703 /* If first operand is constant, swap them.
8704 Thus the following special case checks need only
8705 check the second operand. */
8706 if (TREE_CODE (treeop0) == INTEGER_CST)
8707 {
8708 tree t1 = treeop0;
8709 treeop0 = treeop1;
8710 treeop1 = t1;
8711 }
8712
8713 /* Attempt to return something suitable for generating an
8714 indexed address, for machines that support that. */
8715
8716 if (modifier == EXPAND_SUM && mode == ptr_mode
8717 && tree_fits_shwi_p (treeop1))
8718 {
8719 tree exp1 = treeop1;
8720
8721 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8722 EXPAND_SUM);
8723
8724 if (!REG_P (op0))
8725 op0 = force_operand (op0, NULL_RTX);
8726 if (!REG_P (op0))
8727 op0 = copy_to_mode_reg (mode, op0);
8728
8729 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8730 gen_int_mode (tree_to_shwi (exp1),
8731 TYPE_MODE (TREE_TYPE (exp1)))));
8732 }
8733
8734 if (modifier == EXPAND_STACK_PARM)
8735 target = 0;
8736
8737 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8738 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8739
8740 case TRUNC_DIV_EXPR:
8741 case FLOOR_DIV_EXPR:
8742 case CEIL_DIV_EXPR:
8743 case ROUND_DIV_EXPR:
8744 case EXACT_DIV_EXPR:
8745 /* If this is a fixed-point operation, then we cannot use the code
8746 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8747 divisions. */
8748 if (ALL_FIXED_POINT_MODE_P (mode))
8749 goto binop;
8750
8751 if (modifier == EXPAND_STACK_PARM)
8752 target = 0;
8753 /* Possible optimization: compute the dividend with EXPAND_SUM
8754 then if the divisor is constant can optimize the case
8755 where some terms of the dividend have coeffs divisible by it. */
8756 expand_operands (treeop0, treeop1,
8757 subtarget, &op0, &op1, EXPAND_NORMAL);
8758 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8759
8760 case RDIV_EXPR:
8761 goto binop;
8762
8763 case MULT_HIGHPART_EXPR:
8764 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8765 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8766 gcc_assert (temp);
8767 return temp;
8768
8769 case TRUNC_MOD_EXPR:
8770 case FLOOR_MOD_EXPR:
8771 case CEIL_MOD_EXPR:
8772 case ROUND_MOD_EXPR:
8773 if (modifier == EXPAND_STACK_PARM)
8774 target = 0;
8775 expand_operands (treeop0, treeop1,
8776 subtarget, &op0, &op1, EXPAND_NORMAL);
8777 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8778
8779 case FIXED_CONVERT_EXPR:
8780 op0 = expand_normal (treeop0);
8781 if (target == 0 || modifier == EXPAND_STACK_PARM)
8782 target = gen_reg_rtx (mode);
8783
8784 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8785 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8786 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8787 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8788 else
8789 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8790 return target;
8791
8792 case FIX_TRUNC_EXPR:
8793 op0 = expand_normal (treeop0);
8794 if (target == 0 || modifier == EXPAND_STACK_PARM)
8795 target = gen_reg_rtx (mode);
8796 expand_fix (target, op0, unsignedp);
8797 return target;
8798
8799 case FLOAT_EXPR:
8800 op0 = expand_normal (treeop0);
8801 if (target == 0 || modifier == EXPAND_STACK_PARM)
8802 target = gen_reg_rtx (mode);
8803 /* expand_float can't figure out what to do if FROM has VOIDmode.
8804 So give it the correct mode. With -O, cse will optimize this. */
8805 if (GET_MODE (op0) == VOIDmode)
8806 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8807 op0);
8808 expand_float (target, op0,
8809 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8810 return target;
8811
8812 case NEGATE_EXPR:
8813 op0 = expand_expr (treeop0, subtarget,
8814 VOIDmode, EXPAND_NORMAL);
8815 if (modifier == EXPAND_STACK_PARM)
8816 target = 0;
8817 temp = expand_unop (mode,
8818 optab_for_tree_code (NEGATE_EXPR, type,
8819 optab_default),
8820 op0, target, 0);
8821 gcc_assert (temp);
8822 return REDUCE_BIT_FIELD (temp);
8823
8824 case ABS_EXPR:
8825 op0 = expand_expr (treeop0, subtarget,
8826 VOIDmode, EXPAND_NORMAL);
8827 if (modifier == EXPAND_STACK_PARM)
8828 target = 0;
8829
8830 /* ABS_EXPR is not valid for complex arguments. */
8831 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8832 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8833
8834 /* Unsigned abs is simply the operand. Testing here means we don't
8835 risk generating incorrect code below. */
8836 if (TYPE_UNSIGNED (type))
8837 return op0;
8838
8839 return expand_abs (mode, op0, target, unsignedp,
8840 safe_from_p (target, treeop0, 1));
8841
8842 case MAX_EXPR:
8843 case MIN_EXPR:
8844 target = original_target;
8845 if (target == 0
8846 || modifier == EXPAND_STACK_PARM
8847 || (MEM_P (target) && MEM_VOLATILE_P (target))
8848 || GET_MODE (target) != mode
8849 || (REG_P (target)
8850 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8851 target = gen_reg_rtx (mode);
8852 expand_operands (treeop0, treeop1,
8853 target, &op0, &op1, EXPAND_NORMAL);
8854
8855 /* First try to do it with a special MIN or MAX instruction.
8856 If that does not win, use a conditional jump to select the proper
8857 value. */
8858 this_optab = optab_for_tree_code (code, type, optab_default);
8859 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8860 OPTAB_WIDEN);
8861 if (temp != 0)
8862 return temp;
8863
8864 /* At this point, a MEM target is no longer useful; we will get better
8865 code without it. */
8866
8867 if (! REG_P (target))
8868 target = gen_reg_rtx (mode);
8869
8870 /* If op1 was placed in target, swap op0 and op1. */
8871 if (target != op0 && target == op1)
8872 std::swap (op0, op1);
8873
8874 /* We generate better code and avoid problems with op1 mentioning
8875 target by forcing op1 into a pseudo if it isn't a constant. */
8876 if (! CONSTANT_P (op1))
8877 op1 = force_reg (mode, op1);
8878
8879 {
8880 enum rtx_code comparison_code;
8881 rtx cmpop1 = op1;
8882
8883 if (code == MAX_EXPR)
8884 comparison_code = unsignedp ? GEU : GE;
8885 else
8886 comparison_code = unsignedp ? LEU : LE;
8887
8888 /* Canonicalize to comparisons against 0. */
8889 if (op1 == const1_rtx)
8890 {
8891 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8892 or (a != 0 ? a : 1) for unsigned.
8893 For MIN we are safe converting (a <= 1 ? a : 1)
8894 into (a <= 0 ? a : 1) */
8895 cmpop1 = const0_rtx;
8896 if (code == MAX_EXPR)
8897 comparison_code = unsignedp ? NE : GT;
8898 }
8899 if (op1 == constm1_rtx && !unsignedp)
8900 {
8901 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8902 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8903 cmpop1 = const0_rtx;
8904 if (code == MIN_EXPR)
8905 comparison_code = LT;
8906 }
8907 #ifdef HAVE_conditional_move
8908 /* Use a conditional move if possible. */
8909 if (can_conditionally_move_p (mode))
8910 {
8911 rtx insn;
8912
8913 start_sequence ();
8914
8915 /* Try to emit the conditional move. */
8916 insn = emit_conditional_move (target, comparison_code,
8917 op0, cmpop1, mode,
8918 op0, op1, mode,
8919 unsignedp);
8920
8921 /* If we could do the conditional move, emit the sequence,
8922 and return. */
8923 if (insn)
8924 {
8925 rtx_insn *seq = get_insns ();
8926 end_sequence ();
8927 emit_insn (seq);
8928 return target;
8929 }
8930
8931 /* Otherwise discard the sequence and fall back to code with
8932 branches. */
8933 end_sequence ();
8934 }
8935 #endif
8936 if (target != op0)
8937 emit_move_insn (target, op0);
8938
8939 temp = gen_label_rtx ();
8940 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8941 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8942 -1);
8943 }
8944 emit_move_insn (target, op1);
8945 emit_label (temp);
8946 return target;
8947
8948 case BIT_NOT_EXPR:
8949 op0 = expand_expr (treeop0, subtarget,
8950 VOIDmode, EXPAND_NORMAL);
8951 if (modifier == EXPAND_STACK_PARM)
8952 target = 0;
8953 /* In case we have to reduce the result to bitfield precision
8954 for unsigned bitfield expand this as XOR with a proper constant
8955 instead. */
8956 if (reduce_bit_field && TYPE_UNSIGNED (type))
8957 {
8958 wide_int mask = wi::mask (TYPE_PRECISION (type),
8959 false, GET_MODE_PRECISION (mode));
8960
8961 temp = expand_binop (mode, xor_optab, op0,
8962 immed_wide_int_const (mask, mode),
8963 target, 1, OPTAB_LIB_WIDEN);
8964 }
8965 else
8966 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8967 gcc_assert (temp);
8968 return temp;
8969
8970 /* ??? Can optimize bitwise operations with one arg constant.
8971 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8972 and (a bitwise1 b) bitwise2 b (etc)
8973 but that is probably not worth while. */
8974
8975 case BIT_AND_EXPR:
8976 case BIT_IOR_EXPR:
8977 case BIT_XOR_EXPR:
8978 goto binop;
8979
8980 case LROTATE_EXPR:
8981 case RROTATE_EXPR:
8982 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8983 || (GET_MODE_PRECISION (TYPE_MODE (type))
8984 == TYPE_PRECISION (type)));
8985 /* fall through */
8986
8987 case LSHIFT_EXPR:
8988 case RSHIFT_EXPR:
8989 /* If this is a fixed-point operation, then we cannot use the code
8990 below because "expand_shift" doesn't support sat/no-sat fixed-point
8991 shifts. */
8992 if (ALL_FIXED_POINT_MODE_P (mode))
8993 goto binop;
8994
8995 if (! safe_from_p (subtarget, treeop1, 1))
8996 subtarget = 0;
8997 if (modifier == EXPAND_STACK_PARM)
8998 target = 0;
8999 op0 = expand_expr (treeop0, subtarget,
9000 VOIDmode, EXPAND_NORMAL);
9001 temp = expand_variable_shift (code, mode, op0, treeop1, target,
9002 unsignedp);
9003 if (code == LSHIFT_EXPR)
9004 temp = REDUCE_BIT_FIELD (temp);
9005 return temp;
9006
9007 /* Could determine the answer when only additive constants differ. Also,
9008 the addition of one can be handled by changing the condition. */
9009 case LT_EXPR:
9010 case LE_EXPR:
9011 case GT_EXPR:
9012 case GE_EXPR:
9013 case EQ_EXPR:
9014 case NE_EXPR:
9015 case UNORDERED_EXPR:
9016 case ORDERED_EXPR:
9017 case UNLT_EXPR:
9018 case UNLE_EXPR:
9019 case UNGT_EXPR:
9020 case UNGE_EXPR:
9021 case UNEQ_EXPR:
9022 case LTGT_EXPR:
9023 temp = do_store_flag (ops,
9024 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9025 tmode != VOIDmode ? tmode : mode);
9026 if (temp)
9027 return temp;
9028
9029 /* Use a compare and a jump for BLKmode comparisons, or for function
9030 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9031
9032 if ((target == 0
9033 || modifier == EXPAND_STACK_PARM
9034 || ! safe_from_p (target, treeop0, 1)
9035 || ! safe_from_p (target, treeop1, 1)
9036 /* Make sure we don't have a hard reg (such as function's return
9037 value) live across basic blocks, if not optimizing. */
9038 || (!optimize && REG_P (target)
9039 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9040 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9041
9042 emit_move_insn (target, const0_rtx);
9043
9044 op1 = gen_label_rtx ();
9045 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9046
9047 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9048 emit_move_insn (target, constm1_rtx);
9049 else
9050 emit_move_insn (target, const1_rtx);
9051
9052 emit_label (op1);
9053 return target;
9054
9055 case COMPLEX_EXPR:
9056 /* Get the rtx code of the operands. */
9057 op0 = expand_normal (treeop0);
9058 op1 = expand_normal (treeop1);
9059
9060 if (!target)
9061 target = gen_reg_rtx (TYPE_MODE (type));
9062 else
9063 /* If target overlaps with op1, then either we need to force
9064 op1 into a pseudo (if target also overlaps with op0),
9065 or write the complex parts in reverse order. */
9066 switch (GET_CODE (target))
9067 {
9068 case CONCAT:
9069 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9070 {
9071 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9072 {
9073 complex_expr_force_op1:
9074 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9075 emit_move_insn (temp, op1);
9076 op1 = temp;
9077 break;
9078 }
9079 complex_expr_swap_order:
9080 /* Move the imaginary (op1) and real (op0) parts to their
9081 location. */
9082 write_complex_part (target, op1, true);
9083 write_complex_part (target, op0, false);
9084
9085 return target;
9086 }
9087 break;
9088 case MEM:
9089 temp = adjust_address_nv (target,
9090 GET_MODE_INNER (GET_MODE (target)), 0);
9091 if (reg_overlap_mentioned_p (temp, op1))
9092 {
9093 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9094 temp = adjust_address_nv (target, imode,
9095 GET_MODE_SIZE (imode));
9096 if (reg_overlap_mentioned_p (temp, op0))
9097 goto complex_expr_force_op1;
9098 goto complex_expr_swap_order;
9099 }
9100 break;
9101 default:
9102 if (reg_overlap_mentioned_p (target, op1))
9103 {
9104 if (reg_overlap_mentioned_p (target, op0))
9105 goto complex_expr_force_op1;
9106 goto complex_expr_swap_order;
9107 }
9108 break;
9109 }
9110
9111 /* Move the real (op0) and imaginary (op1) parts to their location. */
9112 write_complex_part (target, op0, false);
9113 write_complex_part (target, op1, true);
9114
9115 return target;
9116
9117 case WIDEN_SUM_EXPR:
9118 {
9119 tree oprnd0 = treeop0;
9120 tree oprnd1 = treeop1;
9121
9122 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9123 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9124 target, unsignedp);
9125 return target;
9126 }
9127
9128 case REDUC_MAX_EXPR:
9129 case REDUC_MIN_EXPR:
9130 case REDUC_PLUS_EXPR:
9131 {
9132 op0 = expand_normal (treeop0);
9133 this_optab = optab_for_tree_code (code, type, optab_default);
9134 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9135
9136 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9137 {
9138 struct expand_operand ops[2];
9139 enum insn_code icode = optab_handler (this_optab, vec_mode);
9140
9141 create_output_operand (&ops[0], target, mode);
9142 create_input_operand (&ops[1], op0, vec_mode);
9143 if (maybe_expand_insn (icode, 2, ops))
9144 {
9145 target = ops[0].value;
9146 if (GET_MODE (target) != mode)
9147 return gen_lowpart (tmode, target);
9148 return target;
9149 }
9150 }
9151 /* Fall back to optab with vector result, and then extract scalar. */
9152 this_optab = scalar_reduc_to_vector (this_optab, type);
9153 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9154 gcc_assert (temp);
9155 /* The tree code produces a scalar result, but (somewhat by convention)
9156 the optab produces a vector with the result in element 0 if
9157 little-endian, or element N-1 if big-endian. So pull the scalar
9158 result out of that element. */
9159 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9160 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9161 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9162 target, mode, mode);
9163 gcc_assert (temp);
9164 return temp;
9165 }
9166
9167 case VEC_UNPACK_HI_EXPR:
9168 case VEC_UNPACK_LO_EXPR:
9169 {
9170 op0 = expand_normal (treeop0);
9171 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9172 target, unsignedp);
9173 gcc_assert (temp);
9174 return temp;
9175 }
9176
9177 case VEC_UNPACK_FLOAT_HI_EXPR:
9178 case VEC_UNPACK_FLOAT_LO_EXPR:
9179 {
9180 op0 = expand_normal (treeop0);
9181 /* The signedness is determined from input operand. */
9182 temp = expand_widen_pattern_expr
9183 (ops, op0, NULL_RTX, NULL_RTX,
9184 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9185
9186 gcc_assert (temp);
9187 return temp;
9188 }
9189
9190 case VEC_WIDEN_MULT_HI_EXPR:
9191 case VEC_WIDEN_MULT_LO_EXPR:
9192 case VEC_WIDEN_MULT_EVEN_EXPR:
9193 case VEC_WIDEN_MULT_ODD_EXPR:
9194 case VEC_WIDEN_LSHIFT_HI_EXPR:
9195 case VEC_WIDEN_LSHIFT_LO_EXPR:
9196 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9197 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9198 target, unsignedp);
9199 gcc_assert (target);
9200 return target;
9201
9202 case VEC_PACK_TRUNC_EXPR:
9203 case VEC_PACK_SAT_EXPR:
9204 case VEC_PACK_FIX_TRUNC_EXPR:
9205 mode = TYPE_MODE (TREE_TYPE (treeop0));
9206 goto binop;
9207
9208 case VEC_PERM_EXPR:
9209 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9210 op2 = expand_normal (treeop2);
9211
9212 /* Careful here: if the target doesn't support integral vector modes,
9213 a constant selection vector could wind up smooshed into a normal
9214 integral constant. */
9215 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9216 {
9217 tree sel_type = TREE_TYPE (treeop2);
9218 machine_mode vmode
9219 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9220 TYPE_VECTOR_SUBPARTS (sel_type));
9221 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9222 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9223 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9224 }
9225 else
9226 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9227
9228 temp = expand_vec_perm (mode, op0, op1, op2, target);
9229 gcc_assert (temp);
9230 return temp;
9231
9232 case DOT_PROD_EXPR:
9233 {
9234 tree oprnd0 = treeop0;
9235 tree oprnd1 = treeop1;
9236 tree oprnd2 = treeop2;
9237 rtx op2;
9238
9239 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9240 op2 = expand_normal (oprnd2);
9241 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9242 target, unsignedp);
9243 return target;
9244 }
9245
9246 case SAD_EXPR:
9247 {
9248 tree oprnd0 = treeop0;
9249 tree oprnd1 = treeop1;
9250 tree oprnd2 = treeop2;
9251 rtx op2;
9252
9253 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9254 op2 = expand_normal (oprnd2);
9255 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9256 target, unsignedp);
9257 return target;
9258 }
9259
9260 case REALIGN_LOAD_EXPR:
9261 {
9262 tree oprnd0 = treeop0;
9263 tree oprnd1 = treeop1;
9264 tree oprnd2 = treeop2;
9265 rtx op2;
9266
9267 this_optab = optab_for_tree_code (code, type, optab_default);
9268 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9269 op2 = expand_normal (oprnd2);
9270 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9271 target, unsignedp);
9272 gcc_assert (temp);
9273 return temp;
9274 }
9275
9276 case COND_EXPR:
9277 /* A COND_EXPR with its type being VOID_TYPE represents a
9278 conditional jump and is handled in
9279 expand_gimple_cond_expr. */
9280 gcc_assert (!VOID_TYPE_P (type));
9281
9282 /* Note that COND_EXPRs whose type is a structure or union
9283 are required to be constructed to contain assignments of
9284 a temporary variable, so that we can evaluate them here
9285 for side effect only. If type is void, we must do likewise. */
9286
9287 gcc_assert (!TREE_ADDRESSABLE (type)
9288 && !ignore
9289 && TREE_TYPE (treeop1) != void_type_node
9290 && TREE_TYPE (treeop2) != void_type_node);
9291
9292 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9293 if (temp)
9294 return temp;
9295
9296 /* If we are not to produce a result, we have no target. Otherwise,
9297 if a target was specified use it; it will not be used as an
9298 intermediate target unless it is safe. If no target, use a
9299 temporary. */
9300
9301 if (modifier != EXPAND_STACK_PARM
9302 && original_target
9303 && safe_from_p (original_target, treeop0, 1)
9304 && GET_MODE (original_target) == mode
9305 && !MEM_P (original_target))
9306 temp = original_target;
9307 else
9308 temp = assign_temp (type, 0, 1);
9309
9310 do_pending_stack_adjust ();
9311 NO_DEFER_POP;
9312 op0 = gen_label_rtx ();
9313 op1 = gen_label_rtx ();
9314 jumpifnot (treeop0, op0, -1);
9315 store_expr (treeop1, temp,
9316 modifier == EXPAND_STACK_PARM,
9317 false);
9318
9319 emit_jump_insn (gen_jump (op1));
9320 emit_barrier ();
9321 emit_label (op0);
9322 store_expr (treeop2, temp,
9323 modifier == EXPAND_STACK_PARM,
9324 false);
9325
9326 emit_label (op1);
9327 OK_DEFER_POP;
9328 return temp;
9329
9330 case VEC_COND_EXPR:
9331 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9332 return target;
9333
9334 default:
9335 gcc_unreachable ();
9336 }
9337
9338 /* Here to do an ordinary binary operator. */
9339 binop:
9340 expand_operands (treeop0, treeop1,
9341 subtarget, &op0, &op1, EXPAND_NORMAL);
9342 binop2:
9343 this_optab = optab_for_tree_code (code, type, optab_default);
9344 binop3:
9345 if (modifier == EXPAND_STACK_PARM)
9346 target = 0;
9347 temp = expand_binop (mode, this_optab, op0, op1, target,
9348 unsignedp, OPTAB_LIB_WIDEN);
9349 gcc_assert (temp);
9350 /* Bitwise operations do not need bitfield reduction as we expect their
9351 operands being properly truncated. */
9352 if (code == BIT_XOR_EXPR
9353 || code == BIT_AND_EXPR
9354 || code == BIT_IOR_EXPR)
9355 return temp;
9356 return REDUCE_BIT_FIELD (temp);
9357 }
9358 #undef REDUCE_BIT_FIELD
9359
9360
9361 /* Return TRUE if expression STMT is suitable for replacement.
9362 Never consider memory loads as replaceable, because those don't ever lead
9363 into constant expressions. */
9364
9365 static bool
9366 stmt_is_replaceable_p (gimple stmt)
9367 {
9368 if (ssa_is_replaceable_p (stmt))
9369 {
9370 /* Don't move around loads. */
9371 if (!gimple_assign_single_p (stmt)
9372 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9373 return true;
9374 }
9375 return false;
9376 }
9377
9378 rtx
9379 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9380 enum expand_modifier modifier, rtx *alt_rtl,
9381 bool inner_reference_p)
9382 {
9383 rtx op0, op1, temp, decl_rtl;
9384 tree type;
9385 int unsignedp;
9386 machine_mode mode;
9387 enum tree_code code = TREE_CODE (exp);
9388 rtx subtarget, original_target;
9389 int ignore;
9390 tree context;
9391 bool reduce_bit_field;
9392 location_t loc = EXPR_LOCATION (exp);
9393 struct separate_ops ops;
9394 tree treeop0, treeop1, treeop2;
9395 tree ssa_name = NULL_TREE;
9396 gimple g;
9397
9398 type = TREE_TYPE (exp);
9399 mode = TYPE_MODE (type);
9400 unsignedp = TYPE_UNSIGNED (type);
9401
9402 treeop0 = treeop1 = treeop2 = NULL_TREE;
9403 if (!VL_EXP_CLASS_P (exp))
9404 switch (TREE_CODE_LENGTH (code))
9405 {
9406 default:
9407 case 3: treeop2 = TREE_OPERAND (exp, 2);
9408 case 2: treeop1 = TREE_OPERAND (exp, 1);
9409 case 1: treeop0 = TREE_OPERAND (exp, 0);
9410 case 0: break;
9411 }
9412 ops.code = code;
9413 ops.type = type;
9414 ops.op0 = treeop0;
9415 ops.op1 = treeop1;
9416 ops.op2 = treeop2;
9417 ops.location = loc;
9418
9419 ignore = (target == const0_rtx
9420 || ((CONVERT_EXPR_CODE_P (code)
9421 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9422 && TREE_CODE (type) == VOID_TYPE));
9423
9424 /* An operation in what may be a bit-field type needs the
9425 result to be reduced to the precision of the bit-field type,
9426 which is narrower than that of the type's mode. */
9427 reduce_bit_field = (!ignore
9428 && INTEGRAL_TYPE_P (type)
9429 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9430
9431 /* If we are going to ignore this result, we need only do something
9432 if there is a side-effect somewhere in the expression. If there
9433 is, short-circuit the most common cases here. Note that we must
9434 not call expand_expr with anything but const0_rtx in case this
9435 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9436
9437 if (ignore)
9438 {
9439 if (! TREE_SIDE_EFFECTS (exp))
9440 return const0_rtx;
9441
9442 /* Ensure we reference a volatile object even if value is ignored, but
9443 don't do this if all we are doing is taking its address. */
9444 if (TREE_THIS_VOLATILE (exp)
9445 && TREE_CODE (exp) != FUNCTION_DECL
9446 && mode != VOIDmode && mode != BLKmode
9447 && modifier != EXPAND_CONST_ADDRESS)
9448 {
9449 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9450 if (MEM_P (temp))
9451 copy_to_reg (temp);
9452 return const0_rtx;
9453 }
9454
9455 if (TREE_CODE_CLASS (code) == tcc_unary
9456 || code == BIT_FIELD_REF
9457 || code == COMPONENT_REF
9458 || code == INDIRECT_REF)
9459 return expand_expr (treeop0, const0_rtx, VOIDmode,
9460 modifier);
9461
9462 else if (TREE_CODE_CLASS (code) == tcc_binary
9463 || TREE_CODE_CLASS (code) == tcc_comparison
9464 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9465 {
9466 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9467 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9468 return const0_rtx;
9469 }
9470
9471 target = 0;
9472 }
9473
9474 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9475 target = 0;
9476
9477 /* Use subtarget as the target for operand 0 of a binary operation. */
9478 subtarget = get_subtarget (target);
9479 original_target = target;
9480
9481 switch (code)
9482 {
9483 case LABEL_DECL:
9484 {
9485 tree function = decl_function_context (exp);
9486
9487 temp = label_rtx (exp);
9488 temp = gen_rtx_LABEL_REF (Pmode, temp);
9489
9490 if (function != current_function_decl
9491 && function != 0)
9492 LABEL_REF_NONLOCAL_P (temp) = 1;
9493
9494 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9495 return temp;
9496 }
9497
9498 case SSA_NAME:
9499 /* ??? ivopts calls expander, without any preparation from
9500 out-of-ssa. So fake instructions as if this was an access to the
9501 base variable. This unnecessarily allocates a pseudo, see how we can
9502 reuse it, if partition base vars have it set already. */
9503 if (!currently_expanding_to_rtl)
9504 {
9505 tree var = SSA_NAME_VAR (exp);
9506 if (var && DECL_RTL_SET_P (var))
9507 return DECL_RTL (var);
9508 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9509 LAST_VIRTUAL_REGISTER + 1);
9510 }
9511
9512 g = get_gimple_for_ssa_name (exp);
9513 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9514 if (g == NULL
9515 && modifier == EXPAND_INITIALIZER
9516 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9517 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9518 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9519 g = SSA_NAME_DEF_STMT (exp);
9520 if (g)
9521 {
9522 rtx r;
9523 ops.code = gimple_assign_rhs_code (g);
9524 switch (get_gimple_rhs_class (ops.code))
9525 {
9526 case GIMPLE_TERNARY_RHS:
9527 ops.op2 = gimple_assign_rhs3 (g);
9528 /* Fallthru */
9529 case GIMPLE_BINARY_RHS:
9530 ops.op1 = gimple_assign_rhs2 (g);
9531
9532 /* Try to expand conditonal compare. */
9533 if (targetm.gen_ccmp_first)
9534 {
9535 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9536 r = expand_ccmp_expr (g);
9537 if (r)
9538 break;
9539 }
9540 /* Fallthru */
9541 case GIMPLE_UNARY_RHS:
9542 ops.op0 = gimple_assign_rhs1 (g);
9543 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9544 ops.location = gimple_location (g);
9545 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9546 break;
9547 case GIMPLE_SINGLE_RHS:
9548 {
9549 location_t saved_loc = curr_insn_location ();
9550 set_curr_insn_location (gimple_location (g));
9551 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9552 tmode, modifier, NULL, inner_reference_p);
9553 set_curr_insn_location (saved_loc);
9554 break;
9555 }
9556 default:
9557 gcc_unreachable ();
9558 }
9559 if (REG_P (r) && !REG_EXPR (r))
9560 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9561 return r;
9562 }
9563
9564 ssa_name = exp;
9565 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9566 exp = SSA_NAME_VAR (ssa_name);
9567 goto expand_decl_rtl;
9568
9569 case PARM_DECL:
9570 case VAR_DECL:
9571 /* If a static var's type was incomplete when the decl was written,
9572 but the type is complete now, lay out the decl now. */
9573 if (DECL_SIZE (exp) == 0
9574 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9575 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9576 layout_decl (exp, 0);
9577
9578 /* ... fall through ... */
9579
9580 case FUNCTION_DECL:
9581 case RESULT_DECL:
9582 decl_rtl = DECL_RTL (exp);
9583 expand_decl_rtl:
9584 gcc_assert (decl_rtl);
9585 decl_rtl = copy_rtx (decl_rtl);
9586 /* Record writes to register variables. */
9587 if (modifier == EXPAND_WRITE
9588 && REG_P (decl_rtl)
9589 && HARD_REGISTER_P (decl_rtl))
9590 add_to_hard_reg_set (&crtl->asm_clobbers,
9591 GET_MODE (decl_rtl), REGNO (decl_rtl));
9592
9593 /* Ensure variable marked as used even if it doesn't go through
9594 a parser. If it hasn't be used yet, write out an external
9595 definition. */
9596 TREE_USED (exp) = 1;
9597
9598 /* Show we haven't gotten RTL for this yet. */
9599 temp = 0;
9600
9601 /* Variables inherited from containing functions should have
9602 been lowered by this point. */
9603 context = decl_function_context (exp);
9604 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9605 || context == current_function_decl
9606 || TREE_STATIC (exp)
9607 || DECL_EXTERNAL (exp)
9608 /* ??? C++ creates functions that are not TREE_STATIC. */
9609 || TREE_CODE (exp) == FUNCTION_DECL);
9610
9611 /* This is the case of an array whose size is to be determined
9612 from its initializer, while the initializer is still being parsed.
9613 ??? We aren't parsing while expanding anymore. */
9614
9615 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9616 temp = validize_mem (decl_rtl);
9617
9618 /* If DECL_RTL is memory, we are in the normal case and the
9619 address is not valid, get the address into a register. */
9620
9621 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9622 {
9623 if (alt_rtl)
9624 *alt_rtl = decl_rtl;
9625 decl_rtl = use_anchored_address (decl_rtl);
9626 if (modifier != EXPAND_CONST_ADDRESS
9627 && modifier != EXPAND_SUM
9628 && !memory_address_addr_space_p (DECL_MODE (exp),
9629 XEXP (decl_rtl, 0),
9630 MEM_ADDR_SPACE (decl_rtl)))
9631 temp = replace_equiv_address (decl_rtl,
9632 copy_rtx (XEXP (decl_rtl, 0)));
9633 }
9634
9635 /* If we got something, return it. But first, set the alignment
9636 if the address is a register. */
9637 if (temp != 0)
9638 {
9639 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9640 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9641
9642 return temp;
9643 }
9644
9645 /* If the mode of DECL_RTL does not match that of the decl,
9646 there are two cases: we are dealing with a BLKmode value
9647 that is returned in a register, or we are dealing with
9648 a promoted value. In the latter case, return a SUBREG
9649 of the wanted mode, but mark it so that we know that it
9650 was already extended. */
9651 if (REG_P (decl_rtl)
9652 && DECL_MODE (exp) != BLKmode
9653 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9654 {
9655 machine_mode pmode;
9656
9657 /* Get the signedness to be used for this variable. Ensure we get
9658 the same mode we got when the variable was declared. */
9659 if (code == SSA_NAME
9660 && (g = SSA_NAME_DEF_STMT (ssa_name))
9661 && gimple_code (g) == GIMPLE_CALL
9662 && !gimple_call_internal_p (g))
9663 pmode = promote_function_mode (type, mode, &unsignedp,
9664 gimple_call_fntype (g),
9665 2);
9666 else
9667 pmode = promote_decl_mode (exp, &unsignedp);
9668 gcc_assert (GET_MODE (decl_rtl) == pmode);
9669
9670 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9671 SUBREG_PROMOTED_VAR_P (temp) = 1;
9672 SUBREG_PROMOTED_SET (temp, unsignedp);
9673 return temp;
9674 }
9675
9676 return decl_rtl;
9677
9678 case INTEGER_CST:
9679 /* Given that TYPE_PRECISION (type) is not always equal to
9680 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9681 the former to the latter according to the signedness of the
9682 type. */
9683 temp = immed_wide_int_const (wide_int::from
9684 (exp,
9685 GET_MODE_PRECISION (TYPE_MODE (type)),
9686 TYPE_SIGN (type)),
9687 TYPE_MODE (type));
9688 return temp;
9689
9690 case VECTOR_CST:
9691 {
9692 tree tmp = NULL_TREE;
9693 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9694 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9695 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9696 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9697 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9698 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9699 return const_vector_from_tree (exp);
9700 if (GET_MODE_CLASS (mode) == MODE_INT)
9701 {
9702 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9703 if (type_for_mode)
9704 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9705 }
9706 if (!tmp)
9707 {
9708 vec<constructor_elt, va_gc> *v;
9709 unsigned i;
9710 vec_alloc (v, VECTOR_CST_NELTS (exp));
9711 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9712 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9713 tmp = build_constructor (type, v);
9714 }
9715 return expand_expr (tmp, ignore ? const0_rtx : target,
9716 tmode, modifier);
9717 }
9718
9719 case CONST_DECL:
9720 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9721
9722 case REAL_CST:
9723 /* If optimized, generate immediate CONST_DOUBLE
9724 which will be turned into memory by reload if necessary.
9725
9726 We used to force a register so that loop.c could see it. But
9727 this does not allow gen_* patterns to perform optimizations with
9728 the constants. It also produces two insns in cases like "x = 1.0;".
9729 On most machines, floating-point constants are not permitted in
9730 many insns, so we'd end up copying it to a register in any case.
9731
9732 Now, we do the copying in expand_binop, if appropriate. */
9733 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9734 TYPE_MODE (TREE_TYPE (exp)));
9735
9736 case FIXED_CST:
9737 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9738 TYPE_MODE (TREE_TYPE (exp)));
9739
9740 case COMPLEX_CST:
9741 /* Handle evaluating a complex constant in a CONCAT target. */
9742 if (original_target && GET_CODE (original_target) == CONCAT)
9743 {
9744 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9745 rtx rtarg, itarg;
9746
9747 rtarg = XEXP (original_target, 0);
9748 itarg = XEXP (original_target, 1);
9749
9750 /* Move the real and imaginary parts separately. */
9751 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9752 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9753
9754 if (op0 != rtarg)
9755 emit_move_insn (rtarg, op0);
9756 if (op1 != itarg)
9757 emit_move_insn (itarg, op1);
9758
9759 return original_target;
9760 }
9761
9762 /* ... fall through ... */
9763
9764 case STRING_CST:
9765 temp = expand_expr_constant (exp, 1, modifier);
9766
9767 /* temp contains a constant address.
9768 On RISC machines where a constant address isn't valid,
9769 make some insns to get that address into a register. */
9770 if (modifier != EXPAND_CONST_ADDRESS
9771 && modifier != EXPAND_INITIALIZER
9772 && modifier != EXPAND_SUM
9773 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9774 MEM_ADDR_SPACE (temp)))
9775 return replace_equiv_address (temp,
9776 copy_rtx (XEXP (temp, 0)));
9777 return temp;
9778
9779 case SAVE_EXPR:
9780 {
9781 tree val = treeop0;
9782 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9783 inner_reference_p);
9784
9785 if (!SAVE_EXPR_RESOLVED_P (exp))
9786 {
9787 /* We can indeed still hit this case, typically via builtin
9788 expanders calling save_expr immediately before expanding
9789 something. Assume this means that we only have to deal
9790 with non-BLKmode values. */
9791 gcc_assert (GET_MODE (ret) != BLKmode);
9792
9793 val = build_decl (curr_insn_location (),
9794 VAR_DECL, NULL, TREE_TYPE (exp));
9795 DECL_ARTIFICIAL (val) = 1;
9796 DECL_IGNORED_P (val) = 1;
9797 treeop0 = val;
9798 TREE_OPERAND (exp, 0) = treeop0;
9799 SAVE_EXPR_RESOLVED_P (exp) = 1;
9800
9801 if (!CONSTANT_P (ret))
9802 ret = copy_to_reg (ret);
9803 SET_DECL_RTL (val, ret);
9804 }
9805
9806 return ret;
9807 }
9808
9809
9810 case CONSTRUCTOR:
9811 /* If we don't need the result, just ensure we evaluate any
9812 subexpressions. */
9813 if (ignore)
9814 {
9815 unsigned HOST_WIDE_INT idx;
9816 tree value;
9817
9818 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9819 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9820
9821 return const0_rtx;
9822 }
9823
9824 return expand_constructor (exp, target, modifier, false);
9825
9826 case TARGET_MEM_REF:
9827 {
9828 addr_space_t as
9829 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9830 enum insn_code icode;
9831 unsigned int align;
9832
9833 op0 = addr_for_mem_ref (exp, as, true);
9834 op0 = memory_address_addr_space (mode, op0, as);
9835 temp = gen_rtx_MEM (mode, op0);
9836 set_mem_attributes (temp, exp, 0);
9837 set_mem_addr_space (temp, as);
9838 align = get_object_alignment (exp);
9839 if (modifier != EXPAND_WRITE
9840 && modifier != EXPAND_MEMORY
9841 && mode != BLKmode
9842 && align < GET_MODE_ALIGNMENT (mode)
9843 /* If the target does not have special handling for unaligned
9844 loads of mode then it can use regular moves for them. */
9845 && ((icode = optab_handler (movmisalign_optab, mode))
9846 != CODE_FOR_nothing))
9847 {
9848 struct expand_operand ops[2];
9849
9850 /* We've already validated the memory, and we're creating a
9851 new pseudo destination. The predicates really can't fail,
9852 nor can the generator. */
9853 create_output_operand (&ops[0], NULL_RTX, mode);
9854 create_fixed_operand (&ops[1], temp);
9855 expand_insn (icode, 2, ops);
9856 temp = ops[0].value;
9857 }
9858 return temp;
9859 }
9860
9861 case MEM_REF:
9862 {
9863 addr_space_t as
9864 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9865 machine_mode address_mode;
9866 tree base = TREE_OPERAND (exp, 0);
9867 gimple def_stmt;
9868 enum insn_code icode;
9869 unsigned align;
9870 /* Handle expansion of non-aliased memory with non-BLKmode. That
9871 might end up in a register. */
9872 if (mem_ref_refers_to_non_mem_p (exp))
9873 {
9874 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9875 base = TREE_OPERAND (base, 0);
9876 if (offset == 0
9877 && tree_fits_uhwi_p (TYPE_SIZE (type))
9878 && (GET_MODE_BITSIZE (DECL_MODE (base))
9879 == tree_to_uhwi (TYPE_SIZE (type))))
9880 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9881 target, tmode, modifier);
9882 if (TYPE_MODE (type) == BLKmode)
9883 {
9884 temp = assign_stack_temp (DECL_MODE (base),
9885 GET_MODE_SIZE (DECL_MODE (base)));
9886 store_expr (base, temp, 0, false);
9887 temp = adjust_address (temp, BLKmode, offset);
9888 set_mem_size (temp, int_size_in_bytes (type));
9889 return temp;
9890 }
9891 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9892 bitsize_int (offset * BITS_PER_UNIT));
9893 return expand_expr (exp, target, tmode, modifier);
9894 }
9895 address_mode = targetm.addr_space.address_mode (as);
9896 base = TREE_OPERAND (exp, 0);
9897 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9898 {
9899 tree mask = gimple_assign_rhs2 (def_stmt);
9900 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9901 gimple_assign_rhs1 (def_stmt), mask);
9902 TREE_OPERAND (exp, 0) = base;
9903 }
9904 align = get_object_alignment (exp);
9905 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9906 op0 = memory_address_addr_space (mode, op0, as);
9907 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9908 {
9909 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9910 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9911 op0 = memory_address_addr_space (mode, op0, as);
9912 }
9913 temp = gen_rtx_MEM (mode, op0);
9914 set_mem_attributes (temp, exp, 0);
9915 set_mem_addr_space (temp, as);
9916 if (TREE_THIS_VOLATILE (exp))
9917 MEM_VOLATILE_P (temp) = 1;
9918 if (modifier != EXPAND_WRITE
9919 && modifier != EXPAND_MEMORY
9920 && !inner_reference_p
9921 && mode != BLKmode
9922 && align < GET_MODE_ALIGNMENT (mode))
9923 {
9924 if ((icode = optab_handler (movmisalign_optab, mode))
9925 != CODE_FOR_nothing)
9926 {
9927 struct expand_operand ops[2];
9928
9929 /* We've already validated the memory, and we're creating a
9930 new pseudo destination. The predicates really can't fail,
9931 nor can the generator. */
9932 create_output_operand (&ops[0], NULL_RTX, mode);
9933 create_fixed_operand (&ops[1], temp);
9934 expand_insn (icode, 2, ops);
9935 temp = ops[0].value;
9936 }
9937 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9938 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9939 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9940 (modifier == EXPAND_STACK_PARM
9941 ? NULL_RTX : target),
9942 mode, mode);
9943 }
9944 return temp;
9945 }
9946
9947 case ARRAY_REF:
9948
9949 {
9950 tree array = treeop0;
9951 tree index = treeop1;
9952 tree init;
9953
9954 /* Fold an expression like: "foo"[2].
9955 This is not done in fold so it won't happen inside &.
9956 Don't fold if this is for wide characters since it's too
9957 difficult to do correctly and this is a very rare case. */
9958
9959 if (modifier != EXPAND_CONST_ADDRESS
9960 && modifier != EXPAND_INITIALIZER
9961 && modifier != EXPAND_MEMORY)
9962 {
9963 tree t = fold_read_from_constant_string (exp);
9964
9965 if (t)
9966 return expand_expr (t, target, tmode, modifier);
9967 }
9968
9969 /* If this is a constant index into a constant array,
9970 just get the value from the array. Handle both the cases when
9971 we have an explicit constructor and when our operand is a variable
9972 that was declared const. */
9973
9974 if (modifier != EXPAND_CONST_ADDRESS
9975 && modifier != EXPAND_INITIALIZER
9976 && modifier != EXPAND_MEMORY
9977 && TREE_CODE (array) == CONSTRUCTOR
9978 && ! TREE_SIDE_EFFECTS (array)
9979 && TREE_CODE (index) == INTEGER_CST)
9980 {
9981 unsigned HOST_WIDE_INT ix;
9982 tree field, value;
9983
9984 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9985 field, value)
9986 if (tree_int_cst_equal (field, index))
9987 {
9988 if (!TREE_SIDE_EFFECTS (value))
9989 return expand_expr (fold (value), target, tmode, modifier);
9990 break;
9991 }
9992 }
9993
9994 else if (optimize >= 1
9995 && modifier != EXPAND_CONST_ADDRESS
9996 && modifier != EXPAND_INITIALIZER
9997 && modifier != EXPAND_MEMORY
9998 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9999 && TREE_CODE (index) == INTEGER_CST
10000 && (TREE_CODE (array) == VAR_DECL
10001 || TREE_CODE (array) == CONST_DECL)
10002 && (init = ctor_for_folding (array)) != error_mark_node)
10003 {
10004 if (init == NULL_TREE)
10005 {
10006 tree value = build_zero_cst (type);
10007 if (TREE_CODE (value) == CONSTRUCTOR)
10008 {
10009 /* If VALUE is a CONSTRUCTOR, this optimization is only
10010 useful if this doesn't store the CONSTRUCTOR into
10011 memory. If it does, it is more efficient to just
10012 load the data from the array directly. */
10013 rtx ret = expand_constructor (value, target,
10014 modifier, true);
10015 if (ret == NULL_RTX)
10016 value = NULL_TREE;
10017 }
10018
10019 if (value)
10020 return expand_expr (value, target, tmode, modifier);
10021 }
10022 else if (TREE_CODE (init) == CONSTRUCTOR)
10023 {
10024 unsigned HOST_WIDE_INT ix;
10025 tree field, value;
10026
10027 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10028 field, value)
10029 if (tree_int_cst_equal (field, index))
10030 {
10031 if (TREE_SIDE_EFFECTS (value))
10032 break;
10033
10034 if (TREE_CODE (value) == CONSTRUCTOR)
10035 {
10036 /* If VALUE is a CONSTRUCTOR, this
10037 optimization is only useful if
10038 this doesn't store the CONSTRUCTOR
10039 into memory. If it does, it is more
10040 efficient to just load the data from
10041 the array directly. */
10042 rtx ret = expand_constructor (value, target,
10043 modifier, true);
10044 if (ret == NULL_RTX)
10045 break;
10046 }
10047
10048 return
10049 expand_expr (fold (value), target, tmode, modifier);
10050 }
10051 }
10052 else if (TREE_CODE (init) == STRING_CST)
10053 {
10054 tree low_bound = array_ref_low_bound (exp);
10055 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10056
10057 /* Optimize the special case of a zero lower bound.
10058
10059 We convert the lower bound to sizetype to avoid problems
10060 with constant folding. E.g. suppose the lower bound is
10061 1 and its mode is QI. Without the conversion
10062 (ARRAY + (INDEX - (unsigned char)1))
10063 becomes
10064 (ARRAY + (-(unsigned char)1) + INDEX)
10065 which becomes
10066 (ARRAY + 255 + INDEX). Oops! */
10067 if (!integer_zerop (low_bound))
10068 index1 = size_diffop_loc (loc, index1,
10069 fold_convert_loc (loc, sizetype,
10070 low_bound));
10071
10072 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10073 {
10074 tree type = TREE_TYPE (TREE_TYPE (init));
10075 machine_mode mode = TYPE_MODE (type);
10076
10077 if (GET_MODE_CLASS (mode) == MODE_INT
10078 && GET_MODE_SIZE (mode) == 1)
10079 return gen_int_mode (TREE_STRING_POINTER (init)
10080 [TREE_INT_CST_LOW (index1)],
10081 mode);
10082 }
10083 }
10084 }
10085 }
10086 goto normal_inner_ref;
10087
10088 case COMPONENT_REF:
10089 /* If the operand is a CONSTRUCTOR, we can just extract the
10090 appropriate field if it is present. */
10091 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10092 {
10093 unsigned HOST_WIDE_INT idx;
10094 tree field, value;
10095
10096 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10097 idx, field, value)
10098 if (field == treeop1
10099 /* We can normally use the value of the field in the
10100 CONSTRUCTOR. However, if this is a bitfield in
10101 an integral mode that we can fit in a HOST_WIDE_INT,
10102 we must mask only the number of bits in the bitfield,
10103 since this is done implicitly by the constructor. If
10104 the bitfield does not meet either of those conditions,
10105 we can't do this optimization. */
10106 && (! DECL_BIT_FIELD (field)
10107 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10108 && (GET_MODE_PRECISION (DECL_MODE (field))
10109 <= HOST_BITS_PER_WIDE_INT))))
10110 {
10111 if (DECL_BIT_FIELD (field)
10112 && modifier == EXPAND_STACK_PARM)
10113 target = 0;
10114 op0 = expand_expr (value, target, tmode, modifier);
10115 if (DECL_BIT_FIELD (field))
10116 {
10117 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10118 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10119
10120 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10121 {
10122 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10123 imode);
10124 op0 = expand_and (imode, op0, op1, target);
10125 }
10126 else
10127 {
10128 int count = GET_MODE_PRECISION (imode) - bitsize;
10129
10130 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10131 target, 0);
10132 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10133 target, 0);
10134 }
10135 }
10136
10137 return op0;
10138 }
10139 }
10140 goto normal_inner_ref;
10141
10142 case BIT_FIELD_REF:
10143 case ARRAY_RANGE_REF:
10144 normal_inner_ref:
10145 {
10146 machine_mode mode1, mode2;
10147 HOST_WIDE_INT bitsize, bitpos;
10148 tree offset;
10149 int volatilep = 0, must_force_mem;
10150 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10151 &mode1, &unsignedp, &volatilep, true);
10152 rtx orig_op0, memloc;
10153 bool clear_mem_expr = false;
10154
10155 /* If we got back the original object, something is wrong. Perhaps
10156 we are evaluating an expression too early. In any event, don't
10157 infinitely recurse. */
10158 gcc_assert (tem != exp);
10159
10160 /* If TEM's type is a union of variable size, pass TARGET to the inner
10161 computation, since it will need a temporary and TARGET is known
10162 to have to do. This occurs in unchecked conversion in Ada. */
10163 orig_op0 = op0
10164 = expand_expr_real (tem,
10165 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10166 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10167 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10168 != INTEGER_CST)
10169 && modifier != EXPAND_STACK_PARM
10170 ? target : NULL_RTX),
10171 VOIDmode,
10172 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10173 NULL, true);
10174
10175 /* If the field has a mode, we want to access it in the
10176 field's mode, not the computed mode.
10177 If a MEM has VOIDmode (external with incomplete type),
10178 use BLKmode for it instead. */
10179 if (MEM_P (op0))
10180 {
10181 if (mode1 != VOIDmode)
10182 op0 = adjust_address (op0, mode1, 0);
10183 else if (GET_MODE (op0) == VOIDmode)
10184 op0 = adjust_address (op0, BLKmode, 0);
10185 }
10186
10187 mode2
10188 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10189
10190 /* If we have either an offset, a BLKmode result, or a reference
10191 outside the underlying object, we must force it to memory.
10192 Such a case can occur in Ada if we have unchecked conversion
10193 of an expression from a scalar type to an aggregate type or
10194 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10195 passed a partially uninitialized object or a view-conversion
10196 to a larger size. */
10197 must_force_mem = (offset
10198 || mode1 == BLKmode
10199 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10200
10201 /* Handle CONCAT first. */
10202 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10203 {
10204 if (bitpos == 0
10205 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10206 return op0;
10207 if (bitpos == 0
10208 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10209 && bitsize)
10210 {
10211 op0 = XEXP (op0, 0);
10212 mode2 = GET_MODE (op0);
10213 }
10214 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10215 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10216 && bitpos
10217 && bitsize)
10218 {
10219 op0 = XEXP (op0, 1);
10220 bitpos = 0;
10221 mode2 = GET_MODE (op0);
10222 }
10223 else
10224 /* Otherwise force into memory. */
10225 must_force_mem = 1;
10226 }
10227
10228 /* If this is a constant, put it in a register if it is a legitimate
10229 constant and we don't need a memory reference. */
10230 if (CONSTANT_P (op0)
10231 && mode2 != BLKmode
10232 && targetm.legitimate_constant_p (mode2, op0)
10233 && !must_force_mem)
10234 op0 = force_reg (mode2, op0);
10235
10236 /* Otherwise, if this is a constant, try to force it to the constant
10237 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10238 is a legitimate constant. */
10239 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10240 op0 = validize_mem (memloc);
10241
10242 /* Otherwise, if this is a constant or the object is not in memory
10243 and need be, put it there. */
10244 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10245 {
10246 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10247 emit_move_insn (memloc, op0);
10248 op0 = memloc;
10249 clear_mem_expr = true;
10250 }
10251
10252 if (offset)
10253 {
10254 machine_mode address_mode;
10255 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10256 EXPAND_SUM);
10257
10258 gcc_assert (MEM_P (op0));
10259
10260 address_mode = get_address_mode (op0);
10261 if (GET_MODE (offset_rtx) != address_mode)
10262 {
10263 /* We cannot be sure that the RTL in offset_rtx is valid outside
10264 of a memory address context, so force it into a register
10265 before attempting to convert it to the desired mode. */
10266 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10267 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10268 }
10269
10270 /* See the comment in expand_assignment for the rationale. */
10271 if (mode1 != VOIDmode
10272 && bitpos != 0
10273 && bitsize > 0
10274 && (bitpos % bitsize) == 0
10275 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10276 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10277 {
10278 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10279 bitpos = 0;
10280 }
10281
10282 op0 = offset_address (op0, offset_rtx,
10283 highest_pow2_factor (offset));
10284 }
10285
10286 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10287 record its alignment as BIGGEST_ALIGNMENT. */
10288 if (MEM_P (op0) && bitpos == 0 && offset != 0
10289 && is_aligning_offset (offset, tem))
10290 set_mem_align (op0, BIGGEST_ALIGNMENT);
10291
10292 /* Don't forget about volatility even if this is a bitfield. */
10293 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10294 {
10295 if (op0 == orig_op0)
10296 op0 = copy_rtx (op0);
10297
10298 MEM_VOLATILE_P (op0) = 1;
10299 }
10300
10301 /* In cases where an aligned union has an unaligned object
10302 as a field, we might be extracting a BLKmode value from
10303 an integer-mode (e.g., SImode) object. Handle this case
10304 by doing the extract into an object as wide as the field
10305 (which we know to be the width of a basic mode), then
10306 storing into memory, and changing the mode to BLKmode. */
10307 if (mode1 == VOIDmode
10308 || REG_P (op0) || GET_CODE (op0) == SUBREG
10309 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10310 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10311 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10312 && modifier != EXPAND_CONST_ADDRESS
10313 && modifier != EXPAND_INITIALIZER
10314 && modifier != EXPAND_MEMORY)
10315 /* If the bitfield is volatile and the bitsize
10316 is narrower than the access size of the bitfield,
10317 we need to extract bitfields from the access. */
10318 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10319 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10320 && mode1 != BLKmode
10321 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10322 /* If the field isn't aligned enough to fetch as a memref,
10323 fetch it as a bit field. */
10324 || (mode1 != BLKmode
10325 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10326 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10327 || (MEM_P (op0)
10328 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10329 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10330 && modifier != EXPAND_MEMORY
10331 && ((modifier == EXPAND_CONST_ADDRESS
10332 || modifier == EXPAND_INITIALIZER)
10333 ? STRICT_ALIGNMENT
10334 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10335 || (bitpos % BITS_PER_UNIT != 0)))
10336 /* If the type and the field are a constant size and the
10337 size of the type isn't the same size as the bitfield,
10338 we must use bitfield operations. */
10339 || (bitsize >= 0
10340 && TYPE_SIZE (TREE_TYPE (exp))
10341 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10342 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10343 bitsize)))
10344 {
10345 machine_mode ext_mode = mode;
10346
10347 if (ext_mode == BLKmode
10348 && ! (target != 0 && MEM_P (op0)
10349 && MEM_P (target)
10350 && bitpos % BITS_PER_UNIT == 0))
10351 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10352
10353 if (ext_mode == BLKmode)
10354 {
10355 if (target == 0)
10356 target = assign_temp (type, 1, 1);
10357
10358 /* ??? Unlike the similar test a few lines below, this one is
10359 very likely obsolete. */
10360 if (bitsize == 0)
10361 return target;
10362
10363 /* In this case, BITPOS must start at a byte boundary and
10364 TARGET, if specified, must be a MEM. */
10365 gcc_assert (MEM_P (op0)
10366 && (!target || MEM_P (target))
10367 && !(bitpos % BITS_PER_UNIT));
10368
10369 emit_block_move (target,
10370 adjust_address (op0, VOIDmode,
10371 bitpos / BITS_PER_UNIT),
10372 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10373 / BITS_PER_UNIT),
10374 (modifier == EXPAND_STACK_PARM
10375 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10376
10377 return target;
10378 }
10379
10380 /* If we have nothing to extract, the result will be 0 for targets
10381 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10382 return 0 for the sake of consistency, as reading a zero-sized
10383 bitfield is valid in Ada and the value is fully specified. */
10384 if (bitsize == 0)
10385 return const0_rtx;
10386
10387 op0 = validize_mem (op0);
10388
10389 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10390 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10391
10392 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10393 (modifier == EXPAND_STACK_PARM
10394 ? NULL_RTX : target),
10395 ext_mode, ext_mode);
10396
10397 /* If the result is a record type and BITSIZE is narrower than
10398 the mode of OP0, an integral mode, and this is a big endian
10399 machine, we must put the field into the high-order bits. */
10400 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10401 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10402 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10403 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10404 GET_MODE_BITSIZE (GET_MODE (op0))
10405 - bitsize, op0, 1);
10406
10407 /* If the result type is BLKmode, store the data into a temporary
10408 of the appropriate type, but with the mode corresponding to the
10409 mode for the data we have (op0's mode). */
10410 if (mode == BLKmode)
10411 {
10412 rtx new_rtx
10413 = assign_stack_temp_for_type (ext_mode,
10414 GET_MODE_BITSIZE (ext_mode),
10415 type);
10416 emit_move_insn (new_rtx, op0);
10417 op0 = copy_rtx (new_rtx);
10418 PUT_MODE (op0, BLKmode);
10419 }
10420
10421 return op0;
10422 }
10423
10424 /* If the result is BLKmode, use that to access the object
10425 now as well. */
10426 if (mode == BLKmode)
10427 mode1 = BLKmode;
10428
10429 /* Get a reference to just this component. */
10430 if (modifier == EXPAND_CONST_ADDRESS
10431 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10432 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10433 else
10434 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10435
10436 if (op0 == orig_op0)
10437 op0 = copy_rtx (op0);
10438
10439 set_mem_attributes (op0, exp, 0);
10440
10441 if (REG_P (XEXP (op0, 0)))
10442 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10443
10444 /* If op0 is a temporary because the original expressions was forced
10445 to memory, clear MEM_EXPR so that the original expression cannot
10446 be marked as addressable through MEM_EXPR of the temporary. */
10447 if (clear_mem_expr)
10448 set_mem_expr (op0, NULL_TREE);
10449
10450 MEM_VOLATILE_P (op0) |= volatilep;
10451 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10452 || modifier == EXPAND_CONST_ADDRESS
10453 || modifier == EXPAND_INITIALIZER)
10454 return op0;
10455
10456 if (target == 0)
10457 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10458
10459 convert_move (target, op0, unsignedp);
10460 return target;
10461 }
10462
10463 case OBJ_TYPE_REF:
10464 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10465
10466 case CALL_EXPR:
10467 /* All valid uses of __builtin_va_arg_pack () are removed during
10468 inlining. */
10469 if (CALL_EXPR_VA_ARG_PACK (exp))
10470 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10471 {
10472 tree fndecl = get_callee_fndecl (exp), attr;
10473
10474 if (fndecl
10475 && (attr = lookup_attribute ("error",
10476 DECL_ATTRIBUTES (fndecl))) != NULL)
10477 error ("%Kcall to %qs declared with attribute error: %s",
10478 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10479 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10480 if (fndecl
10481 && (attr = lookup_attribute ("warning",
10482 DECL_ATTRIBUTES (fndecl))) != NULL)
10483 warning_at (tree_nonartificial_location (exp),
10484 0, "%Kcall to %qs declared with attribute warning: %s",
10485 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10486 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10487
10488 /* Check for a built-in function. */
10489 if (fndecl && DECL_BUILT_IN (fndecl))
10490 {
10491 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10492 if (CALL_WITH_BOUNDS_P (exp))
10493 return expand_builtin_with_bounds (exp, target, subtarget,
10494 tmode, ignore);
10495 else
10496 return expand_builtin (exp, target, subtarget, tmode, ignore);
10497 }
10498 }
10499 return expand_call (exp, target, ignore);
10500
10501 case VIEW_CONVERT_EXPR:
10502 op0 = NULL_RTX;
10503
10504 /* If we are converting to BLKmode, try to avoid an intermediate
10505 temporary by fetching an inner memory reference. */
10506 if (mode == BLKmode
10507 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10508 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10509 && handled_component_p (treeop0))
10510 {
10511 machine_mode mode1;
10512 HOST_WIDE_INT bitsize, bitpos;
10513 tree offset;
10514 int unsignedp;
10515 int volatilep = 0;
10516 tree tem
10517 = get_inner_reference (treeop0, &bitsize, &bitpos,
10518 &offset, &mode1, &unsignedp, &volatilep,
10519 true);
10520 rtx orig_op0;
10521
10522 /* ??? We should work harder and deal with non-zero offsets. */
10523 if (!offset
10524 && (bitpos % BITS_PER_UNIT) == 0
10525 && bitsize >= 0
10526 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10527 {
10528 /* See the normal_inner_ref case for the rationale. */
10529 orig_op0
10530 = expand_expr_real (tem,
10531 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10532 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10533 != INTEGER_CST)
10534 && modifier != EXPAND_STACK_PARM
10535 ? target : NULL_RTX),
10536 VOIDmode,
10537 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10538 NULL, true);
10539
10540 if (MEM_P (orig_op0))
10541 {
10542 op0 = orig_op0;
10543
10544 /* Get a reference to just this component. */
10545 if (modifier == EXPAND_CONST_ADDRESS
10546 || modifier == EXPAND_SUM
10547 || modifier == EXPAND_INITIALIZER)
10548 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10549 else
10550 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10551
10552 if (op0 == orig_op0)
10553 op0 = copy_rtx (op0);
10554
10555 set_mem_attributes (op0, treeop0, 0);
10556 if (REG_P (XEXP (op0, 0)))
10557 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10558
10559 MEM_VOLATILE_P (op0) |= volatilep;
10560 }
10561 }
10562 }
10563
10564 if (!op0)
10565 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10566 NULL, inner_reference_p);
10567
10568 /* If the input and output modes are both the same, we are done. */
10569 if (mode == GET_MODE (op0))
10570 ;
10571 /* If neither mode is BLKmode, and both modes are the same size
10572 then we can use gen_lowpart. */
10573 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10574 && (GET_MODE_PRECISION (mode)
10575 == GET_MODE_PRECISION (GET_MODE (op0)))
10576 && !COMPLEX_MODE_P (GET_MODE (op0)))
10577 {
10578 if (GET_CODE (op0) == SUBREG)
10579 op0 = force_reg (GET_MODE (op0), op0);
10580 temp = gen_lowpart_common (mode, op0);
10581 if (temp)
10582 op0 = temp;
10583 else
10584 {
10585 if (!REG_P (op0) && !MEM_P (op0))
10586 op0 = force_reg (GET_MODE (op0), op0);
10587 op0 = gen_lowpart (mode, op0);
10588 }
10589 }
10590 /* If both types are integral, convert from one mode to the other. */
10591 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10592 op0 = convert_modes (mode, GET_MODE (op0), op0,
10593 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10594 /* If the output type is a bit-field type, do an extraction. */
10595 else if (reduce_bit_field)
10596 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10597 TYPE_UNSIGNED (type), NULL_RTX,
10598 mode, mode);
10599 /* As a last resort, spill op0 to memory, and reload it in a
10600 different mode. */
10601 else if (!MEM_P (op0))
10602 {
10603 /* If the operand is not a MEM, force it into memory. Since we
10604 are going to be changing the mode of the MEM, don't call
10605 force_const_mem for constants because we don't allow pool
10606 constants to change mode. */
10607 tree inner_type = TREE_TYPE (treeop0);
10608
10609 gcc_assert (!TREE_ADDRESSABLE (exp));
10610
10611 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10612 target
10613 = assign_stack_temp_for_type
10614 (TYPE_MODE (inner_type),
10615 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10616
10617 emit_move_insn (target, op0);
10618 op0 = target;
10619 }
10620
10621 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10622 output type is such that the operand is known to be aligned, indicate
10623 that it is. Otherwise, we need only be concerned about alignment for
10624 non-BLKmode results. */
10625 if (MEM_P (op0))
10626 {
10627 enum insn_code icode;
10628
10629 if (TYPE_ALIGN_OK (type))
10630 {
10631 /* ??? Copying the MEM without substantially changing it might
10632 run afoul of the code handling volatile memory references in
10633 store_expr, which assumes that TARGET is returned unmodified
10634 if it has been used. */
10635 op0 = copy_rtx (op0);
10636 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10637 }
10638 else if (modifier != EXPAND_WRITE
10639 && modifier != EXPAND_MEMORY
10640 && !inner_reference_p
10641 && mode != BLKmode
10642 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10643 {
10644 /* If the target does have special handling for unaligned
10645 loads of mode then use them. */
10646 if ((icode = optab_handler (movmisalign_optab, mode))
10647 != CODE_FOR_nothing)
10648 {
10649 rtx reg, insn;
10650
10651 op0 = adjust_address (op0, mode, 0);
10652 /* We've already validated the memory, and we're creating a
10653 new pseudo destination. The predicates really can't
10654 fail. */
10655 reg = gen_reg_rtx (mode);
10656
10657 /* Nor can the insn generator. */
10658 insn = GEN_FCN (icode) (reg, op0);
10659 emit_insn (insn);
10660 return reg;
10661 }
10662 else if (STRICT_ALIGNMENT)
10663 {
10664 tree inner_type = TREE_TYPE (treeop0);
10665 HOST_WIDE_INT temp_size
10666 = MAX (int_size_in_bytes (inner_type),
10667 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10668 rtx new_rtx
10669 = assign_stack_temp_for_type (mode, temp_size, type);
10670 rtx new_with_op0_mode
10671 = adjust_address (new_rtx, GET_MODE (op0), 0);
10672
10673 gcc_assert (!TREE_ADDRESSABLE (exp));
10674
10675 if (GET_MODE (op0) == BLKmode)
10676 emit_block_move (new_with_op0_mode, op0,
10677 GEN_INT (GET_MODE_SIZE (mode)),
10678 (modifier == EXPAND_STACK_PARM
10679 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10680 else
10681 emit_move_insn (new_with_op0_mode, op0);
10682
10683 op0 = new_rtx;
10684 }
10685 }
10686
10687 op0 = adjust_address (op0, mode, 0);
10688 }
10689
10690 return op0;
10691
10692 case MODIFY_EXPR:
10693 {
10694 tree lhs = treeop0;
10695 tree rhs = treeop1;
10696 gcc_assert (ignore);
10697
10698 /* Check for |= or &= of a bitfield of size one into another bitfield
10699 of size 1. In this case, (unless we need the result of the
10700 assignment) we can do this more efficiently with a
10701 test followed by an assignment, if necessary.
10702
10703 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10704 things change so we do, this code should be enhanced to
10705 support it. */
10706 if (TREE_CODE (lhs) == COMPONENT_REF
10707 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10708 || TREE_CODE (rhs) == BIT_AND_EXPR)
10709 && TREE_OPERAND (rhs, 0) == lhs
10710 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10711 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10712 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10713 {
10714 rtx_code_label *label = gen_label_rtx ();
10715 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10716 do_jump (TREE_OPERAND (rhs, 1),
10717 value ? label : 0,
10718 value ? 0 : label, -1);
10719 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10720 false);
10721 do_pending_stack_adjust ();
10722 emit_label (label);
10723 return const0_rtx;
10724 }
10725
10726 expand_assignment (lhs, rhs, false);
10727 return const0_rtx;
10728 }
10729
10730 case ADDR_EXPR:
10731 return expand_expr_addr_expr (exp, target, tmode, modifier);
10732
10733 case REALPART_EXPR:
10734 op0 = expand_normal (treeop0);
10735 return read_complex_part (op0, false);
10736
10737 case IMAGPART_EXPR:
10738 op0 = expand_normal (treeop0);
10739 return read_complex_part (op0, true);
10740
10741 case RETURN_EXPR:
10742 case LABEL_EXPR:
10743 case GOTO_EXPR:
10744 case SWITCH_EXPR:
10745 case ASM_EXPR:
10746 /* Expanded in cfgexpand.c. */
10747 gcc_unreachable ();
10748
10749 case TRY_CATCH_EXPR:
10750 case CATCH_EXPR:
10751 case EH_FILTER_EXPR:
10752 case TRY_FINALLY_EXPR:
10753 /* Lowered by tree-eh.c. */
10754 gcc_unreachable ();
10755
10756 case WITH_CLEANUP_EXPR:
10757 case CLEANUP_POINT_EXPR:
10758 case TARGET_EXPR:
10759 case CASE_LABEL_EXPR:
10760 case VA_ARG_EXPR:
10761 case BIND_EXPR:
10762 case INIT_EXPR:
10763 case CONJ_EXPR:
10764 case COMPOUND_EXPR:
10765 case PREINCREMENT_EXPR:
10766 case PREDECREMENT_EXPR:
10767 case POSTINCREMENT_EXPR:
10768 case POSTDECREMENT_EXPR:
10769 case LOOP_EXPR:
10770 case EXIT_EXPR:
10771 case COMPOUND_LITERAL_EXPR:
10772 /* Lowered by gimplify.c. */
10773 gcc_unreachable ();
10774
10775 case FDESC_EXPR:
10776 /* Function descriptors are not valid except for as
10777 initialization constants, and should not be expanded. */
10778 gcc_unreachable ();
10779
10780 case WITH_SIZE_EXPR:
10781 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10782 have pulled out the size to use in whatever context it needed. */
10783 return expand_expr_real (treeop0, original_target, tmode,
10784 modifier, alt_rtl, inner_reference_p);
10785
10786 default:
10787 return expand_expr_real_2 (&ops, target, tmode, modifier);
10788 }
10789 }
10790 \f
10791 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10792 signedness of TYPE), possibly returning the result in TARGET. */
10793 static rtx
10794 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10795 {
10796 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10797 if (target && GET_MODE (target) != GET_MODE (exp))
10798 target = 0;
10799 /* For constant values, reduce using build_int_cst_type. */
10800 if (CONST_INT_P (exp))
10801 {
10802 HOST_WIDE_INT value = INTVAL (exp);
10803 tree t = build_int_cst_type (type, value);
10804 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10805 }
10806 else if (TYPE_UNSIGNED (type))
10807 {
10808 machine_mode mode = GET_MODE (exp);
10809 rtx mask = immed_wide_int_const
10810 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10811 return expand_and (mode, exp, mask, target);
10812 }
10813 else
10814 {
10815 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10816 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10817 exp, count, target, 0);
10818 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10819 exp, count, target, 0);
10820 }
10821 }
10822 \f
10823 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10824 when applied to the address of EXP produces an address known to be
10825 aligned more than BIGGEST_ALIGNMENT. */
10826
10827 static int
10828 is_aligning_offset (const_tree offset, const_tree exp)
10829 {
10830 /* Strip off any conversions. */
10831 while (CONVERT_EXPR_P (offset))
10832 offset = TREE_OPERAND (offset, 0);
10833
10834 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10835 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10836 if (TREE_CODE (offset) != BIT_AND_EXPR
10837 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10838 || compare_tree_int (TREE_OPERAND (offset, 1),
10839 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10840 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10841 return 0;
10842
10843 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10844 It must be NEGATE_EXPR. Then strip any more conversions. */
10845 offset = TREE_OPERAND (offset, 0);
10846 while (CONVERT_EXPR_P (offset))
10847 offset = TREE_OPERAND (offset, 0);
10848
10849 if (TREE_CODE (offset) != NEGATE_EXPR)
10850 return 0;
10851
10852 offset = TREE_OPERAND (offset, 0);
10853 while (CONVERT_EXPR_P (offset))
10854 offset = TREE_OPERAND (offset, 0);
10855
10856 /* This must now be the address of EXP. */
10857 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10858 }
10859 \f
10860 /* Return the tree node if an ARG corresponds to a string constant or zero
10861 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10862 in bytes within the string that ARG is accessing. The type of the
10863 offset will be `sizetype'. */
10864
10865 tree
10866 string_constant (tree arg, tree *ptr_offset)
10867 {
10868 tree array, offset, lower_bound;
10869 STRIP_NOPS (arg);
10870
10871 if (TREE_CODE (arg) == ADDR_EXPR)
10872 {
10873 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10874 {
10875 *ptr_offset = size_zero_node;
10876 return TREE_OPERAND (arg, 0);
10877 }
10878 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10879 {
10880 array = TREE_OPERAND (arg, 0);
10881 offset = size_zero_node;
10882 }
10883 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10884 {
10885 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10886 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10887 if (TREE_CODE (array) != STRING_CST
10888 && TREE_CODE (array) != VAR_DECL)
10889 return 0;
10890
10891 /* Check if the array has a nonzero lower bound. */
10892 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10893 if (!integer_zerop (lower_bound))
10894 {
10895 /* If the offset and base aren't both constants, return 0. */
10896 if (TREE_CODE (lower_bound) != INTEGER_CST)
10897 return 0;
10898 if (TREE_CODE (offset) != INTEGER_CST)
10899 return 0;
10900 /* Adjust offset by the lower bound. */
10901 offset = size_diffop (fold_convert (sizetype, offset),
10902 fold_convert (sizetype, lower_bound));
10903 }
10904 }
10905 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10906 {
10907 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10908 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10909 if (TREE_CODE (array) != ADDR_EXPR)
10910 return 0;
10911 array = TREE_OPERAND (array, 0);
10912 if (TREE_CODE (array) != STRING_CST
10913 && TREE_CODE (array) != VAR_DECL)
10914 return 0;
10915 }
10916 else
10917 return 0;
10918 }
10919 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10920 {
10921 tree arg0 = TREE_OPERAND (arg, 0);
10922 tree arg1 = TREE_OPERAND (arg, 1);
10923
10924 STRIP_NOPS (arg0);
10925 STRIP_NOPS (arg1);
10926
10927 if (TREE_CODE (arg0) == ADDR_EXPR
10928 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10929 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10930 {
10931 array = TREE_OPERAND (arg0, 0);
10932 offset = arg1;
10933 }
10934 else if (TREE_CODE (arg1) == ADDR_EXPR
10935 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10936 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10937 {
10938 array = TREE_OPERAND (arg1, 0);
10939 offset = arg0;
10940 }
10941 else
10942 return 0;
10943 }
10944 else
10945 return 0;
10946
10947 if (TREE_CODE (array) == STRING_CST)
10948 {
10949 *ptr_offset = fold_convert (sizetype, offset);
10950 return array;
10951 }
10952 else if (TREE_CODE (array) == VAR_DECL
10953 || TREE_CODE (array) == CONST_DECL)
10954 {
10955 int length;
10956 tree init = ctor_for_folding (array);
10957
10958 /* Variables initialized to string literals can be handled too. */
10959 if (init == error_mark_node
10960 || !init
10961 || TREE_CODE (init) != STRING_CST)
10962 return 0;
10963
10964 /* Avoid const char foo[4] = "abcde"; */
10965 if (DECL_SIZE_UNIT (array) == NULL_TREE
10966 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10967 || (length = TREE_STRING_LENGTH (init)) <= 0
10968 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10969 return 0;
10970
10971 /* If variable is bigger than the string literal, OFFSET must be constant
10972 and inside of the bounds of the string literal. */
10973 offset = fold_convert (sizetype, offset);
10974 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10975 && (! tree_fits_uhwi_p (offset)
10976 || compare_tree_int (offset, length) >= 0))
10977 return 0;
10978
10979 *ptr_offset = offset;
10980 return init;
10981 }
10982
10983 return 0;
10984 }
10985 \f
10986 /* Generate code to calculate OPS, and exploded expression
10987 using a store-flag instruction and return an rtx for the result.
10988 OPS reflects a comparison.
10989
10990 If TARGET is nonzero, store the result there if convenient.
10991
10992 Return zero if there is no suitable set-flag instruction
10993 available on this machine.
10994
10995 Once expand_expr has been called on the arguments of the comparison,
10996 we are committed to doing the store flag, since it is not safe to
10997 re-evaluate the expression. We emit the store-flag insn by calling
10998 emit_store_flag, but only expand the arguments if we have a reason
10999 to believe that emit_store_flag will be successful. If we think that
11000 it will, but it isn't, we have to simulate the store-flag with a
11001 set/jump/set sequence. */
11002
11003 static rtx
11004 do_store_flag (sepops ops, rtx target, machine_mode mode)
11005 {
11006 enum rtx_code code;
11007 tree arg0, arg1, type;
11008 tree tem;
11009 machine_mode operand_mode;
11010 int unsignedp;
11011 rtx op0, op1;
11012 rtx subtarget = target;
11013 location_t loc = ops->location;
11014
11015 arg0 = ops->op0;
11016 arg1 = ops->op1;
11017
11018 /* Don't crash if the comparison was erroneous. */
11019 if (arg0 == error_mark_node || arg1 == error_mark_node)
11020 return const0_rtx;
11021
11022 type = TREE_TYPE (arg0);
11023 operand_mode = TYPE_MODE (type);
11024 unsignedp = TYPE_UNSIGNED (type);
11025
11026 /* We won't bother with BLKmode store-flag operations because it would mean
11027 passing a lot of information to emit_store_flag. */
11028 if (operand_mode == BLKmode)
11029 return 0;
11030
11031 /* We won't bother with store-flag operations involving function pointers
11032 when function pointers must be canonicalized before comparisons. */
11033 #ifdef HAVE_canonicalize_funcptr_for_compare
11034 if (HAVE_canonicalize_funcptr_for_compare
11035 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11036 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11037 == FUNCTION_TYPE))
11038 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11039 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11040 == FUNCTION_TYPE))))
11041 return 0;
11042 #endif
11043
11044 STRIP_NOPS (arg0);
11045 STRIP_NOPS (arg1);
11046
11047 /* For vector typed comparisons emit code to generate the desired
11048 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11049 expander for this. */
11050 if (TREE_CODE (ops->type) == VECTOR_TYPE)
11051 {
11052 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11053 tree if_true = constant_boolean_node (true, ops->type);
11054 tree if_false = constant_boolean_node (false, ops->type);
11055 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
11056 }
11057
11058 /* Get the rtx comparison code to use. We know that EXP is a comparison
11059 operation of some type. Some comparisons against 1 and -1 can be
11060 converted to comparisons with zero. Do so here so that the tests
11061 below will be aware that we have a comparison with zero. These
11062 tests will not catch constants in the first operand, but constants
11063 are rarely passed as the first operand. */
11064
11065 switch (ops->code)
11066 {
11067 case EQ_EXPR:
11068 code = EQ;
11069 break;
11070 case NE_EXPR:
11071 code = NE;
11072 break;
11073 case LT_EXPR:
11074 if (integer_onep (arg1))
11075 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11076 else
11077 code = unsignedp ? LTU : LT;
11078 break;
11079 case LE_EXPR:
11080 if (! unsignedp && integer_all_onesp (arg1))
11081 arg1 = integer_zero_node, code = LT;
11082 else
11083 code = unsignedp ? LEU : LE;
11084 break;
11085 case GT_EXPR:
11086 if (! unsignedp && integer_all_onesp (arg1))
11087 arg1 = integer_zero_node, code = GE;
11088 else
11089 code = unsignedp ? GTU : GT;
11090 break;
11091 case GE_EXPR:
11092 if (integer_onep (arg1))
11093 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11094 else
11095 code = unsignedp ? GEU : GE;
11096 break;
11097
11098 case UNORDERED_EXPR:
11099 code = UNORDERED;
11100 break;
11101 case ORDERED_EXPR:
11102 code = ORDERED;
11103 break;
11104 case UNLT_EXPR:
11105 code = UNLT;
11106 break;
11107 case UNLE_EXPR:
11108 code = UNLE;
11109 break;
11110 case UNGT_EXPR:
11111 code = UNGT;
11112 break;
11113 case UNGE_EXPR:
11114 code = UNGE;
11115 break;
11116 case UNEQ_EXPR:
11117 code = UNEQ;
11118 break;
11119 case LTGT_EXPR:
11120 code = LTGT;
11121 break;
11122
11123 default:
11124 gcc_unreachable ();
11125 }
11126
11127 /* Put a constant second. */
11128 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11129 || TREE_CODE (arg0) == FIXED_CST)
11130 {
11131 tem = arg0; arg0 = arg1; arg1 = tem;
11132 code = swap_condition (code);
11133 }
11134
11135 /* If this is an equality or inequality test of a single bit, we can
11136 do this by shifting the bit being tested to the low-order bit and
11137 masking the result with the constant 1. If the condition was EQ,
11138 we xor it with 1. This does not require an scc insn and is faster
11139 than an scc insn even if we have it.
11140
11141 The code to make this transformation was moved into fold_single_bit_test,
11142 so we just call into the folder and expand its result. */
11143
11144 if ((code == NE || code == EQ)
11145 && integer_zerop (arg1)
11146 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11147 {
11148 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11149 if (srcstmt
11150 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11151 {
11152 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11153 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11154 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11155 gimple_assign_rhs1 (srcstmt),
11156 gimple_assign_rhs2 (srcstmt));
11157 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11158 if (temp)
11159 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11160 }
11161 }
11162
11163 if (! get_subtarget (target)
11164 || GET_MODE (subtarget) != operand_mode)
11165 subtarget = 0;
11166
11167 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11168
11169 if (target == 0)
11170 target = gen_reg_rtx (mode);
11171
11172 /* Try a cstore if possible. */
11173 return emit_store_flag_force (target, code, op0, op1,
11174 operand_mode, unsignedp,
11175 (TYPE_PRECISION (ops->type) == 1
11176 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11177 }
11178 \f
11179
11180 /* Stubs in case we haven't got a casesi insn. */
11181 #ifndef HAVE_casesi
11182 # define HAVE_casesi 0
11183 # define gen_casesi(a, b, c, d, e) (0)
11184 # define CODE_FOR_casesi CODE_FOR_nothing
11185 #endif
11186
11187 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11188 0 otherwise (i.e. if there is no casesi instruction).
11189
11190 DEFAULT_PROBABILITY is the probability of jumping to the default
11191 label. */
11192 int
11193 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11194 rtx table_label, rtx default_label, rtx fallback_label,
11195 int default_probability)
11196 {
11197 struct expand_operand ops[5];
11198 machine_mode index_mode = SImode;
11199 rtx op1, op2, index;
11200
11201 if (! HAVE_casesi)
11202 return 0;
11203
11204 /* Convert the index to SImode. */
11205 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11206 {
11207 machine_mode omode = TYPE_MODE (index_type);
11208 rtx rangertx = expand_normal (range);
11209
11210 /* We must handle the endpoints in the original mode. */
11211 index_expr = build2 (MINUS_EXPR, index_type,
11212 index_expr, minval);
11213 minval = integer_zero_node;
11214 index = expand_normal (index_expr);
11215 if (default_label)
11216 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11217 omode, 1, default_label,
11218 default_probability);
11219 /* Now we can safely truncate. */
11220 index = convert_to_mode (index_mode, index, 0);
11221 }
11222 else
11223 {
11224 if (TYPE_MODE (index_type) != index_mode)
11225 {
11226 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11227 index_expr = fold_convert (index_type, index_expr);
11228 }
11229
11230 index = expand_normal (index_expr);
11231 }
11232
11233 do_pending_stack_adjust ();
11234
11235 op1 = expand_normal (minval);
11236 op2 = expand_normal (range);
11237
11238 create_input_operand (&ops[0], index, index_mode);
11239 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11240 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11241 create_fixed_operand (&ops[3], table_label);
11242 create_fixed_operand (&ops[4], (default_label
11243 ? default_label
11244 : fallback_label));
11245 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11246 return 1;
11247 }
11248
11249 /* Attempt to generate a tablejump instruction; same concept. */
11250 #ifndef HAVE_tablejump
11251 #define HAVE_tablejump 0
11252 #define gen_tablejump(x, y) (0)
11253 #endif
11254
11255 /* Subroutine of the next function.
11256
11257 INDEX is the value being switched on, with the lowest value
11258 in the table already subtracted.
11259 MODE is its expected mode (needed if INDEX is constant).
11260 RANGE is the length of the jump table.
11261 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11262
11263 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11264 index value is out of range.
11265 DEFAULT_PROBABILITY is the probability of jumping to
11266 the default label. */
11267
11268 static void
11269 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11270 rtx default_label, int default_probability)
11271 {
11272 rtx temp, vector;
11273
11274 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11275 cfun->cfg->max_jumptable_ents = INTVAL (range);
11276
11277 /* Do an unsigned comparison (in the proper mode) between the index
11278 expression and the value which represents the length of the range.
11279 Since we just finished subtracting the lower bound of the range
11280 from the index expression, this comparison allows us to simultaneously
11281 check that the original index expression value is both greater than
11282 or equal to the minimum value of the range and less than or equal to
11283 the maximum value of the range. */
11284
11285 if (default_label)
11286 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11287 default_label, default_probability);
11288
11289
11290 /* If index is in range, it must fit in Pmode.
11291 Convert to Pmode so we can index with it. */
11292 if (mode != Pmode)
11293 index = convert_to_mode (Pmode, index, 1);
11294
11295 /* Don't let a MEM slip through, because then INDEX that comes
11296 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11297 and break_out_memory_refs will go to work on it and mess it up. */
11298 #ifdef PIC_CASE_VECTOR_ADDRESS
11299 if (flag_pic && !REG_P (index))
11300 index = copy_to_mode_reg (Pmode, index);
11301 #endif
11302
11303 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11304 GET_MODE_SIZE, because this indicates how large insns are. The other
11305 uses should all be Pmode, because they are addresses. This code
11306 could fail if addresses and insns are not the same size. */
11307 index = simplify_gen_binary (MULT, Pmode, index,
11308 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11309 Pmode));
11310 index = simplify_gen_binary (PLUS, Pmode, index,
11311 gen_rtx_LABEL_REF (Pmode, table_label));
11312
11313 #ifdef PIC_CASE_VECTOR_ADDRESS
11314 if (flag_pic)
11315 index = PIC_CASE_VECTOR_ADDRESS (index);
11316 else
11317 #endif
11318 index = memory_address (CASE_VECTOR_MODE, index);
11319 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11320 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11321 convert_move (temp, vector, 0);
11322
11323 emit_jump_insn (gen_tablejump (temp, table_label));
11324
11325 /* If we are generating PIC code or if the table is PC-relative, the
11326 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11327 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11328 emit_barrier ();
11329 }
11330
11331 int
11332 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11333 rtx table_label, rtx default_label, int default_probability)
11334 {
11335 rtx index;
11336
11337 if (! HAVE_tablejump)
11338 return 0;
11339
11340 index_expr = fold_build2 (MINUS_EXPR, index_type,
11341 fold_convert (index_type, index_expr),
11342 fold_convert (index_type, minval));
11343 index = expand_normal (index_expr);
11344 do_pending_stack_adjust ();
11345
11346 do_tablejump (index, TYPE_MODE (index_type),
11347 convert_modes (TYPE_MODE (index_type),
11348 TYPE_MODE (TREE_TYPE (range)),
11349 expand_normal (range),
11350 TYPE_UNSIGNED (TREE_TYPE (range))),
11351 table_label, default_label, default_probability);
11352 return 1;
11353 }
11354
11355 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11356 static rtx
11357 const_vector_from_tree (tree exp)
11358 {
11359 rtvec v;
11360 unsigned i;
11361 int units;
11362 tree elt;
11363 machine_mode inner, mode;
11364
11365 mode = TYPE_MODE (TREE_TYPE (exp));
11366
11367 if (initializer_zerop (exp))
11368 return CONST0_RTX (mode);
11369
11370 units = GET_MODE_NUNITS (mode);
11371 inner = GET_MODE_INNER (mode);
11372
11373 v = rtvec_alloc (units);
11374
11375 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11376 {
11377 elt = VECTOR_CST_ELT (exp, i);
11378
11379 if (TREE_CODE (elt) == REAL_CST)
11380 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11381 inner);
11382 else if (TREE_CODE (elt) == FIXED_CST)
11383 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11384 inner);
11385 else
11386 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11387 }
11388
11389 return gen_rtx_CONST_VECTOR (mode, v);
11390 }
11391
11392 /* Build a decl for a personality function given a language prefix. */
11393
11394 tree
11395 build_personality_function (const char *lang)
11396 {
11397 const char *unwind_and_version;
11398 tree decl, type;
11399 char *name;
11400
11401 switch (targetm_common.except_unwind_info (&global_options))
11402 {
11403 case UI_NONE:
11404 return NULL;
11405 case UI_SJLJ:
11406 unwind_and_version = "_sj0";
11407 break;
11408 case UI_DWARF2:
11409 case UI_TARGET:
11410 unwind_and_version = "_v0";
11411 break;
11412 case UI_SEH:
11413 unwind_and_version = "_seh0";
11414 break;
11415 default:
11416 gcc_unreachable ();
11417 }
11418
11419 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11420
11421 type = build_function_type_list (integer_type_node, integer_type_node,
11422 long_long_unsigned_type_node,
11423 ptr_type_node, ptr_type_node, NULL_TREE);
11424 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11425 get_identifier (name), type);
11426 DECL_ARTIFICIAL (decl) = 1;
11427 DECL_EXTERNAL (decl) = 1;
11428 TREE_PUBLIC (decl) = 1;
11429
11430 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11431 are the flags assigned by targetm.encode_section_info. */
11432 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11433
11434 return decl;
11435 }
11436
11437 /* Extracts the personality function of DECL and returns the corresponding
11438 libfunc. */
11439
11440 rtx
11441 get_personality_function (tree decl)
11442 {
11443 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11444 enum eh_personality_kind pk;
11445
11446 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11447 if (pk == eh_personality_none)
11448 return NULL;
11449
11450 if (!personality
11451 && pk == eh_personality_any)
11452 personality = lang_hooks.eh_personality ();
11453
11454 if (pk == eh_personality_lang)
11455 gcc_assert (personality != NULL_TREE);
11456
11457 return XEXP (DECL_RTL (personality), 0);
11458 }
11459
11460 /* Returns a tree for the size of EXP in bytes. */
11461
11462 static tree
11463 tree_expr_size (const_tree exp)
11464 {
11465 if (DECL_P (exp)
11466 && DECL_SIZE_UNIT (exp) != 0)
11467 return DECL_SIZE_UNIT (exp);
11468 else
11469 return size_in_bytes (TREE_TYPE (exp));
11470 }
11471
11472 /* Return an rtx for the size in bytes of the value of EXP. */
11473
11474 rtx
11475 expr_size (tree exp)
11476 {
11477 tree size;
11478
11479 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11480 size = TREE_OPERAND (exp, 1);
11481 else
11482 {
11483 size = tree_expr_size (exp);
11484 gcc_assert (size);
11485 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11486 }
11487
11488 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11489 }
11490
11491 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11492 if the size can vary or is larger than an integer. */
11493
11494 static HOST_WIDE_INT
11495 int_expr_size (tree exp)
11496 {
11497 tree size;
11498
11499 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11500 size = TREE_OPERAND (exp, 1);
11501 else
11502 {
11503 size = tree_expr_size (exp);
11504 gcc_assert (size);
11505 }
11506
11507 if (size == 0 || !tree_fits_shwi_p (size))
11508 return -1;
11509
11510 return tree_to_shwi (size);
11511 }
11512
11513 #include "gt-expr.h"