remove most ifdef STACK_GROWS_DOWNWARD
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "hash-set.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "flags.h"
41 #include "regs.h"
42 #include "hard-reg-set.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-config.h"
46 #include "insn-attr.h"
47 #include "hashtab.h"
48 #include "statistics.h"
49 #include "real.h"
50 #include "fixed-value.h"
51 #include "expmed.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "calls.h"
55 #include "emit-rtl.h"
56 #include "stmt.h"
57 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
58 #include "expr.h"
59 #include "insn-codes.h"
60 #include "optabs.h"
61 #include "libfuncs.h"
62 #include "recog.h"
63 #include "reload.h"
64 #include "typeclass.h"
65 #include "toplev.h"
66 #include "langhooks.h"
67 #include "intl.h"
68 #include "tm_p.h"
69 #include "tree-iterator.h"
70 #include "predict.h"
71 #include "dominance.h"
72 #include "cfg.h"
73 #include "basic-block.h"
74 #include "tree-ssa-alias.h"
75 #include "internal-fn.h"
76 #include "gimple-expr.h"
77 #include "is-a.h"
78 #include "gimple.h"
79 #include "gimple-ssa.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "tree-ssanames.h"
85 #include "target.h"
86 #include "common/common-target.h"
87 #include "timevar.h"
88 #include "df.h"
89 #include "diagnostic.h"
90 #include "tree-ssa-live.h"
91 #include "tree-outof-ssa.h"
92 #include "target-globals.h"
93 #include "params.h"
94 #include "tree-ssa-address.h"
95 #include "cfgexpand.h"
96 #include "builtins.h"
97 #include "tree-chkp.h"
98 #include "rtl-chkp.h"
99 #include "ccmp.h"
100
101 #ifndef STACK_PUSH_CODE
102 #if STACK_GROWS_DOWNWARD
103 #define STACK_PUSH_CODE PRE_DEC
104 #else
105 #define STACK_PUSH_CODE PRE_INC
106 #endif
107 #endif
108
109
110 /* If this is nonzero, we do not bother generating VOLATILE
111 around volatile memory references, and we are willing to
112 output indirect addresses. If cse is to follow, we reject
113 indirect addresses so a useful potential cse is generated;
114 if it is used only once, instruction combination will produce
115 the same indirect address eventually. */
116 int cse_not_expected;
117
118 /* This structure is used by move_by_pieces to describe the move to
119 be performed. */
120 struct move_by_pieces_d
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 rtx from;
127 rtx from_addr;
128 int autinc_from;
129 int explicit_inc_from;
130 unsigned HOST_WIDE_INT len;
131 HOST_WIDE_INT offset;
132 int reverse;
133 };
134
135 /* This structure is used by store_by_pieces to describe the clear to
136 be performed. */
137
138 struct store_by_pieces_d
139 {
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 unsigned HOST_WIDE_INT len;
145 HOST_WIDE_INT offset;
146 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
147 void *constfundata;
148 int reverse;
149 };
150
151 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
152 struct move_by_pieces_d *);
153 static bool block_move_libcall_safe_for_call_parm (void);
154 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
155 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
156 unsigned HOST_WIDE_INT);
157 static tree emit_block_move_libcall_fn (int);
158 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
159 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
160 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
161 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
162 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
163 struct store_by_pieces_d *);
164 static tree clear_storage_libcall_fn (int);
165 static rtx_insn *compress_float_constant (rtx, rtx);
166 static rtx get_subtarget (rtx);
167 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
168 HOST_WIDE_INT, machine_mode,
169 tree, int, alias_set_type);
170 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
171 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
172 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
173 machine_mode, tree, alias_set_type, bool);
174
175 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
176
177 static int is_aligning_offset (const_tree, const_tree);
178 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
179 static rtx do_store_flag (sepops, rtx, machine_mode);
180 #ifdef PUSH_ROUNDING
181 static void emit_single_push_insn (machine_mode, rtx, tree);
182 #endif
183 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
184 static rtx const_vector_from_tree (tree);
185 static tree tree_expr_size (const_tree);
186 static HOST_WIDE_INT int_expr_size (tree);
187
188 \f
189 /* This is run to set up which modes can be used
190 directly in memory and to initialize the block move optab. It is run
191 at the beginning of compilation and when the target is reinitialized. */
192
193 void
194 init_expr_target (void)
195 {
196 rtx insn, pat;
197 machine_mode mode;
198 int num_clobbers;
199 rtx mem, mem1;
200 rtx reg;
201
202 /* Try indexing by frame ptr and try by stack ptr.
203 It is known that on the Convex the stack ptr isn't a valid index.
204 With luck, one or the other is valid on any machine. */
205 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
206 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
207
208 /* A scratch register we can modify in-place below to avoid
209 useless RTL allocations. */
210 reg = gen_rtx_REG (word_mode, FIRST_PSEUDO_REGISTER);
211
212 insn = rtx_alloc (INSN);
213 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
214 PATTERN (insn) = pat;
215
216 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
217 mode = (machine_mode) ((int) mode + 1))
218 {
219 int regno;
220
221 direct_load[(int) mode] = direct_store[(int) mode] = 0;
222 PUT_MODE (mem, mode);
223 PUT_MODE (mem1, mode);
224
225 /* See if there is some register that can be used in this mode and
226 directly loaded or stored from memory. */
227
228 if (mode != VOIDmode && mode != BLKmode)
229 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
230 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
231 regno++)
232 {
233 if (! HARD_REGNO_MODE_OK (regno, mode))
234 continue;
235
236 set_mode_and_regno (reg, mode, regno);
237
238 SET_SRC (pat) = mem;
239 SET_DEST (pat) = reg;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_load[(int) mode] = 1;
242
243 SET_SRC (pat) = mem1;
244 SET_DEST (pat) = reg;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_load[(int) mode] = 1;
247
248 SET_SRC (pat) = reg;
249 SET_DEST (pat) = mem;
250 if (recog (pat, insn, &num_clobbers) >= 0)
251 direct_store[(int) mode] = 1;
252
253 SET_SRC (pat) = reg;
254 SET_DEST (pat) = mem1;
255 if (recog (pat, insn, &num_clobbers) >= 0)
256 direct_store[(int) mode] = 1;
257 }
258 }
259
260 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, FIRST_PSEUDO_REGISTER));
261
262 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
263 mode = GET_MODE_WIDER_MODE (mode))
264 {
265 machine_mode srcmode;
266 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
267 srcmode = GET_MODE_WIDER_MODE (srcmode))
268 {
269 enum insn_code ic;
270
271 ic = can_extend_p (mode, srcmode, 0);
272 if (ic == CODE_FOR_nothing)
273 continue;
274
275 PUT_MODE (mem, srcmode);
276
277 if (insn_operand_matches (ic, 1, mem))
278 float_extend_from_mem[mode][srcmode] = true;
279 }
280 }
281 }
282
283 /* This is run at the start of compiling a function. */
284
285 void
286 init_expr (void)
287 {
288 memset (&crtl->expr, 0, sizeof (crtl->expr));
289 }
290 \f
291 /* Copy data from FROM to TO, where the machine modes are not the same.
292 Both modes may be integer, or both may be floating, or both may be
293 fixed-point.
294 UNSIGNEDP should be nonzero if FROM is an unsigned type.
295 This causes zero-extension instead of sign-extension. */
296
297 void
298 convert_move (rtx to, rtx from, int unsignedp)
299 {
300 machine_mode to_mode = GET_MODE (to);
301 machine_mode from_mode = GET_MODE (from);
302 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
303 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
304 enum insn_code code;
305 rtx libcall;
306
307 /* rtx code for making an equivalent value. */
308 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
309 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
310
311
312 gcc_assert (to_real == from_real);
313 gcc_assert (to_mode != BLKmode);
314 gcc_assert (from_mode != BLKmode);
315
316 /* If the source and destination are already the same, then there's
317 nothing to do. */
318 if (to == from)
319 return;
320
321 /* If FROM is a SUBREG that indicates that we have already done at least
322 the required extension, strip it. We don't handle such SUBREGs as
323 TO here. */
324
325 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
326 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
327 >= GET_MODE_PRECISION (to_mode))
328 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
329 from = gen_lowpart (to_mode, from), from_mode = to_mode;
330
331 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
332
333 if (to_mode == from_mode
334 || (from_mode == VOIDmode && CONSTANT_P (from)))
335 {
336 emit_move_insn (to, from);
337 return;
338 }
339
340 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
341 {
342 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
343
344 if (VECTOR_MODE_P (to_mode))
345 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
346 else
347 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
348
349 emit_move_insn (to, from);
350 return;
351 }
352
353 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
354 {
355 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
356 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
357 return;
358 }
359
360 if (to_real)
361 {
362 rtx value;
363 rtx_insn *insns;
364 convert_optab tab;
365
366 gcc_assert ((GET_MODE_PRECISION (from_mode)
367 != GET_MODE_PRECISION (to_mode))
368 || (DECIMAL_FLOAT_MODE_P (from_mode)
369 != DECIMAL_FLOAT_MODE_P (to_mode)));
370
371 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
372 /* Conversion between decimal float and binary float, same size. */
373 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
374 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
375 tab = sext_optab;
376 else
377 tab = trunc_optab;
378
379 /* Try converting directly if the insn is supported. */
380
381 code = convert_optab_handler (tab, to_mode, from_mode);
382 if (code != CODE_FOR_nothing)
383 {
384 emit_unop_insn (code, to, from,
385 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
386 return;
387 }
388
389 /* Otherwise use a libcall. */
390 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
391
392 /* Is this conversion implemented yet? */
393 gcc_assert (libcall);
394
395 start_sequence ();
396 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
397 1, from, from_mode);
398 insns = get_insns ();
399 end_sequence ();
400 emit_libcall_block (insns, to, value,
401 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
402 from)
403 : gen_rtx_FLOAT_EXTEND (to_mode, from));
404 return;
405 }
406
407 /* Handle pointer conversion. */ /* SPEE 900220. */
408 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
409 {
410 convert_optab ctab;
411
412 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
413 ctab = trunc_optab;
414 else if (unsignedp)
415 ctab = zext_optab;
416 else
417 ctab = sext_optab;
418
419 if (convert_optab_handler (ctab, to_mode, from_mode)
420 != CODE_FOR_nothing)
421 {
422 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
423 to, from, UNKNOWN);
424 return;
425 }
426 }
427
428 /* Targets are expected to provide conversion insns between PxImode and
429 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
430 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
431 {
432 machine_mode full_mode
433 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
434
435 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
436 != CODE_FOR_nothing);
437
438 if (full_mode != from_mode)
439 from = convert_to_mode (full_mode, from, unsignedp);
440 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
441 to, from, UNKNOWN);
442 return;
443 }
444 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
445 {
446 rtx new_from;
447 machine_mode full_mode
448 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
449 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
450 enum insn_code icode;
451
452 icode = convert_optab_handler (ctab, full_mode, from_mode);
453 gcc_assert (icode != CODE_FOR_nothing);
454
455 if (to_mode == full_mode)
456 {
457 emit_unop_insn (icode, to, from, UNKNOWN);
458 return;
459 }
460
461 new_from = gen_reg_rtx (full_mode);
462 emit_unop_insn (icode, new_from, from, UNKNOWN);
463
464 /* else proceed to integer conversions below. */
465 from_mode = full_mode;
466 from = new_from;
467 }
468
469 /* Make sure both are fixed-point modes or both are not. */
470 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
471 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
472 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
473 {
474 /* If we widen from_mode to to_mode and they are in the same class,
475 we won't saturate the result.
476 Otherwise, always saturate the result to play safe. */
477 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
478 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
479 expand_fixed_convert (to, from, 0, 0);
480 else
481 expand_fixed_convert (to, from, 0, 1);
482 return;
483 }
484
485 /* Now both modes are integers. */
486
487 /* Handle expanding beyond a word. */
488 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
489 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
490 {
491 rtx_insn *insns;
492 rtx lowpart;
493 rtx fill_value;
494 rtx lowfrom;
495 int i;
496 machine_mode lowpart_mode;
497 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
498
499 /* Try converting directly if the insn is supported. */
500 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
501 != CODE_FOR_nothing)
502 {
503 /* If FROM is a SUBREG, put it into a register. Do this
504 so that we always generate the same set of insns for
505 better cse'ing; if an intermediate assignment occurred,
506 we won't be doing the operation directly on the SUBREG. */
507 if (optimize > 0 && GET_CODE (from) == SUBREG)
508 from = force_reg (from_mode, from);
509 emit_unop_insn (code, to, from, equiv_code);
510 return;
511 }
512 /* Next, try converting via full word. */
513 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
514 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
515 != CODE_FOR_nothing))
516 {
517 rtx word_to = gen_reg_rtx (word_mode);
518 if (REG_P (to))
519 {
520 if (reg_overlap_mentioned_p (to, from))
521 from = force_reg (from_mode, from);
522 emit_clobber (to);
523 }
524 convert_move (word_to, from, unsignedp);
525 emit_unop_insn (code, to, word_to, equiv_code);
526 return;
527 }
528
529 /* No special multiword conversion insn; do it by hand. */
530 start_sequence ();
531
532 /* Since we will turn this into a no conflict block, we must ensure the
533 the source does not overlap the target so force it into an isolated
534 register when maybe so. Likewise for any MEM input, since the
535 conversion sequence might require several references to it and we
536 must ensure we're getting the same value every time. */
537
538 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
539 from = force_reg (from_mode, from);
540
541 /* Get a copy of FROM widened to a word, if necessary. */
542 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
543 lowpart_mode = word_mode;
544 else
545 lowpart_mode = from_mode;
546
547 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
548
549 lowpart = gen_lowpart (lowpart_mode, to);
550 emit_move_insn (lowpart, lowfrom);
551
552 /* Compute the value to put in each remaining word. */
553 if (unsignedp)
554 fill_value = const0_rtx;
555 else
556 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
557 LT, lowfrom, const0_rtx,
558 lowpart_mode, 0, -1);
559
560 /* Fill the remaining words. */
561 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
562 {
563 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
564 rtx subword = operand_subword (to, index, 1, to_mode);
565
566 gcc_assert (subword);
567
568 if (fill_value != subword)
569 emit_move_insn (subword, fill_value);
570 }
571
572 insns = get_insns ();
573 end_sequence ();
574
575 emit_insn (insns);
576 return;
577 }
578
579 /* Truncating multi-word to a word or less. */
580 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
581 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
582 {
583 if (!((MEM_P (from)
584 && ! MEM_VOLATILE_P (from)
585 && direct_load[(int) to_mode]
586 && ! mode_dependent_address_p (XEXP (from, 0),
587 MEM_ADDR_SPACE (from)))
588 || REG_P (from)
589 || GET_CODE (from) == SUBREG))
590 from = force_reg (from_mode, from);
591 convert_move (to, gen_lowpart (word_mode, from), 0);
592 return;
593 }
594
595 /* Now follow all the conversions between integers
596 no more than a word long. */
597
598 /* For truncation, usually we can just refer to FROM in a narrower mode. */
599 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
600 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
601 {
602 if (!((MEM_P (from)
603 && ! MEM_VOLATILE_P (from)
604 && direct_load[(int) to_mode]
605 && ! mode_dependent_address_p (XEXP (from, 0),
606 MEM_ADDR_SPACE (from)))
607 || REG_P (from)
608 || GET_CODE (from) == SUBREG))
609 from = force_reg (from_mode, from);
610 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
611 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
612 from = copy_to_reg (from);
613 emit_move_insn (to, gen_lowpart (to_mode, from));
614 return;
615 }
616
617 /* Handle extension. */
618 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
619 {
620 /* Convert directly if that works. */
621 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
622 != CODE_FOR_nothing)
623 {
624 emit_unop_insn (code, to, from, equiv_code);
625 return;
626 }
627 else
628 {
629 machine_mode intermediate;
630 rtx tmp;
631 int shift_amount;
632
633 /* Search for a mode to convert via. */
634 for (intermediate = from_mode; intermediate != VOIDmode;
635 intermediate = GET_MODE_WIDER_MODE (intermediate))
636 if (((can_extend_p (to_mode, intermediate, unsignedp)
637 != CODE_FOR_nothing)
638 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
639 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
640 && (can_extend_p (intermediate, from_mode, unsignedp)
641 != CODE_FOR_nothing))
642 {
643 convert_move (to, convert_to_mode (intermediate, from,
644 unsignedp), unsignedp);
645 return;
646 }
647
648 /* No suitable intermediate mode.
649 Generate what we need with shifts. */
650 shift_amount = (GET_MODE_PRECISION (to_mode)
651 - GET_MODE_PRECISION (from_mode));
652 from = gen_lowpart (to_mode, force_reg (from_mode, from));
653 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
654 to, unsignedp);
655 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
656 to, unsignedp);
657 if (tmp != to)
658 emit_move_insn (to, tmp);
659 return;
660 }
661 }
662
663 /* Support special truncate insns for certain modes. */
664 if (convert_optab_handler (trunc_optab, to_mode,
665 from_mode) != CODE_FOR_nothing)
666 {
667 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
668 to, from, UNKNOWN);
669 return;
670 }
671
672 /* Handle truncation of volatile memrefs, and so on;
673 the things that couldn't be truncated directly,
674 and for which there was no special instruction.
675
676 ??? Code above formerly short-circuited this, for most integer
677 mode pairs, with a force_reg in from_mode followed by a recursive
678 call to this routine. Appears always to have been wrong. */
679 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
680 {
681 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
682 emit_move_insn (to, temp);
683 return;
684 }
685
686 /* Mode combination is not recognized. */
687 gcc_unreachable ();
688 }
689
690 /* Return an rtx for a value that would result
691 from converting X to mode MODE.
692 Both X and MODE may be floating, or both integer.
693 UNSIGNEDP is nonzero if X is an unsigned value.
694 This can be done by referring to a part of X in place
695 or by copying to a new temporary with conversion. */
696
697 rtx
698 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
699 {
700 return convert_modes (mode, VOIDmode, x, unsignedp);
701 }
702
703 /* Return an rtx for a value that would result
704 from converting X from mode OLDMODE to mode MODE.
705 Both modes may be floating, or both integer.
706 UNSIGNEDP is nonzero if X is an unsigned value.
707
708 This can be done by referring to a part of X in place
709 or by copying to a new temporary with conversion.
710
711 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
712
713 rtx
714 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
715 {
716 rtx temp;
717
718 /* If FROM is a SUBREG that indicates that we have already done at least
719 the required extension, strip it. */
720
721 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
722 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
723 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
724 x = gen_lowpart (mode, SUBREG_REG (x));
725
726 if (GET_MODE (x) != VOIDmode)
727 oldmode = GET_MODE (x);
728
729 if (mode == oldmode)
730 return x;
731
732 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
733 {
734 /* If the caller did not tell us the old mode, then there is not
735 much to do with respect to canonicalization. We have to
736 assume that all the bits are significant. */
737 if (GET_MODE_CLASS (oldmode) != MODE_INT)
738 oldmode = MAX_MODE_INT;
739 wide_int w = wide_int::from (std::make_pair (x, oldmode),
740 GET_MODE_PRECISION (mode),
741 unsignedp ? UNSIGNED : SIGNED);
742 return immed_wide_int_const (w, mode);
743 }
744
745 /* We can do this with a gen_lowpart if both desired and current modes
746 are integer, and this is either a constant integer, a register, or a
747 non-volatile MEM. */
748 if (GET_MODE_CLASS (mode) == MODE_INT
749 && GET_MODE_CLASS (oldmode) == MODE_INT
750 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
751 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
752 || (REG_P (x)
753 && (!HARD_REGISTER_P (x)
754 || HARD_REGNO_MODE_OK (REGNO (x), mode))
755 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
756
757 return gen_lowpart (mode, x);
758
759 /* Converting from integer constant into mode is always equivalent to an
760 subreg operation. */
761 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
762 {
763 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
764 return simplify_gen_subreg (mode, x, oldmode, 0);
765 }
766
767 temp = gen_reg_rtx (mode);
768 convert_move (temp, x, unsignedp);
769 return temp;
770 }
771 \f
772 /* Return the largest alignment we can use for doing a move (or store)
773 of MAX_PIECES. ALIGN is the largest alignment we could use. */
774
775 static unsigned int
776 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
777 {
778 machine_mode tmode;
779
780 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
781 if (align >= GET_MODE_ALIGNMENT (tmode))
782 align = GET_MODE_ALIGNMENT (tmode);
783 else
784 {
785 machine_mode tmode, xmode;
786
787 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
788 tmode != VOIDmode;
789 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
790 if (GET_MODE_SIZE (tmode) > max_pieces
791 || SLOW_UNALIGNED_ACCESS (tmode, align))
792 break;
793
794 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
795 }
796
797 return align;
798 }
799
800 /* Return the widest integer mode no wider than SIZE. If no such mode
801 can be found, return VOIDmode. */
802
803 static machine_mode
804 widest_int_mode_for_size (unsigned int size)
805 {
806 machine_mode tmode, mode = VOIDmode;
807
808 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
809 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
810 if (GET_MODE_SIZE (tmode) < size)
811 mode = tmode;
812
813 return mode;
814 }
815
816 /* Determine whether the LEN bytes can be moved by using several move
817 instructions. Return nonzero if a call to move_by_pieces should
818 succeed. */
819
820 int
821 can_move_by_pieces (unsigned HOST_WIDE_INT len,
822 unsigned int align)
823 {
824 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
825 optimize_insn_for_speed_p ());
826 }
827
828 /* Generate several move instructions to copy LEN bytes from block FROM to
829 block TO. (These are MEM rtx's with BLKmode).
830
831 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
832 used to push FROM to the stack.
833
834 ALIGN is maximum stack alignment we can assume.
835
836 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
837 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
838 stpcpy. */
839
840 rtx
841 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
842 unsigned int align, int endp)
843 {
844 struct move_by_pieces_d data;
845 machine_mode to_addr_mode;
846 machine_mode from_addr_mode = get_address_mode (from);
847 rtx to_addr, from_addr = XEXP (from, 0);
848 unsigned int max_size = MOVE_MAX_PIECES + 1;
849 enum insn_code icode;
850
851 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
852
853 data.offset = 0;
854 data.from_addr = from_addr;
855 if (to)
856 {
857 to_addr_mode = get_address_mode (to);
858 to_addr = XEXP (to, 0);
859 data.to = to;
860 data.autinc_to
861 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
862 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
863 data.reverse
864 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
865 }
866 else
867 {
868 to_addr_mode = VOIDmode;
869 to_addr = NULL_RTX;
870 data.to = NULL_RTX;
871 data.autinc_to = 1;
872 if (STACK_GROWS_DOWNWARD)
873 data.reverse = 1;
874 else
875 data.reverse = 0;
876 }
877 data.to_addr = to_addr;
878 data.from = from;
879 data.autinc_from
880 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
881 || GET_CODE (from_addr) == POST_INC
882 || GET_CODE (from_addr) == POST_DEC);
883
884 data.explicit_inc_from = 0;
885 data.explicit_inc_to = 0;
886 if (data.reverse) data.offset = len;
887 data.len = len;
888
889 /* If copying requires more than two move insns,
890 copy addresses to registers (to make displacements shorter)
891 and use post-increment if available. */
892 if (!(data.autinc_from && data.autinc_to)
893 && move_by_pieces_ninsns (len, align, max_size) > 2)
894 {
895 /* Find the mode of the largest move...
896 MODE might not be used depending on the definitions of the
897 USE_* macros below. */
898 machine_mode mode ATTRIBUTE_UNUSED
899 = widest_int_mode_for_size (max_size);
900
901 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
902 {
903 data.from_addr = copy_to_mode_reg (from_addr_mode,
904 plus_constant (from_addr_mode,
905 from_addr, len));
906 data.autinc_from = 1;
907 data.explicit_inc_from = -1;
908 }
909 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
910 {
911 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
912 data.autinc_from = 1;
913 data.explicit_inc_from = 1;
914 }
915 if (!data.autinc_from && CONSTANT_P (from_addr))
916 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
917 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
918 {
919 data.to_addr = copy_to_mode_reg (to_addr_mode,
920 plus_constant (to_addr_mode,
921 to_addr, len));
922 data.autinc_to = 1;
923 data.explicit_inc_to = -1;
924 }
925 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
926 {
927 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
928 data.autinc_to = 1;
929 data.explicit_inc_to = 1;
930 }
931 if (!data.autinc_to && CONSTANT_P (to_addr))
932 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
933 }
934
935 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
936
937 /* First move what we can in the largest integer mode, then go to
938 successively smaller modes. */
939
940 while (max_size > 1 && data.len > 0)
941 {
942 machine_mode mode = widest_int_mode_for_size (max_size);
943
944 if (mode == VOIDmode)
945 break;
946
947 icode = optab_handler (mov_optab, mode);
948 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
949 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
950
951 max_size = GET_MODE_SIZE (mode);
952 }
953
954 /* The code above should have handled everything. */
955 gcc_assert (!data.len);
956
957 if (endp)
958 {
959 rtx to1;
960
961 gcc_assert (!data.reverse);
962 if (data.autinc_to)
963 {
964 if (endp == 2)
965 {
966 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
967 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
968 else
969 data.to_addr = copy_to_mode_reg (to_addr_mode,
970 plus_constant (to_addr_mode,
971 data.to_addr,
972 -1));
973 }
974 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
975 data.offset);
976 }
977 else
978 {
979 if (endp == 2)
980 --data.offset;
981 to1 = adjust_address (data.to, QImode, data.offset);
982 }
983 return to1;
984 }
985 else
986 return data.to;
987 }
988
989 /* Return number of insns required to move L bytes by pieces.
990 ALIGN (in bits) is maximum alignment we can assume. */
991
992 unsigned HOST_WIDE_INT
993 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
994 unsigned int max_size)
995 {
996 unsigned HOST_WIDE_INT n_insns = 0;
997
998 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
999
1000 while (max_size > 1 && l > 0)
1001 {
1002 machine_mode mode;
1003 enum insn_code icode;
1004
1005 mode = widest_int_mode_for_size (max_size);
1006
1007 if (mode == VOIDmode)
1008 break;
1009
1010 icode = optab_handler (mov_optab, mode);
1011 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1012 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1013
1014 max_size = GET_MODE_SIZE (mode);
1015 }
1016
1017 gcc_assert (!l);
1018 return n_insns;
1019 }
1020
1021 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1022 with move instructions for mode MODE. GENFUN is the gen_... function
1023 to make a move insn for that mode. DATA has all the other info. */
1024
1025 static void
1026 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1027 struct move_by_pieces_d *data)
1028 {
1029 unsigned int size = GET_MODE_SIZE (mode);
1030 rtx to1 = NULL_RTX, from1;
1031
1032 while (data->len >= size)
1033 {
1034 if (data->reverse)
1035 data->offset -= size;
1036
1037 if (data->to)
1038 {
1039 if (data->autinc_to)
1040 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1041 data->offset);
1042 else
1043 to1 = adjust_address (data->to, mode, data->offset);
1044 }
1045
1046 if (data->autinc_from)
1047 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1048 data->offset);
1049 else
1050 from1 = adjust_address (data->from, mode, data->offset);
1051
1052 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1053 emit_insn (gen_add2_insn (data->to_addr,
1054 gen_int_mode (-(HOST_WIDE_INT) size,
1055 GET_MODE (data->to_addr))));
1056 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1057 emit_insn (gen_add2_insn (data->from_addr,
1058 gen_int_mode (-(HOST_WIDE_INT) size,
1059 GET_MODE (data->from_addr))));
1060
1061 if (data->to)
1062 emit_insn ((*genfun) (to1, from1));
1063 else
1064 {
1065 #ifdef PUSH_ROUNDING
1066 emit_single_push_insn (mode, from1, NULL);
1067 #else
1068 gcc_unreachable ();
1069 #endif
1070 }
1071
1072 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1073 emit_insn (gen_add2_insn (data->to_addr,
1074 gen_int_mode (size,
1075 GET_MODE (data->to_addr))));
1076 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1077 emit_insn (gen_add2_insn (data->from_addr,
1078 gen_int_mode (size,
1079 GET_MODE (data->from_addr))));
1080
1081 if (! data->reverse)
1082 data->offset += size;
1083
1084 data->len -= size;
1085 }
1086 }
1087 \f
1088 /* Emit code to move a block Y to a block X. This may be done with
1089 string-move instructions, with multiple scalar move instructions,
1090 or with a library call.
1091
1092 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1093 SIZE is an rtx that says how long they are.
1094 ALIGN is the maximum alignment we can assume they have.
1095 METHOD describes what kind of copy this is, and what mechanisms may be used.
1096 MIN_SIZE is the minimal size of block to move
1097 MAX_SIZE is the maximal size of block to move, if it can not be represented
1098 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1099
1100 Return the address of the new block, if memcpy is called and returns it,
1101 0 otherwise. */
1102
1103 rtx
1104 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1105 unsigned int expected_align, HOST_WIDE_INT expected_size,
1106 unsigned HOST_WIDE_INT min_size,
1107 unsigned HOST_WIDE_INT max_size,
1108 unsigned HOST_WIDE_INT probable_max_size)
1109 {
1110 bool may_use_call;
1111 rtx retval = 0;
1112 unsigned int align;
1113
1114 gcc_assert (size);
1115 if (CONST_INT_P (size)
1116 && INTVAL (size) == 0)
1117 return 0;
1118
1119 switch (method)
1120 {
1121 case BLOCK_OP_NORMAL:
1122 case BLOCK_OP_TAILCALL:
1123 may_use_call = true;
1124 break;
1125
1126 case BLOCK_OP_CALL_PARM:
1127 may_use_call = block_move_libcall_safe_for_call_parm ();
1128
1129 /* Make inhibit_defer_pop nonzero around the library call
1130 to force it to pop the arguments right away. */
1131 NO_DEFER_POP;
1132 break;
1133
1134 case BLOCK_OP_NO_LIBCALL:
1135 may_use_call = false;
1136 break;
1137
1138 default:
1139 gcc_unreachable ();
1140 }
1141
1142 gcc_assert (MEM_P (x) && MEM_P (y));
1143 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1144 gcc_assert (align >= BITS_PER_UNIT);
1145
1146 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1147 block copy is more efficient for other large modes, e.g. DCmode. */
1148 x = adjust_address (x, BLKmode, 0);
1149 y = adjust_address (y, BLKmode, 0);
1150
1151 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1152 can be incorrect is coming from __builtin_memcpy. */
1153 if (CONST_INT_P (size))
1154 {
1155 x = shallow_copy_rtx (x);
1156 y = shallow_copy_rtx (y);
1157 set_mem_size (x, INTVAL (size));
1158 set_mem_size (y, INTVAL (size));
1159 }
1160
1161 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1162 move_by_pieces (x, y, INTVAL (size), align, 0);
1163 else if (emit_block_move_via_movmem (x, y, size, align,
1164 expected_align, expected_size,
1165 min_size, max_size, probable_max_size))
1166 ;
1167 else if (may_use_call
1168 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1169 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1170 {
1171 /* Since x and y are passed to a libcall, mark the corresponding
1172 tree EXPR as addressable. */
1173 tree y_expr = MEM_EXPR (y);
1174 tree x_expr = MEM_EXPR (x);
1175 if (y_expr)
1176 mark_addressable (y_expr);
1177 if (x_expr)
1178 mark_addressable (x_expr);
1179 retval = emit_block_move_via_libcall (x, y, size,
1180 method == BLOCK_OP_TAILCALL);
1181 }
1182
1183 else
1184 emit_block_move_via_loop (x, y, size, align);
1185
1186 if (method == BLOCK_OP_CALL_PARM)
1187 OK_DEFER_POP;
1188
1189 return retval;
1190 }
1191
1192 rtx
1193 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1194 {
1195 unsigned HOST_WIDE_INT max, min = 0;
1196 if (GET_CODE (size) == CONST_INT)
1197 min = max = UINTVAL (size);
1198 else
1199 max = GET_MODE_MASK (GET_MODE (size));
1200 return emit_block_move_hints (x, y, size, method, 0, -1,
1201 min, max, max);
1202 }
1203
1204 /* A subroutine of emit_block_move. Returns true if calling the
1205 block move libcall will not clobber any parameters which may have
1206 already been placed on the stack. */
1207
1208 static bool
1209 block_move_libcall_safe_for_call_parm (void)
1210 {
1211 #if defined (REG_PARM_STACK_SPACE)
1212 tree fn;
1213 #endif
1214
1215 /* If arguments are pushed on the stack, then they're safe. */
1216 if (PUSH_ARGS)
1217 return true;
1218
1219 /* If registers go on the stack anyway, any argument is sure to clobber
1220 an outgoing argument. */
1221 #if defined (REG_PARM_STACK_SPACE)
1222 fn = emit_block_move_libcall_fn (false);
1223 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1224 depend on its argument. */
1225 (void) fn;
1226 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1227 && REG_PARM_STACK_SPACE (fn) != 0)
1228 return false;
1229 #endif
1230
1231 /* If any argument goes in memory, then it might clobber an outgoing
1232 argument. */
1233 {
1234 CUMULATIVE_ARGS args_so_far_v;
1235 cumulative_args_t args_so_far;
1236 tree fn, arg;
1237
1238 fn = emit_block_move_libcall_fn (false);
1239 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1240 args_so_far = pack_cumulative_args (&args_so_far_v);
1241
1242 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1243 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1244 {
1245 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1246 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1247 NULL_TREE, true);
1248 if (!tmp || !REG_P (tmp))
1249 return false;
1250 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1251 return false;
1252 targetm.calls.function_arg_advance (args_so_far, mode,
1253 NULL_TREE, true);
1254 }
1255 }
1256 return true;
1257 }
1258
1259 /* A subroutine of emit_block_move. Expand a movmem pattern;
1260 return true if successful. */
1261
1262 static bool
1263 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1264 unsigned int expected_align, HOST_WIDE_INT expected_size,
1265 unsigned HOST_WIDE_INT min_size,
1266 unsigned HOST_WIDE_INT max_size,
1267 unsigned HOST_WIDE_INT probable_max_size)
1268 {
1269 int save_volatile_ok = volatile_ok;
1270 machine_mode mode;
1271
1272 if (expected_align < align)
1273 expected_align = align;
1274 if (expected_size != -1)
1275 {
1276 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1277 expected_size = probable_max_size;
1278 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1279 expected_size = min_size;
1280 }
1281
1282 /* Since this is a move insn, we don't care about volatility. */
1283 volatile_ok = 1;
1284
1285 /* Try the most limited insn first, because there's no point
1286 including more than one in the machine description unless
1287 the more limited one has some advantage. */
1288
1289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1290 mode = GET_MODE_WIDER_MODE (mode))
1291 {
1292 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1293
1294 if (code != CODE_FOR_nothing
1295 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1296 here because if SIZE is less than the mode mask, as it is
1297 returned by the macro, it will definitely be less than the
1298 actual mode mask. Since SIZE is within the Pmode address
1299 space, we limit MODE to Pmode. */
1300 && ((CONST_INT_P (size)
1301 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1302 <= (GET_MODE_MASK (mode) >> 1)))
1303 || max_size <= (GET_MODE_MASK (mode) >> 1)
1304 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1305 {
1306 struct expand_operand ops[9];
1307 unsigned int nops;
1308
1309 /* ??? When called via emit_block_move_for_call, it'd be
1310 nice if there were some way to inform the backend, so
1311 that it doesn't fail the expansion because it thinks
1312 emitting the libcall would be more efficient. */
1313 nops = insn_data[(int) code].n_generator_args;
1314 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1315
1316 create_fixed_operand (&ops[0], x);
1317 create_fixed_operand (&ops[1], y);
1318 /* The check above guarantees that this size conversion is valid. */
1319 create_convert_operand_to (&ops[2], size, mode, true);
1320 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1321 if (nops >= 6)
1322 {
1323 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1324 create_integer_operand (&ops[5], expected_size);
1325 }
1326 if (nops >= 8)
1327 {
1328 create_integer_operand (&ops[6], min_size);
1329 /* If we can not represent the maximal size,
1330 make parameter NULL. */
1331 if ((HOST_WIDE_INT) max_size != -1)
1332 create_integer_operand (&ops[7], max_size);
1333 else
1334 create_fixed_operand (&ops[7], NULL);
1335 }
1336 if (nops == 9)
1337 {
1338 /* If we can not represent the maximal size,
1339 make parameter NULL. */
1340 if ((HOST_WIDE_INT) probable_max_size != -1)
1341 create_integer_operand (&ops[8], probable_max_size);
1342 else
1343 create_fixed_operand (&ops[8], NULL);
1344 }
1345 if (maybe_expand_insn (code, nops, ops))
1346 {
1347 volatile_ok = save_volatile_ok;
1348 return true;
1349 }
1350 }
1351 }
1352
1353 volatile_ok = save_volatile_ok;
1354 return false;
1355 }
1356
1357 /* A subroutine of emit_block_move. Expand a call to memcpy.
1358 Return the return value from memcpy, 0 otherwise. */
1359
1360 rtx
1361 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1362 {
1363 rtx dst_addr, src_addr;
1364 tree call_expr, fn, src_tree, dst_tree, size_tree;
1365 machine_mode size_mode;
1366 rtx retval;
1367
1368 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1369 pseudos. We can then place those new pseudos into a VAR_DECL and
1370 use them later. */
1371
1372 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1373 src_addr = copy_addr_to_reg (XEXP (src, 0));
1374
1375 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1376 src_addr = convert_memory_address (ptr_mode, src_addr);
1377
1378 dst_tree = make_tree (ptr_type_node, dst_addr);
1379 src_tree = make_tree (ptr_type_node, src_addr);
1380
1381 size_mode = TYPE_MODE (sizetype);
1382
1383 size = convert_to_mode (size_mode, size, 1);
1384 size = copy_to_mode_reg (size_mode, size);
1385
1386 /* It is incorrect to use the libcall calling conventions to call
1387 memcpy in this context. This could be a user call to memcpy and
1388 the user may wish to examine the return value from memcpy. For
1389 targets where libcalls and normal calls have different conventions
1390 for returning pointers, we could end up generating incorrect code. */
1391
1392 size_tree = make_tree (sizetype, size);
1393
1394 fn = emit_block_move_libcall_fn (true);
1395 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1396 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1397
1398 retval = expand_normal (call_expr);
1399
1400 return retval;
1401 }
1402
1403 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1404 for the function we use for block copies. */
1405
1406 static GTY(()) tree block_move_fn;
1407
1408 void
1409 init_block_move_fn (const char *asmspec)
1410 {
1411 if (!block_move_fn)
1412 {
1413 tree args, fn, attrs, attr_args;
1414
1415 fn = get_identifier ("memcpy");
1416 args = build_function_type_list (ptr_type_node, ptr_type_node,
1417 const_ptr_type_node, sizetype,
1418 NULL_TREE);
1419
1420 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1421 DECL_EXTERNAL (fn) = 1;
1422 TREE_PUBLIC (fn) = 1;
1423 DECL_ARTIFICIAL (fn) = 1;
1424 TREE_NOTHROW (fn) = 1;
1425 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1426 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1427
1428 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1429 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1430
1431 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1432
1433 block_move_fn = fn;
1434 }
1435
1436 if (asmspec)
1437 set_user_assembler_name (block_move_fn, asmspec);
1438 }
1439
1440 static tree
1441 emit_block_move_libcall_fn (int for_call)
1442 {
1443 static bool emitted_extern;
1444
1445 if (!block_move_fn)
1446 init_block_move_fn (NULL);
1447
1448 if (for_call && !emitted_extern)
1449 {
1450 emitted_extern = true;
1451 make_decl_rtl (block_move_fn);
1452 }
1453
1454 return block_move_fn;
1455 }
1456
1457 /* A subroutine of emit_block_move. Copy the data via an explicit
1458 loop. This is used only when libcalls are forbidden. */
1459 /* ??? It'd be nice to copy in hunks larger than QImode. */
1460
1461 static void
1462 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1463 unsigned int align ATTRIBUTE_UNUSED)
1464 {
1465 rtx_code_label *cmp_label, *top_label;
1466 rtx iter, x_addr, y_addr, tmp;
1467 machine_mode x_addr_mode = get_address_mode (x);
1468 machine_mode y_addr_mode = get_address_mode (y);
1469 machine_mode iter_mode;
1470
1471 iter_mode = GET_MODE (size);
1472 if (iter_mode == VOIDmode)
1473 iter_mode = word_mode;
1474
1475 top_label = gen_label_rtx ();
1476 cmp_label = gen_label_rtx ();
1477 iter = gen_reg_rtx (iter_mode);
1478
1479 emit_move_insn (iter, const0_rtx);
1480
1481 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1482 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1483 do_pending_stack_adjust ();
1484
1485 emit_jump (cmp_label);
1486 emit_label (top_label);
1487
1488 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1489 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1490
1491 if (x_addr_mode != y_addr_mode)
1492 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1493 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1494
1495 x = change_address (x, QImode, x_addr);
1496 y = change_address (y, QImode, y_addr);
1497
1498 emit_move_insn (x, y);
1499
1500 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1501 true, OPTAB_LIB_WIDEN);
1502 if (tmp != iter)
1503 emit_move_insn (iter, tmp);
1504
1505 emit_label (cmp_label);
1506
1507 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1508 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1509 }
1510 \f
1511 /* Copy all or part of a value X into registers starting at REGNO.
1512 The number of registers to be filled is NREGS. */
1513
1514 void
1515 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1516 {
1517 int i;
1518 #ifdef HAVE_load_multiple
1519 rtx pat;
1520 rtx_insn *last;
1521 #endif
1522
1523 if (nregs == 0)
1524 return;
1525
1526 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1527 x = validize_mem (force_const_mem (mode, x));
1528
1529 /* See if the machine can do this with a load multiple insn. */
1530 #ifdef HAVE_load_multiple
1531 if (HAVE_load_multiple)
1532 {
1533 last = get_last_insn ();
1534 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1535 GEN_INT (nregs));
1536 if (pat)
1537 {
1538 emit_insn (pat);
1539 return;
1540 }
1541 else
1542 delete_insns_since (last);
1543 }
1544 #endif
1545
1546 for (i = 0; i < nregs; i++)
1547 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1548 operand_subword_force (x, i, mode));
1549 }
1550
1551 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1552 The number of registers to be filled is NREGS. */
1553
1554 void
1555 move_block_from_reg (int regno, rtx x, int nregs)
1556 {
1557 int i;
1558
1559 if (nregs == 0)
1560 return;
1561
1562 /* See if the machine can do this with a store multiple insn. */
1563 #ifdef HAVE_store_multiple
1564 if (HAVE_store_multiple)
1565 {
1566 rtx_insn *last = get_last_insn ();
1567 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1568 GEN_INT (nregs));
1569 if (pat)
1570 {
1571 emit_insn (pat);
1572 return;
1573 }
1574 else
1575 delete_insns_since (last);
1576 }
1577 #endif
1578
1579 for (i = 0; i < nregs; i++)
1580 {
1581 rtx tem = operand_subword (x, i, 1, BLKmode);
1582
1583 gcc_assert (tem);
1584
1585 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1586 }
1587 }
1588
1589 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1590 ORIG, where ORIG is a non-consecutive group of registers represented by
1591 a PARALLEL. The clone is identical to the original except in that the
1592 original set of registers is replaced by a new set of pseudo registers.
1593 The new set has the same modes as the original set. */
1594
1595 rtx
1596 gen_group_rtx (rtx orig)
1597 {
1598 int i, length;
1599 rtx *tmps;
1600
1601 gcc_assert (GET_CODE (orig) == PARALLEL);
1602
1603 length = XVECLEN (orig, 0);
1604 tmps = XALLOCAVEC (rtx, length);
1605
1606 /* Skip a NULL entry in first slot. */
1607 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1608
1609 if (i)
1610 tmps[0] = 0;
1611
1612 for (; i < length; i++)
1613 {
1614 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1615 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1616
1617 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1618 }
1619
1620 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1621 }
1622
1623 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1624 except that values are placed in TMPS[i], and must later be moved
1625 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1626
1627 static void
1628 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1629 {
1630 rtx src;
1631 int start, i;
1632 machine_mode m = GET_MODE (orig_src);
1633
1634 gcc_assert (GET_CODE (dst) == PARALLEL);
1635
1636 if (m != VOIDmode
1637 && !SCALAR_INT_MODE_P (m)
1638 && !MEM_P (orig_src)
1639 && GET_CODE (orig_src) != CONCAT)
1640 {
1641 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1642 if (imode == BLKmode)
1643 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1644 else
1645 src = gen_reg_rtx (imode);
1646 if (imode != BLKmode)
1647 src = gen_lowpart (GET_MODE (orig_src), src);
1648 emit_move_insn (src, orig_src);
1649 /* ...and back again. */
1650 if (imode != BLKmode)
1651 src = gen_lowpart (imode, src);
1652 emit_group_load_1 (tmps, dst, src, type, ssize);
1653 return;
1654 }
1655
1656 /* Check for a NULL entry, used to indicate that the parameter goes
1657 both on the stack and in registers. */
1658 if (XEXP (XVECEXP (dst, 0, 0), 0))
1659 start = 0;
1660 else
1661 start = 1;
1662
1663 /* Process the pieces. */
1664 for (i = start; i < XVECLEN (dst, 0); i++)
1665 {
1666 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1667 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1668 unsigned int bytelen = GET_MODE_SIZE (mode);
1669 int shift = 0;
1670
1671 /* Handle trailing fragments that run over the size of the struct. */
1672 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1673 {
1674 /* Arrange to shift the fragment to where it belongs.
1675 extract_bit_field loads to the lsb of the reg. */
1676 if (
1677 #ifdef BLOCK_REG_PADDING
1678 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1679 == (BYTES_BIG_ENDIAN ? upward : downward)
1680 #else
1681 BYTES_BIG_ENDIAN
1682 #endif
1683 )
1684 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1685 bytelen = ssize - bytepos;
1686 gcc_assert (bytelen > 0);
1687 }
1688
1689 /* If we won't be loading directly from memory, protect the real source
1690 from strange tricks we might play; but make sure that the source can
1691 be loaded directly into the destination. */
1692 src = orig_src;
1693 if (!MEM_P (orig_src)
1694 && (!CONSTANT_P (orig_src)
1695 || (GET_MODE (orig_src) != mode
1696 && GET_MODE (orig_src) != VOIDmode)))
1697 {
1698 if (GET_MODE (orig_src) == VOIDmode)
1699 src = gen_reg_rtx (mode);
1700 else
1701 src = gen_reg_rtx (GET_MODE (orig_src));
1702
1703 emit_move_insn (src, orig_src);
1704 }
1705
1706 /* Optimize the access just a bit. */
1707 if (MEM_P (src)
1708 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1709 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1710 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1711 && bytelen == GET_MODE_SIZE (mode))
1712 {
1713 tmps[i] = gen_reg_rtx (mode);
1714 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1715 }
1716 else if (COMPLEX_MODE_P (mode)
1717 && GET_MODE (src) == mode
1718 && bytelen == GET_MODE_SIZE (mode))
1719 /* Let emit_move_complex do the bulk of the work. */
1720 tmps[i] = src;
1721 else if (GET_CODE (src) == CONCAT)
1722 {
1723 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1724 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1725
1726 if ((bytepos == 0 && bytelen == slen0)
1727 || (bytepos != 0 && bytepos + bytelen <= slen))
1728 {
1729 /* The following assumes that the concatenated objects all
1730 have the same size. In this case, a simple calculation
1731 can be used to determine the object and the bit field
1732 to be extracted. */
1733 tmps[i] = XEXP (src, bytepos / slen0);
1734 if (! CONSTANT_P (tmps[i])
1735 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1736 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1737 (bytepos % slen0) * BITS_PER_UNIT,
1738 1, NULL_RTX, mode, mode);
1739 }
1740 else
1741 {
1742 rtx mem;
1743
1744 gcc_assert (!bytepos);
1745 mem = assign_stack_temp (GET_MODE (src), slen);
1746 emit_move_insn (mem, src);
1747 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1748 0, 1, NULL_RTX, mode, mode);
1749 }
1750 }
1751 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1752 SIMD register, which is currently broken. While we get GCC
1753 to emit proper RTL for these cases, let's dump to memory. */
1754 else if (VECTOR_MODE_P (GET_MODE (dst))
1755 && REG_P (src))
1756 {
1757 int slen = GET_MODE_SIZE (GET_MODE (src));
1758 rtx mem;
1759
1760 mem = assign_stack_temp (GET_MODE (src), slen);
1761 emit_move_insn (mem, src);
1762 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1763 }
1764 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1765 && XVECLEN (dst, 0) > 1)
1766 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1767 else if (CONSTANT_P (src))
1768 {
1769 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1770
1771 if (len == ssize)
1772 tmps[i] = src;
1773 else
1774 {
1775 rtx first, second;
1776
1777 /* TODO: const_wide_int can have sizes other than this... */
1778 gcc_assert (2 * len == ssize);
1779 split_double (src, &first, &second);
1780 if (i)
1781 tmps[i] = second;
1782 else
1783 tmps[i] = first;
1784 }
1785 }
1786 else if (REG_P (src) && GET_MODE (src) == mode)
1787 tmps[i] = src;
1788 else
1789 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1790 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1791 mode, mode);
1792
1793 if (shift)
1794 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1795 shift, tmps[i], 0);
1796 }
1797 }
1798
1799 /* Emit code to move a block SRC of type TYPE to a block DST,
1800 where DST is non-consecutive registers represented by a PARALLEL.
1801 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1802 if not known. */
1803
1804 void
1805 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1806 {
1807 rtx *tmps;
1808 int i;
1809
1810 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1811 emit_group_load_1 (tmps, dst, src, type, ssize);
1812
1813 /* Copy the extracted pieces into the proper (probable) hard regs. */
1814 for (i = 0; i < XVECLEN (dst, 0); i++)
1815 {
1816 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1817 if (d == NULL)
1818 continue;
1819 emit_move_insn (d, tmps[i]);
1820 }
1821 }
1822
1823 /* Similar, but load SRC into new pseudos in a format that looks like
1824 PARALLEL. This can later be fed to emit_group_move to get things
1825 in the right place. */
1826
1827 rtx
1828 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1829 {
1830 rtvec vec;
1831 int i;
1832
1833 vec = rtvec_alloc (XVECLEN (parallel, 0));
1834 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1835
1836 /* Convert the vector to look just like the original PARALLEL, except
1837 with the computed values. */
1838 for (i = 0; i < XVECLEN (parallel, 0); i++)
1839 {
1840 rtx e = XVECEXP (parallel, 0, i);
1841 rtx d = XEXP (e, 0);
1842
1843 if (d)
1844 {
1845 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1846 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1847 }
1848 RTVEC_ELT (vec, i) = e;
1849 }
1850
1851 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1852 }
1853
1854 /* Emit code to move a block SRC to block DST, where SRC and DST are
1855 non-consecutive groups of registers, each represented by a PARALLEL. */
1856
1857 void
1858 emit_group_move (rtx dst, rtx src)
1859 {
1860 int i;
1861
1862 gcc_assert (GET_CODE (src) == PARALLEL
1863 && GET_CODE (dst) == PARALLEL
1864 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1865
1866 /* Skip first entry if NULL. */
1867 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1868 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1869 XEXP (XVECEXP (src, 0, i), 0));
1870 }
1871
1872 /* Move a group of registers represented by a PARALLEL into pseudos. */
1873
1874 rtx
1875 emit_group_move_into_temps (rtx src)
1876 {
1877 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1878 int i;
1879
1880 for (i = 0; i < XVECLEN (src, 0); i++)
1881 {
1882 rtx e = XVECEXP (src, 0, i);
1883 rtx d = XEXP (e, 0);
1884
1885 if (d)
1886 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1887 RTVEC_ELT (vec, i) = e;
1888 }
1889
1890 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1891 }
1892
1893 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1894 where SRC is non-consecutive registers represented by a PARALLEL.
1895 SSIZE represents the total size of block ORIG_DST, or -1 if not
1896 known. */
1897
1898 void
1899 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1900 {
1901 rtx *tmps, dst;
1902 int start, finish, i;
1903 machine_mode m = GET_MODE (orig_dst);
1904
1905 gcc_assert (GET_CODE (src) == PARALLEL);
1906
1907 if (!SCALAR_INT_MODE_P (m)
1908 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1909 {
1910 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1911 if (imode == BLKmode)
1912 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1913 else
1914 dst = gen_reg_rtx (imode);
1915 emit_group_store (dst, src, type, ssize);
1916 if (imode != BLKmode)
1917 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1918 emit_move_insn (orig_dst, dst);
1919 return;
1920 }
1921
1922 /* Check for a NULL entry, used to indicate that the parameter goes
1923 both on the stack and in registers. */
1924 if (XEXP (XVECEXP (src, 0, 0), 0))
1925 start = 0;
1926 else
1927 start = 1;
1928 finish = XVECLEN (src, 0);
1929
1930 tmps = XALLOCAVEC (rtx, finish);
1931
1932 /* Copy the (probable) hard regs into pseudos. */
1933 for (i = start; i < finish; i++)
1934 {
1935 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1936 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1937 {
1938 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1939 emit_move_insn (tmps[i], reg);
1940 }
1941 else
1942 tmps[i] = reg;
1943 }
1944
1945 /* If we won't be storing directly into memory, protect the real destination
1946 from strange tricks we might play. */
1947 dst = orig_dst;
1948 if (GET_CODE (dst) == PARALLEL)
1949 {
1950 rtx temp;
1951
1952 /* We can get a PARALLEL dst if there is a conditional expression in
1953 a return statement. In that case, the dst and src are the same,
1954 so no action is necessary. */
1955 if (rtx_equal_p (dst, src))
1956 return;
1957
1958 /* It is unclear if we can ever reach here, but we may as well handle
1959 it. Allocate a temporary, and split this into a store/load to/from
1960 the temporary. */
1961 temp = assign_stack_temp (GET_MODE (dst), ssize);
1962 emit_group_store (temp, src, type, ssize);
1963 emit_group_load (dst, temp, type, ssize);
1964 return;
1965 }
1966 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1967 {
1968 machine_mode outer = GET_MODE (dst);
1969 machine_mode inner;
1970 HOST_WIDE_INT bytepos;
1971 bool done = false;
1972 rtx temp;
1973
1974 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1975 dst = gen_reg_rtx (outer);
1976
1977 /* Make life a bit easier for combine. */
1978 /* If the first element of the vector is the low part
1979 of the destination mode, use a paradoxical subreg to
1980 initialize the destination. */
1981 if (start < finish)
1982 {
1983 inner = GET_MODE (tmps[start]);
1984 bytepos = subreg_lowpart_offset (inner, outer);
1985 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1986 {
1987 temp = simplify_gen_subreg (outer, tmps[start],
1988 inner, 0);
1989 if (temp)
1990 {
1991 emit_move_insn (dst, temp);
1992 done = true;
1993 start++;
1994 }
1995 }
1996 }
1997
1998 /* If the first element wasn't the low part, try the last. */
1999 if (!done
2000 && start < finish - 1)
2001 {
2002 inner = GET_MODE (tmps[finish - 1]);
2003 bytepos = subreg_lowpart_offset (inner, outer);
2004 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2005 {
2006 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2007 inner, 0);
2008 if (temp)
2009 {
2010 emit_move_insn (dst, temp);
2011 done = true;
2012 finish--;
2013 }
2014 }
2015 }
2016
2017 /* Otherwise, simply initialize the result to zero. */
2018 if (!done)
2019 emit_move_insn (dst, CONST0_RTX (outer));
2020 }
2021
2022 /* Process the pieces. */
2023 for (i = start; i < finish; i++)
2024 {
2025 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2026 machine_mode mode = GET_MODE (tmps[i]);
2027 unsigned int bytelen = GET_MODE_SIZE (mode);
2028 unsigned int adj_bytelen;
2029 rtx dest = dst;
2030
2031 /* Handle trailing fragments that run over the size of the struct. */
2032 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2033 adj_bytelen = ssize - bytepos;
2034 else
2035 adj_bytelen = bytelen;
2036
2037 if (GET_CODE (dst) == CONCAT)
2038 {
2039 if (bytepos + adj_bytelen
2040 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2041 dest = XEXP (dst, 0);
2042 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2043 {
2044 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2045 dest = XEXP (dst, 1);
2046 }
2047 else
2048 {
2049 machine_mode dest_mode = GET_MODE (dest);
2050 machine_mode tmp_mode = GET_MODE (tmps[i]);
2051
2052 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2053
2054 if (GET_MODE_ALIGNMENT (dest_mode)
2055 >= GET_MODE_ALIGNMENT (tmp_mode))
2056 {
2057 dest = assign_stack_temp (dest_mode,
2058 GET_MODE_SIZE (dest_mode));
2059 emit_move_insn (adjust_address (dest,
2060 tmp_mode,
2061 bytepos),
2062 tmps[i]);
2063 dst = dest;
2064 }
2065 else
2066 {
2067 dest = assign_stack_temp (tmp_mode,
2068 GET_MODE_SIZE (tmp_mode));
2069 emit_move_insn (dest, tmps[i]);
2070 dst = adjust_address (dest, dest_mode, bytepos);
2071 }
2072 break;
2073 }
2074 }
2075
2076 /* Handle trailing fragments that run over the size of the struct. */
2077 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2078 {
2079 /* store_bit_field always takes its value from the lsb.
2080 Move the fragment to the lsb if it's not already there. */
2081 if (
2082 #ifdef BLOCK_REG_PADDING
2083 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2084 == (BYTES_BIG_ENDIAN ? upward : downward)
2085 #else
2086 BYTES_BIG_ENDIAN
2087 #endif
2088 )
2089 {
2090 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2091 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2092 shift, tmps[i], 0);
2093 }
2094
2095 /* Make sure not to write past the end of the struct. */
2096 store_bit_field (dest,
2097 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2098 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2099 VOIDmode, tmps[i]);
2100 }
2101
2102 /* Optimize the access just a bit. */
2103 else if (MEM_P (dest)
2104 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2105 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2106 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2107 && bytelen == GET_MODE_SIZE (mode))
2108 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2109
2110 else
2111 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2112 0, 0, mode, tmps[i]);
2113 }
2114
2115 /* Copy from the pseudo into the (probable) hard reg. */
2116 if (orig_dst != dst)
2117 emit_move_insn (orig_dst, dst);
2118 }
2119
2120 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2121 of the value stored in X. */
2122
2123 rtx
2124 maybe_emit_group_store (rtx x, tree type)
2125 {
2126 machine_mode mode = TYPE_MODE (type);
2127 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2128 if (GET_CODE (x) == PARALLEL)
2129 {
2130 rtx result = gen_reg_rtx (mode);
2131 emit_group_store (result, x, type, int_size_in_bytes (type));
2132 return result;
2133 }
2134 return x;
2135 }
2136
2137 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2138
2139 This is used on targets that return BLKmode values in registers. */
2140
2141 void
2142 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2143 {
2144 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2145 rtx src = NULL, dst = NULL;
2146 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2147 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2148 machine_mode mode = GET_MODE (srcreg);
2149 machine_mode tmode = GET_MODE (target);
2150 machine_mode copy_mode;
2151
2152 /* BLKmode registers created in the back-end shouldn't have survived. */
2153 gcc_assert (mode != BLKmode);
2154
2155 /* If the structure doesn't take up a whole number of words, see whether
2156 SRCREG is padded on the left or on the right. If it's on the left,
2157 set PADDING_CORRECTION to the number of bits to skip.
2158
2159 In most ABIs, the structure will be returned at the least end of
2160 the register, which translates to right padding on little-endian
2161 targets and left padding on big-endian targets. The opposite
2162 holds if the structure is returned at the most significant
2163 end of the register. */
2164 if (bytes % UNITS_PER_WORD != 0
2165 && (targetm.calls.return_in_msb (type)
2166 ? !BYTES_BIG_ENDIAN
2167 : BYTES_BIG_ENDIAN))
2168 padding_correction
2169 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2170
2171 /* We can use a single move if we have an exact mode for the size. */
2172 else if (MEM_P (target)
2173 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2174 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2175 && bytes == GET_MODE_SIZE (mode))
2176 {
2177 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2178 return;
2179 }
2180
2181 /* And if we additionally have the same mode for a register. */
2182 else if (REG_P (target)
2183 && GET_MODE (target) == mode
2184 && bytes == GET_MODE_SIZE (mode))
2185 {
2186 emit_move_insn (target, srcreg);
2187 return;
2188 }
2189
2190 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2191 into a new pseudo which is a full word. */
2192 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2193 {
2194 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2195 mode = word_mode;
2196 }
2197
2198 /* Copy the structure BITSIZE bits at a time. If the target lives in
2199 memory, take care of not reading/writing past its end by selecting
2200 a copy mode suited to BITSIZE. This should always be possible given
2201 how it is computed.
2202
2203 If the target lives in register, make sure not to select a copy mode
2204 larger than the mode of the register.
2205
2206 We could probably emit more efficient code for machines which do not use
2207 strict alignment, but it doesn't seem worth the effort at the current
2208 time. */
2209
2210 copy_mode = word_mode;
2211 if (MEM_P (target))
2212 {
2213 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2214 if (mem_mode != BLKmode)
2215 copy_mode = mem_mode;
2216 }
2217 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2218 copy_mode = tmode;
2219
2220 for (bitpos = 0, xbitpos = padding_correction;
2221 bitpos < bytes * BITS_PER_UNIT;
2222 bitpos += bitsize, xbitpos += bitsize)
2223 {
2224 /* We need a new source operand each time xbitpos is on a
2225 word boundary and when xbitpos == padding_correction
2226 (the first time through). */
2227 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2228 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2229
2230 /* We need a new destination operand each time bitpos is on
2231 a word boundary. */
2232 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2233 dst = target;
2234 else if (bitpos % BITS_PER_WORD == 0)
2235 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2236
2237 /* Use xbitpos for the source extraction (right justified) and
2238 bitpos for the destination store (left justified). */
2239 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2240 extract_bit_field (src, bitsize,
2241 xbitpos % BITS_PER_WORD, 1,
2242 NULL_RTX, copy_mode, copy_mode));
2243 }
2244 }
2245
2246 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2247 register if it contains any data, otherwise return null.
2248
2249 This is used on targets that return BLKmode values in registers. */
2250
2251 rtx
2252 copy_blkmode_to_reg (machine_mode mode, tree src)
2253 {
2254 int i, n_regs;
2255 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2256 unsigned int bitsize;
2257 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2258 machine_mode dst_mode;
2259
2260 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2261
2262 x = expand_normal (src);
2263
2264 bytes = int_size_in_bytes (TREE_TYPE (src));
2265 if (bytes == 0)
2266 return NULL_RTX;
2267
2268 /* If the structure doesn't take up a whole number of words, see
2269 whether the register value should be padded on the left or on
2270 the right. Set PADDING_CORRECTION to the number of padding
2271 bits needed on the left side.
2272
2273 In most ABIs, the structure will be returned at the least end of
2274 the register, which translates to right padding on little-endian
2275 targets and left padding on big-endian targets. The opposite
2276 holds if the structure is returned at the most significant
2277 end of the register. */
2278 if (bytes % UNITS_PER_WORD != 0
2279 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2280 ? !BYTES_BIG_ENDIAN
2281 : BYTES_BIG_ENDIAN))
2282 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2283 * BITS_PER_UNIT));
2284
2285 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2286 dst_words = XALLOCAVEC (rtx, n_regs);
2287 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2288
2289 /* Copy the structure BITSIZE bits at a time. */
2290 for (bitpos = 0, xbitpos = padding_correction;
2291 bitpos < bytes * BITS_PER_UNIT;
2292 bitpos += bitsize, xbitpos += bitsize)
2293 {
2294 /* We need a new destination pseudo each time xbitpos is
2295 on a word boundary and when xbitpos == padding_correction
2296 (the first time through). */
2297 if (xbitpos % BITS_PER_WORD == 0
2298 || xbitpos == padding_correction)
2299 {
2300 /* Generate an appropriate register. */
2301 dst_word = gen_reg_rtx (word_mode);
2302 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2303
2304 /* Clear the destination before we move anything into it. */
2305 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2306 }
2307
2308 /* We need a new source operand each time bitpos is on a word
2309 boundary. */
2310 if (bitpos % BITS_PER_WORD == 0)
2311 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2312
2313 /* Use bitpos for the source extraction (left justified) and
2314 xbitpos for the destination store (right justified). */
2315 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2316 0, 0, word_mode,
2317 extract_bit_field (src_word, bitsize,
2318 bitpos % BITS_PER_WORD, 1,
2319 NULL_RTX, word_mode, word_mode));
2320 }
2321
2322 if (mode == BLKmode)
2323 {
2324 /* Find the smallest integer mode large enough to hold the
2325 entire structure. */
2326 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2327 mode != VOIDmode;
2328 mode = GET_MODE_WIDER_MODE (mode))
2329 /* Have we found a large enough mode? */
2330 if (GET_MODE_SIZE (mode) >= bytes)
2331 break;
2332
2333 /* A suitable mode should have been found. */
2334 gcc_assert (mode != VOIDmode);
2335 }
2336
2337 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2338 dst_mode = word_mode;
2339 else
2340 dst_mode = mode;
2341 dst = gen_reg_rtx (dst_mode);
2342
2343 for (i = 0; i < n_regs; i++)
2344 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2345
2346 if (mode != dst_mode)
2347 dst = gen_lowpart (mode, dst);
2348
2349 return dst;
2350 }
2351
2352 /* Add a USE expression for REG to the (possibly empty) list pointed
2353 to by CALL_FUSAGE. REG must denote a hard register. */
2354
2355 void
2356 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2357 {
2358 gcc_assert (REG_P (reg));
2359
2360 if (!HARD_REGISTER_P (reg))
2361 return;
2362
2363 *call_fusage
2364 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2365 }
2366
2367 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2368 to by CALL_FUSAGE. REG must denote a hard register. */
2369
2370 void
2371 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2372 {
2373 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2374
2375 *call_fusage
2376 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2377 }
2378
2379 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2380 starting at REGNO. All of these registers must be hard registers. */
2381
2382 void
2383 use_regs (rtx *call_fusage, int regno, int nregs)
2384 {
2385 int i;
2386
2387 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2388
2389 for (i = 0; i < nregs; i++)
2390 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2391 }
2392
2393 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2394 PARALLEL REGS. This is for calls that pass values in multiple
2395 non-contiguous locations. The Irix 6 ABI has examples of this. */
2396
2397 void
2398 use_group_regs (rtx *call_fusage, rtx regs)
2399 {
2400 int i;
2401
2402 for (i = 0; i < XVECLEN (regs, 0); i++)
2403 {
2404 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2405
2406 /* A NULL entry means the parameter goes both on the stack and in
2407 registers. This can also be a MEM for targets that pass values
2408 partially on the stack and partially in registers. */
2409 if (reg != 0 && REG_P (reg))
2410 use_reg (call_fusage, reg);
2411 }
2412 }
2413
2414 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2415 assigment and the code of the expresion on the RHS is CODE. Return
2416 NULL otherwise. */
2417
2418 static gimple
2419 get_def_for_expr (tree name, enum tree_code code)
2420 {
2421 gimple def_stmt;
2422
2423 if (TREE_CODE (name) != SSA_NAME)
2424 return NULL;
2425
2426 def_stmt = get_gimple_for_ssa_name (name);
2427 if (!def_stmt
2428 || gimple_assign_rhs_code (def_stmt) != code)
2429 return NULL;
2430
2431 return def_stmt;
2432 }
2433
2434 #ifdef HAVE_conditional_move
2435 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2436 assigment and the class of the expresion on the RHS is CLASS. Return
2437 NULL otherwise. */
2438
2439 static gimple
2440 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2441 {
2442 gimple def_stmt;
2443
2444 if (TREE_CODE (name) != SSA_NAME)
2445 return NULL;
2446
2447 def_stmt = get_gimple_for_ssa_name (name);
2448 if (!def_stmt
2449 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2450 return NULL;
2451
2452 return def_stmt;
2453 }
2454 #endif
2455 \f
2456
2457 /* Determine whether the LEN bytes generated by CONSTFUN can be
2458 stored to memory using several move instructions. CONSTFUNDATA is
2459 a pointer which will be passed as argument in every CONSTFUN call.
2460 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2461 a memset operation and false if it's a copy of a constant string.
2462 Return nonzero if a call to store_by_pieces should succeed. */
2463
2464 int
2465 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2466 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2467 void *constfundata, unsigned int align, bool memsetp)
2468 {
2469 unsigned HOST_WIDE_INT l;
2470 unsigned int max_size;
2471 HOST_WIDE_INT offset = 0;
2472 machine_mode mode;
2473 enum insn_code icode;
2474 int reverse;
2475 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2476 rtx cst ATTRIBUTE_UNUSED;
2477
2478 if (len == 0)
2479 return 1;
2480
2481 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2482 memsetp
2483 ? SET_BY_PIECES
2484 : STORE_BY_PIECES,
2485 optimize_insn_for_speed_p ()))
2486 return 0;
2487
2488 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2489
2490 /* We would first store what we can in the largest integer mode, then go to
2491 successively smaller modes. */
2492
2493 for (reverse = 0;
2494 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2495 reverse++)
2496 {
2497 l = len;
2498 max_size = STORE_MAX_PIECES + 1;
2499 while (max_size > 1 && l > 0)
2500 {
2501 mode = widest_int_mode_for_size (max_size);
2502
2503 if (mode == VOIDmode)
2504 break;
2505
2506 icode = optab_handler (mov_optab, mode);
2507 if (icode != CODE_FOR_nothing
2508 && align >= GET_MODE_ALIGNMENT (mode))
2509 {
2510 unsigned int size = GET_MODE_SIZE (mode);
2511
2512 while (l >= size)
2513 {
2514 if (reverse)
2515 offset -= size;
2516
2517 cst = (*constfun) (constfundata, offset, mode);
2518 if (!targetm.legitimate_constant_p (mode, cst))
2519 return 0;
2520
2521 if (!reverse)
2522 offset += size;
2523
2524 l -= size;
2525 }
2526 }
2527
2528 max_size = GET_MODE_SIZE (mode);
2529 }
2530
2531 /* The code above should have handled everything. */
2532 gcc_assert (!l);
2533 }
2534
2535 return 1;
2536 }
2537
2538 /* Generate several move instructions to store LEN bytes generated by
2539 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2540 pointer which will be passed as argument in every CONSTFUN call.
2541 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2542 a memset operation and false if it's a copy of a constant string.
2543 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2544 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2545 stpcpy. */
2546
2547 rtx
2548 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2549 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2550 void *constfundata, unsigned int align, bool memsetp, int endp)
2551 {
2552 machine_mode to_addr_mode = get_address_mode (to);
2553 struct store_by_pieces_d data;
2554
2555 if (len == 0)
2556 {
2557 gcc_assert (endp != 2);
2558 return to;
2559 }
2560
2561 gcc_assert (targetm.use_by_pieces_infrastructure_p
2562 (len, align,
2563 memsetp
2564 ? SET_BY_PIECES
2565 : STORE_BY_PIECES,
2566 optimize_insn_for_speed_p ()));
2567
2568 data.constfun = constfun;
2569 data.constfundata = constfundata;
2570 data.len = len;
2571 data.to = to;
2572 store_by_pieces_1 (&data, align);
2573 if (endp)
2574 {
2575 rtx to1;
2576
2577 gcc_assert (!data.reverse);
2578 if (data.autinc_to)
2579 {
2580 if (endp == 2)
2581 {
2582 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2583 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2584 else
2585 data.to_addr = copy_to_mode_reg (to_addr_mode,
2586 plus_constant (to_addr_mode,
2587 data.to_addr,
2588 -1));
2589 }
2590 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2591 data.offset);
2592 }
2593 else
2594 {
2595 if (endp == 2)
2596 --data.offset;
2597 to1 = adjust_address (data.to, QImode, data.offset);
2598 }
2599 return to1;
2600 }
2601 else
2602 return data.to;
2603 }
2604
2605 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2606 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2607
2608 static void
2609 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2610 {
2611 struct store_by_pieces_d data;
2612
2613 if (len == 0)
2614 return;
2615
2616 data.constfun = clear_by_pieces_1;
2617 data.constfundata = NULL;
2618 data.len = len;
2619 data.to = to;
2620 store_by_pieces_1 (&data, align);
2621 }
2622
2623 /* Callback routine for clear_by_pieces.
2624 Return const0_rtx unconditionally. */
2625
2626 static rtx
2627 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2628 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2629 machine_mode mode ATTRIBUTE_UNUSED)
2630 {
2631 return const0_rtx;
2632 }
2633
2634 /* Subroutine of clear_by_pieces and store_by_pieces.
2635 Generate several move instructions to store LEN bytes of block TO. (A MEM
2636 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2637
2638 static void
2639 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2640 unsigned int align ATTRIBUTE_UNUSED)
2641 {
2642 machine_mode to_addr_mode = get_address_mode (data->to);
2643 rtx to_addr = XEXP (data->to, 0);
2644 unsigned int max_size = STORE_MAX_PIECES + 1;
2645 enum insn_code icode;
2646
2647 data->offset = 0;
2648 data->to_addr = to_addr;
2649 data->autinc_to
2650 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2651 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2652
2653 data->explicit_inc_to = 0;
2654 data->reverse
2655 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2656 if (data->reverse)
2657 data->offset = data->len;
2658
2659 /* If storing requires more than two move insns,
2660 copy addresses to registers (to make displacements shorter)
2661 and use post-increment if available. */
2662 if (!data->autinc_to
2663 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2664 {
2665 /* Determine the main mode we'll be using.
2666 MODE might not be used depending on the definitions of the
2667 USE_* macros below. */
2668 machine_mode mode ATTRIBUTE_UNUSED
2669 = widest_int_mode_for_size (max_size);
2670
2671 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2672 {
2673 data->to_addr = copy_to_mode_reg (to_addr_mode,
2674 plus_constant (to_addr_mode,
2675 to_addr,
2676 data->len));
2677 data->autinc_to = 1;
2678 data->explicit_inc_to = -1;
2679 }
2680
2681 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2682 && ! data->autinc_to)
2683 {
2684 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2685 data->autinc_to = 1;
2686 data->explicit_inc_to = 1;
2687 }
2688
2689 if ( !data->autinc_to && CONSTANT_P (to_addr))
2690 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2691 }
2692
2693 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2694
2695 /* First store what we can in the largest integer mode, then go to
2696 successively smaller modes. */
2697
2698 while (max_size > 1 && data->len > 0)
2699 {
2700 machine_mode mode = widest_int_mode_for_size (max_size);
2701
2702 if (mode == VOIDmode)
2703 break;
2704
2705 icode = optab_handler (mov_optab, mode);
2706 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2707 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2708
2709 max_size = GET_MODE_SIZE (mode);
2710 }
2711
2712 /* The code above should have handled everything. */
2713 gcc_assert (!data->len);
2714 }
2715
2716 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2717 with move instructions for mode MODE. GENFUN is the gen_... function
2718 to make a move insn for that mode. DATA has all the other info. */
2719
2720 static void
2721 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2722 struct store_by_pieces_d *data)
2723 {
2724 unsigned int size = GET_MODE_SIZE (mode);
2725 rtx to1, cst;
2726
2727 while (data->len >= size)
2728 {
2729 if (data->reverse)
2730 data->offset -= size;
2731
2732 if (data->autinc_to)
2733 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2734 data->offset);
2735 else
2736 to1 = adjust_address (data->to, mode, data->offset);
2737
2738 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2739 emit_insn (gen_add2_insn (data->to_addr,
2740 gen_int_mode (-(HOST_WIDE_INT) size,
2741 GET_MODE (data->to_addr))));
2742
2743 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2744 emit_insn ((*genfun) (to1, cst));
2745
2746 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2747 emit_insn (gen_add2_insn (data->to_addr,
2748 gen_int_mode (size,
2749 GET_MODE (data->to_addr))));
2750
2751 if (! data->reverse)
2752 data->offset += size;
2753
2754 data->len -= size;
2755 }
2756 }
2757 \f
2758 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2759 its length in bytes. */
2760
2761 rtx
2762 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2763 unsigned int expected_align, HOST_WIDE_INT expected_size,
2764 unsigned HOST_WIDE_INT min_size,
2765 unsigned HOST_WIDE_INT max_size,
2766 unsigned HOST_WIDE_INT probable_max_size)
2767 {
2768 machine_mode mode = GET_MODE (object);
2769 unsigned int align;
2770
2771 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2772
2773 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2774 just move a zero. Otherwise, do this a piece at a time. */
2775 if (mode != BLKmode
2776 && CONST_INT_P (size)
2777 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2778 {
2779 rtx zero = CONST0_RTX (mode);
2780 if (zero != NULL)
2781 {
2782 emit_move_insn (object, zero);
2783 return NULL;
2784 }
2785
2786 if (COMPLEX_MODE_P (mode))
2787 {
2788 zero = CONST0_RTX (GET_MODE_INNER (mode));
2789 if (zero != NULL)
2790 {
2791 write_complex_part (object, zero, 0);
2792 write_complex_part (object, zero, 1);
2793 return NULL;
2794 }
2795 }
2796 }
2797
2798 if (size == const0_rtx)
2799 return NULL;
2800
2801 align = MEM_ALIGN (object);
2802
2803 if (CONST_INT_P (size)
2804 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2805 CLEAR_BY_PIECES,
2806 optimize_insn_for_speed_p ()))
2807 clear_by_pieces (object, INTVAL (size), align);
2808 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2809 expected_align, expected_size,
2810 min_size, max_size, probable_max_size))
2811 ;
2812 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2813 return set_storage_via_libcall (object, size, const0_rtx,
2814 method == BLOCK_OP_TAILCALL);
2815 else
2816 gcc_unreachable ();
2817
2818 return NULL;
2819 }
2820
2821 rtx
2822 clear_storage (rtx object, rtx size, enum block_op_methods method)
2823 {
2824 unsigned HOST_WIDE_INT max, min = 0;
2825 if (GET_CODE (size) == CONST_INT)
2826 min = max = UINTVAL (size);
2827 else
2828 max = GET_MODE_MASK (GET_MODE (size));
2829 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2830 }
2831
2832
2833 /* A subroutine of clear_storage. Expand a call to memset.
2834 Return the return value of memset, 0 otherwise. */
2835
2836 rtx
2837 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2838 {
2839 tree call_expr, fn, object_tree, size_tree, val_tree;
2840 machine_mode size_mode;
2841 rtx retval;
2842
2843 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2844 place those into new pseudos into a VAR_DECL and use them later. */
2845
2846 object = copy_addr_to_reg (XEXP (object, 0));
2847
2848 size_mode = TYPE_MODE (sizetype);
2849 size = convert_to_mode (size_mode, size, 1);
2850 size = copy_to_mode_reg (size_mode, size);
2851
2852 /* It is incorrect to use the libcall calling conventions to call
2853 memset in this context. This could be a user call to memset and
2854 the user may wish to examine the return value from memset. For
2855 targets where libcalls and normal calls have different conventions
2856 for returning pointers, we could end up generating incorrect code. */
2857
2858 object_tree = make_tree (ptr_type_node, object);
2859 if (!CONST_INT_P (val))
2860 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2861 size_tree = make_tree (sizetype, size);
2862 val_tree = make_tree (integer_type_node, val);
2863
2864 fn = clear_storage_libcall_fn (true);
2865 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2866 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2867
2868 retval = expand_normal (call_expr);
2869
2870 return retval;
2871 }
2872
2873 /* A subroutine of set_storage_via_libcall. Create the tree node
2874 for the function we use for block clears. */
2875
2876 tree block_clear_fn;
2877
2878 void
2879 init_block_clear_fn (const char *asmspec)
2880 {
2881 if (!block_clear_fn)
2882 {
2883 tree fn, args;
2884
2885 fn = get_identifier ("memset");
2886 args = build_function_type_list (ptr_type_node, ptr_type_node,
2887 integer_type_node, sizetype,
2888 NULL_TREE);
2889
2890 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2891 DECL_EXTERNAL (fn) = 1;
2892 TREE_PUBLIC (fn) = 1;
2893 DECL_ARTIFICIAL (fn) = 1;
2894 TREE_NOTHROW (fn) = 1;
2895 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2896 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2897
2898 block_clear_fn = fn;
2899 }
2900
2901 if (asmspec)
2902 set_user_assembler_name (block_clear_fn, asmspec);
2903 }
2904
2905 static tree
2906 clear_storage_libcall_fn (int for_call)
2907 {
2908 static bool emitted_extern;
2909
2910 if (!block_clear_fn)
2911 init_block_clear_fn (NULL);
2912
2913 if (for_call && !emitted_extern)
2914 {
2915 emitted_extern = true;
2916 make_decl_rtl (block_clear_fn);
2917 }
2918
2919 return block_clear_fn;
2920 }
2921 \f
2922 /* Expand a setmem pattern; return true if successful. */
2923
2924 bool
2925 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2926 unsigned int expected_align, HOST_WIDE_INT expected_size,
2927 unsigned HOST_WIDE_INT min_size,
2928 unsigned HOST_WIDE_INT max_size,
2929 unsigned HOST_WIDE_INT probable_max_size)
2930 {
2931 /* Try the most limited insn first, because there's no point
2932 including more than one in the machine description unless
2933 the more limited one has some advantage. */
2934
2935 machine_mode mode;
2936
2937 if (expected_align < align)
2938 expected_align = align;
2939 if (expected_size != -1)
2940 {
2941 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2942 expected_size = max_size;
2943 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2944 expected_size = min_size;
2945 }
2946
2947 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2948 mode = GET_MODE_WIDER_MODE (mode))
2949 {
2950 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2951
2952 if (code != CODE_FOR_nothing
2953 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2954 here because if SIZE is less than the mode mask, as it is
2955 returned by the macro, it will definitely be less than the
2956 actual mode mask. Since SIZE is within the Pmode address
2957 space, we limit MODE to Pmode. */
2958 && ((CONST_INT_P (size)
2959 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2960 <= (GET_MODE_MASK (mode) >> 1)))
2961 || max_size <= (GET_MODE_MASK (mode) >> 1)
2962 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2963 {
2964 struct expand_operand ops[9];
2965 unsigned int nops;
2966
2967 nops = insn_data[(int) code].n_generator_args;
2968 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2969
2970 create_fixed_operand (&ops[0], object);
2971 /* The check above guarantees that this size conversion is valid. */
2972 create_convert_operand_to (&ops[1], size, mode, true);
2973 create_convert_operand_from (&ops[2], val, byte_mode, true);
2974 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2975 if (nops >= 6)
2976 {
2977 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2978 create_integer_operand (&ops[5], expected_size);
2979 }
2980 if (nops >= 8)
2981 {
2982 create_integer_operand (&ops[6], min_size);
2983 /* If we can not represent the maximal size,
2984 make parameter NULL. */
2985 if ((HOST_WIDE_INT) max_size != -1)
2986 create_integer_operand (&ops[7], max_size);
2987 else
2988 create_fixed_operand (&ops[7], NULL);
2989 }
2990 if (nops == 9)
2991 {
2992 /* If we can not represent the maximal size,
2993 make parameter NULL. */
2994 if ((HOST_WIDE_INT) probable_max_size != -1)
2995 create_integer_operand (&ops[8], probable_max_size);
2996 else
2997 create_fixed_operand (&ops[8], NULL);
2998 }
2999 if (maybe_expand_insn (code, nops, ops))
3000 return true;
3001 }
3002 }
3003
3004 return false;
3005 }
3006
3007 \f
3008 /* Write to one of the components of the complex value CPLX. Write VAL to
3009 the real part if IMAG_P is false, and the imaginary part if its true. */
3010
3011 void
3012 write_complex_part (rtx cplx, rtx val, bool imag_p)
3013 {
3014 machine_mode cmode;
3015 machine_mode imode;
3016 unsigned ibitsize;
3017
3018 if (GET_CODE (cplx) == CONCAT)
3019 {
3020 emit_move_insn (XEXP (cplx, imag_p), val);
3021 return;
3022 }
3023
3024 cmode = GET_MODE (cplx);
3025 imode = GET_MODE_INNER (cmode);
3026 ibitsize = GET_MODE_BITSIZE (imode);
3027
3028 /* For MEMs simplify_gen_subreg may generate an invalid new address
3029 because, e.g., the original address is considered mode-dependent
3030 by the target, which restricts simplify_subreg from invoking
3031 adjust_address_nv. Instead of preparing fallback support for an
3032 invalid address, we call adjust_address_nv directly. */
3033 if (MEM_P (cplx))
3034 {
3035 emit_move_insn (adjust_address_nv (cplx, imode,
3036 imag_p ? GET_MODE_SIZE (imode) : 0),
3037 val);
3038 return;
3039 }
3040
3041 /* If the sub-object is at least word sized, then we know that subregging
3042 will work. This special case is important, since store_bit_field
3043 wants to operate on integer modes, and there's rarely an OImode to
3044 correspond to TCmode. */
3045 if (ibitsize >= BITS_PER_WORD
3046 /* For hard regs we have exact predicates. Assume we can split
3047 the original object if it spans an even number of hard regs.
3048 This special case is important for SCmode on 64-bit platforms
3049 where the natural size of floating-point regs is 32-bit. */
3050 || (REG_P (cplx)
3051 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3052 && REG_NREGS (cplx) % 2 == 0))
3053 {
3054 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3055 imag_p ? GET_MODE_SIZE (imode) : 0);
3056 if (part)
3057 {
3058 emit_move_insn (part, val);
3059 return;
3060 }
3061 else
3062 /* simplify_gen_subreg may fail for sub-word MEMs. */
3063 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3064 }
3065
3066 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3067 }
3068
3069 /* Extract one of the components of the complex value CPLX. Extract the
3070 real part if IMAG_P is false, and the imaginary part if it's true. */
3071
3072 static rtx
3073 read_complex_part (rtx cplx, bool imag_p)
3074 {
3075 machine_mode cmode, imode;
3076 unsigned ibitsize;
3077
3078 if (GET_CODE (cplx) == CONCAT)
3079 return XEXP (cplx, imag_p);
3080
3081 cmode = GET_MODE (cplx);
3082 imode = GET_MODE_INNER (cmode);
3083 ibitsize = GET_MODE_BITSIZE (imode);
3084
3085 /* Special case reads from complex constants that got spilled to memory. */
3086 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3087 {
3088 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3089 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3090 {
3091 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3092 if (CONSTANT_CLASS_P (part))
3093 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3094 }
3095 }
3096
3097 /* For MEMs simplify_gen_subreg may generate an invalid new address
3098 because, e.g., the original address is considered mode-dependent
3099 by the target, which restricts simplify_subreg from invoking
3100 adjust_address_nv. Instead of preparing fallback support for an
3101 invalid address, we call adjust_address_nv directly. */
3102 if (MEM_P (cplx))
3103 return adjust_address_nv (cplx, imode,
3104 imag_p ? GET_MODE_SIZE (imode) : 0);
3105
3106 /* If the sub-object is at least word sized, then we know that subregging
3107 will work. This special case is important, since extract_bit_field
3108 wants to operate on integer modes, and there's rarely an OImode to
3109 correspond to TCmode. */
3110 if (ibitsize >= BITS_PER_WORD
3111 /* For hard regs we have exact predicates. Assume we can split
3112 the original object if it spans an even number of hard regs.
3113 This special case is important for SCmode on 64-bit platforms
3114 where the natural size of floating-point regs is 32-bit. */
3115 || (REG_P (cplx)
3116 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3117 && REG_NREGS (cplx) % 2 == 0))
3118 {
3119 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3120 imag_p ? GET_MODE_SIZE (imode) : 0);
3121 if (ret)
3122 return ret;
3123 else
3124 /* simplify_gen_subreg may fail for sub-word MEMs. */
3125 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3126 }
3127
3128 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3129 true, NULL_RTX, imode, imode);
3130 }
3131 \f
3132 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3133 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3134 represented in NEW_MODE. If FORCE is true, this will never happen, as
3135 we'll force-create a SUBREG if needed. */
3136
3137 static rtx
3138 emit_move_change_mode (machine_mode new_mode,
3139 machine_mode old_mode, rtx x, bool force)
3140 {
3141 rtx ret;
3142
3143 if (push_operand (x, GET_MODE (x)))
3144 {
3145 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3146 MEM_COPY_ATTRIBUTES (ret, x);
3147 }
3148 else if (MEM_P (x))
3149 {
3150 /* We don't have to worry about changing the address since the
3151 size in bytes is supposed to be the same. */
3152 if (reload_in_progress)
3153 {
3154 /* Copy the MEM to change the mode and move any
3155 substitutions from the old MEM to the new one. */
3156 ret = adjust_address_nv (x, new_mode, 0);
3157 copy_replacements (x, ret);
3158 }
3159 else
3160 ret = adjust_address (x, new_mode, 0);
3161 }
3162 else
3163 {
3164 /* Note that we do want simplify_subreg's behavior of validating
3165 that the new mode is ok for a hard register. If we were to use
3166 simplify_gen_subreg, we would create the subreg, but would
3167 probably run into the target not being able to implement it. */
3168 /* Except, of course, when FORCE is true, when this is exactly what
3169 we want. Which is needed for CCmodes on some targets. */
3170 if (force)
3171 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3172 else
3173 ret = simplify_subreg (new_mode, x, old_mode, 0);
3174 }
3175
3176 return ret;
3177 }
3178
3179 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3180 an integer mode of the same size as MODE. Returns the instruction
3181 emitted, or NULL if such a move could not be generated. */
3182
3183 static rtx_insn *
3184 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3185 {
3186 machine_mode imode;
3187 enum insn_code code;
3188
3189 /* There must exist a mode of the exact size we require. */
3190 imode = int_mode_for_mode (mode);
3191 if (imode == BLKmode)
3192 return NULL;
3193
3194 /* The target must support moves in this mode. */
3195 code = optab_handler (mov_optab, imode);
3196 if (code == CODE_FOR_nothing)
3197 return NULL;
3198
3199 x = emit_move_change_mode (imode, mode, x, force);
3200 if (x == NULL_RTX)
3201 return NULL;
3202 y = emit_move_change_mode (imode, mode, y, force);
3203 if (y == NULL_RTX)
3204 return NULL;
3205 return emit_insn (GEN_FCN (code) (x, y));
3206 }
3207
3208 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3209 Return an equivalent MEM that does not use an auto-increment. */
3210
3211 rtx
3212 emit_move_resolve_push (machine_mode mode, rtx x)
3213 {
3214 enum rtx_code code = GET_CODE (XEXP (x, 0));
3215 HOST_WIDE_INT adjust;
3216 rtx temp;
3217
3218 adjust = GET_MODE_SIZE (mode);
3219 #ifdef PUSH_ROUNDING
3220 adjust = PUSH_ROUNDING (adjust);
3221 #endif
3222 if (code == PRE_DEC || code == POST_DEC)
3223 adjust = -adjust;
3224 else if (code == PRE_MODIFY || code == POST_MODIFY)
3225 {
3226 rtx expr = XEXP (XEXP (x, 0), 1);
3227 HOST_WIDE_INT val;
3228
3229 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3230 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3231 val = INTVAL (XEXP (expr, 1));
3232 if (GET_CODE (expr) == MINUS)
3233 val = -val;
3234 gcc_assert (adjust == val || adjust == -val);
3235 adjust = val;
3236 }
3237
3238 /* Do not use anti_adjust_stack, since we don't want to update
3239 stack_pointer_delta. */
3240 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3241 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3242 0, OPTAB_LIB_WIDEN);
3243 if (temp != stack_pointer_rtx)
3244 emit_move_insn (stack_pointer_rtx, temp);
3245
3246 switch (code)
3247 {
3248 case PRE_INC:
3249 case PRE_DEC:
3250 case PRE_MODIFY:
3251 temp = stack_pointer_rtx;
3252 break;
3253 case POST_INC:
3254 case POST_DEC:
3255 case POST_MODIFY:
3256 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3257 break;
3258 default:
3259 gcc_unreachable ();
3260 }
3261
3262 return replace_equiv_address (x, temp);
3263 }
3264
3265 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3266 X is known to satisfy push_operand, and MODE is known to be complex.
3267 Returns the last instruction emitted. */
3268
3269 rtx_insn *
3270 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3271 {
3272 machine_mode submode = GET_MODE_INNER (mode);
3273 bool imag_first;
3274
3275 #ifdef PUSH_ROUNDING
3276 unsigned int submodesize = GET_MODE_SIZE (submode);
3277
3278 /* In case we output to the stack, but the size is smaller than the
3279 machine can push exactly, we need to use move instructions. */
3280 if (PUSH_ROUNDING (submodesize) != submodesize)
3281 {
3282 x = emit_move_resolve_push (mode, x);
3283 return emit_move_insn (x, y);
3284 }
3285 #endif
3286
3287 /* Note that the real part always precedes the imag part in memory
3288 regardless of machine's endianness. */
3289 switch (GET_CODE (XEXP (x, 0)))
3290 {
3291 case PRE_DEC:
3292 case POST_DEC:
3293 imag_first = true;
3294 break;
3295 case PRE_INC:
3296 case POST_INC:
3297 imag_first = false;
3298 break;
3299 default:
3300 gcc_unreachable ();
3301 }
3302
3303 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3304 read_complex_part (y, imag_first));
3305 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3306 read_complex_part (y, !imag_first));
3307 }
3308
3309 /* A subroutine of emit_move_complex. Perform the move from Y to X
3310 via two moves of the parts. Returns the last instruction emitted. */
3311
3312 rtx_insn *
3313 emit_move_complex_parts (rtx x, rtx y)
3314 {
3315 /* Show the output dies here. This is necessary for SUBREGs
3316 of pseudos since we cannot track their lifetimes correctly;
3317 hard regs shouldn't appear here except as return values. */
3318 if (!reload_completed && !reload_in_progress
3319 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3320 emit_clobber (x);
3321
3322 write_complex_part (x, read_complex_part (y, false), false);
3323 write_complex_part (x, read_complex_part (y, true), true);
3324
3325 return get_last_insn ();
3326 }
3327
3328 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3329 MODE is known to be complex. Returns the last instruction emitted. */
3330
3331 static rtx_insn *
3332 emit_move_complex (machine_mode mode, rtx x, rtx y)
3333 {
3334 bool try_int;
3335
3336 /* Need to take special care for pushes, to maintain proper ordering
3337 of the data, and possibly extra padding. */
3338 if (push_operand (x, mode))
3339 return emit_move_complex_push (mode, x, y);
3340
3341 /* See if we can coerce the target into moving both values at once, except
3342 for floating point where we favor moving as parts if this is easy. */
3343 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3344 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3345 && !(REG_P (x)
3346 && HARD_REGISTER_P (x)
3347 && REG_NREGS (x) == 1)
3348 && !(REG_P (y)
3349 && HARD_REGISTER_P (y)
3350 && REG_NREGS (y) == 1))
3351 try_int = false;
3352 /* Not possible if the values are inherently not adjacent. */
3353 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3354 try_int = false;
3355 /* Is possible if both are registers (or subregs of registers). */
3356 else if (register_operand (x, mode) && register_operand (y, mode))
3357 try_int = true;
3358 /* If one of the operands is a memory, and alignment constraints
3359 are friendly enough, we may be able to do combined memory operations.
3360 We do not attempt this if Y is a constant because that combination is
3361 usually better with the by-parts thing below. */
3362 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3363 && (!STRICT_ALIGNMENT
3364 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3365 try_int = true;
3366 else
3367 try_int = false;
3368
3369 if (try_int)
3370 {
3371 rtx_insn *ret;
3372
3373 /* For memory to memory moves, optimal behavior can be had with the
3374 existing block move logic. */
3375 if (MEM_P (x) && MEM_P (y))
3376 {
3377 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3378 BLOCK_OP_NO_LIBCALL);
3379 return get_last_insn ();
3380 }
3381
3382 ret = emit_move_via_integer (mode, x, y, true);
3383 if (ret)
3384 return ret;
3385 }
3386
3387 return emit_move_complex_parts (x, y);
3388 }
3389
3390 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3391 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3392
3393 static rtx_insn *
3394 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3395 {
3396 rtx_insn *ret;
3397
3398 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3399 if (mode != CCmode)
3400 {
3401 enum insn_code code = optab_handler (mov_optab, CCmode);
3402 if (code != CODE_FOR_nothing)
3403 {
3404 x = emit_move_change_mode (CCmode, mode, x, true);
3405 y = emit_move_change_mode (CCmode, mode, y, true);
3406 return emit_insn (GEN_FCN (code) (x, y));
3407 }
3408 }
3409
3410 /* Otherwise, find the MODE_INT mode of the same width. */
3411 ret = emit_move_via_integer (mode, x, y, false);
3412 gcc_assert (ret != NULL);
3413 return ret;
3414 }
3415
3416 /* Return true if word I of OP lies entirely in the
3417 undefined bits of a paradoxical subreg. */
3418
3419 static bool
3420 undefined_operand_subword_p (const_rtx op, int i)
3421 {
3422 machine_mode innermode, innermostmode;
3423 int offset;
3424 if (GET_CODE (op) != SUBREG)
3425 return false;
3426 innermode = GET_MODE (op);
3427 innermostmode = GET_MODE (SUBREG_REG (op));
3428 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3429 /* The SUBREG_BYTE represents offset, as if the value were stored in
3430 memory, except for a paradoxical subreg where we define
3431 SUBREG_BYTE to be 0; undo this exception as in
3432 simplify_subreg. */
3433 if (SUBREG_BYTE (op) == 0
3434 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3435 {
3436 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3437 if (WORDS_BIG_ENDIAN)
3438 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3439 if (BYTES_BIG_ENDIAN)
3440 offset += difference % UNITS_PER_WORD;
3441 }
3442 if (offset >= GET_MODE_SIZE (innermostmode)
3443 || offset <= -GET_MODE_SIZE (word_mode))
3444 return true;
3445 return false;
3446 }
3447
3448 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3449 MODE is any multi-word or full-word mode that lacks a move_insn
3450 pattern. Note that you will get better code if you define such
3451 patterns, even if they must turn into multiple assembler instructions. */
3452
3453 static rtx_insn *
3454 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3455 {
3456 rtx_insn *last_insn = 0;
3457 rtx_insn *seq;
3458 rtx inner;
3459 bool need_clobber;
3460 int i;
3461
3462 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3463
3464 /* If X is a push on the stack, do the push now and replace
3465 X with a reference to the stack pointer. */
3466 if (push_operand (x, mode))
3467 x = emit_move_resolve_push (mode, x);
3468
3469 /* If we are in reload, see if either operand is a MEM whose address
3470 is scheduled for replacement. */
3471 if (reload_in_progress && MEM_P (x)
3472 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3473 x = replace_equiv_address_nv (x, inner);
3474 if (reload_in_progress && MEM_P (y)
3475 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3476 y = replace_equiv_address_nv (y, inner);
3477
3478 start_sequence ();
3479
3480 need_clobber = false;
3481 for (i = 0;
3482 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3483 i++)
3484 {
3485 rtx xpart = operand_subword (x, i, 1, mode);
3486 rtx ypart;
3487
3488 /* Do not generate code for a move if it would come entirely
3489 from the undefined bits of a paradoxical subreg. */
3490 if (undefined_operand_subword_p (y, i))
3491 continue;
3492
3493 ypart = operand_subword (y, i, 1, mode);
3494
3495 /* If we can't get a part of Y, put Y into memory if it is a
3496 constant. Otherwise, force it into a register. Then we must
3497 be able to get a part of Y. */
3498 if (ypart == 0 && CONSTANT_P (y))
3499 {
3500 y = use_anchored_address (force_const_mem (mode, y));
3501 ypart = operand_subword (y, i, 1, mode);
3502 }
3503 else if (ypart == 0)
3504 ypart = operand_subword_force (y, i, mode);
3505
3506 gcc_assert (xpart && ypart);
3507
3508 need_clobber |= (GET_CODE (xpart) == SUBREG);
3509
3510 last_insn = emit_move_insn (xpart, ypart);
3511 }
3512
3513 seq = get_insns ();
3514 end_sequence ();
3515
3516 /* Show the output dies here. This is necessary for SUBREGs
3517 of pseudos since we cannot track their lifetimes correctly;
3518 hard regs shouldn't appear here except as return values.
3519 We never want to emit such a clobber after reload. */
3520 if (x != y
3521 && ! (reload_in_progress || reload_completed)
3522 && need_clobber != 0)
3523 emit_clobber (x);
3524
3525 emit_insn (seq);
3526
3527 return last_insn;
3528 }
3529
3530 /* Low level part of emit_move_insn.
3531 Called just like emit_move_insn, but assumes X and Y
3532 are basically valid. */
3533
3534 rtx_insn *
3535 emit_move_insn_1 (rtx x, rtx y)
3536 {
3537 machine_mode mode = GET_MODE (x);
3538 enum insn_code code;
3539
3540 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3541
3542 code = optab_handler (mov_optab, mode);
3543 if (code != CODE_FOR_nothing)
3544 return emit_insn (GEN_FCN (code) (x, y));
3545
3546 /* Expand complex moves by moving real part and imag part. */
3547 if (COMPLEX_MODE_P (mode))
3548 return emit_move_complex (mode, x, y);
3549
3550 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3551 || ALL_FIXED_POINT_MODE_P (mode))
3552 {
3553 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3554
3555 /* If we can't find an integer mode, use multi words. */
3556 if (result)
3557 return result;
3558 else
3559 return emit_move_multi_word (mode, x, y);
3560 }
3561
3562 if (GET_MODE_CLASS (mode) == MODE_CC)
3563 return emit_move_ccmode (mode, x, y);
3564
3565 /* Try using a move pattern for the corresponding integer mode. This is
3566 only safe when simplify_subreg can convert MODE constants into integer
3567 constants. At present, it can only do this reliably if the value
3568 fits within a HOST_WIDE_INT. */
3569 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3570 {
3571 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3572
3573 if (ret)
3574 {
3575 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3576 return ret;
3577 }
3578 }
3579
3580 return emit_move_multi_word (mode, x, y);
3581 }
3582
3583 /* Generate code to copy Y into X.
3584 Both Y and X must have the same mode, except that
3585 Y can be a constant with VOIDmode.
3586 This mode cannot be BLKmode; use emit_block_move for that.
3587
3588 Return the last instruction emitted. */
3589
3590 rtx_insn *
3591 emit_move_insn (rtx x, rtx y)
3592 {
3593 machine_mode mode = GET_MODE (x);
3594 rtx y_cst = NULL_RTX;
3595 rtx_insn *last_insn;
3596 rtx set;
3597
3598 gcc_assert (mode != BLKmode
3599 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3600
3601 if (CONSTANT_P (y))
3602 {
3603 if (optimize
3604 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3605 && (last_insn = compress_float_constant (x, y)))
3606 return last_insn;
3607
3608 y_cst = y;
3609
3610 if (!targetm.legitimate_constant_p (mode, y))
3611 {
3612 y = force_const_mem (mode, y);
3613
3614 /* If the target's cannot_force_const_mem prevented the spill,
3615 assume that the target's move expanders will also take care
3616 of the non-legitimate constant. */
3617 if (!y)
3618 y = y_cst;
3619 else
3620 y = use_anchored_address (y);
3621 }
3622 }
3623
3624 /* If X or Y are memory references, verify that their addresses are valid
3625 for the machine. */
3626 if (MEM_P (x)
3627 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3628 MEM_ADDR_SPACE (x))
3629 && ! push_operand (x, GET_MODE (x))))
3630 x = validize_mem (x);
3631
3632 if (MEM_P (y)
3633 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3634 MEM_ADDR_SPACE (y)))
3635 y = validize_mem (y);
3636
3637 gcc_assert (mode != BLKmode);
3638
3639 last_insn = emit_move_insn_1 (x, y);
3640
3641 if (y_cst && REG_P (x)
3642 && (set = single_set (last_insn)) != NULL_RTX
3643 && SET_DEST (set) == x
3644 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3645 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3646
3647 return last_insn;
3648 }
3649
3650 /* Generate the body of an instruction to copy Y into X.
3651 It may be a list of insns, if one insn isn't enough. */
3652
3653 rtx_insn *
3654 gen_move_insn (rtx x, rtx y)
3655 {
3656 rtx_insn *seq;
3657
3658 start_sequence ();
3659 emit_move_insn_1 (x, y);
3660 seq = get_insns ();
3661 end_sequence ();
3662 return seq;
3663 }
3664
3665 /* Same as above, but return rtx (used as a callback, which must have
3666 prototype compatible with other functions returning rtx). */
3667
3668 rtx
3669 gen_move_insn_uncast (rtx x, rtx y)
3670 {
3671 return gen_move_insn (x, y);
3672 }
3673
3674 /* If Y is representable exactly in a narrower mode, and the target can
3675 perform the extension directly from constant or memory, then emit the
3676 move as an extension. */
3677
3678 static rtx_insn *
3679 compress_float_constant (rtx x, rtx y)
3680 {
3681 machine_mode dstmode = GET_MODE (x);
3682 machine_mode orig_srcmode = GET_MODE (y);
3683 machine_mode srcmode;
3684 REAL_VALUE_TYPE r;
3685 int oldcost, newcost;
3686 bool speed = optimize_insn_for_speed_p ();
3687
3688 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3689
3690 if (targetm.legitimate_constant_p (dstmode, y))
3691 oldcost = set_src_cost (y, speed);
3692 else
3693 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3694
3695 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3696 srcmode != orig_srcmode;
3697 srcmode = GET_MODE_WIDER_MODE (srcmode))
3698 {
3699 enum insn_code ic;
3700 rtx trunc_y;
3701 rtx_insn *last_insn;
3702
3703 /* Skip if the target can't extend this way. */
3704 ic = can_extend_p (dstmode, srcmode, 0);
3705 if (ic == CODE_FOR_nothing)
3706 continue;
3707
3708 /* Skip if the narrowed value isn't exact. */
3709 if (! exact_real_truncate (srcmode, &r))
3710 continue;
3711
3712 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3713
3714 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3715 {
3716 /* Skip if the target needs extra instructions to perform
3717 the extension. */
3718 if (!insn_operand_matches (ic, 1, trunc_y))
3719 continue;
3720 /* This is valid, but may not be cheaper than the original. */
3721 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3722 speed);
3723 if (oldcost < newcost)
3724 continue;
3725 }
3726 else if (float_extend_from_mem[dstmode][srcmode])
3727 {
3728 trunc_y = force_const_mem (srcmode, trunc_y);
3729 /* This is valid, but may not be cheaper than the original. */
3730 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3731 speed);
3732 if (oldcost < newcost)
3733 continue;
3734 trunc_y = validize_mem (trunc_y);
3735 }
3736 else
3737 continue;
3738
3739 /* For CSE's benefit, force the compressed constant pool entry
3740 into a new pseudo. This constant may be used in different modes,
3741 and if not, combine will put things back together for us. */
3742 trunc_y = force_reg (srcmode, trunc_y);
3743
3744 /* If x is a hard register, perform the extension into a pseudo,
3745 so that e.g. stack realignment code is aware of it. */
3746 rtx target = x;
3747 if (REG_P (x) && HARD_REGISTER_P (x))
3748 target = gen_reg_rtx (dstmode);
3749
3750 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3751 last_insn = get_last_insn ();
3752
3753 if (REG_P (target))
3754 set_unique_reg_note (last_insn, REG_EQUAL, y);
3755
3756 if (target != x)
3757 return emit_move_insn (x, target);
3758 return last_insn;
3759 }
3760
3761 return NULL;
3762 }
3763 \f
3764 /* Pushing data onto the stack. */
3765
3766 /* Push a block of length SIZE (perhaps variable)
3767 and return an rtx to address the beginning of the block.
3768 The value may be virtual_outgoing_args_rtx.
3769
3770 EXTRA is the number of bytes of padding to push in addition to SIZE.
3771 BELOW nonzero means this padding comes at low addresses;
3772 otherwise, the padding comes at high addresses. */
3773
3774 rtx
3775 push_block (rtx size, int extra, int below)
3776 {
3777 rtx temp;
3778
3779 size = convert_modes (Pmode, ptr_mode, size, 1);
3780 if (CONSTANT_P (size))
3781 anti_adjust_stack (plus_constant (Pmode, size, extra));
3782 else if (REG_P (size) && extra == 0)
3783 anti_adjust_stack (size);
3784 else
3785 {
3786 temp = copy_to_mode_reg (Pmode, size);
3787 if (extra != 0)
3788 temp = expand_binop (Pmode, add_optab, temp,
3789 gen_int_mode (extra, Pmode),
3790 temp, 0, OPTAB_LIB_WIDEN);
3791 anti_adjust_stack (temp);
3792 }
3793
3794 if (STACK_GROWS_DOWNWARD)
3795 {
3796 temp = virtual_outgoing_args_rtx;
3797 if (extra != 0 && below)
3798 temp = plus_constant (Pmode, temp, extra);
3799 }
3800 else
3801 {
3802 if (CONST_INT_P (size))
3803 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3804 -INTVAL (size) - (below ? 0 : extra));
3805 else if (extra != 0 && !below)
3806 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3807 negate_rtx (Pmode, plus_constant (Pmode, size,
3808 extra)));
3809 else
3810 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3811 negate_rtx (Pmode, size));
3812 }
3813
3814 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3815 }
3816
3817 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3818
3819 static rtx
3820 mem_autoinc_base (rtx mem)
3821 {
3822 if (MEM_P (mem))
3823 {
3824 rtx addr = XEXP (mem, 0);
3825 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3826 return XEXP (addr, 0);
3827 }
3828 return NULL;
3829 }
3830
3831 /* A utility routine used here, in reload, and in try_split. The insns
3832 after PREV up to and including LAST are known to adjust the stack,
3833 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3834 placing notes as appropriate. PREV may be NULL, indicating the
3835 entire insn sequence prior to LAST should be scanned.
3836
3837 The set of allowed stack pointer modifications is small:
3838 (1) One or more auto-inc style memory references (aka pushes),
3839 (2) One or more addition/subtraction with the SP as destination,
3840 (3) A single move insn with the SP as destination,
3841 (4) A call_pop insn,
3842 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3843
3844 Insns in the sequence that do not modify the SP are ignored,
3845 except for noreturn calls.
3846
3847 The return value is the amount of adjustment that can be trivially
3848 verified, via immediate operand or auto-inc. If the adjustment
3849 cannot be trivially extracted, the return value is INT_MIN. */
3850
3851 HOST_WIDE_INT
3852 find_args_size_adjust (rtx_insn *insn)
3853 {
3854 rtx dest, set, pat;
3855 int i;
3856
3857 pat = PATTERN (insn);
3858 set = NULL;
3859
3860 /* Look for a call_pop pattern. */
3861 if (CALL_P (insn))
3862 {
3863 /* We have to allow non-call_pop patterns for the case
3864 of emit_single_push_insn of a TLS address. */
3865 if (GET_CODE (pat) != PARALLEL)
3866 return 0;
3867
3868 /* All call_pop have a stack pointer adjust in the parallel.
3869 The call itself is always first, and the stack adjust is
3870 usually last, so search from the end. */
3871 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3872 {
3873 set = XVECEXP (pat, 0, i);
3874 if (GET_CODE (set) != SET)
3875 continue;
3876 dest = SET_DEST (set);
3877 if (dest == stack_pointer_rtx)
3878 break;
3879 }
3880 /* We'd better have found the stack pointer adjust. */
3881 if (i == 0)
3882 return 0;
3883 /* Fall through to process the extracted SET and DEST
3884 as if it was a standalone insn. */
3885 }
3886 else if (GET_CODE (pat) == SET)
3887 set = pat;
3888 else if ((set = single_set (insn)) != NULL)
3889 ;
3890 else if (GET_CODE (pat) == PARALLEL)
3891 {
3892 /* ??? Some older ports use a parallel with a stack adjust
3893 and a store for a PUSH_ROUNDING pattern, rather than a
3894 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3895 /* ??? See h8300 and m68k, pushqi1. */
3896 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3897 {
3898 set = XVECEXP (pat, 0, i);
3899 if (GET_CODE (set) != SET)
3900 continue;
3901 dest = SET_DEST (set);
3902 if (dest == stack_pointer_rtx)
3903 break;
3904
3905 /* We do not expect an auto-inc of the sp in the parallel. */
3906 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3907 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3908 != stack_pointer_rtx);
3909 }
3910 if (i < 0)
3911 return 0;
3912 }
3913 else
3914 return 0;
3915
3916 dest = SET_DEST (set);
3917
3918 /* Look for direct modifications of the stack pointer. */
3919 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3920 {
3921 /* Look for a trivial adjustment, otherwise assume nothing. */
3922 /* Note that the SPU restore_stack_block pattern refers to
3923 the stack pointer in V4SImode. Consider that non-trivial. */
3924 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3925 && GET_CODE (SET_SRC (set)) == PLUS
3926 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3927 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3928 return INTVAL (XEXP (SET_SRC (set), 1));
3929 /* ??? Reload can generate no-op moves, which will be cleaned
3930 up later. Recognize it and continue searching. */
3931 else if (rtx_equal_p (dest, SET_SRC (set)))
3932 return 0;
3933 else
3934 return HOST_WIDE_INT_MIN;
3935 }
3936 else
3937 {
3938 rtx mem, addr;
3939
3940 /* Otherwise only think about autoinc patterns. */
3941 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3942 {
3943 mem = dest;
3944 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3945 != stack_pointer_rtx);
3946 }
3947 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3948 mem = SET_SRC (set);
3949 else
3950 return 0;
3951
3952 addr = XEXP (mem, 0);
3953 switch (GET_CODE (addr))
3954 {
3955 case PRE_INC:
3956 case POST_INC:
3957 return GET_MODE_SIZE (GET_MODE (mem));
3958 case PRE_DEC:
3959 case POST_DEC:
3960 return -GET_MODE_SIZE (GET_MODE (mem));
3961 case PRE_MODIFY:
3962 case POST_MODIFY:
3963 addr = XEXP (addr, 1);
3964 gcc_assert (GET_CODE (addr) == PLUS);
3965 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3966 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3967 return INTVAL (XEXP (addr, 1));
3968 default:
3969 gcc_unreachable ();
3970 }
3971 }
3972 }
3973
3974 int
3975 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3976 {
3977 int args_size = end_args_size;
3978 bool saw_unknown = false;
3979 rtx_insn *insn;
3980
3981 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3982 {
3983 HOST_WIDE_INT this_delta;
3984
3985 if (!NONDEBUG_INSN_P (insn))
3986 continue;
3987
3988 this_delta = find_args_size_adjust (insn);
3989 if (this_delta == 0)
3990 {
3991 if (!CALL_P (insn)
3992 || ACCUMULATE_OUTGOING_ARGS
3993 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3994 continue;
3995 }
3996
3997 gcc_assert (!saw_unknown);
3998 if (this_delta == HOST_WIDE_INT_MIN)
3999 saw_unknown = true;
4000
4001 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
4002 if (STACK_GROWS_DOWNWARD)
4003 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
4004
4005 args_size -= this_delta;
4006 }
4007
4008 return saw_unknown ? INT_MIN : args_size;
4009 }
4010
4011 #ifdef PUSH_ROUNDING
4012 /* Emit single push insn. */
4013
4014 static void
4015 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4016 {
4017 rtx dest_addr;
4018 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4019 rtx dest;
4020 enum insn_code icode;
4021
4022 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4023 /* If there is push pattern, use it. Otherwise try old way of throwing
4024 MEM representing push operation to move expander. */
4025 icode = optab_handler (push_optab, mode);
4026 if (icode != CODE_FOR_nothing)
4027 {
4028 struct expand_operand ops[1];
4029
4030 create_input_operand (&ops[0], x, mode);
4031 if (maybe_expand_insn (icode, 1, ops))
4032 return;
4033 }
4034 if (GET_MODE_SIZE (mode) == rounded_size)
4035 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4036 /* If we are to pad downward, adjust the stack pointer first and
4037 then store X into the stack location using an offset. This is
4038 because emit_move_insn does not know how to pad; it does not have
4039 access to type. */
4040 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4041 {
4042 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4043 HOST_WIDE_INT offset;
4044
4045 emit_move_insn (stack_pointer_rtx,
4046 expand_binop (Pmode,
4047 STACK_GROWS_DOWNWARD ? sub_optab
4048 : add_optab,
4049 stack_pointer_rtx,
4050 gen_int_mode (rounded_size, Pmode),
4051 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4052
4053 offset = (HOST_WIDE_INT) padding_size;
4054 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
4055 /* We have already decremented the stack pointer, so get the
4056 previous value. */
4057 offset += (HOST_WIDE_INT) rounded_size;
4058
4059 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
4060 /* We have already incremented the stack pointer, so get the
4061 previous value. */
4062 offset -= (HOST_WIDE_INT) rounded_size;
4063
4064 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4065 gen_int_mode (offset, Pmode));
4066 }
4067 else
4068 {
4069 if (STACK_GROWS_DOWNWARD)
4070 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4071 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4072 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4073 Pmode));
4074 else
4075 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4076 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4077 gen_int_mode (rounded_size, Pmode));
4078
4079 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4080 }
4081
4082 dest = gen_rtx_MEM (mode, dest_addr);
4083
4084 if (type != 0)
4085 {
4086 set_mem_attributes (dest, type, 1);
4087
4088 if (cfun->tail_call_marked)
4089 /* Function incoming arguments may overlap with sibling call
4090 outgoing arguments and we cannot allow reordering of reads
4091 from function arguments with stores to outgoing arguments
4092 of sibling calls. */
4093 set_mem_alias_set (dest, 0);
4094 }
4095 emit_move_insn (dest, x);
4096 }
4097
4098 /* Emit and annotate a single push insn. */
4099
4100 static void
4101 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4102 {
4103 int delta, old_delta = stack_pointer_delta;
4104 rtx_insn *prev = get_last_insn ();
4105 rtx_insn *last;
4106
4107 emit_single_push_insn_1 (mode, x, type);
4108
4109 last = get_last_insn ();
4110
4111 /* Notice the common case where we emitted exactly one insn. */
4112 if (PREV_INSN (last) == prev)
4113 {
4114 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4115 return;
4116 }
4117
4118 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4119 gcc_assert (delta == INT_MIN || delta == old_delta);
4120 }
4121 #endif
4122
4123 /* Generate code to push X onto the stack, assuming it has mode MODE and
4124 type TYPE.
4125 MODE is redundant except when X is a CONST_INT (since they don't
4126 carry mode info).
4127 SIZE is an rtx for the size of data to be copied (in bytes),
4128 needed only if X is BLKmode.
4129
4130 ALIGN (in bits) is maximum alignment we can assume.
4131
4132 If PARTIAL and REG are both nonzero, then copy that many of the first
4133 bytes of X into registers starting with REG, and push the rest of X.
4134 The amount of space pushed is decreased by PARTIAL bytes.
4135 REG must be a hard register in this case.
4136 If REG is zero but PARTIAL is not, take any all others actions for an
4137 argument partially in registers, but do not actually load any
4138 registers.
4139
4140 EXTRA is the amount in bytes of extra space to leave next to this arg.
4141 This is ignored if an argument block has already been allocated.
4142
4143 On a machine that lacks real push insns, ARGS_ADDR is the address of
4144 the bottom of the argument block for this call. We use indexing off there
4145 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4146 argument block has not been preallocated.
4147
4148 ARGS_SO_FAR is the size of args previously pushed for this call.
4149
4150 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4151 for arguments passed in registers. If nonzero, it will be the number
4152 of bytes required. */
4153
4154 void
4155 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4156 unsigned int align, int partial, rtx reg, int extra,
4157 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4158 rtx alignment_pad)
4159 {
4160 rtx xinner;
4161 enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
4162
4163 /* Decide where to pad the argument: `downward' for below,
4164 `upward' for above, or `none' for don't pad it.
4165 Default is below for small data on big-endian machines; else above. */
4166 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4167
4168 /* Invert direction if stack is post-decrement.
4169 FIXME: why? */
4170 if (STACK_PUSH_CODE == POST_DEC)
4171 if (where_pad != none)
4172 where_pad = (where_pad == downward ? upward : downward);
4173
4174 xinner = x;
4175
4176 if (mode == BLKmode
4177 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4178 {
4179 /* Copy a block into the stack, entirely or partially. */
4180
4181 rtx temp;
4182 int used;
4183 int offset;
4184 int skip;
4185
4186 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4187 used = partial - offset;
4188
4189 if (mode != BLKmode)
4190 {
4191 /* A value is to be stored in an insufficiently aligned
4192 stack slot; copy via a suitably aligned slot if
4193 necessary. */
4194 size = GEN_INT (GET_MODE_SIZE (mode));
4195 if (!MEM_P (xinner))
4196 {
4197 temp = assign_temp (type, 1, 1);
4198 emit_move_insn (temp, xinner);
4199 xinner = temp;
4200 }
4201 }
4202
4203 gcc_assert (size);
4204
4205 /* USED is now the # of bytes we need not copy to the stack
4206 because registers will take care of them. */
4207
4208 if (partial != 0)
4209 xinner = adjust_address (xinner, BLKmode, used);
4210
4211 /* If the partial register-part of the arg counts in its stack size,
4212 skip the part of stack space corresponding to the registers.
4213 Otherwise, start copying to the beginning of the stack space,
4214 by setting SKIP to 0. */
4215 skip = (reg_parm_stack_space == 0) ? 0 : used;
4216
4217 #ifdef PUSH_ROUNDING
4218 /* Do it with several push insns if that doesn't take lots of insns
4219 and if there is no difficulty with push insns that skip bytes
4220 on the stack for alignment purposes. */
4221 if (args_addr == 0
4222 && PUSH_ARGS
4223 && CONST_INT_P (size)
4224 && skip == 0
4225 && MEM_ALIGN (xinner) >= align
4226 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4227 /* Here we avoid the case of a structure whose weak alignment
4228 forces many pushes of a small amount of data,
4229 and such small pushes do rounding that causes trouble. */
4230 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4231 || align >= BIGGEST_ALIGNMENT
4232 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4233 == (align / BITS_PER_UNIT)))
4234 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4235 {
4236 /* Push padding now if padding above and stack grows down,
4237 or if padding below and stack grows up.
4238 But if space already allocated, this has already been done. */
4239 if (extra && args_addr == 0
4240 && where_pad != none && where_pad != stack_direction)
4241 anti_adjust_stack (GEN_INT (extra));
4242
4243 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4244 }
4245 else
4246 #endif /* PUSH_ROUNDING */
4247 {
4248 rtx target;
4249
4250 /* Otherwise make space on the stack and copy the data
4251 to the address of that space. */
4252
4253 /* Deduct words put into registers from the size we must copy. */
4254 if (partial != 0)
4255 {
4256 if (CONST_INT_P (size))
4257 size = GEN_INT (INTVAL (size) - used);
4258 else
4259 size = expand_binop (GET_MODE (size), sub_optab, size,
4260 gen_int_mode (used, GET_MODE (size)),
4261 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4262 }
4263
4264 /* Get the address of the stack space.
4265 In this case, we do not deal with EXTRA separately.
4266 A single stack adjust will do. */
4267 if (! args_addr)
4268 {
4269 temp = push_block (size, extra, where_pad == downward);
4270 extra = 0;
4271 }
4272 else if (CONST_INT_P (args_so_far))
4273 temp = memory_address (BLKmode,
4274 plus_constant (Pmode, args_addr,
4275 skip + INTVAL (args_so_far)));
4276 else
4277 temp = memory_address (BLKmode,
4278 plus_constant (Pmode,
4279 gen_rtx_PLUS (Pmode,
4280 args_addr,
4281 args_so_far),
4282 skip));
4283
4284 if (!ACCUMULATE_OUTGOING_ARGS)
4285 {
4286 /* If the source is referenced relative to the stack pointer,
4287 copy it to another register to stabilize it. We do not need
4288 to do this if we know that we won't be changing sp. */
4289
4290 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4291 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4292 temp = copy_to_reg (temp);
4293 }
4294
4295 target = gen_rtx_MEM (BLKmode, temp);
4296
4297 /* We do *not* set_mem_attributes here, because incoming arguments
4298 may overlap with sibling call outgoing arguments and we cannot
4299 allow reordering of reads from function arguments with stores
4300 to outgoing arguments of sibling calls. We do, however, want
4301 to record the alignment of the stack slot. */
4302 /* ALIGN may well be better aligned than TYPE, e.g. due to
4303 PARM_BOUNDARY. Assume the caller isn't lying. */
4304 set_mem_align (target, align);
4305
4306 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4307 }
4308 }
4309 else if (partial > 0)
4310 {
4311 /* Scalar partly in registers. */
4312
4313 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4314 int i;
4315 int not_stack;
4316 /* # bytes of start of argument
4317 that we must make space for but need not store. */
4318 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4319 int args_offset = INTVAL (args_so_far);
4320 int skip;
4321
4322 /* Push padding now if padding above and stack grows down,
4323 or if padding below and stack grows up.
4324 But if space already allocated, this has already been done. */
4325 if (extra && args_addr == 0
4326 && where_pad != none && where_pad != stack_direction)
4327 anti_adjust_stack (GEN_INT (extra));
4328
4329 /* If we make space by pushing it, we might as well push
4330 the real data. Otherwise, we can leave OFFSET nonzero
4331 and leave the space uninitialized. */
4332 if (args_addr == 0)
4333 offset = 0;
4334
4335 /* Now NOT_STACK gets the number of words that we don't need to
4336 allocate on the stack. Convert OFFSET to words too. */
4337 not_stack = (partial - offset) / UNITS_PER_WORD;
4338 offset /= UNITS_PER_WORD;
4339
4340 /* If the partial register-part of the arg counts in its stack size,
4341 skip the part of stack space corresponding to the registers.
4342 Otherwise, start copying to the beginning of the stack space,
4343 by setting SKIP to 0. */
4344 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4345
4346 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4347 x = validize_mem (force_const_mem (mode, x));
4348
4349 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4350 SUBREGs of such registers are not allowed. */
4351 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4352 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4353 x = copy_to_reg (x);
4354
4355 /* Loop over all the words allocated on the stack for this arg. */
4356 /* We can do it by words, because any scalar bigger than a word
4357 has a size a multiple of a word. */
4358 for (i = size - 1; i >= not_stack; i--)
4359 if (i >= not_stack + offset)
4360 emit_push_insn (operand_subword_force (x, i, mode),
4361 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4362 0, args_addr,
4363 GEN_INT (args_offset + ((i - not_stack + skip)
4364 * UNITS_PER_WORD)),
4365 reg_parm_stack_space, alignment_pad);
4366 }
4367 else
4368 {
4369 rtx addr;
4370 rtx dest;
4371
4372 /* Push padding now if padding above and stack grows down,
4373 or if padding below and stack grows up.
4374 But if space already allocated, this has already been done. */
4375 if (extra && args_addr == 0
4376 && where_pad != none && where_pad != stack_direction)
4377 anti_adjust_stack (GEN_INT (extra));
4378
4379 #ifdef PUSH_ROUNDING
4380 if (args_addr == 0 && PUSH_ARGS)
4381 emit_single_push_insn (mode, x, type);
4382 else
4383 #endif
4384 {
4385 if (CONST_INT_P (args_so_far))
4386 addr
4387 = memory_address (mode,
4388 plus_constant (Pmode, args_addr,
4389 INTVAL (args_so_far)));
4390 else
4391 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4392 args_so_far));
4393 dest = gen_rtx_MEM (mode, addr);
4394
4395 /* We do *not* set_mem_attributes here, because incoming arguments
4396 may overlap with sibling call outgoing arguments and we cannot
4397 allow reordering of reads from function arguments with stores
4398 to outgoing arguments of sibling calls. We do, however, want
4399 to record the alignment of the stack slot. */
4400 /* ALIGN may well be better aligned than TYPE, e.g. due to
4401 PARM_BOUNDARY. Assume the caller isn't lying. */
4402 set_mem_align (dest, align);
4403
4404 emit_move_insn (dest, x);
4405 }
4406 }
4407
4408 /* If part should go in registers, copy that part
4409 into the appropriate registers. Do this now, at the end,
4410 since mem-to-mem copies above may do function calls. */
4411 if (partial > 0 && reg != 0)
4412 {
4413 /* Handle calls that pass values in multiple non-contiguous locations.
4414 The Irix 6 ABI has examples of this. */
4415 if (GET_CODE (reg) == PARALLEL)
4416 emit_group_load (reg, x, type, -1);
4417 else
4418 {
4419 gcc_assert (partial % UNITS_PER_WORD == 0);
4420 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4421 }
4422 }
4423
4424 if (extra && args_addr == 0 && where_pad == stack_direction)
4425 anti_adjust_stack (GEN_INT (extra));
4426
4427 if (alignment_pad && args_addr == 0)
4428 anti_adjust_stack (alignment_pad);
4429 }
4430 \f
4431 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4432 operations. */
4433
4434 static rtx
4435 get_subtarget (rtx x)
4436 {
4437 return (optimize
4438 || x == 0
4439 /* Only registers can be subtargets. */
4440 || !REG_P (x)
4441 /* Don't use hard regs to avoid extending their life. */
4442 || REGNO (x) < FIRST_PSEUDO_REGISTER
4443 ? 0 : x);
4444 }
4445
4446 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4447 FIELD is a bitfield. Returns true if the optimization was successful,
4448 and there's nothing else to do. */
4449
4450 static bool
4451 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4452 unsigned HOST_WIDE_INT bitpos,
4453 unsigned HOST_WIDE_INT bitregion_start,
4454 unsigned HOST_WIDE_INT bitregion_end,
4455 machine_mode mode1, rtx str_rtx,
4456 tree to, tree src)
4457 {
4458 machine_mode str_mode = GET_MODE (str_rtx);
4459 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4460 tree op0, op1;
4461 rtx value, result;
4462 optab binop;
4463 gimple srcstmt;
4464 enum tree_code code;
4465
4466 if (mode1 != VOIDmode
4467 || bitsize >= BITS_PER_WORD
4468 || str_bitsize > BITS_PER_WORD
4469 || TREE_SIDE_EFFECTS (to)
4470 || TREE_THIS_VOLATILE (to))
4471 return false;
4472
4473 STRIP_NOPS (src);
4474 if (TREE_CODE (src) != SSA_NAME)
4475 return false;
4476 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4477 return false;
4478
4479 srcstmt = get_gimple_for_ssa_name (src);
4480 if (!srcstmt
4481 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4482 return false;
4483
4484 code = gimple_assign_rhs_code (srcstmt);
4485
4486 op0 = gimple_assign_rhs1 (srcstmt);
4487
4488 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4489 to find its initialization. Hopefully the initialization will
4490 be from a bitfield load. */
4491 if (TREE_CODE (op0) == SSA_NAME)
4492 {
4493 gimple op0stmt = get_gimple_for_ssa_name (op0);
4494
4495 /* We want to eventually have OP0 be the same as TO, which
4496 should be a bitfield. */
4497 if (!op0stmt
4498 || !is_gimple_assign (op0stmt)
4499 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4500 return false;
4501 op0 = gimple_assign_rhs1 (op0stmt);
4502 }
4503
4504 op1 = gimple_assign_rhs2 (srcstmt);
4505
4506 if (!operand_equal_p (to, op0, 0))
4507 return false;
4508
4509 if (MEM_P (str_rtx))
4510 {
4511 unsigned HOST_WIDE_INT offset1;
4512
4513 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4514 str_mode = word_mode;
4515 str_mode = get_best_mode (bitsize, bitpos,
4516 bitregion_start, bitregion_end,
4517 MEM_ALIGN (str_rtx), str_mode, 0);
4518 if (str_mode == VOIDmode)
4519 return false;
4520 str_bitsize = GET_MODE_BITSIZE (str_mode);
4521
4522 offset1 = bitpos;
4523 bitpos %= str_bitsize;
4524 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4525 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4526 }
4527 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4528 return false;
4529
4530 /* If the bit field covers the whole REG/MEM, store_field
4531 will likely generate better code. */
4532 if (bitsize >= str_bitsize)
4533 return false;
4534
4535 /* We can't handle fields split across multiple entities. */
4536 if (bitpos + bitsize > str_bitsize)
4537 return false;
4538
4539 if (BYTES_BIG_ENDIAN)
4540 bitpos = str_bitsize - bitpos - bitsize;
4541
4542 switch (code)
4543 {
4544 case PLUS_EXPR:
4545 case MINUS_EXPR:
4546 /* For now, just optimize the case of the topmost bitfield
4547 where we don't need to do any masking and also
4548 1 bit bitfields where xor can be used.
4549 We might win by one instruction for the other bitfields
4550 too if insv/extv instructions aren't used, so that
4551 can be added later. */
4552 if (bitpos + bitsize != str_bitsize
4553 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4554 break;
4555
4556 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4557 value = convert_modes (str_mode,
4558 TYPE_MODE (TREE_TYPE (op1)), value,
4559 TYPE_UNSIGNED (TREE_TYPE (op1)));
4560
4561 /* We may be accessing data outside the field, which means
4562 we can alias adjacent data. */
4563 if (MEM_P (str_rtx))
4564 {
4565 str_rtx = shallow_copy_rtx (str_rtx);
4566 set_mem_alias_set (str_rtx, 0);
4567 set_mem_expr (str_rtx, 0);
4568 }
4569
4570 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4571 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4572 {
4573 value = expand_and (str_mode, value, const1_rtx, NULL);
4574 binop = xor_optab;
4575 }
4576 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4577 result = expand_binop (str_mode, binop, str_rtx,
4578 value, str_rtx, 1, OPTAB_WIDEN);
4579 if (result != str_rtx)
4580 emit_move_insn (str_rtx, result);
4581 return true;
4582
4583 case BIT_IOR_EXPR:
4584 case BIT_XOR_EXPR:
4585 if (TREE_CODE (op1) != INTEGER_CST)
4586 break;
4587 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4588 value = convert_modes (str_mode,
4589 TYPE_MODE (TREE_TYPE (op1)), value,
4590 TYPE_UNSIGNED (TREE_TYPE (op1)));
4591
4592 /* We may be accessing data outside the field, which means
4593 we can alias adjacent data. */
4594 if (MEM_P (str_rtx))
4595 {
4596 str_rtx = shallow_copy_rtx (str_rtx);
4597 set_mem_alias_set (str_rtx, 0);
4598 set_mem_expr (str_rtx, 0);
4599 }
4600
4601 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4602 if (bitpos + bitsize != str_bitsize)
4603 {
4604 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4605 str_mode);
4606 value = expand_and (str_mode, value, mask, NULL_RTX);
4607 }
4608 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4609 result = expand_binop (str_mode, binop, str_rtx,
4610 value, str_rtx, 1, OPTAB_WIDEN);
4611 if (result != str_rtx)
4612 emit_move_insn (str_rtx, result);
4613 return true;
4614
4615 default:
4616 break;
4617 }
4618
4619 return false;
4620 }
4621
4622 /* In the C++ memory model, consecutive bit fields in a structure are
4623 considered one memory location.
4624
4625 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4626 returns the bit range of consecutive bits in which this COMPONENT_REF
4627 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4628 and *OFFSET may be adjusted in the process.
4629
4630 If the access does not need to be restricted, 0 is returned in both
4631 *BITSTART and *BITEND. */
4632
4633 static void
4634 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4635 unsigned HOST_WIDE_INT *bitend,
4636 tree exp,
4637 HOST_WIDE_INT *bitpos,
4638 tree *offset)
4639 {
4640 HOST_WIDE_INT bitoffset;
4641 tree field, repr;
4642
4643 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4644
4645 field = TREE_OPERAND (exp, 1);
4646 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4647 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4648 need to limit the range we can access. */
4649 if (!repr)
4650 {
4651 *bitstart = *bitend = 0;
4652 return;
4653 }
4654
4655 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4656 part of a larger bit field, then the representative does not serve any
4657 useful purpose. This can occur in Ada. */
4658 if (handled_component_p (TREE_OPERAND (exp, 0)))
4659 {
4660 machine_mode rmode;
4661 HOST_WIDE_INT rbitsize, rbitpos;
4662 tree roffset;
4663 int unsignedp;
4664 int volatilep = 0;
4665 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4666 &roffset, &rmode, &unsignedp, &volatilep, false);
4667 if ((rbitpos % BITS_PER_UNIT) != 0)
4668 {
4669 *bitstart = *bitend = 0;
4670 return;
4671 }
4672 }
4673
4674 /* Compute the adjustment to bitpos from the offset of the field
4675 relative to the representative. DECL_FIELD_OFFSET of field and
4676 repr are the same by construction if they are not constants,
4677 see finish_bitfield_layout. */
4678 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4679 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4680 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4681 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4682 else
4683 bitoffset = 0;
4684 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4685 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4686
4687 /* If the adjustment is larger than bitpos, we would have a negative bit
4688 position for the lower bound and this may wreak havoc later. Adjust
4689 offset and bitpos to make the lower bound non-negative in that case. */
4690 if (bitoffset > *bitpos)
4691 {
4692 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4693 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4694
4695 *bitpos += adjust;
4696 if (*offset == NULL_TREE)
4697 *offset = size_int (-adjust / BITS_PER_UNIT);
4698 else
4699 *offset
4700 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4701 *bitstart = 0;
4702 }
4703 else
4704 *bitstart = *bitpos - bitoffset;
4705
4706 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4707 }
4708
4709 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4710 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4711 DECL_RTL was not set yet, return NORTL. */
4712
4713 static inline bool
4714 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4715 {
4716 if (TREE_CODE (addr) != ADDR_EXPR)
4717 return false;
4718
4719 tree base = TREE_OPERAND (addr, 0);
4720
4721 if (!DECL_P (base)
4722 || TREE_ADDRESSABLE (base)
4723 || DECL_MODE (base) == BLKmode)
4724 return false;
4725
4726 if (!DECL_RTL_SET_P (base))
4727 return nortl;
4728
4729 return (!MEM_P (DECL_RTL (base)));
4730 }
4731
4732 /* Returns true if the MEM_REF REF refers to an object that does not
4733 reside in memory and has non-BLKmode. */
4734
4735 static inline bool
4736 mem_ref_refers_to_non_mem_p (tree ref)
4737 {
4738 tree base = TREE_OPERAND (ref, 0);
4739 return addr_expr_of_non_mem_decl_p_1 (base, false);
4740 }
4741
4742 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4743 is true, try generating a nontemporal store. */
4744
4745 void
4746 expand_assignment (tree to, tree from, bool nontemporal)
4747 {
4748 rtx to_rtx = 0;
4749 rtx result;
4750 machine_mode mode;
4751 unsigned int align;
4752 enum insn_code icode;
4753
4754 /* Don't crash if the lhs of the assignment was erroneous. */
4755 if (TREE_CODE (to) == ERROR_MARK)
4756 {
4757 expand_normal (from);
4758 return;
4759 }
4760
4761 /* Optimize away no-op moves without side-effects. */
4762 if (operand_equal_p (to, from, 0))
4763 return;
4764
4765 /* Handle misaligned stores. */
4766 mode = TYPE_MODE (TREE_TYPE (to));
4767 if ((TREE_CODE (to) == MEM_REF
4768 || TREE_CODE (to) == TARGET_MEM_REF)
4769 && mode != BLKmode
4770 && !mem_ref_refers_to_non_mem_p (to)
4771 && ((align = get_object_alignment (to))
4772 < GET_MODE_ALIGNMENT (mode))
4773 && (((icode = optab_handler (movmisalign_optab, mode))
4774 != CODE_FOR_nothing)
4775 || SLOW_UNALIGNED_ACCESS (mode, align)))
4776 {
4777 rtx reg, mem;
4778
4779 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4780 reg = force_not_mem (reg);
4781 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4782
4783 if (icode != CODE_FOR_nothing)
4784 {
4785 struct expand_operand ops[2];
4786
4787 create_fixed_operand (&ops[0], mem);
4788 create_input_operand (&ops[1], reg, mode);
4789 /* The movmisalign<mode> pattern cannot fail, else the assignment
4790 would silently be omitted. */
4791 expand_insn (icode, 2, ops);
4792 }
4793 else
4794 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4795 return;
4796 }
4797
4798 /* Assignment of a structure component needs special treatment
4799 if the structure component's rtx is not simply a MEM.
4800 Assignment of an array element at a constant index, and assignment of
4801 an array element in an unaligned packed structure field, has the same
4802 problem. Same for (partially) storing into a non-memory object. */
4803 if (handled_component_p (to)
4804 || (TREE_CODE (to) == MEM_REF
4805 && mem_ref_refers_to_non_mem_p (to))
4806 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4807 {
4808 machine_mode mode1;
4809 HOST_WIDE_INT bitsize, bitpos;
4810 unsigned HOST_WIDE_INT bitregion_start = 0;
4811 unsigned HOST_WIDE_INT bitregion_end = 0;
4812 tree offset;
4813 int unsignedp;
4814 int volatilep = 0;
4815 tree tem;
4816
4817 push_temp_slots ();
4818 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4819 &unsignedp, &volatilep, true);
4820
4821 /* Make sure bitpos is not negative, it can wreak havoc later. */
4822 if (bitpos < 0)
4823 {
4824 gcc_assert (offset == NULL_TREE);
4825 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4826 ? 3 : exact_log2 (BITS_PER_UNIT)));
4827 bitpos &= BITS_PER_UNIT - 1;
4828 }
4829
4830 if (TREE_CODE (to) == COMPONENT_REF
4831 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4832 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4833 /* The C++ memory model naturally applies to byte-aligned fields.
4834 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4835 BITSIZE are not byte-aligned, there is no need to limit the range
4836 we can access. This can occur with packed structures in Ada. */
4837 else if (bitsize > 0
4838 && bitsize % BITS_PER_UNIT == 0
4839 && bitpos % BITS_PER_UNIT == 0)
4840 {
4841 bitregion_start = bitpos;
4842 bitregion_end = bitpos + bitsize - 1;
4843 }
4844
4845 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4846
4847 /* If the field has a mode, we want to access it in the
4848 field's mode, not the computed mode.
4849 If a MEM has VOIDmode (external with incomplete type),
4850 use BLKmode for it instead. */
4851 if (MEM_P (to_rtx))
4852 {
4853 if (mode1 != VOIDmode)
4854 to_rtx = adjust_address (to_rtx, mode1, 0);
4855 else if (GET_MODE (to_rtx) == VOIDmode)
4856 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4857 }
4858
4859 if (offset != 0)
4860 {
4861 machine_mode address_mode;
4862 rtx offset_rtx;
4863
4864 if (!MEM_P (to_rtx))
4865 {
4866 /* We can get constant negative offsets into arrays with broken
4867 user code. Translate this to a trap instead of ICEing. */
4868 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4869 expand_builtin_trap ();
4870 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4871 }
4872
4873 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4874 address_mode = get_address_mode (to_rtx);
4875 if (GET_MODE (offset_rtx) != address_mode)
4876 {
4877 /* We cannot be sure that the RTL in offset_rtx is valid outside
4878 of a memory address context, so force it into a register
4879 before attempting to convert it to the desired mode. */
4880 offset_rtx = force_operand (offset_rtx, NULL_RTX);
4881 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4882 }
4883
4884 /* If we have an expression in OFFSET_RTX and a non-zero
4885 byte offset in BITPOS, adding the byte offset before the
4886 OFFSET_RTX results in better intermediate code, which makes
4887 later rtl optimization passes perform better.
4888
4889 We prefer intermediate code like this:
4890
4891 r124:DI=r123:DI+0x18
4892 [r124:DI]=r121:DI
4893
4894 ... instead of ...
4895
4896 r124:DI=r123:DI+0x10
4897 [r124:DI+0x8]=r121:DI
4898
4899 This is only done for aligned data values, as these can
4900 be expected to result in single move instructions. */
4901 if (mode1 != VOIDmode
4902 && bitpos != 0
4903 && bitsize > 0
4904 && (bitpos % bitsize) == 0
4905 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4906 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4907 {
4908 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4909 bitregion_start = 0;
4910 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4911 bitregion_end -= bitpos;
4912 bitpos = 0;
4913 }
4914
4915 to_rtx = offset_address (to_rtx, offset_rtx,
4916 highest_pow2_factor_for_target (to,
4917 offset));
4918 }
4919
4920 /* No action is needed if the target is not a memory and the field
4921 lies completely outside that target. This can occur if the source
4922 code contains an out-of-bounds access to a small array. */
4923 if (!MEM_P (to_rtx)
4924 && GET_MODE (to_rtx) != BLKmode
4925 && (unsigned HOST_WIDE_INT) bitpos
4926 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4927 {
4928 expand_normal (from);
4929 result = NULL;
4930 }
4931 /* Handle expand_expr of a complex value returning a CONCAT. */
4932 else if (GET_CODE (to_rtx) == CONCAT)
4933 {
4934 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4935 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4936 && bitpos == 0
4937 && bitsize == mode_bitsize)
4938 result = store_expr (from, to_rtx, false, nontemporal);
4939 else if (bitsize == mode_bitsize / 2
4940 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4941 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4942 nontemporal);
4943 else if (bitpos + bitsize <= mode_bitsize / 2)
4944 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4945 bitregion_start, bitregion_end,
4946 mode1, from,
4947 get_alias_set (to), nontemporal);
4948 else if (bitpos >= mode_bitsize / 2)
4949 result = store_field (XEXP (to_rtx, 1), bitsize,
4950 bitpos - mode_bitsize / 2,
4951 bitregion_start, bitregion_end,
4952 mode1, from,
4953 get_alias_set (to), nontemporal);
4954 else if (bitpos == 0 && bitsize == mode_bitsize)
4955 {
4956 rtx from_rtx;
4957 result = expand_normal (from);
4958 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4959 TYPE_MODE (TREE_TYPE (from)), 0);
4960 emit_move_insn (XEXP (to_rtx, 0),
4961 read_complex_part (from_rtx, false));
4962 emit_move_insn (XEXP (to_rtx, 1),
4963 read_complex_part (from_rtx, true));
4964 }
4965 else
4966 {
4967 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4968 GET_MODE_SIZE (GET_MODE (to_rtx)));
4969 write_complex_part (temp, XEXP (to_rtx, 0), false);
4970 write_complex_part (temp, XEXP (to_rtx, 1), true);
4971 result = store_field (temp, bitsize, bitpos,
4972 bitregion_start, bitregion_end,
4973 mode1, from,
4974 get_alias_set (to), nontemporal);
4975 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4976 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4977 }
4978 }
4979 else
4980 {
4981 if (MEM_P (to_rtx))
4982 {
4983 /* If the field is at offset zero, we could have been given the
4984 DECL_RTX of the parent struct. Don't munge it. */
4985 to_rtx = shallow_copy_rtx (to_rtx);
4986 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4987 if (volatilep)
4988 MEM_VOLATILE_P (to_rtx) = 1;
4989 }
4990
4991 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4992 bitregion_start, bitregion_end,
4993 mode1,
4994 to_rtx, to, from))
4995 result = NULL;
4996 else
4997 result = store_field (to_rtx, bitsize, bitpos,
4998 bitregion_start, bitregion_end,
4999 mode1, from,
5000 get_alias_set (to), nontemporal);
5001 }
5002
5003 if (result)
5004 preserve_temp_slots (result);
5005 pop_temp_slots ();
5006 return;
5007 }
5008
5009 /* If the rhs is a function call and its value is not an aggregate,
5010 call the function before we start to compute the lhs.
5011 This is needed for correct code for cases such as
5012 val = setjmp (buf) on machines where reference to val
5013 requires loading up part of an address in a separate insn.
5014
5015 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5016 since it might be a promoted variable where the zero- or sign- extension
5017 needs to be done. Handling this in the normal way is safe because no
5018 computation is done before the call. The same is true for SSA names. */
5019 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5020 && COMPLETE_TYPE_P (TREE_TYPE (from))
5021 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5022 && ! (((TREE_CODE (to) == VAR_DECL
5023 || TREE_CODE (to) == PARM_DECL
5024 || TREE_CODE (to) == RESULT_DECL)
5025 && REG_P (DECL_RTL (to)))
5026 || TREE_CODE (to) == SSA_NAME))
5027 {
5028 rtx value;
5029 rtx bounds;
5030
5031 push_temp_slots ();
5032 value = expand_normal (from);
5033
5034 /* Split value and bounds to store them separately. */
5035 chkp_split_slot (value, &value, &bounds);
5036
5037 if (to_rtx == 0)
5038 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5039
5040 /* Handle calls that return values in multiple non-contiguous locations.
5041 The Irix 6 ABI has examples of this. */
5042 if (GET_CODE (to_rtx) == PARALLEL)
5043 {
5044 if (GET_CODE (value) == PARALLEL)
5045 emit_group_move (to_rtx, value);
5046 else
5047 emit_group_load (to_rtx, value, TREE_TYPE (from),
5048 int_size_in_bytes (TREE_TYPE (from)));
5049 }
5050 else if (GET_CODE (value) == PARALLEL)
5051 emit_group_store (to_rtx, value, TREE_TYPE (from),
5052 int_size_in_bytes (TREE_TYPE (from)));
5053 else if (GET_MODE (to_rtx) == BLKmode)
5054 {
5055 /* Handle calls that return BLKmode values in registers. */
5056 if (REG_P (value))
5057 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5058 else
5059 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5060 }
5061 else
5062 {
5063 if (POINTER_TYPE_P (TREE_TYPE (to)))
5064 value = convert_memory_address_addr_space
5065 (GET_MODE (to_rtx), value,
5066 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5067
5068 emit_move_insn (to_rtx, value);
5069 }
5070
5071 /* Store bounds if required. */
5072 if (bounds
5073 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5074 {
5075 gcc_assert (MEM_P (to_rtx));
5076 chkp_emit_bounds_store (bounds, value, to_rtx);
5077 }
5078
5079 preserve_temp_slots (to_rtx);
5080 pop_temp_slots ();
5081 return;
5082 }
5083
5084 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5085 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5086
5087 /* Don't move directly into a return register. */
5088 if (TREE_CODE (to) == RESULT_DECL
5089 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5090 {
5091 rtx temp;
5092
5093 push_temp_slots ();
5094
5095 /* If the source is itself a return value, it still is in a pseudo at
5096 this point so we can move it back to the return register directly. */
5097 if (REG_P (to_rtx)
5098 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5099 && TREE_CODE (from) != CALL_EXPR)
5100 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5101 else
5102 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5103
5104 /* Handle calls that return values in multiple non-contiguous locations.
5105 The Irix 6 ABI has examples of this. */
5106 if (GET_CODE (to_rtx) == PARALLEL)
5107 {
5108 if (GET_CODE (temp) == PARALLEL)
5109 emit_group_move (to_rtx, temp);
5110 else
5111 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5112 int_size_in_bytes (TREE_TYPE (from)));
5113 }
5114 else if (temp)
5115 emit_move_insn (to_rtx, temp);
5116
5117 preserve_temp_slots (to_rtx);
5118 pop_temp_slots ();
5119 return;
5120 }
5121
5122 /* In case we are returning the contents of an object which overlaps
5123 the place the value is being stored, use a safe function when copying
5124 a value through a pointer into a structure value return block. */
5125 if (TREE_CODE (to) == RESULT_DECL
5126 && TREE_CODE (from) == INDIRECT_REF
5127 && ADDR_SPACE_GENERIC_P
5128 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5129 && refs_may_alias_p (to, from)
5130 && cfun->returns_struct
5131 && !cfun->returns_pcc_struct)
5132 {
5133 rtx from_rtx, size;
5134
5135 push_temp_slots ();
5136 size = expr_size (from);
5137 from_rtx = expand_normal (from);
5138
5139 emit_library_call (memmove_libfunc, LCT_NORMAL,
5140 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5141 XEXP (from_rtx, 0), Pmode,
5142 convert_to_mode (TYPE_MODE (sizetype),
5143 size, TYPE_UNSIGNED (sizetype)),
5144 TYPE_MODE (sizetype));
5145
5146 preserve_temp_slots (to_rtx);
5147 pop_temp_slots ();
5148 return;
5149 }
5150
5151 /* Compute FROM and store the value in the rtx we got. */
5152
5153 push_temp_slots ();
5154 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, to);
5155 preserve_temp_slots (result);
5156 pop_temp_slots ();
5157 return;
5158 }
5159
5160 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5161 succeeded, false otherwise. */
5162
5163 bool
5164 emit_storent_insn (rtx to, rtx from)
5165 {
5166 struct expand_operand ops[2];
5167 machine_mode mode = GET_MODE (to);
5168 enum insn_code code = optab_handler (storent_optab, mode);
5169
5170 if (code == CODE_FOR_nothing)
5171 return false;
5172
5173 create_fixed_operand (&ops[0], to);
5174 create_input_operand (&ops[1], from, mode);
5175 return maybe_expand_insn (code, 2, ops);
5176 }
5177
5178 /* Generate code for computing expression EXP,
5179 and storing the value into TARGET.
5180
5181 If the mode is BLKmode then we may return TARGET itself.
5182 It turns out that in BLKmode it doesn't cause a problem.
5183 because C has no operators that could combine two different
5184 assignments into the same BLKmode object with different values
5185 with no sequence point. Will other languages need this to
5186 be more thorough?
5187
5188 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5189 stack, and block moves may need to be treated specially.
5190
5191 If NONTEMPORAL is true, try using a nontemporal store instruction.
5192
5193 If BTARGET is not NULL then computed bounds of EXP are
5194 associated with BTARGET. */
5195
5196 rtx
5197 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5198 bool nontemporal, tree btarget)
5199 {
5200 rtx temp;
5201 rtx alt_rtl = NULL_RTX;
5202 location_t loc = curr_insn_location ();
5203
5204 if (VOID_TYPE_P (TREE_TYPE (exp)))
5205 {
5206 /* C++ can generate ?: expressions with a throw expression in one
5207 branch and an rvalue in the other. Here, we resolve attempts to
5208 store the throw expression's nonexistent result. */
5209 gcc_assert (!call_param_p);
5210 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5211 return NULL_RTX;
5212 }
5213 if (TREE_CODE (exp) == COMPOUND_EXPR)
5214 {
5215 /* Perform first part of compound expression, then assign from second
5216 part. */
5217 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5218 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5219 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5220 call_param_p, nontemporal, btarget);
5221 }
5222 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5223 {
5224 /* For conditional expression, get safe form of the target. Then
5225 test the condition, doing the appropriate assignment on either
5226 side. This avoids the creation of unnecessary temporaries.
5227 For non-BLKmode, it is more efficient not to do this. */
5228
5229 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5230
5231 do_pending_stack_adjust ();
5232 NO_DEFER_POP;
5233 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5234 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5235 nontemporal, btarget);
5236 emit_jump_insn (gen_jump (lab2));
5237 emit_barrier ();
5238 emit_label (lab1);
5239 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5240 nontemporal, btarget);
5241 emit_label (lab2);
5242 OK_DEFER_POP;
5243
5244 return NULL_RTX;
5245 }
5246 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5247 /* If this is a scalar in a register that is stored in a wider mode
5248 than the declared mode, compute the result into its declared mode
5249 and then convert to the wider mode. Our value is the computed
5250 expression. */
5251 {
5252 rtx inner_target = 0;
5253
5254 /* We can do the conversion inside EXP, which will often result
5255 in some optimizations. Do the conversion in two steps: first
5256 change the signedness, if needed, then the extend. But don't
5257 do this if the type of EXP is a subtype of something else
5258 since then the conversion might involve more than just
5259 converting modes. */
5260 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5261 && TREE_TYPE (TREE_TYPE (exp)) == 0
5262 && GET_MODE_PRECISION (GET_MODE (target))
5263 == TYPE_PRECISION (TREE_TYPE (exp)))
5264 {
5265 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5266 TYPE_UNSIGNED (TREE_TYPE (exp))))
5267 {
5268 /* Some types, e.g. Fortran's logical*4, won't have a signed
5269 version, so use the mode instead. */
5270 tree ntype
5271 = (signed_or_unsigned_type_for
5272 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5273 if (ntype == NULL)
5274 ntype = lang_hooks.types.type_for_mode
5275 (TYPE_MODE (TREE_TYPE (exp)),
5276 SUBREG_PROMOTED_SIGN (target));
5277
5278 exp = fold_convert_loc (loc, ntype, exp);
5279 }
5280
5281 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5282 (GET_MODE (SUBREG_REG (target)),
5283 SUBREG_PROMOTED_SIGN (target)),
5284 exp);
5285
5286 inner_target = SUBREG_REG (target);
5287 }
5288
5289 temp = expand_expr (exp, inner_target, VOIDmode,
5290 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5291
5292 /* Handle bounds returned by call. */
5293 if (TREE_CODE (exp) == CALL_EXPR)
5294 {
5295 rtx bounds;
5296 chkp_split_slot (temp, &temp, &bounds);
5297 if (bounds && btarget)
5298 {
5299 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5300 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5301 chkp_set_rtl_bounds (btarget, tmp);
5302 }
5303 }
5304
5305 /* If TEMP is a VOIDmode constant, use convert_modes to make
5306 sure that we properly convert it. */
5307 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5308 {
5309 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5310 temp, SUBREG_PROMOTED_SIGN (target));
5311 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5312 GET_MODE (target), temp,
5313 SUBREG_PROMOTED_SIGN (target));
5314 }
5315
5316 convert_move (SUBREG_REG (target), temp,
5317 SUBREG_PROMOTED_SIGN (target));
5318
5319 return NULL_RTX;
5320 }
5321 else if ((TREE_CODE (exp) == STRING_CST
5322 || (TREE_CODE (exp) == MEM_REF
5323 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5324 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5325 == STRING_CST
5326 && integer_zerop (TREE_OPERAND (exp, 1))))
5327 && !nontemporal && !call_param_p
5328 && MEM_P (target))
5329 {
5330 /* Optimize initialization of an array with a STRING_CST. */
5331 HOST_WIDE_INT exp_len, str_copy_len;
5332 rtx dest_mem;
5333 tree str = TREE_CODE (exp) == STRING_CST
5334 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5335
5336 exp_len = int_expr_size (exp);
5337 if (exp_len <= 0)
5338 goto normal_expr;
5339
5340 if (TREE_STRING_LENGTH (str) <= 0)
5341 goto normal_expr;
5342
5343 str_copy_len = strlen (TREE_STRING_POINTER (str));
5344 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5345 goto normal_expr;
5346
5347 str_copy_len = TREE_STRING_LENGTH (str);
5348 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5349 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5350 {
5351 str_copy_len += STORE_MAX_PIECES - 1;
5352 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5353 }
5354 str_copy_len = MIN (str_copy_len, exp_len);
5355 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5356 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5357 MEM_ALIGN (target), false))
5358 goto normal_expr;
5359
5360 dest_mem = target;
5361
5362 dest_mem = store_by_pieces (dest_mem,
5363 str_copy_len, builtin_strncpy_read_str,
5364 CONST_CAST (char *,
5365 TREE_STRING_POINTER (str)),
5366 MEM_ALIGN (target), false,
5367 exp_len > str_copy_len ? 1 : 0);
5368 if (exp_len > str_copy_len)
5369 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5370 GEN_INT (exp_len - str_copy_len),
5371 BLOCK_OP_NORMAL);
5372 return NULL_RTX;
5373 }
5374 else
5375 {
5376 rtx tmp_target;
5377
5378 normal_expr:
5379 /* If we want to use a nontemporal store, force the value to
5380 register first. */
5381 tmp_target = nontemporal ? NULL_RTX : target;
5382 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5383 (call_param_p
5384 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5385 &alt_rtl, false);
5386
5387 /* Handle bounds returned by call. */
5388 if (TREE_CODE (exp) == CALL_EXPR)
5389 {
5390 rtx bounds;
5391 chkp_split_slot (temp, &temp, &bounds);
5392 if (bounds && btarget)
5393 {
5394 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5395 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5396 chkp_set_rtl_bounds (btarget, tmp);
5397 }
5398 }
5399 }
5400
5401 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5402 the same as that of TARGET, adjust the constant. This is needed, for
5403 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5404 only a word-sized value. */
5405 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5406 && TREE_CODE (exp) != ERROR_MARK
5407 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5408 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5409 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5410
5411 /* If value was not generated in the target, store it there.
5412 Convert the value to TARGET's type first if necessary and emit the
5413 pending incrementations that have been queued when expanding EXP.
5414 Note that we cannot emit the whole queue blindly because this will
5415 effectively disable the POST_INC optimization later.
5416
5417 If TEMP and TARGET compare equal according to rtx_equal_p, but
5418 one or both of them are volatile memory refs, we have to distinguish
5419 two cases:
5420 - expand_expr has used TARGET. In this case, we must not generate
5421 another copy. This can be detected by TARGET being equal according
5422 to == .
5423 - expand_expr has not used TARGET - that means that the source just
5424 happens to have the same RTX form. Since temp will have been created
5425 by expand_expr, it will compare unequal according to == .
5426 We must generate a copy in this case, to reach the correct number
5427 of volatile memory references. */
5428
5429 if ((! rtx_equal_p (temp, target)
5430 || (temp != target && (side_effects_p (temp)
5431 || side_effects_p (target))))
5432 && TREE_CODE (exp) != ERROR_MARK
5433 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5434 but TARGET is not valid memory reference, TEMP will differ
5435 from TARGET although it is really the same location. */
5436 && !(alt_rtl
5437 && rtx_equal_p (alt_rtl, target)
5438 && !side_effects_p (alt_rtl)
5439 && !side_effects_p (target))
5440 /* If there's nothing to copy, don't bother. Don't call
5441 expr_size unless necessary, because some front-ends (C++)
5442 expr_size-hook must not be given objects that are not
5443 supposed to be bit-copied or bit-initialized. */
5444 && expr_size (exp) != const0_rtx)
5445 {
5446 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5447 {
5448 if (GET_MODE (target) == BLKmode)
5449 {
5450 /* Handle calls that return BLKmode values in registers. */
5451 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5452 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5453 else
5454 store_bit_field (target,
5455 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5456 0, 0, 0, GET_MODE (temp), temp);
5457 }
5458 else
5459 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5460 }
5461
5462 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5463 {
5464 /* Handle copying a string constant into an array. The string
5465 constant may be shorter than the array. So copy just the string's
5466 actual length, and clear the rest. First get the size of the data
5467 type of the string, which is actually the size of the target. */
5468 rtx size = expr_size (exp);
5469
5470 if (CONST_INT_P (size)
5471 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5472 emit_block_move (target, temp, size,
5473 (call_param_p
5474 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5475 else
5476 {
5477 machine_mode pointer_mode
5478 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5479 machine_mode address_mode = get_address_mode (target);
5480
5481 /* Compute the size of the data to copy from the string. */
5482 tree copy_size
5483 = size_binop_loc (loc, MIN_EXPR,
5484 make_tree (sizetype, size),
5485 size_int (TREE_STRING_LENGTH (exp)));
5486 rtx copy_size_rtx
5487 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5488 (call_param_p
5489 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5490 rtx_code_label *label = 0;
5491
5492 /* Copy that much. */
5493 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5494 TYPE_UNSIGNED (sizetype));
5495 emit_block_move (target, temp, copy_size_rtx,
5496 (call_param_p
5497 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5498
5499 /* Figure out how much is left in TARGET that we have to clear.
5500 Do all calculations in pointer_mode. */
5501 if (CONST_INT_P (copy_size_rtx))
5502 {
5503 size = plus_constant (address_mode, size,
5504 -INTVAL (copy_size_rtx));
5505 target = adjust_address (target, BLKmode,
5506 INTVAL (copy_size_rtx));
5507 }
5508 else
5509 {
5510 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5511 copy_size_rtx, NULL_RTX, 0,
5512 OPTAB_LIB_WIDEN);
5513
5514 if (GET_MODE (copy_size_rtx) != address_mode)
5515 copy_size_rtx = convert_to_mode (address_mode,
5516 copy_size_rtx,
5517 TYPE_UNSIGNED (sizetype));
5518
5519 target = offset_address (target, copy_size_rtx,
5520 highest_pow2_factor (copy_size));
5521 label = gen_label_rtx ();
5522 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5523 GET_MODE (size), 0, label);
5524 }
5525
5526 if (size != const0_rtx)
5527 clear_storage (target, size, BLOCK_OP_NORMAL);
5528
5529 if (label)
5530 emit_label (label);
5531 }
5532 }
5533 /* Handle calls that return values in multiple non-contiguous locations.
5534 The Irix 6 ABI has examples of this. */
5535 else if (GET_CODE (target) == PARALLEL)
5536 {
5537 if (GET_CODE (temp) == PARALLEL)
5538 emit_group_move (target, temp);
5539 else
5540 emit_group_load (target, temp, TREE_TYPE (exp),
5541 int_size_in_bytes (TREE_TYPE (exp)));
5542 }
5543 else if (GET_CODE (temp) == PARALLEL)
5544 emit_group_store (target, temp, TREE_TYPE (exp),
5545 int_size_in_bytes (TREE_TYPE (exp)));
5546 else if (GET_MODE (temp) == BLKmode)
5547 emit_block_move (target, temp, expr_size (exp),
5548 (call_param_p
5549 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5550 /* If we emit a nontemporal store, there is nothing else to do. */
5551 else if (nontemporal && emit_storent_insn (target, temp))
5552 ;
5553 else
5554 {
5555 temp = force_operand (temp, target);
5556 if (temp != target)
5557 emit_move_insn (target, temp);
5558 }
5559 }
5560
5561 return NULL_RTX;
5562 }
5563
5564 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5565 rtx
5566 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5567 {
5568 return store_expr_with_bounds (exp, target, call_param_p, nontemporal, NULL);
5569 }
5570 \f
5571 /* Return true if field F of structure TYPE is a flexible array. */
5572
5573 static bool
5574 flexible_array_member_p (const_tree f, const_tree type)
5575 {
5576 const_tree tf;
5577
5578 tf = TREE_TYPE (f);
5579 return (DECL_CHAIN (f) == NULL
5580 && TREE_CODE (tf) == ARRAY_TYPE
5581 && TYPE_DOMAIN (tf)
5582 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5583 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5584 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5585 && int_size_in_bytes (type) >= 0);
5586 }
5587
5588 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5589 must have in order for it to completely initialize a value of type TYPE.
5590 Return -1 if the number isn't known.
5591
5592 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5593
5594 static HOST_WIDE_INT
5595 count_type_elements (const_tree type, bool for_ctor_p)
5596 {
5597 switch (TREE_CODE (type))
5598 {
5599 case ARRAY_TYPE:
5600 {
5601 tree nelts;
5602
5603 nelts = array_type_nelts (type);
5604 if (nelts && tree_fits_uhwi_p (nelts))
5605 {
5606 unsigned HOST_WIDE_INT n;
5607
5608 n = tree_to_uhwi (nelts) + 1;
5609 if (n == 0 || for_ctor_p)
5610 return n;
5611 else
5612 return n * count_type_elements (TREE_TYPE (type), false);
5613 }
5614 return for_ctor_p ? -1 : 1;
5615 }
5616
5617 case RECORD_TYPE:
5618 {
5619 unsigned HOST_WIDE_INT n;
5620 tree f;
5621
5622 n = 0;
5623 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5624 if (TREE_CODE (f) == FIELD_DECL)
5625 {
5626 if (!for_ctor_p)
5627 n += count_type_elements (TREE_TYPE (f), false);
5628 else if (!flexible_array_member_p (f, type))
5629 /* Don't count flexible arrays, which are not supposed
5630 to be initialized. */
5631 n += 1;
5632 }
5633
5634 return n;
5635 }
5636
5637 case UNION_TYPE:
5638 case QUAL_UNION_TYPE:
5639 {
5640 tree f;
5641 HOST_WIDE_INT n, m;
5642
5643 gcc_assert (!for_ctor_p);
5644 /* Estimate the number of scalars in each field and pick the
5645 maximum. Other estimates would do instead; the idea is simply
5646 to make sure that the estimate is not sensitive to the ordering
5647 of the fields. */
5648 n = 1;
5649 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5650 if (TREE_CODE (f) == FIELD_DECL)
5651 {
5652 m = count_type_elements (TREE_TYPE (f), false);
5653 /* If the field doesn't span the whole union, add an extra
5654 scalar for the rest. */
5655 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5656 TYPE_SIZE (type)) != 1)
5657 m++;
5658 if (n < m)
5659 n = m;
5660 }
5661 return n;
5662 }
5663
5664 case COMPLEX_TYPE:
5665 return 2;
5666
5667 case VECTOR_TYPE:
5668 return TYPE_VECTOR_SUBPARTS (type);
5669
5670 case INTEGER_TYPE:
5671 case REAL_TYPE:
5672 case FIXED_POINT_TYPE:
5673 case ENUMERAL_TYPE:
5674 case BOOLEAN_TYPE:
5675 case POINTER_TYPE:
5676 case OFFSET_TYPE:
5677 case REFERENCE_TYPE:
5678 case NULLPTR_TYPE:
5679 return 1;
5680
5681 case ERROR_MARK:
5682 return 0;
5683
5684 case VOID_TYPE:
5685 case METHOD_TYPE:
5686 case FUNCTION_TYPE:
5687 case LANG_TYPE:
5688 default:
5689 gcc_unreachable ();
5690 }
5691 }
5692
5693 /* Helper for categorize_ctor_elements. Identical interface. */
5694
5695 static bool
5696 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5697 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5698 {
5699 unsigned HOST_WIDE_INT idx;
5700 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5701 tree value, purpose, elt_type;
5702
5703 /* Whether CTOR is a valid constant initializer, in accordance with what
5704 initializer_constant_valid_p does. If inferred from the constructor
5705 elements, true until proven otherwise. */
5706 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5707 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5708
5709 nz_elts = 0;
5710 init_elts = 0;
5711 num_fields = 0;
5712 elt_type = NULL_TREE;
5713
5714 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5715 {
5716 HOST_WIDE_INT mult = 1;
5717
5718 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5719 {
5720 tree lo_index = TREE_OPERAND (purpose, 0);
5721 tree hi_index = TREE_OPERAND (purpose, 1);
5722
5723 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5724 mult = (tree_to_uhwi (hi_index)
5725 - tree_to_uhwi (lo_index) + 1);
5726 }
5727 num_fields += mult;
5728 elt_type = TREE_TYPE (value);
5729
5730 switch (TREE_CODE (value))
5731 {
5732 case CONSTRUCTOR:
5733 {
5734 HOST_WIDE_INT nz = 0, ic = 0;
5735
5736 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5737 p_complete);
5738
5739 nz_elts += mult * nz;
5740 init_elts += mult * ic;
5741
5742 if (const_from_elts_p && const_p)
5743 const_p = const_elt_p;
5744 }
5745 break;
5746
5747 case INTEGER_CST:
5748 case REAL_CST:
5749 case FIXED_CST:
5750 if (!initializer_zerop (value))
5751 nz_elts += mult;
5752 init_elts += mult;
5753 break;
5754
5755 case STRING_CST:
5756 nz_elts += mult * TREE_STRING_LENGTH (value);
5757 init_elts += mult * TREE_STRING_LENGTH (value);
5758 break;
5759
5760 case COMPLEX_CST:
5761 if (!initializer_zerop (TREE_REALPART (value)))
5762 nz_elts += mult;
5763 if (!initializer_zerop (TREE_IMAGPART (value)))
5764 nz_elts += mult;
5765 init_elts += mult;
5766 break;
5767
5768 case VECTOR_CST:
5769 {
5770 unsigned i;
5771 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5772 {
5773 tree v = VECTOR_CST_ELT (value, i);
5774 if (!initializer_zerop (v))
5775 nz_elts += mult;
5776 init_elts += mult;
5777 }
5778 }
5779 break;
5780
5781 default:
5782 {
5783 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5784 nz_elts += mult * tc;
5785 init_elts += mult * tc;
5786
5787 if (const_from_elts_p && const_p)
5788 const_p = initializer_constant_valid_p (value, elt_type)
5789 != NULL_TREE;
5790 }
5791 break;
5792 }
5793 }
5794
5795 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5796 num_fields, elt_type))
5797 *p_complete = false;
5798
5799 *p_nz_elts += nz_elts;
5800 *p_init_elts += init_elts;
5801
5802 return const_p;
5803 }
5804
5805 /* Examine CTOR to discover:
5806 * how many scalar fields are set to nonzero values,
5807 and place it in *P_NZ_ELTS;
5808 * how many scalar fields in total are in CTOR,
5809 and place it in *P_ELT_COUNT.
5810 * whether the constructor is complete -- in the sense that every
5811 meaningful byte is explicitly given a value --
5812 and place it in *P_COMPLETE.
5813
5814 Return whether or not CTOR is a valid static constant initializer, the same
5815 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5816
5817 bool
5818 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5819 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5820 {
5821 *p_nz_elts = 0;
5822 *p_init_elts = 0;
5823 *p_complete = true;
5824
5825 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5826 }
5827
5828 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5829 of which had type LAST_TYPE. Each element was itself a complete
5830 initializer, in the sense that every meaningful byte was explicitly
5831 given a value. Return true if the same is true for the constructor
5832 as a whole. */
5833
5834 bool
5835 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5836 const_tree last_type)
5837 {
5838 if (TREE_CODE (type) == UNION_TYPE
5839 || TREE_CODE (type) == QUAL_UNION_TYPE)
5840 {
5841 if (num_elts == 0)
5842 return false;
5843
5844 gcc_assert (num_elts == 1 && last_type);
5845
5846 /* ??? We could look at each element of the union, and find the
5847 largest element. Which would avoid comparing the size of the
5848 initialized element against any tail padding in the union.
5849 Doesn't seem worth the effort... */
5850 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5851 }
5852
5853 return count_type_elements (type, true) == num_elts;
5854 }
5855
5856 /* Return 1 if EXP contains mostly (3/4) zeros. */
5857
5858 static int
5859 mostly_zeros_p (const_tree exp)
5860 {
5861 if (TREE_CODE (exp) == CONSTRUCTOR)
5862 {
5863 HOST_WIDE_INT nz_elts, init_elts;
5864 bool complete_p;
5865
5866 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5867 return !complete_p || nz_elts < init_elts / 4;
5868 }
5869
5870 return initializer_zerop (exp);
5871 }
5872
5873 /* Return 1 if EXP contains all zeros. */
5874
5875 static int
5876 all_zeros_p (const_tree exp)
5877 {
5878 if (TREE_CODE (exp) == CONSTRUCTOR)
5879 {
5880 HOST_WIDE_INT nz_elts, init_elts;
5881 bool complete_p;
5882
5883 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5884 return nz_elts == 0;
5885 }
5886
5887 return initializer_zerop (exp);
5888 }
5889 \f
5890 /* Helper function for store_constructor.
5891 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5892 CLEARED is as for store_constructor.
5893 ALIAS_SET is the alias set to use for any stores.
5894
5895 This provides a recursive shortcut back to store_constructor when it isn't
5896 necessary to go through store_field. This is so that we can pass through
5897 the cleared field to let store_constructor know that we may not have to
5898 clear a substructure if the outer structure has already been cleared. */
5899
5900 static void
5901 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5902 HOST_WIDE_INT bitpos, machine_mode mode,
5903 tree exp, int cleared, alias_set_type alias_set)
5904 {
5905 if (TREE_CODE (exp) == CONSTRUCTOR
5906 /* We can only call store_constructor recursively if the size and
5907 bit position are on a byte boundary. */
5908 && bitpos % BITS_PER_UNIT == 0
5909 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5910 /* If we have a nonzero bitpos for a register target, then we just
5911 let store_field do the bitfield handling. This is unlikely to
5912 generate unnecessary clear instructions anyways. */
5913 && (bitpos == 0 || MEM_P (target)))
5914 {
5915 if (MEM_P (target))
5916 target
5917 = adjust_address (target,
5918 GET_MODE (target) == BLKmode
5919 || 0 != (bitpos
5920 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5921 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5922
5923
5924 /* Update the alias set, if required. */
5925 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5926 && MEM_ALIAS_SET (target) != 0)
5927 {
5928 target = copy_rtx (target);
5929 set_mem_alias_set (target, alias_set);
5930 }
5931
5932 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5933 }
5934 else
5935 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5936 }
5937
5938
5939 /* Returns the number of FIELD_DECLs in TYPE. */
5940
5941 static int
5942 fields_length (const_tree type)
5943 {
5944 tree t = TYPE_FIELDS (type);
5945 int count = 0;
5946
5947 for (; t; t = DECL_CHAIN (t))
5948 if (TREE_CODE (t) == FIELD_DECL)
5949 ++count;
5950
5951 return count;
5952 }
5953
5954
5955 /* Store the value of constructor EXP into the rtx TARGET.
5956 TARGET is either a REG or a MEM; we know it cannot conflict, since
5957 safe_from_p has been called.
5958 CLEARED is true if TARGET is known to have been zero'd.
5959 SIZE is the number of bytes of TARGET we are allowed to modify: this
5960 may not be the same as the size of EXP if we are assigning to a field
5961 which has been packed to exclude padding bits. */
5962
5963 static void
5964 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5965 {
5966 tree type = TREE_TYPE (exp);
5967 #ifdef WORD_REGISTER_OPERATIONS
5968 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5969 #endif
5970
5971 switch (TREE_CODE (type))
5972 {
5973 case RECORD_TYPE:
5974 case UNION_TYPE:
5975 case QUAL_UNION_TYPE:
5976 {
5977 unsigned HOST_WIDE_INT idx;
5978 tree field, value;
5979
5980 /* If size is zero or the target is already cleared, do nothing. */
5981 if (size == 0 || cleared)
5982 cleared = 1;
5983 /* We either clear the aggregate or indicate the value is dead. */
5984 else if ((TREE_CODE (type) == UNION_TYPE
5985 || TREE_CODE (type) == QUAL_UNION_TYPE)
5986 && ! CONSTRUCTOR_ELTS (exp))
5987 /* If the constructor is empty, clear the union. */
5988 {
5989 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5990 cleared = 1;
5991 }
5992
5993 /* If we are building a static constructor into a register,
5994 set the initial value as zero so we can fold the value into
5995 a constant. But if more than one register is involved,
5996 this probably loses. */
5997 else if (REG_P (target) && TREE_STATIC (exp)
5998 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5999 {
6000 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6001 cleared = 1;
6002 }
6003
6004 /* If the constructor has fewer fields than the structure or
6005 if we are initializing the structure to mostly zeros, clear
6006 the whole structure first. Don't do this if TARGET is a
6007 register whose mode size isn't equal to SIZE since
6008 clear_storage can't handle this case. */
6009 else if (size > 0
6010 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6011 != fields_length (type))
6012 || mostly_zeros_p (exp))
6013 && (!REG_P (target)
6014 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6015 == size)))
6016 {
6017 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6018 cleared = 1;
6019 }
6020
6021 if (REG_P (target) && !cleared)
6022 emit_clobber (target);
6023
6024 /* Store each element of the constructor into the
6025 corresponding field of TARGET. */
6026 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6027 {
6028 machine_mode mode;
6029 HOST_WIDE_INT bitsize;
6030 HOST_WIDE_INT bitpos = 0;
6031 tree offset;
6032 rtx to_rtx = target;
6033
6034 /* Just ignore missing fields. We cleared the whole
6035 structure, above, if any fields are missing. */
6036 if (field == 0)
6037 continue;
6038
6039 if (cleared && initializer_zerop (value))
6040 continue;
6041
6042 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6043 bitsize = tree_to_uhwi (DECL_SIZE (field));
6044 else
6045 bitsize = -1;
6046
6047 mode = DECL_MODE (field);
6048 if (DECL_BIT_FIELD (field))
6049 mode = VOIDmode;
6050
6051 offset = DECL_FIELD_OFFSET (field);
6052 if (tree_fits_shwi_p (offset)
6053 && tree_fits_shwi_p (bit_position (field)))
6054 {
6055 bitpos = int_bit_position (field);
6056 offset = 0;
6057 }
6058 else
6059 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6060
6061 if (offset)
6062 {
6063 machine_mode address_mode;
6064 rtx offset_rtx;
6065
6066 offset
6067 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6068 make_tree (TREE_TYPE (exp),
6069 target));
6070
6071 offset_rtx = expand_normal (offset);
6072 gcc_assert (MEM_P (to_rtx));
6073
6074 address_mode = get_address_mode (to_rtx);
6075 if (GET_MODE (offset_rtx) != address_mode)
6076 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6077
6078 to_rtx = offset_address (to_rtx, offset_rtx,
6079 highest_pow2_factor (offset));
6080 }
6081
6082 #ifdef WORD_REGISTER_OPERATIONS
6083 /* If this initializes a field that is smaller than a
6084 word, at the start of a word, try to widen it to a full
6085 word. This special case allows us to output C++ member
6086 function initializations in a form that the optimizers
6087 can understand. */
6088 if (REG_P (target)
6089 && bitsize < BITS_PER_WORD
6090 && bitpos % BITS_PER_WORD == 0
6091 && GET_MODE_CLASS (mode) == MODE_INT
6092 && TREE_CODE (value) == INTEGER_CST
6093 && exp_size >= 0
6094 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6095 {
6096 tree type = TREE_TYPE (value);
6097
6098 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6099 {
6100 type = lang_hooks.types.type_for_mode
6101 (word_mode, TYPE_UNSIGNED (type));
6102 value = fold_convert (type, value);
6103 }
6104
6105 if (BYTES_BIG_ENDIAN)
6106 value
6107 = fold_build2 (LSHIFT_EXPR, type, value,
6108 build_int_cst (type,
6109 BITS_PER_WORD - bitsize));
6110 bitsize = BITS_PER_WORD;
6111 mode = word_mode;
6112 }
6113 #endif
6114
6115 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6116 && DECL_NONADDRESSABLE_P (field))
6117 {
6118 to_rtx = copy_rtx (to_rtx);
6119 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6120 }
6121
6122 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6123 value, cleared,
6124 get_alias_set (TREE_TYPE (field)));
6125 }
6126 break;
6127 }
6128 case ARRAY_TYPE:
6129 {
6130 tree value, index;
6131 unsigned HOST_WIDE_INT i;
6132 int need_to_clear;
6133 tree domain;
6134 tree elttype = TREE_TYPE (type);
6135 int const_bounds_p;
6136 HOST_WIDE_INT minelt = 0;
6137 HOST_WIDE_INT maxelt = 0;
6138
6139 domain = TYPE_DOMAIN (type);
6140 const_bounds_p = (TYPE_MIN_VALUE (domain)
6141 && TYPE_MAX_VALUE (domain)
6142 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6143 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6144
6145 /* If we have constant bounds for the range of the type, get them. */
6146 if (const_bounds_p)
6147 {
6148 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6149 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6150 }
6151
6152 /* If the constructor has fewer elements than the array, clear
6153 the whole array first. Similarly if this is static
6154 constructor of a non-BLKmode object. */
6155 if (cleared)
6156 need_to_clear = 0;
6157 else if (REG_P (target) && TREE_STATIC (exp))
6158 need_to_clear = 1;
6159 else
6160 {
6161 unsigned HOST_WIDE_INT idx;
6162 tree index, value;
6163 HOST_WIDE_INT count = 0, zero_count = 0;
6164 need_to_clear = ! const_bounds_p;
6165
6166 /* This loop is a more accurate version of the loop in
6167 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6168 is also needed to check for missing elements. */
6169 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6170 {
6171 HOST_WIDE_INT this_node_count;
6172
6173 if (need_to_clear)
6174 break;
6175
6176 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6177 {
6178 tree lo_index = TREE_OPERAND (index, 0);
6179 tree hi_index = TREE_OPERAND (index, 1);
6180
6181 if (! tree_fits_uhwi_p (lo_index)
6182 || ! tree_fits_uhwi_p (hi_index))
6183 {
6184 need_to_clear = 1;
6185 break;
6186 }
6187
6188 this_node_count = (tree_to_uhwi (hi_index)
6189 - tree_to_uhwi (lo_index) + 1);
6190 }
6191 else
6192 this_node_count = 1;
6193
6194 count += this_node_count;
6195 if (mostly_zeros_p (value))
6196 zero_count += this_node_count;
6197 }
6198
6199 /* Clear the entire array first if there are any missing
6200 elements, or if the incidence of zero elements is >=
6201 75%. */
6202 if (! need_to_clear
6203 && (count < maxelt - minelt + 1
6204 || 4 * zero_count >= 3 * count))
6205 need_to_clear = 1;
6206 }
6207
6208 if (need_to_clear && size > 0)
6209 {
6210 if (REG_P (target))
6211 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6212 else
6213 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6214 cleared = 1;
6215 }
6216
6217 if (!cleared && REG_P (target))
6218 /* Inform later passes that the old value is dead. */
6219 emit_clobber (target);
6220
6221 /* Store each element of the constructor into the
6222 corresponding element of TARGET, determined by counting the
6223 elements. */
6224 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6225 {
6226 machine_mode mode;
6227 HOST_WIDE_INT bitsize;
6228 HOST_WIDE_INT bitpos;
6229 rtx xtarget = target;
6230
6231 if (cleared && initializer_zerop (value))
6232 continue;
6233
6234 mode = TYPE_MODE (elttype);
6235 if (mode == BLKmode)
6236 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6237 ? tree_to_uhwi (TYPE_SIZE (elttype))
6238 : -1);
6239 else
6240 bitsize = GET_MODE_BITSIZE (mode);
6241
6242 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6243 {
6244 tree lo_index = TREE_OPERAND (index, 0);
6245 tree hi_index = TREE_OPERAND (index, 1);
6246 rtx index_r, pos_rtx;
6247 HOST_WIDE_INT lo, hi, count;
6248 tree position;
6249
6250 /* If the range is constant and "small", unroll the loop. */
6251 if (const_bounds_p
6252 && tree_fits_shwi_p (lo_index)
6253 && tree_fits_shwi_p (hi_index)
6254 && (lo = tree_to_shwi (lo_index),
6255 hi = tree_to_shwi (hi_index),
6256 count = hi - lo + 1,
6257 (!MEM_P (target)
6258 || count <= 2
6259 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6260 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6261 <= 40 * 8)))))
6262 {
6263 lo -= minelt; hi -= minelt;
6264 for (; lo <= hi; lo++)
6265 {
6266 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6267
6268 if (MEM_P (target)
6269 && !MEM_KEEP_ALIAS_SET_P (target)
6270 && TREE_CODE (type) == ARRAY_TYPE
6271 && TYPE_NONALIASED_COMPONENT (type))
6272 {
6273 target = copy_rtx (target);
6274 MEM_KEEP_ALIAS_SET_P (target) = 1;
6275 }
6276
6277 store_constructor_field
6278 (target, bitsize, bitpos, mode, value, cleared,
6279 get_alias_set (elttype));
6280 }
6281 }
6282 else
6283 {
6284 rtx_code_label *loop_start = gen_label_rtx ();
6285 rtx_code_label *loop_end = gen_label_rtx ();
6286 tree exit_cond;
6287
6288 expand_normal (hi_index);
6289
6290 index = build_decl (EXPR_LOCATION (exp),
6291 VAR_DECL, NULL_TREE, domain);
6292 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6293 SET_DECL_RTL (index, index_r);
6294 store_expr (lo_index, index_r, 0, false);
6295
6296 /* Build the head of the loop. */
6297 do_pending_stack_adjust ();
6298 emit_label (loop_start);
6299
6300 /* Assign value to element index. */
6301 position =
6302 fold_convert (ssizetype,
6303 fold_build2 (MINUS_EXPR,
6304 TREE_TYPE (index),
6305 index,
6306 TYPE_MIN_VALUE (domain)));
6307
6308 position =
6309 size_binop (MULT_EXPR, position,
6310 fold_convert (ssizetype,
6311 TYPE_SIZE_UNIT (elttype)));
6312
6313 pos_rtx = expand_normal (position);
6314 xtarget = offset_address (target, pos_rtx,
6315 highest_pow2_factor (position));
6316 xtarget = adjust_address (xtarget, mode, 0);
6317 if (TREE_CODE (value) == CONSTRUCTOR)
6318 store_constructor (value, xtarget, cleared,
6319 bitsize / BITS_PER_UNIT);
6320 else
6321 store_expr (value, xtarget, 0, false);
6322
6323 /* Generate a conditional jump to exit the loop. */
6324 exit_cond = build2 (LT_EXPR, integer_type_node,
6325 index, hi_index);
6326 jumpif (exit_cond, loop_end, -1);
6327
6328 /* Update the loop counter, and jump to the head of
6329 the loop. */
6330 expand_assignment (index,
6331 build2 (PLUS_EXPR, TREE_TYPE (index),
6332 index, integer_one_node),
6333 false);
6334
6335 emit_jump (loop_start);
6336
6337 /* Build the end of the loop. */
6338 emit_label (loop_end);
6339 }
6340 }
6341 else if ((index != 0 && ! tree_fits_shwi_p (index))
6342 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6343 {
6344 tree position;
6345
6346 if (index == 0)
6347 index = ssize_int (1);
6348
6349 if (minelt)
6350 index = fold_convert (ssizetype,
6351 fold_build2 (MINUS_EXPR,
6352 TREE_TYPE (index),
6353 index,
6354 TYPE_MIN_VALUE (domain)));
6355
6356 position =
6357 size_binop (MULT_EXPR, index,
6358 fold_convert (ssizetype,
6359 TYPE_SIZE_UNIT (elttype)));
6360 xtarget = offset_address (target,
6361 expand_normal (position),
6362 highest_pow2_factor (position));
6363 xtarget = adjust_address (xtarget, mode, 0);
6364 store_expr (value, xtarget, 0, false);
6365 }
6366 else
6367 {
6368 if (index != 0)
6369 bitpos = ((tree_to_shwi (index) - minelt)
6370 * tree_to_uhwi (TYPE_SIZE (elttype)));
6371 else
6372 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6373
6374 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6375 && TREE_CODE (type) == ARRAY_TYPE
6376 && TYPE_NONALIASED_COMPONENT (type))
6377 {
6378 target = copy_rtx (target);
6379 MEM_KEEP_ALIAS_SET_P (target) = 1;
6380 }
6381 store_constructor_field (target, bitsize, bitpos, mode, value,
6382 cleared, get_alias_set (elttype));
6383 }
6384 }
6385 break;
6386 }
6387
6388 case VECTOR_TYPE:
6389 {
6390 unsigned HOST_WIDE_INT idx;
6391 constructor_elt *ce;
6392 int i;
6393 int need_to_clear;
6394 int icode = CODE_FOR_nothing;
6395 tree elttype = TREE_TYPE (type);
6396 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6397 machine_mode eltmode = TYPE_MODE (elttype);
6398 HOST_WIDE_INT bitsize;
6399 HOST_WIDE_INT bitpos;
6400 rtvec vector = NULL;
6401 unsigned n_elts;
6402 alias_set_type alias;
6403
6404 gcc_assert (eltmode != BLKmode);
6405
6406 n_elts = TYPE_VECTOR_SUBPARTS (type);
6407 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6408 {
6409 machine_mode mode = GET_MODE (target);
6410
6411 icode = (int) optab_handler (vec_init_optab, mode);
6412 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6413 if (icode != CODE_FOR_nothing)
6414 {
6415 tree value;
6416
6417 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6418 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6419 {
6420 icode = CODE_FOR_nothing;
6421 break;
6422 }
6423 }
6424 if (icode != CODE_FOR_nothing)
6425 {
6426 unsigned int i;
6427
6428 vector = rtvec_alloc (n_elts);
6429 for (i = 0; i < n_elts; i++)
6430 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6431 }
6432 }
6433
6434 /* If the constructor has fewer elements than the vector,
6435 clear the whole array first. Similarly if this is static
6436 constructor of a non-BLKmode object. */
6437 if (cleared)
6438 need_to_clear = 0;
6439 else if (REG_P (target) && TREE_STATIC (exp))
6440 need_to_clear = 1;
6441 else
6442 {
6443 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6444 tree value;
6445
6446 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6447 {
6448 int n_elts_here = tree_to_uhwi
6449 (int_const_binop (TRUNC_DIV_EXPR,
6450 TYPE_SIZE (TREE_TYPE (value)),
6451 TYPE_SIZE (elttype)));
6452
6453 count += n_elts_here;
6454 if (mostly_zeros_p (value))
6455 zero_count += n_elts_here;
6456 }
6457
6458 /* Clear the entire vector first if there are any missing elements,
6459 or if the incidence of zero elements is >= 75%. */
6460 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6461 }
6462
6463 if (need_to_clear && size > 0 && !vector)
6464 {
6465 if (REG_P (target))
6466 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6467 else
6468 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6469 cleared = 1;
6470 }
6471
6472 /* Inform later passes that the old value is dead. */
6473 if (!cleared && !vector && REG_P (target))
6474 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6475
6476 if (MEM_P (target))
6477 alias = MEM_ALIAS_SET (target);
6478 else
6479 alias = get_alias_set (elttype);
6480
6481 /* Store each element of the constructor into the corresponding
6482 element of TARGET, determined by counting the elements. */
6483 for (idx = 0, i = 0;
6484 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6485 idx++, i += bitsize / elt_size)
6486 {
6487 HOST_WIDE_INT eltpos;
6488 tree value = ce->value;
6489
6490 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6491 if (cleared && initializer_zerop (value))
6492 continue;
6493
6494 if (ce->index)
6495 eltpos = tree_to_uhwi (ce->index);
6496 else
6497 eltpos = i;
6498
6499 if (vector)
6500 {
6501 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6502 elements. */
6503 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6504 RTVEC_ELT (vector, eltpos)
6505 = expand_normal (value);
6506 }
6507 else
6508 {
6509 machine_mode value_mode =
6510 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6511 ? TYPE_MODE (TREE_TYPE (value))
6512 : eltmode;
6513 bitpos = eltpos * elt_size;
6514 store_constructor_field (target, bitsize, bitpos, value_mode,
6515 value, cleared, alias);
6516 }
6517 }
6518
6519 if (vector)
6520 emit_insn (GEN_FCN (icode)
6521 (target,
6522 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6523 break;
6524 }
6525
6526 default:
6527 gcc_unreachable ();
6528 }
6529 }
6530
6531 /* Store the value of EXP (an expression tree)
6532 into a subfield of TARGET which has mode MODE and occupies
6533 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6534 If MODE is VOIDmode, it means that we are storing into a bit-field.
6535
6536 BITREGION_START is bitpos of the first bitfield in this region.
6537 BITREGION_END is the bitpos of the ending bitfield in this region.
6538 These two fields are 0, if the C++ memory model does not apply,
6539 or we are not interested in keeping track of bitfield regions.
6540
6541 Always return const0_rtx unless we have something particular to
6542 return.
6543
6544 ALIAS_SET is the alias set for the destination. This value will
6545 (in general) be different from that for TARGET, since TARGET is a
6546 reference to the containing structure.
6547
6548 If NONTEMPORAL is true, try generating a nontemporal store. */
6549
6550 static rtx
6551 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6552 unsigned HOST_WIDE_INT bitregion_start,
6553 unsigned HOST_WIDE_INT bitregion_end,
6554 machine_mode mode, tree exp,
6555 alias_set_type alias_set, bool nontemporal)
6556 {
6557 if (TREE_CODE (exp) == ERROR_MARK)
6558 return const0_rtx;
6559
6560 /* If we have nothing to store, do nothing unless the expression has
6561 side-effects. */
6562 if (bitsize == 0)
6563 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6564
6565 if (GET_CODE (target) == CONCAT)
6566 {
6567 /* We're storing into a struct containing a single __complex. */
6568
6569 gcc_assert (!bitpos);
6570 return store_expr (exp, target, 0, nontemporal);
6571 }
6572
6573 /* If the structure is in a register or if the component
6574 is a bit field, we cannot use addressing to access it.
6575 Use bit-field techniques or SUBREG to store in it. */
6576
6577 if (mode == VOIDmode
6578 || (mode != BLKmode && ! direct_store[(int) mode]
6579 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6580 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6581 || REG_P (target)
6582 || GET_CODE (target) == SUBREG
6583 /* If the field isn't aligned enough to store as an ordinary memref,
6584 store it as a bit field. */
6585 || (mode != BLKmode
6586 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6587 || bitpos % GET_MODE_ALIGNMENT (mode))
6588 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6589 || (bitpos % BITS_PER_UNIT != 0)))
6590 || (bitsize >= 0 && mode != BLKmode
6591 && GET_MODE_BITSIZE (mode) > bitsize)
6592 /* If the RHS and field are a constant size and the size of the
6593 RHS isn't the same size as the bitfield, we must use bitfield
6594 operations. */
6595 || (bitsize >= 0
6596 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6597 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6598 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6599 decl we must use bitfield operations. */
6600 || (bitsize >= 0
6601 && TREE_CODE (exp) == MEM_REF
6602 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6603 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6604 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6605 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6606 {
6607 rtx temp;
6608 gimple nop_def;
6609
6610 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6611 implies a mask operation. If the precision is the same size as
6612 the field we're storing into, that mask is redundant. This is
6613 particularly common with bit field assignments generated by the
6614 C front end. */
6615 nop_def = get_def_for_expr (exp, NOP_EXPR);
6616 if (nop_def)
6617 {
6618 tree type = TREE_TYPE (exp);
6619 if (INTEGRAL_TYPE_P (type)
6620 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6621 && bitsize == TYPE_PRECISION (type))
6622 {
6623 tree op = gimple_assign_rhs1 (nop_def);
6624 type = TREE_TYPE (op);
6625 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6626 exp = op;
6627 }
6628 }
6629
6630 temp = expand_normal (exp);
6631
6632 /* If BITSIZE is narrower than the size of the type of EXP
6633 we will be narrowing TEMP. Normally, what's wanted are the
6634 low-order bits. However, if EXP's type is a record and this is
6635 big-endian machine, we want the upper BITSIZE bits. */
6636 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6637 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6638 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6639 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6640 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6641 NULL_RTX, 1);
6642
6643 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6644 if (mode != VOIDmode && mode != BLKmode
6645 && mode != TYPE_MODE (TREE_TYPE (exp)))
6646 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6647
6648 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6649 are both BLKmode, both must be in memory and BITPOS must be aligned
6650 on a byte boundary. If so, we simply do a block copy. Likewise for
6651 a BLKmode-like TARGET. */
6652 if (GET_CODE (temp) != PARALLEL
6653 && GET_MODE (temp) == BLKmode
6654 && (GET_MODE (target) == BLKmode
6655 || (MEM_P (target)
6656 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6657 && (bitpos % BITS_PER_UNIT) == 0
6658 && (bitsize % BITS_PER_UNIT) == 0)))
6659 {
6660 gcc_assert (MEM_P (target) && MEM_P (temp)
6661 && (bitpos % BITS_PER_UNIT) == 0);
6662
6663 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6664 emit_block_move (target, temp,
6665 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6666 / BITS_PER_UNIT),
6667 BLOCK_OP_NORMAL);
6668
6669 return const0_rtx;
6670 }
6671
6672 /* Handle calls that return values in multiple non-contiguous locations.
6673 The Irix 6 ABI has examples of this. */
6674 if (GET_CODE (temp) == PARALLEL)
6675 {
6676 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6677 rtx temp_target;
6678 if (mode == BLKmode || mode == VOIDmode)
6679 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6680 temp_target = gen_reg_rtx (mode);
6681 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6682 temp = temp_target;
6683 }
6684 else if (mode == BLKmode)
6685 {
6686 /* Handle calls that return BLKmode values in registers. */
6687 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6688 {
6689 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6690 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6691 temp = temp_target;
6692 }
6693 else
6694 {
6695 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6696 rtx temp_target;
6697 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6698 temp_target = gen_reg_rtx (mode);
6699 temp_target
6700 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6701 temp_target, mode, mode);
6702 temp = temp_target;
6703 }
6704 }
6705
6706 /* Store the value in the bitfield. */
6707 store_bit_field (target, bitsize, bitpos,
6708 bitregion_start, bitregion_end,
6709 mode, temp);
6710
6711 return const0_rtx;
6712 }
6713 else
6714 {
6715 /* Now build a reference to just the desired component. */
6716 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6717
6718 if (to_rtx == target)
6719 to_rtx = copy_rtx (to_rtx);
6720
6721 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6722 set_mem_alias_set (to_rtx, alias_set);
6723
6724 return store_expr (exp, to_rtx, 0, nontemporal);
6725 }
6726 }
6727 \f
6728 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6729 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6730 codes and find the ultimate containing object, which we return.
6731
6732 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6733 bit position, and *PUNSIGNEDP to the signedness of the field.
6734 If the position of the field is variable, we store a tree
6735 giving the variable offset (in units) in *POFFSET.
6736 This offset is in addition to the bit position.
6737 If the position is not variable, we store 0 in *POFFSET.
6738
6739 If any of the extraction expressions is volatile,
6740 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6741
6742 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6743 Otherwise, it is a mode that can be used to access the field.
6744
6745 If the field describes a variable-sized object, *PMODE is set to
6746 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6747 this case, but the address of the object can be found.
6748
6749 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6750 look through nodes that serve as markers of a greater alignment than
6751 the one that can be deduced from the expression. These nodes make it
6752 possible for front-ends to prevent temporaries from being created by
6753 the middle-end on alignment considerations. For that purpose, the
6754 normal operating mode at high-level is to always pass FALSE so that
6755 the ultimate containing object is really returned; moreover, the
6756 associated predicate handled_component_p will always return TRUE
6757 on these nodes, thus indicating that they are essentially handled
6758 by get_inner_reference. TRUE should only be passed when the caller
6759 is scanning the expression in order to build another representation
6760 and specifically knows how to handle these nodes; as such, this is
6761 the normal operating mode in the RTL expanders. */
6762
6763 tree
6764 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6765 HOST_WIDE_INT *pbitpos, tree *poffset,
6766 machine_mode *pmode, int *punsignedp,
6767 int *pvolatilep, bool keep_aligning)
6768 {
6769 tree size_tree = 0;
6770 machine_mode mode = VOIDmode;
6771 bool blkmode_bitfield = false;
6772 tree offset = size_zero_node;
6773 offset_int bit_offset = 0;
6774
6775 /* First get the mode, signedness, and size. We do this from just the
6776 outermost expression. */
6777 *pbitsize = -1;
6778 if (TREE_CODE (exp) == COMPONENT_REF)
6779 {
6780 tree field = TREE_OPERAND (exp, 1);
6781 size_tree = DECL_SIZE (field);
6782 if (flag_strict_volatile_bitfields > 0
6783 && TREE_THIS_VOLATILE (exp)
6784 && DECL_BIT_FIELD_TYPE (field)
6785 && DECL_MODE (field) != BLKmode)
6786 /* Volatile bitfields should be accessed in the mode of the
6787 field's type, not the mode computed based on the bit
6788 size. */
6789 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6790 else if (!DECL_BIT_FIELD (field))
6791 mode = DECL_MODE (field);
6792 else if (DECL_MODE (field) == BLKmode)
6793 blkmode_bitfield = true;
6794
6795 *punsignedp = DECL_UNSIGNED (field);
6796 }
6797 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6798 {
6799 size_tree = TREE_OPERAND (exp, 1);
6800 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6801 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6802
6803 /* For vector types, with the correct size of access, use the mode of
6804 inner type. */
6805 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6806 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6807 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6808 mode = TYPE_MODE (TREE_TYPE (exp));
6809 }
6810 else
6811 {
6812 mode = TYPE_MODE (TREE_TYPE (exp));
6813 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6814
6815 if (mode == BLKmode)
6816 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6817 else
6818 *pbitsize = GET_MODE_BITSIZE (mode);
6819 }
6820
6821 if (size_tree != 0)
6822 {
6823 if (! tree_fits_uhwi_p (size_tree))
6824 mode = BLKmode, *pbitsize = -1;
6825 else
6826 *pbitsize = tree_to_uhwi (size_tree);
6827 }
6828
6829 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6830 and find the ultimate containing object. */
6831 while (1)
6832 {
6833 switch (TREE_CODE (exp))
6834 {
6835 case BIT_FIELD_REF:
6836 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6837 break;
6838
6839 case COMPONENT_REF:
6840 {
6841 tree field = TREE_OPERAND (exp, 1);
6842 tree this_offset = component_ref_field_offset (exp);
6843
6844 /* If this field hasn't been filled in yet, don't go past it.
6845 This should only happen when folding expressions made during
6846 type construction. */
6847 if (this_offset == 0)
6848 break;
6849
6850 offset = size_binop (PLUS_EXPR, offset, this_offset);
6851 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6852
6853 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6854 }
6855 break;
6856
6857 case ARRAY_REF:
6858 case ARRAY_RANGE_REF:
6859 {
6860 tree index = TREE_OPERAND (exp, 1);
6861 tree low_bound = array_ref_low_bound (exp);
6862 tree unit_size = array_ref_element_size (exp);
6863
6864 /* We assume all arrays have sizes that are a multiple of a byte.
6865 First subtract the lower bound, if any, in the type of the
6866 index, then convert to sizetype and multiply by the size of
6867 the array element. */
6868 if (! integer_zerop (low_bound))
6869 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6870 index, low_bound);
6871
6872 offset = size_binop (PLUS_EXPR, offset,
6873 size_binop (MULT_EXPR,
6874 fold_convert (sizetype, index),
6875 unit_size));
6876 }
6877 break;
6878
6879 case REALPART_EXPR:
6880 break;
6881
6882 case IMAGPART_EXPR:
6883 bit_offset += *pbitsize;
6884 break;
6885
6886 case VIEW_CONVERT_EXPR:
6887 if (keep_aligning && STRICT_ALIGNMENT
6888 && (TYPE_ALIGN (TREE_TYPE (exp))
6889 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6890 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6891 < BIGGEST_ALIGNMENT)
6892 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6893 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6894 goto done;
6895 break;
6896
6897 case MEM_REF:
6898 /* Hand back the decl for MEM[&decl, off]. */
6899 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6900 {
6901 tree off = TREE_OPERAND (exp, 1);
6902 if (!integer_zerop (off))
6903 {
6904 offset_int boff, coff = mem_ref_offset (exp);
6905 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6906 bit_offset += boff;
6907 }
6908 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6909 }
6910 goto done;
6911
6912 default:
6913 goto done;
6914 }
6915
6916 /* If any reference in the chain is volatile, the effect is volatile. */
6917 if (TREE_THIS_VOLATILE (exp))
6918 *pvolatilep = 1;
6919
6920 exp = TREE_OPERAND (exp, 0);
6921 }
6922 done:
6923
6924 /* If OFFSET is constant, see if we can return the whole thing as a
6925 constant bit position. Make sure to handle overflow during
6926 this conversion. */
6927 if (TREE_CODE (offset) == INTEGER_CST)
6928 {
6929 offset_int tem = wi::sext (wi::to_offset (offset),
6930 TYPE_PRECISION (sizetype));
6931 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6932 tem += bit_offset;
6933 if (wi::fits_shwi_p (tem))
6934 {
6935 *pbitpos = tem.to_shwi ();
6936 *poffset = offset = NULL_TREE;
6937 }
6938 }
6939
6940 /* Otherwise, split it up. */
6941 if (offset)
6942 {
6943 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6944 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
6945 {
6946 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6947 offset_int tem = bit_offset.and_not (mask);
6948 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6949 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6950 bit_offset -= tem;
6951 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6952 offset = size_binop (PLUS_EXPR, offset,
6953 wide_int_to_tree (sizetype, tem));
6954 }
6955
6956 *pbitpos = bit_offset.to_shwi ();
6957 *poffset = offset;
6958 }
6959
6960 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6961 if (mode == VOIDmode
6962 && blkmode_bitfield
6963 && (*pbitpos % BITS_PER_UNIT) == 0
6964 && (*pbitsize % BITS_PER_UNIT) == 0)
6965 *pmode = BLKmode;
6966 else
6967 *pmode = mode;
6968
6969 return exp;
6970 }
6971
6972 /* Return a tree of sizetype representing the size, in bytes, of the element
6973 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6974
6975 tree
6976 array_ref_element_size (tree exp)
6977 {
6978 tree aligned_size = TREE_OPERAND (exp, 3);
6979 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6980 location_t loc = EXPR_LOCATION (exp);
6981
6982 /* If a size was specified in the ARRAY_REF, it's the size measured
6983 in alignment units of the element type. So multiply by that value. */
6984 if (aligned_size)
6985 {
6986 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6987 sizetype from another type of the same width and signedness. */
6988 if (TREE_TYPE (aligned_size) != sizetype)
6989 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6990 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6991 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6992 }
6993
6994 /* Otherwise, take the size from that of the element type. Substitute
6995 any PLACEHOLDER_EXPR that we have. */
6996 else
6997 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6998 }
6999
7000 /* Return a tree representing the lower bound of the array mentioned in
7001 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
7002
7003 tree
7004 array_ref_low_bound (tree exp)
7005 {
7006 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7007
7008 /* If a lower bound is specified in EXP, use it. */
7009 if (TREE_OPERAND (exp, 2))
7010 return TREE_OPERAND (exp, 2);
7011
7012 /* Otherwise, if there is a domain type and it has a lower bound, use it,
7013 substituting for a PLACEHOLDER_EXPR as needed. */
7014 if (domain_type && TYPE_MIN_VALUE (domain_type))
7015 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
7016
7017 /* Otherwise, return a zero of the appropriate type. */
7018 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
7019 }
7020
7021 /* Returns true if REF is an array reference to an array at the end of
7022 a structure. If this is the case, the array may be allocated larger
7023 than its upper bound implies. */
7024
7025 bool
7026 array_at_struct_end_p (tree ref)
7027 {
7028 if (TREE_CODE (ref) != ARRAY_REF
7029 && TREE_CODE (ref) != ARRAY_RANGE_REF)
7030 return false;
7031
7032 while (handled_component_p (ref))
7033 {
7034 /* If the reference chain contains a component reference to a
7035 non-union type and there follows another field the reference
7036 is not at the end of a structure. */
7037 if (TREE_CODE (ref) == COMPONENT_REF
7038 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
7039 {
7040 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
7041 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
7042 nextf = DECL_CHAIN (nextf);
7043 if (nextf)
7044 return false;
7045 }
7046
7047 ref = TREE_OPERAND (ref, 0);
7048 }
7049
7050 /* If the reference is based on a declared entity, the size of the array
7051 is constrained by its given domain. */
7052 if (DECL_P (ref))
7053 return false;
7054
7055 return true;
7056 }
7057
7058 /* Return a tree representing the upper bound of the array mentioned in
7059 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
7060
7061 tree
7062 array_ref_up_bound (tree exp)
7063 {
7064 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7065
7066 /* If there is a domain type and it has an upper bound, use it, substituting
7067 for a PLACEHOLDER_EXPR as needed. */
7068 if (domain_type && TYPE_MAX_VALUE (domain_type))
7069 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
7070
7071 /* Otherwise fail. */
7072 return NULL_TREE;
7073 }
7074
7075 /* Return a tree representing the offset, in bytes, of the field referenced
7076 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
7077
7078 tree
7079 component_ref_field_offset (tree exp)
7080 {
7081 tree aligned_offset = TREE_OPERAND (exp, 2);
7082 tree field = TREE_OPERAND (exp, 1);
7083 location_t loc = EXPR_LOCATION (exp);
7084
7085 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
7086 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
7087 value. */
7088 if (aligned_offset)
7089 {
7090 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7091 sizetype from another type of the same width and signedness. */
7092 if (TREE_TYPE (aligned_offset) != sizetype)
7093 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7094 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7095 size_int (DECL_OFFSET_ALIGN (field)
7096 / BITS_PER_UNIT));
7097 }
7098
7099 /* Otherwise, take the offset from that of the field. Substitute
7100 any PLACEHOLDER_EXPR that we have. */
7101 else
7102 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7103 }
7104
7105 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7106
7107 static unsigned HOST_WIDE_INT
7108 target_align (const_tree target)
7109 {
7110 /* We might have a chain of nested references with intermediate misaligning
7111 bitfields components, so need to recurse to find out. */
7112
7113 unsigned HOST_WIDE_INT this_align, outer_align;
7114
7115 switch (TREE_CODE (target))
7116 {
7117 case BIT_FIELD_REF:
7118 return 1;
7119
7120 case COMPONENT_REF:
7121 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7122 outer_align = target_align (TREE_OPERAND (target, 0));
7123 return MIN (this_align, outer_align);
7124
7125 case ARRAY_REF:
7126 case ARRAY_RANGE_REF:
7127 this_align = TYPE_ALIGN (TREE_TYPE (target));
7128 outer_align = target_align (TREE_OPERAND (target, 0));
7129 return MIN (this_align, outer_align);
7130
7131 CASE_CONVERT:
7132 case NON_LVALUE_EXPR:
7133 case VIEW_CONVERT_EXPR:
7134 this_align = TYPE_ALIGN (TREE_TYPE (target));
7135 outer_align = target_align (TREE_OPERAND (target, 0));
7136 return MAX (this_align, outer_align);
7137
7138 default:
7139 return TYPE_ALIGN (TREE_TYPE (target));
7140 }
7141 }
7142
7143 \f
7144 /* Given an rtx VALUE that may contain additions and multiplications, return
7145 an equivalent value that just refers to a register, memory, or constant.
7146 This is done by generating instructions to perform the arithmetic and
7147 returning a pseudo-register containing the value.
7148
7149 The returned value may be a REG, SUBREG, MEM or constant. */
7150
7151 rtx
7152 force_operand (rtx value, rtx target)
7153 {
7154 rtx op1, op2;
7155 /* Use subtarget as the target for operand 0 of a binary operation. */
7156 rtx subtarget = get_subtarget (target);
7157 enum rtx_code code = GET_CODE (value);
7158
7159 /* Check for subreg applied to an expression produced by loop optimizer. */
7160 if (code == SUBREG
7161 && !REG_P (SUBREG_REG (value))
7162 && !MEM_P (SUBREG_REG (value)))
7163 {
7164 value
7165 = simplify_gen_subreg (GET_MODE (value),
7166 force_reg (GET_MODE (SUBREG_REG (value)),
7167 force_operand (SUBREG_REG (value),
7168 NULL_RTX)),
7169 GET_MODE (SUBREG_REG (value)),
7170 SUBREG_BYTE (value));
7171 code = GET_CODE (value);
7172 }
7173
7174 /* Check for a PIC address load. */
7175 if ((code == PLUS || code == MINUS)
7176 && XEXP (value, 0) == pic_offset_table_rtx
7177 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7178 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7179 || GET_CODE (XEXP (value, 1)) == CONST))
7180 {
7181 if (!subtarget)
7182 subtarget = gen_reg_rtx (GET_MODE (value));
7183 emit_move_insn (subtarget, value);
7184 return subtarget;
7185 }
7186
7187 if (ARITHMETIC_P (value))
7188 {
7189 op2 = XEXP (value, 1);
7190 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7191 subtarget = 0;
7192 if (code == MINUS && CONST_INT_P (op2))
7193 {
7194 code = PLUS;
7195 op2 = negate_rtx (GET_MODE (value), op2);
7196 }
7197
7198 /* Check for an addition with OP2 a constant integer and our first
7199 operand a PLUS of a virtual register and something else. In that
7200 case, we want to emit the sum of the virtual register and the
7201 constant first and then add the other value. This allows virtual
7202 register instantiation to simply modify the constant rather than
7203 creating another one around this addition. */
7204 if (code == PLUS && CONST_INT_P (op2)
7205 && GET_CODE (XEXP (value, 0)) == PLUS
7206 && REG_P (XEXP (XEXP (value, 0), 0))
7207 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7208 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7209 {
7210 rtx temp = expand_simple_binop (GET_MODE (value), code,
7211 XEXP (XEXP (value, 0), 0), op2,
7212 subtarget, 0, OPTAB_LIB_WIDEN);
7213 return expand_simple_binop (GET_MODE (value), code, temp,
7214 force_operand (XEXP (XEXP (value,
7215 0), 1), 0),
7216 target, 0, OPTAB_LIB_WIDEN);
7217 }
7218
7219 op1 = force_operand (XEXP (value, 0), subtarget);
7220 op2 = force_operand (op2, NULL_RTX);
7221 switch (code)
7222 {
7223 case MULT:
7224 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7225 case DIV:
7226 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7227 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7228 target, 1, OPTAB_LIB_WIDEN);
7229 else
7230 return expand_divmod (0,
7231 FLOAT_MODE_P (GET_MODE (value))
7232 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7233 GET_MODE (value), op1, op2, target, 0);
7234 case MOD:
7235 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7236 target, 0);
7237 case UDIV:
7238 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7239 target, 1);
7240 case UMOD:
7241 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7242 target, 1);
7243 case ASHIFTRT:
7244 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7245 target, 0, OPTAB_LIB_WIDEN);
7246 default:
7247 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7248 target, 1, OPTAB_LIB_WIDEN);
7249 }
7250 }
7251 if (UNARY_P (value))
7252 {
7253 if (!target)
7254 target = gen_reg_rtx (GET_MODE (value));
7255 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7256 switch (code)
7257 {
7258 case ZERO_EXTEND:
7259 case SIGN_EXTEND:
7260 case TRUNCATE:
7261 case FLOAT_EXTEND:
7262 case FLOAT_TRUNCATE:
7263 convert_move (target, op1, code == ZERO_EXTEND);
7264 return target;
7265
7266 case FIX:
7267 case UNSIGNED_FIX:
7268 expand_fix (target, op1, code == UNSIGNED_FIX);
7269 return target;
7270
7271 case FLOAT:
7272 case UNSIGNED_FLOAT:
7273 expand_float (target, op1, code == UNSIGNED_FLOAT);
7274 return target;
7275
7276 default:
7277 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7278 }
7279 }
7280
7281 #ifdef INSN_SCHEDULING
7282 /* On machines that have insn scheduling, we want all memory reference to be
7283 explicit, so we need to deal with such paradoxical SUBREGs. */
7284 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7285 value
7286 = simplify_gen_subreg (GET_MODE (value),
7287 force_reg (GET_MODE (SUBREG_REG (value)),
7288 force_operand (SUBREG_REG (value),
7289 NULL_RTX)),
7290 GET_MODE (SUBREG_REG (value)),
7291 SUBREG_BYTE (value));
7292 #endif
7293
7294 return value;
7295 }
7296 \f
7297 /* Subroutine of expand_expr: return nonzero iff there is no way that
7298 EXP can reference X, which is being modified. TOP_P is nonzero if this
7299 call is going to be used to determine whether we need a temporary
7300 for EXP, as opposed to a recursive call to this function.
7301
7302 It is always safe for this routine to return zero since it merely
7303 searches for optimization opportunities. */
7304
7305 int
7306 safe_from_p (const_rtx x, tree exp, int top_p)
7307 {
7308 rtx exp_rtl = 0;
7309 int i, nops;
7310
7311 if (x == 0
7312 /* If EXP has varying size, we MUST use a target since we currently
7313 have no way of allocating temporaries of variable size
7314 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7315 So we assume here that something at a higher level has prevented a
7316 clash. This is somewhat bogus, but the best we can do. Only
7317 do this when X is BLKmode and when we are at the top level. */
7318 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7319 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7320 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7321 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7322 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7323 != INTEGER_CST)
7324 && GET_MODE (x) == BLKmode)
7325 /* If X is in the outgoing argument area, it is always safe. */
7326 || (MEM_P (x)
7327 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7328 || (GET_CODE (XEXP (x, 0)) == PLUS
7329 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7330 return 1;
7331
7332 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7333 find the underlying pseudo. */
7334 if (GET_CODE (x) == SUBREG)
7335 {
7336 x = SUBREG_REG (x);
7337 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7338 return 0;
7339 }
7340
7341 /* Now look at our tree code and possibly recurse. */
7342 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7343 {
7344 case tcc_declaration:
7345 exp_rtl = DECL_RTL_IF_SET (exp);
7346 break;
7347
7348 case tcc_constant:
7349 return 1;
7350
7351 case tcc_exceptional:
7352 if (TREE_CODE (exp) == TREE_LIST)
7353 {
7354 while (1)
7355 {
7356 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7357 return 0;
7358 exp = TREE_CHAIN (exp);
7359 if (!exp)
7360 return 1;
7361 if (TREE_CODE (exp) != TREE_LIST)
7362 return safe_from_p (x, exp, 0);
7363 }
7364 }
7365 else if (TREE_CODE (exp) == CONSTRUCTOR)
7366 {
7367 constructor_elt *ce;
7368 unsigned HOST_WIDE_INT idx;
7369
7370 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7371 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7372 || !safe_from_p (x, ce->value, 0))
7373 return 0;
7374 return 1;
7375 }
7376 else if (TREE_CODE (exp) == ERROR_MARK)
7377 return 1; /* An already-visited SAVE_EXPR? */
7378 else
7379 return 0;
7380
7381 case tcc_statement:
7382 /* The only case we look at here is the DECL_INITIAL inside a
7383 DECL_EXPR. */
7384 return (TREE_CODE (exp) != DECL_EXPR
7385 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7386 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7387 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7388
7389 case tcc_binary:
7390 case tcc_comparison:
7391 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7392 return 0;
7393 /* Fall through. */
7394
7395 case tcc_unary:
7396 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7397
7398 case tcc_expression:
7399 case tcc_reference:
7400 case tcc_vl_exp:
7401 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7402 the expression. If it is set, we conflict iff we are that rtx or
7403 both are in memory. Otherwise, we check all operands of the
7404 expression recursively. */
7405
7406 switch (TREE_CODE (exp))
7407 {
7408 case ADDR_EXPR:
7409 /* If the operand is static or we are static, we can't conflict.
7410 Likewise if we don't conflict with the operand at all. */
7411 if (staticp (TREE_OPERAND (exp, 0))
7412 || TREE_STATIC (exp)
7413 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7414 return 1;
7415
7416 /* Otherwise, the only way this can conflict is if we are taking
7417 the address of a DECL a that address if part of X, which is
7418 very rare. */
7419 exp = TREE_OPERAND (exp, 0);
7420 if (DECL_P (exp))
7421 {
7422 if (!DECL_RTL_SET_P (exp)
7423 || !MEM_P (DECL_RTL (exp)))
7424 return 0;
7425 else
7426 exp_rtl = XEXP (DECL_RTL (exp), 0);
7427 }
7428 break;
7429
7430 case MEM_REF:
7431 if (MEM_P (x)
7432 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7433 get_alias_set (exp)))
7434 return 0;
7435 break;
7436
7437 case CALL_EXPR:
7438 /* Assume that the call will clobber all hard registers and
7439 all of memory. */
7440 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7441 || MEM_P (x))
7442 return 0;
7443 break;
7444
7445 case WITH_CLEANUP_EXPR:
7446 case CLEANUP_POINT_EXPR:
7447 /* Lowered by gimplify.c. */
7448 gcc_unreachable ();
7449
7450 case SAVE_EXPR:
7451 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7452
7453 default:
7454 break;
7455 }
7456
7457 /* If we have an rtx, we do not need to scan our operands. */
7458 if (exp_rtl)
7459 break;
7460
7461 nops = TREE_OPERAND_LENGTH (exp);
7462 for (i = 0; i < nops; i++)
7463 if (TREE_OPERAND (exp, i) != 0
7464 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7465 return 0;
7466
7467 break;
7468
7469 case tcc_type:
7470 /* Should never get a type here. */
7471 gcc_unreachable ();
7472 }
7473
7474 /* If we have an rtl, find any enclosed object. Then see if we conflict
7475 with it. */
7476 if (exp_rtl)
7477 {
7478 if (GET_CODE (exp_rtl) == SUBREG)
7479 {
7480 exp_rtl = SUBREG_REG (exp_rtl);
7481 if (REG_P (exp_rtl)
7482 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7483 return 0;
7484 }
7485
7486 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7487 are memory and they conflict. */
7488 return ! (rtx_equal_p (x, exp_rtl)
7489 || (MEM_P (x) && MEM_P (exp_rtl)
7490 && true_dependence (exp_rtl, VOIDmode, x)));
7491 }
7492
7493 /* If we reach here, it is safe. */
7494 return 1;
7495 }
7496
7497 \f
7498 /* Return the highest power of two that EXP is known to be a multiple of.
7499 This is used in updating alignment of MEMs in array references. */
7500
7501 unsigned HOST_WIDE_INT
7502 highest_pow2_factor (const_tree exp)
7503 {
7504 unsigned HOST_WIDE_INT ret;
7505 int trailing_zeros = tree_ctz (exp);
7506 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7507 return BIGGEST_ALIGNMENT;
7508 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7509 if (ret > BIGGEST_ALIGNMENT)
7510 return BIGGEST_ALIGNMENT;
7511 return ret;
7512 }
7513
7514 /* Similar, except that the alignment requirements of TARGET are
7515 taken into account. Assume it is at least as aligned as its
7516 type, unless it is a COMPONENT_REF in which case the layout of
7517 the structure gives the alignment. */
7518
7519 static unsigned HOST_WIDE_INT
7520 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7521 {
7522 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7523 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7524
7525 return MAX (factor, talign);
7526 }
7527 \f
7528 #ifdef HAVE_conditional_move
7529 /* Convert the tree comparison code TCODE to the rtl one where the
7530 signedness is UNSIGNEDP. */
7531
7532 static enum rtx_code
7533 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7534 {
7535 enum rtx_code code;
7536 switch (tcode)
7537 {
7538 case EQ_EXPR:
7539 code = EQ;
7540 break;
7541 case NE_EXPR:
7542 code = NE;
7543 break;
7544 case LT_EXPR:
7545 code = unsignedp ? LTU : LT;
7546 break;
7547 case LE_EXPR:
7548 code = unsignedp ? LEU : LE;
7549 break;
7550 case GT_EXPR:
7551 code = unsignedp ? GTU : GT;
7552 break;
7553 case GE_EXPR:
7554 code = unsignedp ? GEU : GE;
7555 break;
7556 case UNORDERED_EXPR:
7557 code = UNORDERED;
7558 break;
7559 case ORDERED_EXPR:
7560 code = ORDERED;
7561 break;
7562 case UNLT_EXPR:
7563 code = UNLT;
7564 break;
7565 case UNLE_EXPR:
7566 code = UNLE;
7567 break;
7568 case UNGT_EXPR:
7569 code = UNGT;
7570 break;
7571 case UNGE_EXPR:
7572 code = UNGE;
7573 break;
7574 case UNEQ_EXPR:
7575 code = UNEQ;
7576 break;
7577 case LTGT_EXPR:
7578 code = LTGT;
7579 break;
7580
7581 default:
7582 gcc_unreachable ();
7583 }
7584 return code;
7585 }
7586 #endif
7587
7588 /* Subroutine of expand_expr. Expand the two operands of a binary
7589 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7590 The value may be stored in TARGET if TARGET is nonzero. The
7591 MODIFIER argument is as documented by expand_expr. */
7592
7593 void
7594 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7595 enum expand_modifier modifier)
7596 {
7597 if (! safe_from_p (target, exp1, 1))
7598 target = 0;
7599 if (operand_equal_p (exp0, exp1, 0))
7600 {
7601 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7602 *op1 = copy_rtx (*op0);
7603 }
7604 else
7605 {
7606 /* If we need to preserve evaluation order, copy exp0 into its own
7607 temporary variable so that it can't be clobbered by exp1. */
7608 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7609 exp0 = save_expr (exp0);
7610 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7611 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7612 }
7613 }
7614
7615 \f
7616 /* Return a MEM that contains constant EXP. DEFER is as for
7617 output_constant_def and MODIFIER is as for expand_expr. */
7618
7619 static rtx
7620 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7621 {
7622 rtx mem;
7623
7624 mem = output_constant_def (exp, defer);
7625 if (modifier != EXPAND_INITIALIZER)
7626 mem = use_anchored_address (mem);
7627 return mem;
7628 }
7629
7630 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7631 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7632
7633 static rtx
7634 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7635 enum expand_modifier modifier, addr_space_t as)
7636 {
7637 rtx result, subtarget;
7638 tree inner, offset;
7639 HOST_WIDE_INT bitsize, bitpos;
7640 int volatilep, unsignedp;
7641 machine_mode mode1;
7642
7643 /* If we are taking the address of a constant and are at the top level,
7644 we have to use output_constant_def since we can't call force_const_mem
7645 at top level. */
7646 /* ??? This should be considered a front-end bug. We should not be
7647 generating ADDR_EXPR of something that isn't an LVALUE. The only
7648 exception here is STRING_CST. */
7649 if (CONSTANT_CLASS_P (exp))
7650 {
7651 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7652 if (modifier < EXPAND_SUM)
7653 result = force_operand (result, target);
7654 return result;
7655 }
7656
7657 /* Everything must be something allowed by is_gimple_addressable. */
7658 switch (TREE_CODE (exp))
7659 {
7660 case INDIRECT_REF:
7661 /* This case will happen via recursion for &a->b. */
7662 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7663
7664 case MEM_REF:
7665 {
7666 tree tem = TREE_OPERAND (exp, 0);
7667 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7668 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7669 return expand_expr (tem, target, tmode, modifier);
7670 }
7671
7672 case CONST_DECL:
7673 /* Expand the initializer like constants above. */
7674 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7675 0, modifier), 0);
7676 if (modifier < EXPAND_SUM)
7677 result = force_operand (result, target);
7678 return result;
7679
7680 case REALPART_EXPR:
7681 /* The real part of the complex number is always first, therefore
7682 the address is the same as the address of the parent object. */
7683 offset = 0;
7684 bitpos = 0;
7685 inner = TREE_OPERAND (exp, 0);
7686 break;
7687
7688 case IMAGPART_EXPR:
7689 /* The imaginary part of the complex number is always second.
7690 The expression is therefore always offset by the size of the
7691 scalar type. */
7692 offset = 0;
7693 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7694 inner = TREE_OPERAND (exp, 0);
7695 break;
7696
7697 case COMPOUND_LITERAL_EXPR:
7698 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7699 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7700 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7701 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7702 the initializers aren't gimplified. */
7703 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7704 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7705 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7706 target, tmode, modifier, as);
7707 /* FALLTHRU */
7708 default:
7709 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7710 expand_expr, as that can have various side effects; LABEL_DECLs for
7711 example, may not have their DECL_RTL set yet. Expand the rtl of
7712 CONSTRUCTORs too, which should yield a memory reference for the
7713 constructor's contents. Assume language specific tree nodes can
7714 be expanded in some interesting way. */
7715 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7716 if (DECL_P (exp)
7717 || TREE_CODE (exp) == CONSTRUCTOR
7718 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7719 {
7720 result = expand_expr (exp, target, tmode,
7721 modifier == EXPAND_INITIALIZER
7722 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7723
7724 /* If the DECL isn't in memory, then the DECL wasn't properly
7725 marked TREE_ADDRESSABLE, which will be either a front-end
7726 or a tree optimizer bug. */
7727
7728 if (TREE_ADDRESSABLE (exp)
7729 && ! MEM_P (result)
7730 && ! targetm.calls.allocate_stack_slots_for_args ())
7731 {
7732 error ("local frame unavailable (naked function?)");
7733 return result;
7734 }
7735 else
7736 gcc_assert (MEM_P (result));
7737 result = XEXP (result, 0);
7738
7739 /* ??? Is this needed anymore? */
7740 if (DECL_P (exp))
7741 TREE_USED (exp) = 1;
7742
7743 if (modifier != EXPAND_INITIALIZER
7744 && modifier != EXPAND_CONST_ADDRESS
7745 && modifier != EXPAND_SUM)
7746 result = force_operand (result, target);
7747 return result;
7748 }
7749
7750 /* Pass FALSE as the last argument to get_inner_reference although
7751 we are expanding to RTL. The rationale is that we know how to
7752 handle "aligning nodes" here: we can just bypass them because
7753 they won't change the final object whose address will be returned
7754 (they actually exist only for that purpose). */
7755 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7756 &mode1, &unsignedp, &volatilep, false);
7757 break;
7758 }
7759
7760 /* We must have made progress. */
7761 gcc_assert (inner != exp);
7762
7763 subtarget = offset || bitpos ? NULL_RTX : target;
7764 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7765 inner alignment, force the inner to be sufficiently aligned. */
7766 if (CONSTANT_CLASS_P (inner)
7767 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7768 {
7769 inner = copy_node (inner);
7770 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7771 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7772 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7773 }
7774 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7775
7776 if (offset)
7777 {
7778 rtx tmp;
7779
7780 if (modifier != EXPAND_NORMAL)
7781 result = force_operand (result, NULL);
7782 tmp = expand_expr (offset, NULL_RTX, tmode,
7783 modifier == EXPAND_INITIALIZER
7784 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7785
7786 /* expand_expr is allowed to return an object in a mode other
7787 than TMODE. If it did, we need to convert. */
7788 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7789 tmp = convert_modes (tmode, GET_MODE (tmp),
7790 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7791 result = convert_memory_address_addr_space (tmode, result, as);
7792 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7793
7794 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7795 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7796 else
7797 {
7798 subtarget = bitpos ? NULL_RTX : target;
7799 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7800 1, OPTAB_LIB_WIDEN);
7801 }
7802 }
7803
7804 if (bitpos)
7805 {
7806 /* Someone beforehand should have rejected taking the address
7807 of such an object. */
7808 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7809
7810 result = convert_memory_address_addr_space (tmode, result, as);
7811 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7812 if (modifier < EXPAND_SUM)
7813 result = force_operand (result, target);
7814 }
7815
7816 return result;
7817 }
7818
7819 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7820 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7821
7822 static rtx
7823 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7824 enum expand_modifier modifier)
7825 {
7826 addr_space_t as = ADDR_SPACE_GENERIC;
7827 machine_mode address_mode = Pmode;
7828 machine_mode pointer_mode = ptr_mode;
7829 machine_mode rmode;
7830 rtx result;
7831
7832 /* Target mode of VOIDmode says "whatever's natural". */
7833 if (tmode == VOIDmode)
7834 tmode = TYPE_MODE (TREE_TYPE (exp));
7835
7836 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7837 {
7838 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7839 address_mode = targetm.addr_space.address_mode (as);
7840 pointer_mode = targetm.addr_space.pointer_mode (as);
7841 }
7842
7843 /* We can get called with some Weird Things if the user does silliness
7844 like "(short) &a". In that case, convert_memory_address won't do
7845 the right thing, so ignore the given target mode. */
7846 if (tmode != address_mode && tmode != pointer_mode)
7847 tmode = address_mode;
7848
7849 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7850 tmode, modifier, as);
7851
7852 /* Despite expand_expr claims concerning ignoring TMODE when not
7853 strictly convenient, stuff breaks if we don't honor it. Note
7854 that combined with the above, we only do this for pointer modes. */
7855 rmode = GET_MODE (result);
7856 if (rmode == VOIDmode)
7857 rmode = tmode;
7858 if (rmode != tmode)
7859 result = convert_memory_address_addr_space (tmode, result, as);
7860
7861 return result;
7862 }
7863
7864 /* Generate code for computing CONSTRUCTOR EXP.
7865 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7866 is TRUE, instead of creating a temporary variable in memory
7867 NULL is returned and the caller needs to handle it differently. */
7868
7869 static rtx
7870 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7871 bool avoid_temp_mem)
7872 {
7873 tree type = TREE_TYPE (exp);
7874 machine_mode mode = TYPE_MODE (type);
7875
7876 /* Try to avoid creating a temporary at all. This is possible
7877 if all of the initializer is zero.
7878 FIXME: try to handle all [0..255] initializers we can handle
7879 with memset. */
7880 if (TREE_STATIC (exp)
7881 && !TREE_ADDRESSABLE (exp)
7882 && target != 0 && mode == BLKmode
7883 && all_zeros_p (exp))
7884 {
7885 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7886 return target;
7887 }
7888
7889 /* All elts simple constants => refer to a constant in memory. But
7890 if this is a non-BLKmode mode, let it store a field at a time
7891 since that should make a CONST_INT, CONST_WIDE_INT or
7892 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7893 use, it is best to store directly into the target unless the type
7894 is large enough that memcpy will be used. If we are making an
7895 initializer and all operands are constant, put it in memory as
7896 well.
7897
7898 FIXME: Avoid trying to fill vector constructors piece-meal.
7899 Output them with output_constant_def below unless we're sure
7900 they're zeros. This should go away when vector initializers
7901 are treated like VECTOR_CST instead of arrays. */
7902 if ((TREE_STATIC (exp)
7903 && ((mode == BLKmode
7904 && ! (target != 0 && safe_from_p (target, exp, 1)))
7905 || TREE_ADDRESSABLE (exp)
7906 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7907 && (! can_move_by_pieces
7908 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7909 TYPE_ALIGN (type)))
7910 && ! mostly_zeros_p (exp))))
7911 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7912 && TREE_CONSTANT (exp)))
7913 {
7914 rtx constructor;
7915
7916 if (avoid_temp_mem)
7917 return NULL_RTX;
7918
7919 constructor = expand_expr_constant (exp, 1, modifier);
7920
7921 if (modifier != EXPAND_CONST_ADDRESS
7922 && modifier != EXPAND_INITIALIZER
7923 && modifier != EXPAND_SUM)
7924 constructor = validize_mem (constructor);
7925
7926 return constructor;
7927 }
7928
7929 /* Handle calls that pass values in multiple non-contiguous
7930 locations. The Irix 6 ABI has examples of this. */
7931 if (target == 0 || ! safe_from_p (target, exp, 1)
7932 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7933 {
7934 if (avoid_temp_mem)
7935 return NULL_RTX;
7936
7937 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7938 }
7939
7940 store_constructor (exp, target, 0, int_expr_size (exp));
7941 return target;
7942 }
7943
7944
7945 /* expand_expr: generate code for computing expression EXP.
7946 An rtx for the computed value is returned. The value is never null.
7947 In the case of a void EXP, const0_rtx is returned.
7948
7949 The value may be stored in TARGET if TARGET is nonzero.
7950 TARGET is just a suggestion; callers must assume that
7951 the rtx returned may not be the same as TARGET.
7952
7953 If TARGET is CONST0_RTX, it means that the value will be ignored.
7954
7955 If TMODE is not VOIDmode, it suggests generating the
7956 result in mode TMODE. But this is done only when convenient.
7957 Otherwise, TMODE is ignored and the value generated in its natural mode.
7958 TMODE is just a suggestion; callers must assume that
7959 the rtx returned may not have mode TMODE.
7960
7961 Note that TARGET may have neither TMODE nor MODE. In that case, it
7962 probably will not be used.
7963
7964 If MODIFIER is EXPAND_SUM then when EXP is an addition
7965 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7966 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7967 products as above, or REG or MEM, or constant.
7968 Ordinarily in such cases we would output mul or add instructions
7969 and then return a pseudo reg containing the sum.
7970
7971 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7972 it also marks a label as absolutely required (it can't be dead).
7973 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7974 This is used for outputting expressions used in initializers.
7975
7976 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7977 with a constant address even if that address is not normally legitimate.
7978 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7979
7980 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7981 a call parameter. Such targets require special care as we haven't yet
7982 marked TARGET so that it's safe from being trashed by libcalls. We
7983 don't want to use TARGET for anything but the final result;
7984 Intermediate values must go elsewhere. Additionally, calls to
7985 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7986
7987 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7988 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7989 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7990 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7991 recursively.
7992
7993 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7994 In this case, we don't adjust a returned MEM rtx that wouldn't be
7995 sufficiently aligned for its mode; instead, it's up to the caller
7996 to deal with it afterwards. This is used to make sure that unaligned
7997 base objects for which out-of-bounds accesses are supported, for
7998 example record types with trailing arrays, aren't realigned behind
7999 the back of the caller.
8000 The normal operating mode is to pass FALSE for this parameter. */
8001
8002 rtx
8003 expand_expr_real (tree exp, rtx target, machine_mode tmode,
8004 enum expand_modifier modifier, rtx *alt_rtl,
8005 bool inner_reference_p)
8006 {
8007 rtx ret;
8008
8009 /* Handle ERROR_MARK before anybody tries to access its type. */
8010 if (TREE_CODE (exp) == ERROR_MARK
8011 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
8012 {
8013 ret = CONST0_RTX (tmode);
8014 return ret ? ret : const0_rtx;
8015 }
8016
8017 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8018 inner_reference_p);
8019 return ret;
8020 }
8021
8022 /* Try to expand the conditional expression which is represented by
8023 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
8024 return the rtl reg which repsents the result. Otherwise return
8025 NULL_RTL. */
8026
8027 static rtx
8028 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8029 tree treeop1 ATTRIBUTE_UNUSED,
8030 tree treeop2 ATTRIBUTE_UNUSED)
8031 {
8032 #ifdef HAVE_conditional_move
8033 rtx insn;
8034 rtx op00, op01, op1, op2;
8035 enum rtx_code comparison_code;
8036 machine_mode comparison_mode;
8037 gimple srcstmt;
8038 rtx temp;
8039 tree type = TREE_TYPE (treeop1);
8040 int unsignedp = TYPE_UNSIGNED (type);
8041 machine_mode mode = TYPE_MODE (type);
8042 machine_mode orig_mode = mode;
8043
8044 /* If we cannot do a conditional move on the mode, try doing it
8045 with the promoted mode. */
8046 if (!can_conditionally_move_p (mode))
8047 {
8048 mode = promote_mode (type, mode, &unsignedp);
8049 if (!can_conditionally_move_p (mode))
8050 return NULL_RTX;
8051 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
8052 }
8053 else
8054 temp = assign_temp (type, 0, 1);
8055
8056 start_sequence ();
8057 expand_operands (treeop1, treeop2,
8058 temp, &op1, &op2, EXPAND_NORMAL);
8059
8060 if (TREE_CODE (treeop0) == SSA_NAME
8061 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8062 {
8063 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8064 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8065 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8066 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8067 comparison_mode = TYPE_MODE (type);
8068 unsignedp = TYPE_UNSIGNED (type);
8069 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8070 }
8071 else if (COMPARISON_CLASS_P (treeop0))
8072 {
8073 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8074 enum tree_code cmpcode = TREE_CODE (treeop0);
8075 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8076 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8077 unsignedp = TYPE_UNSIGNED (type);
8078 comparison_mode = TYPE_MODE (type);
8079 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8080 }
8081 else
8082 {
8083 op00 = expand_normal (treeop0);
8084 op01 = const0_rtx;
8085 comparison_code = NE;
8086 comparison_mode = GET_MODE (op00);
8087 if (comparison_mode == VOIDmode)
8088 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8089 }
8090
8091 if (GET_MODE (op1) != mode)
8092 op1 = gen_lowpart (mode, op1);
8093
8094 if (GET_MODE (op2) != mode)
8095 op2 = gen_lowpart (mode, op2);
8096
8097 /* Try to emit the conditional move. */
8098 insn = emit_conditional_move (temp, comparison_code,
8099 op00, op01, comparison_mode,
8100 op1, op2, mode,
8101 unsignedp);
8102
8103 /* If we could do the conditional move, emit the sequence,
8104 and return. */
8105 if (insn)
8106 {
8107 rtx_insn *seq = get_insns ();
8108 end_sequence ();
8109 emit_insn (seq);
8110 return convert_modes (orig_mode, mode, temp, 0);
8111 }
8112
8113 /* Otherwise discard the sequence and fall back to code with
8114 branches. */
8115 end_sequence ();
8116 #endif
8117 return NULL_RTX;
8118 }
8119
8120 rtx
8121 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8122 enum expand_modifier modifier)
8123 {
8124 rtx op0, op1, op2, temp;
8125 rtx_code_label *lab;
8126 tree type;
8127 int unsignedp;
8128 machine_mode mode;
8129 enum tree_code code = ops->code;
8130 optab this_optab;
8131 rtx subtarget, original_target;
8132 int ignore;
8133 bool reduce_bit_field;
8134 location_t loc = ops->location;
8135 tree treeop0, treeop1, treeop2;
8136 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8137 ? reduce_to_bit_field_precision ((expr), \
8138 target, \
8139 type) \
8140 : (expr))
8141
8142 type = ops->type;
8143 mode = TYPE_MODE (type);
8144 unsignedp = TYPE_UNSIGNED (type);
8145
8146 treeop0 = ops->op0;
8147 treeop1 = ops->op1;
8148 treeop2 = ops->op2;
8149
8150 /* We should be called only on simple (binary or unary) expressions,
8151 exactly those that are valid in gimple expressions that aren't
8152 GIMPLE_SINGLE_RHS (or invalid). */
8153 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8154 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8155 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8156
8157 ignore = (target == const0_rtx
8158 || ((CONVERT_EXPR_CODE_P (code)
8159 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8160 && TREE_CODE (type) == VOID_TYPE));
8161
8162 /* We should be called only if we need the result. */
8163 gcc_assert (!ignore);
8164
8165 /* An operation in what may be a bit-field type needs the
8166 result to be reduced to the precision of the bit-field type,
8167 which is narrower than that of the type's mode. */
8168 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8169 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8170
8171 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8172 target = 0;
8173
8174 /* Use subtarget as the target for operand 0 of a binary operation. */
8175 subtarget = get_subtarget (target);
8176 original_target = target;
8177
8178 switch (code)
8179 {
8180 case NON_LVALUE_EXPR:
8181 case PAREN_EXPR:
8182 CASE_CONVERT:
8183 if (treeop0 == error_mark_node)
8184 return const0_rtx;
8185
8186 if (TREE_CODE (type) == UNION_TYPE)
8187 {
8188 tree valtype = TREE_TYPE (treeop0);
8189
8190 /* If both input and output are BLKmode, this conversion isn't doing
8191 anything except possibly changing memory attribute. */
8192 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8193 {
8194 rtx result = expand_expr (treeop0, target, tmode,
8195 modifier);
8196
8197 result = copy_rtx (result);
8198 set_mem_attributes (result, type, 0);
8199 return result;
8200 }
8201
8202 if (target == 0)
8203 {
8204 if (TYPE_MODE (type) != BLKmode)
8205 target = gen_reg_rtx (TYPE_MODE (type));
8206 else
8207 target = assign_temp (type, 1, 1);
8208 }
8209
8210 if (MEM_P (target))
8211 /* Store data into beginning of memory target. */
8212 store_expr (treeop0,
8213 adjust_address (target, TYPE_MODE (valtype), 0),
8214 modifier == EXPAND_STACK_PARM,
8215 false);
8216
8217 else
8218 {
8219 gcc_assert (REG_P (target));
8220
8221 /* Store this field into a union of the proper type. */
8222 store_field (target,
8223 MIN ((int_size_in_bytes (TREE_TYPE
8224 (treeop0))
8225 * BITS_PER_UNIT),
8226 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8227 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8228 }
8229
8230 /* Return the entire union. */
8231 return target;
8232 }
8233
8234 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8235 {
8236 op0 = expand_expr (treeop0, target, VOIDmode,
8237 modifier);
8238
8239 /* If the signedness of the conversion differs and OP0 is
8240 a promoted SUBREG, clear that indication since we now
8241 have to do the proper extension. */
8242 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8243 && GET_CODE (op0) == SUBREG)
8244 SUBREG_PROMOTED_VAR_P (op0) = 0;
8245
8246 return REDUCE_BIT_FIELD (op0);
8247 }
8248
8249 op0 = expand_expr (treeop0, NULL_RTX, mode,
8250 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8251 if (GET_MODE (op0) == mode)
8252 ;
8253
8254 /* If OP0 is a constant, just convert it into the proper mode. */
8255 else if (CONSTANT_P (op0))
8256 {
8257 tree inner_type = TREE_TYPE (treeop0);
8258 machine_mode inner_mode = GET_MODE (op0);
8259
8260 if (inner_mode == VOIDmode)
8261 inner_mode = TYPE_MODE (inner_type);
8262
8263 if (modifier == EXPAND_INITIALIZER)
8264 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8265 subreg_lowpart_offset (mode,
8266 inner_mode));
8267 else
8268 op0= convert_modes (mode, inner_mode, op0,
8269 TYPE_UNSIGNED (inner_type));
8270 }
8271
8272 else if (modifier == EXPAND_INITIALIZER)
8273 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8274
8275 else if (target == 0)
8276 op0 = convert_to_mode (mode, op0,
8277 TYPE_UNSIGNED (TREE_TYPE
8278 (treeop0)));
8279 else
8280 {
8281 convert_move (target, op0,
8282 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8283 op0 = target;
8284 }
8285
8286 return REDUCE_BIT_FIELD (op0);
8287
8288 case ADDR_SPACE_CONVERT_EXPR:
8289 {
8290 tree treeop0_type = TREE_TYPE (treeop0);
8291 addr_space_t as_to;
8292 addr_space_t as_from;
8293
8294 gcc_assert (POINTER_TYPE_P (type));
8295 gcc_assert (POINTER_TYPE_P (treeop0_type));
8296
8297 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8298 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8299
8300 /* Conversions between pointers to the same address space should
8301 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8302 gcc_assert (as_to != as_from);
8303
8304 /* Ask target code to handle conversion between pointers
8305 to overlapping address spaces. */
8306 if (targetm.addr_space.subset_p (as_to, as_from)
8307 || targetm.addr_space.subset_p (as_from, as_to))
8308 {
8309 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8310 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8311 gcc_assert (op0);
8312 return op0;
8313 }
8314
8315 /* For disjoint address spaces, converting anything but
8316 a null pointer invokes undefined behaviour. We simply
8317 always return a null pointer here. */
8318 return CONST0_RTX (mode);
8319 }
8320
8321 case POINTER_PLUS_EXPR:
8322 /* Even though the sizetype mode and the pointer's mode can be different
8323 expand is able to handle this correctly and get the correct result out
8324 of the PLUS_EXPR code. */
8325 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8326 if sizetype precision is smaller than pointer precision. */
8327 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8328 treeop1 = fold_convert_loc (loc, type,
8329 fold_convert_loc (loc, ssizetype,
8330 treeop1));
8331 /* If sizetype precision is larger than pointer precision, truncate the
8332 offset to have matching modes. */
8333 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8334 treeop1 = fold_convert_loc (loc, type, treeop1);
8335
8336 case PLUS_EXPR:
8337 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8338 something else, make sure we add the register to the constant and
8339 then to the other thing. This case can occur during strength
8340 reduction and doing it this way will produce better code if the
8341 frame pointer or argument pointer is eliminated.
8342
8343 fold-const.c will ensure that the constant is always in the inner
8344 PLUS_EXPR, so the only case we need to do anything about is if
8345 sp, ap, or fp is our second argument, in which case we must swap
8346 the innermost first argument and our second argument. */
8347
8348 if (TREE_CODE (treeop0) == PLUS_EXPR
8349 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8350 && TREE_CODE (treeop1) == VAR_DECL
8351 && (DECL_RTL (treeop1) == frame_pointer_rtx
8352 || DECL_RTL (treeop1) == stack_pointer_rtx
8353 || DECL_RTL (treeop1) == arg_pointer_rtx))
8354 {
8355 gcc_unreachable ();
8356 }
8357
8358 /* If the result is to be ptr_mode and we are adding an integer to
8359 something, we might be forming a constant. So try to use
8360 plus_constant. If it produces a sum and we can't accept it,
8361 use force_operand. This allows P = &ARR[const] to generate
8362 efficient code on machines where a SYMBOL_REF is not a valid
8363 address.
8364
8365 If this is an EXPAND_SUM call, always return the sum. */
8366 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8367 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8368 {
8369 if (modifier == EXPAND_STACK_PARM)
8370 target = 0;
8371 if (TREE_CODE (treeop0) == INTEGER_CST
8372 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8373 && TREE_CONSTANT (treeop1))
8374 {
8375 rtx constant_part;
8376 HOST_WIDE_INT wc;
8377 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8378
8379 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8380 EXPAND_SUM);
8381 /* Use wi::shwi to ensure that the constant is
8382 truncated according to the mode of OP1, then sign extended
8383 to a HOST_WIDE_INT. Using the constant directly can result
8384 in non-canonical RTL in a 64x32 cross compile. */
8385 wc = TREE_INT_CST_LOW (treeop0);
8386 constant_part =
8387 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8388 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8389 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8390 op1 = force_operand (op1, target);
8391 return REDUCE_BIT_FIELD (op1);
8392 }
8393
8394 else if (TREE_CODE (treeop1) == INTEGER_CST
8395 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8396 && TREE_CONSTANT (treeop0))
8397 {
8398 rtx constant_part;
8399 HOST_WIDE_INT wc;
8400 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8401
8402 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8403 (modifier == EXPAND_INITIALIZER
8404 ? EXPAND_INITIALIZER : EXPAND_SUM));
8405 if (! CONSTANT_P (op0))
8406 {
8407 op1 = expand_expr (treeop1, NULL_RTX,
8408 VOIDmode, modifier);
8409 /* Return a PLUS if modifier says it's OK. */
8410 if (modifier == EXPAND_SUM
8411 || modifier == EXPAND_INITIALIZER)
8412 return simplify_gen_binary (PLUS, mode, op0, op1);
8413 goto binop2;
8414 }
8415 /* Use wi::shwi to ensure that the constant is
8416 truncated according to the mode of OP1, then sign extended
8417 to a HOST_WIDE_INT. Using the constant directly can result
8418 in non-canonical RTL in a 64x32 cross compile. */
8419 wc = TREE_INT_CST_LOW (treeop1);
8420 constant_part
8421 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8422 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8423 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8424 op0 = force_operand (op0, target);
8425 return REDUCE_BIT_FIELD (op0);
8426 }
8427 }
8428
8429 /* Use TER to expand pointer addition of a negated value
8430 as pointer subtraction. */
8431 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8432 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8433 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8434 && TREE_CODE (treeop1) == SSA_NAME
8435 && TYPE_MODE (TREE_TYPE (treeop0))
8436 == TYPE_MODE (TREE_TYPE (treeop1)))
8437 {
8438 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8439 if (def)
8440 {
8441 treeop1 = gimple_assign_rhs1 (def);
8442 code = MINUS_EXPR;
8443 goto do_minus;
8444 }
8445 }
8446
8447 /* No sense saving up arithmetic to be done
8448 if it's all in the wrong mode to form part of an address.
8449 And force_operand won't know whether to sign-extend or
8450 zero-extend. */
8451 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8452 || mode != ptr_mode)
8453 {
8454 expand_operands (treeop0, treeop1,
8455 subtarget, &op0, &op1, EXPAND_NORMAL);
8456 if (op0 == const0_rtx)
8457 return op1;
8458 if (op1 == const0_rtx)
8459 return op0;
8460 goto binop2;
8461 }
8462
8463 expand_operands (treeop0, treeop1,
8464 subtarget, &op0, &op1, modifier);
8465 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8466
8467 case MINUS_EXPR:
8468 do_minus:
8469 /* For initializers, we are allowed to return a MINUS of two
8470 symbolic constants. Here we handle all cases when both operands
8471 are constant. */
8472 /* Handle difference of two symbolic constants,
8473 for the sake of an initializer. */
8474 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8475 && really_constant_p (treeop0)
8476 && really_constant_p (treeop1))
8477 {
8478 expand_operands (treeop0, treeop1,
8479 NULL_RTX, &op0, &op1, modifier);
8480
8481 /* If the last operand is a CONST_INT, use plus_constant of
8482 the negated constant. Else make the MINUS. */
8483 if (CONST_INT_P (op1))
8484 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8485 -INTVAL (op1)));
8486 else
8487 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8488 }
8489
8490 /* No sense saving up arithmetic to be done
8491 if it's all in the wrong mode to form part of an address.
8492 And force_operand won't know whether to sign-extend or
8493 zero-extend. */
8494 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8495 || mode != ptr_mode)
8496 goto binop;
8497
8498 expand_operands (treeop0, treeop1,
8499 subtarget, &op0, &op1, modifier);
8500
8501 /* Convert A - const to A + (-const). */
8502 if (CONST_INT_P (op1))
8503 {
8504 op1 = negate_rtx (mode, op1);
8505 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8506 }
8507
8508 goto binop2;
8509
8510 case WIDEN_MULT_PLUS_EXPR:
8511 case WIDEN_MULT_MINUS_EXPR:
8512 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8513 op2 = expand_normal (treeop2);
8514 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8515 target, unsignedp);
8516 return target;
8517
8518 case WIDEN_MULT_EXPR:
8519 /* If first operand is constant, swap them.
8520 Thus the following special case checks need only
8521 check the second operand. */
8522 if (TREE_CODE (treeop0) == INTEGER_CST)
8523 {
8524 tree t1 = treeop0;
8525 treeop0 = treeop1;
8526 treeop1 = t1;
8527 }
8528
8529 /* First, check if we have a multiplication of one signed and one
8530 unsigned operand. */
8531 if (TREE_CODE (treeop1) != INTEGER_CST
8532 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8533 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8534 {
8535 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8536 this_optab = usmul_widen_optab;
8537 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8538 != CODE_FOR_nothing)
8539 {
8540 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8541 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8542 EXPAND_NORMAL);
8543 else
8544 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8545 EXPAND_NORMAL);
8546 /* op0 and op1 might still be constant, despite the above
8547 != INTEGER_CST check. Handle it. */
8548 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8549 {
8550 op0 = convert_modes (innermode, mode, op0, true);
8551 op1 = convert_modes (innermode, mode, op1, false);
8552 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8553 target, unsignedp));
8554 }
8555 goto binop3;
8556 }
8557 }
8558 /* Check for a multiplication with matching signedness. */
8559 else if ((TREE_CODE (treeop1) == INTEGER_CST
8560 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8561 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8562 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8563 {
8564 tree op0type = TREE_TYPE (treeop0);
8565 machine_mode innermode = TYPE_MODE (op0type);
8566 bool zextend_p = TYPE_UNSIGNED (op0type);
8567 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8568 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8569
8570 if (TREE_CODE (treeop0) != INTEGER_CST)
8571 {
8572 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8573 != CODE_FOR_nothing)
8574 {
8575 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8576 EXPAND_NORMAL);
8577 /* op0 and op1 might still be constant, despite the above
8578 != INTEGER_CST check. Handle it. */
8579 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8580 {
8581 widen_mult_const:
8582 op0 = convert_modes (innermode, mode, op0, zextend_p);
8583 op1
8584 = convert_modes (innermode, mode, op1,
8585 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8586 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8587 target,
8588 unsignedp));
8589 }
8590 temp = expand_widening_mult (mode, op0, op1, target,
8591 unsignedp, this_optab);
8592 return REDUCE_BIT_FIELD (temp);
8593 }
8594 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8595 != CODE_FOR_nothing
8596 && innermode == word_mode)
8597 {
8598 rtx htem, hipart;
8599 op0 = expand_normal (treeop0);
8600 if (TREE_CODE (treeop1) == INTEGER_CST)
8601 op1 = convert_modes (innermode, mode,
8602 expand_normal (treeop1),
8603 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8604 else
8605 op1 = expand_normal (treeop1);
8606 /* op0 and op1 might still be constant, despite the above
8607 != INTEGER_CST check. Handle it. */
8608 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8609 goto widen_mult_const;
8610 temp = expand_binop (mode, other_optab, op0, op1, target,
8611 unsignedp, OPTAB_LIB_WIDEN);
8612 hipart = gen_highpart (innermode, temp);
8613 htem = expand_mult_highpart_adjust (innermode, hipart,
8614 op0, op1, hipart,
8615 zextend_p);
8616 if (htem != hipart)
8617 emit_move_insn (hipart, htem);
8618 return REDUCE_BIT_FIELD (temp);
8619 }
8620 }
8621 }
8622 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8623 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8624 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8625 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8626
8627 case FMA_EXPR:
8628 {
8629 optab opt = fma_optab;
8630 gimple def0, def2;
8631
8632 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8633 call. */
8634 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8635 {
8636 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8637 tree call_expr;
8638
8639 gcc_assert (fn != NULL_TREE);
8640 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8641 return expand_builtin (call_expr, target, subtarget, mode, false);
8642 }
8643
8644 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8645 /* The multiplication is commutative - look at its 2nd operand
8646 if the first isn't fed by a negate. */
8647 if (!def0)
8648 {
8649 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8650 /* Swap operands if the 2nd operand is fed by a negate. */
8651 if (def0)
8652 {
8653 tree tem = treeop0;
8654 treeop0 = treeop1;
8655 treeop1 = tem;
8656 }
8657 }
8658 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8659
8660 op0 = op2 = NULL;
8661
8662 if (def0 && def2
8663 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8664 {
8665 opt = fnms_optab;
8666 op0 = expand_normal (gimple_assign_rhs1 (def0));
8667 op2 = expand_normal (gimple_assign_rhs1 (def2));
8668 }
8669 else if (def0
8670 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8671 {
8672 opt = fnma_optab;
8673 op0 = expand_normal (gimple_assign_rhs1 (def0));
8674 }
8675 else if (def2
8676 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8677 {
8678 opt = fms_optab;
8679 op2 = expand_normal (gimple_assign_rhs1 (def2));
8680 }
8681
8682 if (op0 == NULL)
8683 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8684 if (op2 == NULL)
8685 op2 = expand_normal (treeop2);
8686 op1 = expand_normal (treeop1);
8687
8688 return expand_ternary_op (TYPE_MODE (type), opt,
8689 op0, op1, op2, target, 0);
8690 }
8691
8692 case MULT_EXPR:
8693 /* If this is a fixed-point operation, then we cannot use the code
8694 below because "expand_mult" doesn't support sat/no-sat fixed-point
8695 multiplications. */
8696 if (ALL_FIXED_POINT_MODE_P (mode))
8697 goto binop;
8698
8699 /* If first operand is constant, swap them.
8700 Thus the following special case checks need only
8701 check the second operand. */
8702 if (TREE_CODE (treeop0) == INTEGER_CST)
8703 {
8704 tree t1 = treeop0;
8705 treeop0 = treeop1;
8706 treeop1 = t1;
8707 }
8708
8709 /* Attempt to return something suitable for generating an
8710 indexed address, for machines that support that. */
8711
8712 if (modifier == EXPAND_SUM && mode == ptr_mode
8713 && tree_fits_shwi_p (treeop1))
8714 {
8715 tree exp1 = treeop1;
8716
8717 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8718 EXPAND_SUM);
8719
8720 if (!REG_P (op0))
8721 op0 = force_operand (op0, NULL_RTX);
8722 if (!REG_P (op0))
8723 op0 = copy_to_mode_reg (mode, op0);
8724
8725 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8726 gen_int_mode (tree_to_shwi (exp1),
8727 TYPE_MODE (TREE_TYPE (exp1)))));
8728 }
8729
8730 if (modifier == EXPAND_STACK_PARM)
8731 target = 0;
8732
8733 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8734 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8735
8736 case TRUNC_DIV_EXPR:
8737 case FLOOR_DIV_EXPR:
8738 case CEIL_DIV_EXPR:
8739 case ROUND_DIV_EXPR:
8740 case EXACT_DIV_EXPR:
8741 /* If this is a fixed-point operation, then we cannot use the code
8742 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8743 divisions. */
8744 if (ALL_FIXED_POINT_MODE_P (mode))
8745 goto binop;
8746
8747 if (modifier == EXPAND_STACK_PARM)
8748 target = 0;
8749 /* Possible optimization: compute the dividend with EXPAND_SUM
8750 then if the divisor is constant can optimize the case
8751 where some terms of the dividend have coeffs divisible by it. */
8752 expand_operands (treeop0, treeop1,
8753 subtarget, &op0, &op1, EXPAND_NORMAL);
8754 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8755
8756 case RDIV_EXPR:
8757 goto binop;
8758
8759 case MULT_HIGHPART_EXPR:
8760 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8761 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8762 gcc_assert (temp);
8763 return temp;
8764
8765 case TRUNC_MOD_EXPR:
8766 case FLOOR_MOD_EXPR:
8767 case CEIL_MOD_EXPR:
8768 case ROUND_MOD_EXPR:
8769 if (modifier == EXPAND_STACK_PARM)
8770 target = 0;
8771 expand_operands (treeop0, treeop1,
8772 subtarget, &op0, &op1, EXPAND_NORMAL);
8773 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8774
8775 case FIXED_CONVERT_EXPR:
8776 op0 = expand_normal (treeop0);
8777 if (target == 0 || modifier == EXPAND_STACK_PARM)
8778 target = gen_reg_rtx (mode);
8779
8780 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8781 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8782 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8783 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8784 else
8785 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8786 return target;
8787
8788 case FIX_TRUNC_EXPR:
8789 op0 = expand_normal (treeop0);
8790 if (target == 0 || modifier == EXPAND_STACK_PARM)
8791 target = gen_reg_rtx (mode);
8792 expand_fix (target, op0, unsignedp);
8793 return target;
8794
8795 case FLOAT_EXPR:
8796 op0 = expand_normal (treeop0);
8797 if (target == 0 || modifier == EXPAND_STACK_PARM)
8798 target = gen_reg_rtx (mode);
8799 /* expand_float can't figure out what to do if FROM has VOIDmode.
8800 So give it the correct mode. With -O, cse will optimize this. */
8801 if (GET_MODE (op0) == VOIDmode)
8802 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8803 op0);
8804 expand_float (target, op0,
8805 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8806 return target;
8807
8808 case NEGATE_EXPR:
8809 op0 = expand_expr (treeop0, subtarget,
8810 VOIDmode, EXPAND_NORMAL);
8811 if (modifier == EXPAND_STACK_PARM)
8812 target = 0;
8813 temp = expand_unop (mode,
8814 optab_for_tree_code (NEGATE_EXPR, type,
8815 optab_default),
8816 op0, target, 0);
8817 gcc_assert (temp);
8818 return REDUCE_BIT_FIELD (temp);
8819
8820 case ABS_EXPR:
8821 op0 = expand_expr (treeop0, subtarget,
8822 VOIDmode, EXPAND_NORMAL);
8823 if (modifier == EXPAND_STACK_PARM)
8824 target = 0;
8825
8826 /* ABS_EXPR is not valid for complex arguments. */
8827 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8828 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8829
8830 /* Unsigned abs is simply the operand. Testing here means we don't
8831 risk generating incorrect code below. */
8832 if (TYPE_UNSIGNED (type))
8833 return op0;
8834
8835 return expand_abs (mode, op0, target, unsignedp,
8836 safe_from_p (target, treeop0, 1));
8837
8838 case MAX_EXPR:
8839 case MIN_EXPR:
8840 target = original_target;
8841 if (target == 0
8842 || modifier == EXPAND_STACK_PARM
8843 || (MEM_P (target) && MEM_VOLATILE_P (target))
8844 || GET_MODE (target) != mode
8845 || (REG_P (target)
8846 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8847 target = gen_reg_rtx (mode);
8848 expand_operands (treeop0, treeop1,
8849 target, &op0, &op1, EXPAND_NORMAL);
8850
8851 /* First try to do it with a special MIN or MAX instruction.
8852 If that does not win, use a conditional jump to select the proper
8853 value. */
8854 this_optab = optab_for_tree_code (code, type, optab_default);
8855 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8856 OPTAB_WIDEN);
8857 if (temp != 0)
8858 return temp;
8859
8860 /* At this point, a MEM target is no longer useful; we will get better
8861 code without it. */
8862
8863 if (! REG_P (target))
8864 target = gen_reg_rtx (mode);
8865
8866 /* If op1 was placed in target, swap op0 and op1. */
8867 if (target != op0 && target == op1)
8868 std::swap (op0, op1);
8869
8870 /* We generate better code and avoid problems with op1 mentioning
8871 target by forcing op1 into a pseudo if it isn't a constant. */
8872 if (! CONSTANT_P (op1))
8873 op1 = force_reg (mode, op1);
8874
8875 {
8876 enum rtx_code comparison_code;
8877 rtx cmpop1 = op1;
8878
8879 if (code == MAX_EXPR)
8880 comparison_code = unsignedp ? GEU : GE;
8881 else
8882 comparison_code = unsignedp ? LEU : LE;
8883
8884 /* Canonicalize to comparisons against 0. */
8885 if (op1 == const1_rtx)
8886 {
8887 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8888 or (a != 0 ? a : 1) for unsigned.
8889 For MIN we are safe converting (a <= 1 ? a : 1)
8890 into (a <= 0 ? a : 1) */
8891 cmpop1 = const0_rtx;
8892 if (code == MAX_EXPR)
8893 comparison_code = unsignedp ? NE : GT;
8894 }
8895 if (op1 == constm1_rtx && !unsignedp)
8896 {
8897 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8898 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8899 cmpop1 = const0_rtx;
8900 if (code == MIN_EXPR)
8901 comparison_code = LT;
8902 }
8903 #ifdef HAVE_conditional_move
8904 /* Use a conditional move if possible. */
8905 if (can_conditionally_move_p (mode))
8906 {
8907 rtx insn;
8908
8909 start_sequence ();
8910
8911 /* Try to emit the conditional move. */
8912 insn = emit_conditional_move (target, comparison_code,
8913 op0, cmpop1, mode,
8914 op0, op1, mode,
8915 unsignedp);
8916
8917 /* If we could do the conditional move, emit the sequence,
8918 and return. */
8919 if (insn)
8920 {
8921 rtx_insn *seq = get_insns ();
8922 end_sequence ();
8923 emit_insn (seq);
8924 return target;
8925 }
8926
8927 /* Otherwise discard the sequence and fall back to code with
8928 branches. */
8929 end_sequence ();
8930 }
8931 #endif
8932 if (target != op0)
8933 emit_move_insn (target, op0);
8934
8935 lab = gen_label_rtx ();
8936 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8937 unsignedp, mode, NULL_RTX, NULL, lab,
8938 -1);
8939 }
8940 emit_move_insn (target, op1);
8941 emit_label (lab);
8942 return target;
8943
8944 case BIT_NOT_EXPR:
8945 op0 = expand_expr (treeop0, subtarget,
8946 VOIDmode, EXPAND_NORMAL);
8947 if (modifier == EXPAND_STACK_PARM)
8948 target = 0;
8949 /* In case we have to reduce the result to bitfield precision
8950 for unsigned bitfield expand this as XOR with a proper constant
8951 instead. */
8952 if (reduce_bit_field && TYPE_UNSIGNED (type))
8953 {
8954 wide_int mask = wi::mask (TYPE_PRECISION (type),
8955 false, GET_MODE_PRECISION (mode));
8956
8957 temp = expand_binop (mode, xor_optab, op0,
8958 immed_wide_int_const (mask, mode),
8959 target, 1, OPTAB_LIB_WIDEN);
8960 }
8961 else
8962 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8963 gcc_assert (temp);
8964 return temp;
8965
8966 /* ??? Can optimize bitwise operations with one arg constant.
8967 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8968 and (a bitwise1 b) bitwise2 b (etc)
8969 but that is probably not worth while. */
8970
8971 case BIT_AND_EXPR:
8972 case BIT_IOR_EXPR:
8973 case BIT_XOR_EXPR:
8974 goto binop;
8975
8976 case LROTATE_EXPR:
8977 case RROTATE_EXPR:
8978 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8979 || (GET_MODE_PRECISION (TYPE_MODE (type))
8980 == TYPE_PRECISION (type)));
8981 /* fall through */
8982
8983 case LSHIFT_EXPR:
8984 case RSHIFT_EXPR:
8985 /* If this is a fixed-point operation, then we cannot use the code
8986 below because "expand_shift" doesn't support sat/no-sat fixed-point
8987 shifts. */
8988 if (ALL_FIXED_POINT_MODE_P (mode))
8989 goto binop;
8990
8991 if (! safe_from_p (subtarget, treeop1, 1))
8992 subtarget = 0;
8993 if (modifier == EXPAND_STACK_PARM)
8994 target = 0;
8995 op0 = expand_expr (treeop0, subtarget,
8996 VOIDmode, EXPAND_NORMAL);
8997 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8998 unsignedp);
8999 if (code == LSHIFT_EXPR)
9000 temp = REDUCE_BIT_FIELD (temp);
9001 return temp;
9002
9003 /* Could determine the answer when only additive constants differ. Also,
9004 the addition of one can be handled by changing the condition. */
9005 case LT_EXPR:
9006 case LE_EXPR:
9007 case GT_EXPR:
9008 case GE_EXPR:
9009 case EQ_EXPR:
9010 case NE_EXPR:
9011 case UNORDERED_EXPR:
9012 case ORDERED_EXPR:
9013 case UNLT_EXPR:
9014 case UNLE_EXPR:
9015 case UNGT_EXPR:
9016 case UNGE_EXPR:
9017 case UNEQ_EXPR:
9018 case LTGT_EXPR:
9019 {
9020 temp = do_store_flag (ops,
9021 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9022 tmode != VOIDmode ? tmode : mode);
9023 if (temp)
9024 return temp;
9025
9026 /* Use a compare and a jump for BLKmode comparisons, or for function
9027 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9028
9029 if ((target == 0
9030 || modifier == EXPAND_STACK_PARM
9031 || ! safe_from_p (target, treeop0, 1)
9032 || ! safe_from_p (target, treeop1, 1)
9033 /* Make sure we don't have a hard reg (such as function's return
9034 value) live across basic blocks, if not optimizing. */
9035 || (!optimize && REG_P (target)
9036 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9037 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9038
9039 emit_move_insn (target, const0_rtx);
9040
9041 rtx_code_label *lab1 = gen_label_rtx ();
9042 jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
9043
9044 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9045 emit_move_insn (target, constm1_rtx);
9046 else
9047 emit_move_insn (target, const1_rtx);
9048
9049 emit_label (lab1);
9050 return target;
9051 }
9052 case COMPLEX_EXPR:
9053 /* Get the rtx code of the operands. */
9054 op0 = expand_normal (treeop0);
9055 op1 = expand_normal (treeop1);
9056
9057 if (!target)
9058 target = gen_reg_rtx (TYPE_MODE (type));
9059 else
9060 /* If target overlaps with op1, then either we need to force
9061 op1 into a pseudo (if target also overlaps with op0),
9062 or write the complex parts in reverse order. */
9063 switch (GET_CODE (target))
9064 {
9065 case CONCAT:
9066 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9067 {
9068 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9069 {
9070 complex_expr_force_op1:
9071 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9072 emit_move_insn (temp, op1);
9073 op1 = temp;
9074 break;
9075 }
9076 complex_expr_swap_order:
9077 /* Move the imaginary (op1) and real (op0) parts to their
9078 location. */
9079 write_complex_part (target, op1, true);
9080 write_complex_part (target, op0, false);
9081
9082 return target;
9083 }
9084 break;
9085 case MEM:
9086 temp = adjust_address_nv (target,
9087 GET_MODE_INNER (GET_MODE (target)), 0);
9088 if (reg_overlap_mentioned_p (temp, op1))
9089 {
9090 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9091 temp = adjust_address_nv (target, imode,
9092 GET_MODE_SIZE (imode));
9093 if (reg_overlap_mentioned_p (temp, op0))
9094 goto complex_expr_force_op1;
9095 goto complex_expr_swap_order;
9096 }
9097 break;
9098 default:
9099 if (reg_overlap_mentioned_p (target, op1))
9100 {
9101 if (reg_overlap_mentioned_p (target, op0))
9102 goto complex_expr_force_op1;
9103 goto complex_expr_swap_order;
9104 }
9105 break;
9106 }
9107
9108 /* Move the real (op0) and imaginary (op1) parts to their location. */
9109 write_complex_part (target, op0, false);
9110 write_complex_part (target, op1, true);
9111
9112 return target;
9113
9114 case WIDEN_SUM_EXPR:
9115 {
9116 tree oprnd0 = treeop0;
9117 tree oprnd1 = treeop1;
9118
9119 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9120 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9121 target, unsignedp);
9122 return target;
9123 }
9124
9125 case REDUC_MAX_EXPR:
9126 case REDUC_MIN_EXPR:
9127 case REDUC_PLUS_EXPR:
9128 {
9129 op0 = expand_normal (treeop0);
9130 this_optab = optab_for_tree_code (code, type, optab_default);
9131 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9132
9133 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9134 {
9135 struct expand_operand ops[2];
9136 enum insn_code icode = optab_handler (this_optab, vec_mode);
9137
9138 create_output_operand (&ops[0], target, mode);
9139 create_input_operand (&ops[1], op0, vec_mode);
9140 if (maybe_expand_insn (icode, 2, ops))
9141 {
9142 target = ops[0].value;
9143 if (GET_MODE (target) != mode)
9144 return gen_lowpart (tmode, target);
9145 return target;
9146 }
9147 }
9148 /* Fall back to optab with vector result, and then extract scalar. */
9149 this_optab = scalar_reduc_to_vector (this_optab, type);
9150 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9151 gcc_assert (temp);
9152 /* The tree code produces a scalar result, but (somewhat by convention)
9153 the optab produces a vector with the result in element 0 if
9154 little-endian, or element N-1 if big-endian. So pull the scalar
9155 result out of that element. */
9156 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9157 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9158 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9159 target, mode, mode);
9160 gcc_assert (temp);
9161 return temp;
9162 }
9163
9164 case VEC_UNPACK_HI_EXPR:
9165 case VEC_UNPACK_LO_EXPR:
9166 {
9167 op0 = expand_normal (treeop0);
9168 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9169 target, unsignedp);
9170 gcc_assert (temp);
9171 return temp;
9172 }
9173
9174 case VEC_UNPACK_FLOAT_HI_EXPR:
9175 case VEC_UNPACK_FLOAT_LO_EXPR:
9176 {
9177 op0 = expand_normal (treeop0);
9178 /* The signedness is determined from input operand. */
9179 temp = expand_widen_pattern_expr
9180 (ops, op0, NULL_RTX, NULL_RTX,
9181 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9182
9183 gcc_assert (temp);
9184 return temp;
9185 }
9186
9187 case VEC_WIDEN_MULT_HI_EXPR:
9188 case VEC_WIDEN_MULT_LO_EXPR:
9189 case VEC_WIDEN_MULT_EVEN_EXPR:
9190 case VEC_WIDEN_MULT_ODD_EXPR:
9191 case VEC_WIDEN_LSHIFT_HI_EXPR:
9192 case VEC_WIDEN_LSHIFT_LO_EXPR:
9193 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9194 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9195 target, unsignedp);
9196 gcc_assert (target);
9197 return target;
9198
9199 case VEC_PACK_TRUNC_EXPR:
9200 case VEC_PACK_SAT_EXPR:
9201 case VEC_PACK_FIX_TRUNC_EXPR:
9202 mode = TYPE_MODE (TREE_TYPE (treeop0));
9203 goto binop;
9204
9205 case VEC_PERM_EXPR:
9206 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9207 op2 = expand_normal (treeop2);
9208
9209 /* Careful here: if the target doesn't support integral vector modes,
9210 a constant selection vector could wind up smooshed into a normal
9211 integral constant. */
9212 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9213 {
9214 tree sel_type = TREE_TYPE (treeop2);
9215 machine_mode vmode
9216 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9217 TYPE_VECTOR_SUBPARTS (sel_type));
9218 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9219 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9220 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9221 }
9222 else
9223 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9224
9225 temp = expand_vec_perm (mode, op0, op1, op2, target);
9226 gcc_assert (temp);
9227 return temp;
9228
9229 case DOT_PROD_EXPR:
9230 {
9231 tree oprnd0 = treeop0;
9232 tree oprnd1 = treeop1;
9233 tree oprnd2 = treeop2;
9234 rtx op2;
9235
9236 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9237 op2 = expand_normal (oprnd2);
9238 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9239 target, unsignedp);
9240 return target;
9241 }
9242
9243 case SAD_EXPR:
9244 {
9245 tree oprnd0 = treeop0;
9246 tree oprnd1 = treeop1;
9247 tree oprnd2 = treeop2;
9248 rtx op2;
9249
9250 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9251 op2 = expand_normal (oprnd2);
9252 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9253 target, unsignedp);
9254 return target;
9255 }
9256
9257 case REALIGN_LOAD_EXPR:
9258 {
9259 tree oprnd0 = treeop0;
9260 tree oprnd1 = treeop1;
9261 tree oprnd2 = treeop2;
9262 rtx op2;
9263
9264 this_optab = optab_for_tree_code (code, type, optab_default);
9265 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9266 op2 = expand_normal (oprnd2);
9267 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9268 target, unsignedp);
9269 gcc_assert (temp);
9270 return temp;
9271 }
9272
9273 case COND_EXPR:
9274 {
9275 /* A COND_EXPR with its type being VOID_TYPE represents a
9276 conditional jump and is handled in
9277 expand_gimple_cond_expr. */
9278 gcc_assert (!VOID_TYPE_P (type));
9279
9280 /* Note that COND_EXPRs whose type is a structure or union
9281 are required to be constructed to contain assignments of
9282 a temporary variable, so that we can evaluate them here
9283 for side effect only. If type is void, we must do likewise. */
9284
9285 gcc_assert (!TREE_ADDRESSABLE (type)
9286 && !ignore
9287 && TREE_TYPE (treeop1) != void_type_node
9288 && TREE_TYPE (treeop2) != void_type_node);
9289
9290 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9291 if (temp)
9292 return temp;
9293
9294 /* If we are not to produce a result, we have no target. Otherwise,
9295 if a target was specified use it; it will not be used as an
9296 intermediate target unless it is safe. If no target, use a
9297 temporary. */
9298
9299 if (modifier != EXPAND_STACK_PARM
9300 && original_target
9301 && safe_from_p (original_target, treeop0, 1)
9302 && GET_MODE (original_target) == mode
9303 && !MEM_P (original_target))
9304 temp = original_target;
9305 else
9306 temp = assign_temp (type, 0, 1);
9307
9308 do_pending_stack_adjust ();
9309 NO_DEFER_POP;
9310 rtx_code_label *lab0 = gen_label_rtx ();
9311 rtx_code_label *lab1 = gen_label_rtx ();
9312 jumpifnot (treeop0, lab0, -1);
9313 store_expr (treeop1, temp,
9314 modifier == EXPAND_STACK_PARM,
9315 false);
9316
9317 emit_jump_insn (gen_jump (lab1));
9318 emit_barrier ();
9319 emit_label (lab0);
9320 store_expr (treeop2, temp,
9321 modifier == EXPAND_STACK_PARM,
9322 false);
9323
9324 emit_label (lab1);
9325 OK_DEFER_POP;
9326 return temp;
9327 }
9328
9329 case VEC_COND_EXPR:
9330 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9331 return target;
9332
9333 default:
9334 gcc_unreachable ();
9335 }
9336
9337 /* Here to do an ordinary binary operator. */
9338 binop:
9339 expand_operands (treeop0, treeop1,
9340 subtarget, &op0, &op1, EXPAND_NORMAL);
9341 binop2:
9342 this_optab = optab_for_tree_code (code, type, optab_default);
9343 binop3:
9344 if (modifier == EXPAND_STACK_PARM)
9345 target = 0;
9346 temp = expand_binop (mode, this_optab, op0, op1, target,
9347 unsignedp, OPTAB_LIB_WIDEN);
9348 gcc_assert (temp);
9349 /* Bitwise operations do not need bitfield reduction as we expect their
9350 operands being properly truncated. */
9351 if (code == BIT_XOR_EXPR
9352 || code == BIT_AND_EXPR
9353 || code == BIT_IOR_EXPR)
9354 return temp;
9355 return REDUCE_BIT_FIELD (temp);
9356 }
9357 #undef REDUCE_BIT_FIELD
9358
9359
9360 /* Return TRUE if expression STMT is suitable for replacement.
9361 Never consider memory loads as replaceable, because those don't ever lead
9362 into constant expressions. */
9363
9364 static bool
9365 stmt_is_replaceable_p (gimple stmt)
9366 {
9367 if (ssa_is_replaceable_p (stmt))
9368 {
9369 /* Don't move around loads. */
9370 if (!gimple_assign_single_p (stmt)
9371 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9372 return true;
9373 }
9374 return false;
9375 }
9376
9377 rtx
9378 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9379 enum expand_modifier modifier, rtx *alt_rtl,
9380 bool inner_reference_p)
9381 {
9382 rtx op0, op1, temp, decl_rtl;
9383 tree type;
9384 int unsignedp;
9385 machine_mode mode;
9386 enum tree_code code = TREE_CODE (exp);
9387 rtx subtarget, original_target;
9388 int ignore;
9389 tree context;
9390 bool reduce_bit_field;
9391 location_t loc = EXPR_LOCATION (exp);
9392 struct separate_ops ops;
9393 tree treeop0, treeop1, treeop2;
9394 tree ssa_name = NULL_TREE;
9395 gimple g;
9396
9397 type = TREE_TYPE (exp);
9398 mode = TYPE_MODE (type);
9399 unsignedp = TYPE_UNSIGNED (type);
9400
9401 treeop0 = treeop1 = treeop2 = NULL_TREE;
9402 if (!VL_EXP_CLASS_P (exp))
9403 switch (TREE_CODE_LENGTH (code))
9404 {
9405 default:
9406 case 3: treeop2 = TREE_OPERAND (exp, 2);
9407 case 2: treeop1 = TREE_OPERAND (exp, 1);
9408 case 1: treeop0 = TREE_OPERAND (exp, 0);
9409 case 0: break;
9410 }
9411 ops.code = code;
9412 ops.type = type;
9413 ops.op0 = treeop0;
9414 ops.op1 = treeop1;
9415 ops.op2 = treeop2;
9416 ops.location = loc;
9417
9418 ignore = (target == const0_rtx
9419 || ((CONVERT_EXPR_CODE_P (code)
9420 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9421 && TREE_CODE (type) == VOID_TYPE));
9422
9423 /* An operation in what may be a bit-field type needs the
9424 result to be reduced to the precision of the bit-field type,
9425 which is narrower than that of the type's mode. */
9426 reduce_bit_field = (!ignore
9427 && INTEGRAL_TYPE_P (type)
9428 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9429
9430 /* If we are going to ignore this result, we need only do something
9431 if there is a side-effect somewhere in the expression. If there
9432 is, short-circuit the most common cases here. Note that we must
9433 not call expand_expr with anything but const0_rtx in case this
9434 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9435
9436 if (ignore)
9437 {
9438 if (! TREE_SIDE_EFFECTS (exp))
9439 return const0_rtx;
9440
9441 /* Ensure we reference a volatile object even if value is ignored, but
9442 don't do this if all we are doing is taking its address. */
9443 if (TREE_THIS_VOLATILE (exp)
9444 && TREE_CODE (exp) != FUNCTION_DECL
9445 && mode != VOIDmode && mode != BLKmode
9446 && modifier != EXPAND_CONST_ADDRESS)
9447 {
9448 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9449 if (MEM_P (temp))
9450 copy_to_reg (temp);
9451 return const0_rtx;
9452 }
9453
9454 if (TREE_CODE_CLASS (code) == tcc_unary
9455 || code == BIT_FIELD_REF
9456 || code == COMPONENT_REF
9457 || code == INDIRECT_REF)
9458 return expand_expr (treeop0, const0_rtx, VOIDmode,
9459 modifier);
9460
9461 else if (TREE_CODE_CLASS (code) == tcc_binary
9462 || TREE_CODE_CLASS (code) == tcc_comparison
9463 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9464 {
9465 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9466 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9467 return const0_rtx;
9468 }
9469
9470 target = 0;
9471 }
9472
9473 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9474 target = 0;
9475
9476 /* Use subtarget as the target for operand 0 of a binary operation. */
9477 subtarget = get_subtarget (target);
9478 original_target = target;
9479
9480 switch (code)
9481 {
9482 case LABEL_DECL:
9483 {
9484 tree function = decl_function_context (exp);
9485
9486 temp = label_rtx (exp);
9487 temp = gen_rtx_LABEL_REF (Pmode, temp);
9488
9489 if (function != current_function_decl
9490 && function != 0)
9491 LABEL_REF_NONLOCAL_P (temp) = 1;
9492
9493 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9494 return temp;
9495 }
9496
9497 case SSA_NAME:
9498 /* ??? ivopts calls expander, without any preparation from
9499 out-of-ssa. So fake instructions as if this was an access to the
9500 base variable. This unnecessarily allocates a pseudo, see how we can
9501 reuse it, if partition base vars have it set already. */
9502 if (!currently_expanding_to_rtl)
9503 {
9504 tree var = SSA_NAME_VAR (exp);
9505 if (var && DECL_RTL_SET_P (var))
9506 return DECL_RTL (var);
9507 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9508 LAST_VIRTUAL_REGISTER + 1);
9509 }
9510
9511 g = get_gimple_for_ssa_name (exp);
9512 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9513 if (g == NULL
9514 && modifier == EXPAND_INITIALIZER
9515 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9516 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9517 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9518 g = SSA_NAME_DEF_STMT (exp);
9519 if (g)
9520 {
9521 rtx r;
9522 ops.code = gimple_assign_rhs_code (g);
9523 switch (get_gimple_rhs_class (ops.code))
9524 {
9525 case GIMPLE_TERNARY_RHS:
9526 ops.op2 = gimple_assign_rhs3 (g);
9527 /* Fallthru */
9528 case GIMPLE_BINARY_RHS:
9529 ops.op1 = gimple_assign_rhs2 (g);
9530
9531 /* Try to expand conditonal compare. */
9532 if (targetm.gen_ccmp_first)
9533 {
9534 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9535 r = expand_ccmp_expr (g);
9536 if (r)
9537 break;
9538 }
9539 /* Fallthru */
9540 case GIMPLE_UNARY_RHS:
9541 ops.op0 = gimple_assign_rhs1 (g);
9542 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9543 ops.location = gimple_location (g);
9544 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9545 break;
9546 case GIMPLE_SINGLE_RHS:
9547 {
9548 location_t saved_loc = curr_insn_location ();
9549 set_curr_insn_location (gimple_location (g));
9550 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9551 tmode, modifier, NULL, inner_reference_p);
9552 set_curr_insn_location (saved_loc);
9553 break;
9554 }
9555 default:
9556 gcc_unreachable ();
9557 }
9558 if (REG_P (r) && !REG_EXPR (r))
9559 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9560 return r;
9561 }
9562
9563 ssa_name = exp;
9564 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9565 exp = SSA_NAME_VAR (ssa_name);
9566 goto expand_decl_rtl;
9567
9568 case PARM_DECL:
9569 case VAR_DECL:
9570 /* If a static var's type was incomplete when the decl was written,
9571 but the type is complete now, lay out the decl now. */
9572 if (DECL_SIZE (exp) == 0
9573 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9574 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9575 layout_decl (exp, 0);
9576
9577 /* ... fall through ... */
9578
9579 case FUNCTION_DECL:
9580 case RESULT_DECL:
9581 decl_rtl = DECL_RTL (exp);
9582 expand_decl_rtl:
9583 gcc_assert (decl_rtl);
9584 decl_rtl = copy_rtx (decl_rtl);
9585 /* Record writes to register variables. */
9586 if (modifier == EXPAND_WRITE
9587 && REG_P (decl_rtl)
9588 && HARD_REGISTER_P (decl_rtl))
9589 add_to_hard_reg_set (&crtl->asm_clobbers,
9590 GET_MODE (decl_rtl), REGNO (decl_rtl));
9591
9592 /* Ensure variable marked as used even if it doesn't go through
9593 a parser. If it hasn't be used yet, write out an external
9594 definition. */
9595 TREE_USED (exp) = 1;
9596
9597 /* Show we haven't gotten RTL for this yet. */
9598 temp = 0;
9599
9600 /* Variables inherited from containing functions should have
9601 been lowered by this point. */
9602 context = decl_function_context (exp);
9603 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9604 || context == current_function_decl
9605 || TREE_STATIC (exp)
9606 || DECL_EXTERNAL (exp)
9607 /* ??? C++ creates functions that are not TREE_STATIC. */
9608 || TREE_CODE (exp) == FUNCTION_DECL);
9609
9610 /* This is the case of an array whose size is to be determined
9611 from its initializer, while the initializer is still being parsed.
9612 ??? We aren't parsing while expanding anymore. */
9613
9614 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9615 temp = validize_mem (decl_rtl);
9616
9617 /* If DECL_RTL is memory, we are in the normal case and the
9618 address is not valid, get the address into a register. */
9619
9620 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9621 {
9622 if (alt_rtl)
9623 *alt_rtl = decl_rtl;
9624 decl_rtl = use_anchored_address (decl_rtl);
9625 if (modifier != EXPAND_CONST_ADDRESS
9626 && modifier != EXPAND_SUM
9627 && !memory_address_addr_space_p (DECL_MODE (exp),
9628 XEXP (decl_rtl, 0),
9629 MEM_ADDR_SPACE (decl_rtl)))
9630 temp = replace_equiv_address (decl_rtl,
9631 copy_rtx (XEXP (decl_rtl, 0)));
9632 }
9633
9634 /* If we got something, return it. But first, set the alignment
9635 if the address is a register. */
9636 if (temp != 0)
9637 {
9638 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9639 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9640
9641 return temp;
9642 }
9643
9644 /* If the mode of DECL_RTL does not match that of the decl,
9645 there are two cases: we are dealing with a BLKmode value
9646 that is returned in a register, or we are dealing with
9647 a promoted value. In the latter case, return a SUBREG
9648 of the wanted mode, but mark it so that we know that it
9649 was already extended. */
9650 if (REG_P (decl_rtl)
9651 && DECL_MODE (exp) != BLKmode
9652 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9653 {
9654 machine_mode pmode;
9655
9656 /* Get the signedness to be used for this variable. Ensure we get
9657 the same mode we got when the variable was declared. */
9658 if (code == SSA_NAME
9659 && (g = SSA_NAME_DEF_STMT (ssa_name))
9660 && gimple_code (g) == GIMPLE_CALL
9661 && !gimple_call_internal_p (g))
9662 pmode = promote_function_mode (type, mode, &unsignedp,
9663 gimple_call_fntype (g),
9664 2);
9665 else
9666 pmode = promote_decl_mode (exp, &unsignedp);
9667 gcc_assert (GET_MODE (decl_rtl) == pmode);
9668
9669 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9670 SUBREG_PROMOTED_VAR_P (temp) = 1;
9671 SUBREG_PROMOTED_SET (temp, unsignedp);
9672 return temp;
9673 }
9674
9675 return decl_rtl;
9676
9677 case INTEGER_CST:
9678 /* Given that TYPE_PRECISION (type) is not always equal to
9679 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9680 the former to the latter according to the signedness of the
9681 type. */
9682 temp = immed_wide_int_const (wide_int::from
9683 (exp,
9684 GET_MODE_PRECISION (TYPE_MODE (type)),
9685 TYPE_SIGN (type)),
9686 TYPE_MODE (type));
9687 return temp;
9688
9689 case VECTOR_CST:
9690 {
9691 tree tmp = NULL_TREE;
9692 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9693 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9694 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9695 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9696 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9697 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9698 return const_vector_from_tree (exp);
9699 if (GET_MODE_CLASS (mode) == MODE_INT)
9700 {
9701 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9702 if (type_for_mode)
9703 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9704 }
9705 if (!tmp)
9706 {
9707 vec<constructor_elt, va_gc> *v;
9708 unsigned i;
9709 vec_alloc (v, VECTOR_CST_NELTS (exp));
9710 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9711 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9712 tmp = build_constructor (type, v);
9713 }
9714 return expand_expr (tmp, ignore ? const0_rtx : target,
9715 tmode, modifier);
9716 }
9717
9718 case CONST_DECL:
9719 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9720
9721 case REAL_CST:
9722 /* If optimized, generate immediate CONST_DOUBLE
9723 which will be turned into memory by reload if necessary.
9724
9725 We used to force a register so that loop.c could see it. But
9726 this does not allow gen_* patterns to perform optimizations with
9727 the constants. It also produces two insns in cases like "x = 1.0;".
9728 On most machines, floating-point constants are not permitted in
9729 many insns, so we'd end up copying it to a register in any case.
9730
9731 Now, we do the copying in expand_binop, if appropriate. */
9732 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9733 TYPE_MODE (TREE_TYPE (exp)));
9734
9735 case FIXED_CST:
9736 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9737 TYPE_MODE (TREE_TYPE (exp)));
9738
9739 case COMPLEX_CST:
9740 /* Handle evaluating a complex constant in a CONCAT target. */
9741 if (original_target && GET_CODE (original_target) == CONCAT)
9742 {
9743 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9744 rtx rtarg, itarg;
9745
9746 rtarg = XEXP (original_target, 0);
9747 itarg = XEXP (original_target, 1);
9748
9749 /* Move the real and imaginary parts separately. */
9750 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9751 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9752
9753 if (op0 != rtarg)
9754 emit_move_insn (rtarg, op0);
9755 if (op1 != itarg)
9756 emit_move_insn (itarg, op1);
9757
9758 return original_target;
9759 }
9760
9761 /* ... fall through ... */
9762
9763 case STRING_CST:
9764 temp = expand_expr_constant (exp, 1, modifier);
9765
9766 /* temp contains a constant address.
9767 On RISC machines where a constant address isn't valid,
9768 make some insns to get that address into a register. */
9769 if (modifier != EXPAND_CONST_ADDRESS
9770 && modifier != EXPAND_INITIALIZER
9771 && modifier != EXPAND_SUM
9772 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9773 MEM_ADDR_SPACE (temp)))
9774 return replace_equiv_address (temp,
9775 copy_rtx (XEXP (temp, 0)));
9776 return temp;
9777
9778 case SAVE_EXPR:
9779 {
9780 tree val = treeop0;
9781 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9782 inner_reference_p);
9783
9784 if (!SAVE_EXPR_RESOLVED_P (exp))
9785 {
9786 /* We can indeed still hit this case, typically via builtin
9787 expanders calling save_expr immediately before expanding
9788 something. Assume this means that we only have to deal
9789 with non-BLKmode values. */
9790 gcc_assert (GET_MODE (ret) != BLKmode);
9791
9792 val = build_decl (curr_insn_location (),
9793 VAR_DECL, NULL, TREE_TYPE (exp));
9794 DECL_ARTIFICIAL (val) = 1;
9795 DECL_IGNORED_P (val) = 1;
9796 treeop0 = val;
9797 TREE_OPERAND (exp, 0) = treeop0;
9798 SAVE_EXPR_RESOLVED_P (exp) = 1;
9799
9800 if (!CONSTANT_P (ret))
9801 ret = copy_to_reg (ret);
9802 SET_DECL_RTL (val, ret);
9803 }
9804
9805 return ret;
9806 }
9807
9808
9809 case CONSTRUCTOR:
9810 /* If we don't need the result, just ensure we evaluate any
9811 subexpressions. */
9812 if (ignore)
9813 {
9814 unsigned HOST_WIDE_INT idx;
9815 tree value;
9816
9817 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9818 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9819
9820 return const0_rtx;
9821 }
9822
9823 return expand_constructor (exp, target, modifier, false);
9824
9825 case TARGET_MEM_REF:
9826 {
9827 addr_space_t as
9828 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9829 enum insn_code icode;
9830 unsigned int align;
9831
9832 op0 = addr_for_mem_ref (exp, as, true);
9833 op0 = memory_address_addr_space (mode, op0, as);
9834 temp = gen_rtx_MEM (mode, op0);
9835 set_mem_attributes (temp, exp, 0);
9836 set_mem_addr_space (temp, as);
9837 align = get_object_alignment (exp);
9838 if (modifier != EXPAND_WRITE
9839 && modifier != EXPAND_MEMORY
9840 && mode != BLKmode
9841 && align < GET_MODE_ALIGNMENT (mode)
9842 /* If the target does not have special handling for unaligned
9843 loads of mode then it can use regular moves for them. */
9844 && ((icode = optab_handler (movmisalign_optab, mode))
9845 != CODE_FOR_nothing))
9846 {
9847 struct expand_operand ops[2];
9848
9849 /* We've already validated the memory, and we're creating a
9850 new pseudo destination. The predicates really can't fail,
9851 nor can the generator. */
9852 create_output_operand (&ops[0], NULL_RTX, mode);
9853 create_fixed_operand (&ops[1], temp);
9854 expand_insn (icode, 2, ops);
9855 temp = ops[0].value;
9856 }
9857 return temp;
9858 }
9859
9860 case MEM_REF:
9861 {
9862 addr_space_t as
9863 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9864 machine_mode address_mode;
9865 tree base = TREE_OPERAND (exp, 0);
9866 gimple def_stmt;
9867 enum insn_code icode;
9868 unsigned align;
9869 /* Handle expansion of non-aliased memory with non-BLKmode. That
9870 might end up in a register. */
9871 if (mem_ref_refers_to_non_mem_p (exp))
9872 {
9873 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9874 base = TREE_OPERAND (base, 0);
9875 if (offset == 0
9876 && tree_fits_uhwi_p (TYPE_SIZE (type))
9877 && (GET_MODE_BITSIZE (DECL_MODE (base))
9878 == tree_to_uhwi (TYPE_SIZE (type))))
9879 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9880 target, tmode, modifier);
9881 if (TYPE_MODE (type) == BLKmode)
9882 {
9883 temp = assign_stack_temp (DECL_MODE (base),
9884 GET_MODE_SIZE (DECL_MODE (base)));
9885 store_expr (base, temp, 0, false);
9886 temp = adjust_address (temp, BLKmode, offset);
9887 set_mem_size (temp, int_size_in_bytes (type));
9888 return temp;
9889 }
9890 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9891 bitsize_int (offset * BITS_PER_UNIT));
9892 return expand_expr (exp, target, tmode, modifier);
9893 }
9894 address_mode = targetm.addr_space.address_mode (as);
9895 base = TREE_OPERAND (exp, 0);
9896 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9897 {
9898 tree mask = gimple_assign_rhs2 (def_stmt);
9899 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9900 gimple_assign_rhs1 (def_stmt), mask);
9901 TREE_OPERAND (exp, 0) = base;
9902 }
9903 align = get_object_alignment (exp);
9904 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9905 op0 = memory_address_addr_space (mode, op0, as);
9906 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9907 {
9908 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9909 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9910 op0 = memory_address_addr_space (mode, op0, as);
9911 }
9912 temp = gen_rtx_MEM (mode, op0);
9913 set_mem_attributes (temp, exp, 0);
9914 set_mem_addr_space (temp, as);
9915 if (TREE_THIS_VOLATILE (exp))
9916 MEM_VOLATILE_P (temp) = 1;
9917 if (modifier != EXPAND_WRITE
9918 && modifier != EXPAND_MEMORY
9919 && !inner_reference_p
9920 && mode != BLKmode
9921 && align < GET_MODE_ALIGNMENT (mode))
9922 {
9923 if ((icode = optab_handler (movmisalign_optab, mode))
9924 != CODE_FOR_nothing)
9925 {
9926 struct expand_operand ops[2];
9927
9928 /* We've already validated the memory, and we're creating a
9929 new pseudo destination. The predicates really can't fail,
9930 nor can the generator. */
9931 create_output_operand (&ops[0], NULL_RTX, mode);
9932 create_fixed_operand (&ops[1], temp);
9933 expand_insn (icode, 2, ops);
9934 temp = ops[0].value;
9935 }
9936 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9937 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9938 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9939 (modifier == EXPAND_STACK_PARM
9940 ? NULL_RTX : target),
9941 mode, mode);
9942 }
9943 return temp;
9944 }
9945
9946 case ARRAY_REF:
9947
9948 {
9949 tree array = treeop0;
9950 tree index = treeop1;
9951 tree init;
9952
9953 /* Fold an expression like: "foo"[2].
9954 This is not done in fold so it won't happen inside &.
9955 Don't fold if this is for wide characters since it's too
9956 difficult to do correctly and this is a very rare case. */
9957
9958 if (modifier != EXPAND_CONST_ADDRESS
9959 && modifier != EXPAND_INITIALIZER
9960 && modifier != EXPAND_MEMORY)
9961 {
9962 tree t = fold_read_from_constant_string (exp);
9963
9964 if (t)
9965 return expand_expr (t, target, tmode, modifier);
9966 }
9967
9968 /* If this is a constant index into a constant array,
9969 just get the value from the array. Handle both the cases when
9970 we have an explicit constructor and when our operand is a variable
9971 that was declared const. */
9972
9973 if (modifier != EXPAND_CONST_ADDRESS
9974 && modifier != EXPAND_INITIALIZER
9975 && modifier != EXPAND_MEMORY
9976 && TREE_CODE (array) == CONSTRUCTOR
9977 && ! TREE_SIDE_EFFECTS (array)
9978 && TREE_CODE (index) == INTEGER_CST)
9979 {
9980 unsigned HOST_WIDE_INT ix;
9981 tree field, value;
9982
9983 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9984 field, value)
9985 if (tree_int_cst_equal (field, index))
9986 {
9987 if (!TREE_SIDE_EFFECTS (value))
9988 return expand_expr (fold (value), target, tmode, modifier);
9989 break;
9990 }
9991 }
9992
9993 else if (optimize >= 1
9994 && modifier != EXPAND_CONST_ADDRESS
9995 && modifier != EXPAND_INITIALIZER
9996 && modifier != EXPAND_MEMORY
9997 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9998 && TREE_CODE (index) == INTEGER_CST
9999 && (TREE_CODE (array) == VAR_DECL
10000 || TREE_CODE (array) == CONST_DECL)
10001 && (init = ctor_for_folding (array)) != error_mark_node)
10002 {
10003 if (init == NULL_TREE)
10004 {
10005 tree value = build_zero_cst (type);
10006 if (TREE_CODE (value) == CONSTRUCTOR)
10007 {
10008 /* If VALUE is a CONSTRUCTOR, this optimization is only
10009 useful if this doesn't store the CONSTRUCTOR into
10010 memory. If it does, it is more efficient to just
10011 load the data from the array directly. */
10012 rtx ret = expand_constructor (value, target,
10013 modifier, true);
10014 if (ret == NULL_RTX)
10015 value = NULL_TREE;
10016 }
10017
10018 if (value)
10019 return expand_expr (value, target, tmode, modifier);
10020 }
10021 else if (TREE_CODE (init) == CONSTRUCTOR)
10022 {
10023 unsigned HOST_WIDE_INT ix;
10024 tree field, value;
10025
10026 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10027 field, value)
10028 if (tree_int_cst_equal (field, index))
10029 {
10030 if (TREE_SIDE_EFFECTS (value))
10031 break;
10032
10033 if (TREE_CODE (value) == CONSTRUCTOR)
10034 {
10035 /* If VALUE is a CONSTRUCTOR, this
10036 optimization is only useful if
10037 this doesn't store the CONSTRUCTOR
10038 into memory. If it does, it is more
10039 efficient to just load the data from
10040 the array directly. */
10041 rtx ret = expand_constructor (value, target,
10042 modifier, true);
10043 if (ret == NULL_RTX)
10044 break;
10045 }
10046
10047 return
10048 expand_expr (fold (value), target, tmode, modifier);
10049 }
10050 }
10051 else if (TREE_CODE (init) == STRING_CST)
10052 {
10053 tree low_bound = array_ref_low_bound (exp);
10054 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10055
10056 /* Optimize the special case of a zero lower bound.
10057
10058 We convert the lower bound to sizetype to avoid problems
10059 with constant folding. E.g. suppose the lower bound is
10060 1 and its mode is QI. Without the conversion
10061 (ARRAY + (INDEX - (unsigned char)1))
10062 becomes
10063 (ARRAY + (-(unsigned char)1) + INDEX)
10064 which becomes
10065 (ARRAY + 255 + INDEX). Oops! */
10066 if (!integer_zerop (low_bound))
10067 index1 = size_diffop_loc (loc, index1,
10068 fold_convert_loc (loc, sizetype,
10069 low_bound));
10070
10071 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10072 {
10073 tree type = TREE_TYPE (TREE_TYPE (init));
10074 machine_mode mode = TYPE_MODE (type);
10075
10076 if (GET_MODE_CLASS (mode) == MODE_INT
10077 && GET_MODE_SIZE (mode) == 1)
10078 return gen_int_mode (TREE_STRING_POINTER (init)
10079 [TREE_INT_CST_LOW (index1)],
10080 mode);
10081 }
10082 }
10083 }
10084 }
10085 goto normal_inner_ref;
10086
10087 case COMPONENT_REF:
10088 /* If the operand is a CONSTRUCTOR, we can just extract the
10089 appropriate field if it is present. */
10090 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10091 {
10092 unsigned HOST_WIDE_INT idx;
10093 tree field, value;
10094
10095 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10096 idx, field, value)
10097 if (field == treeop1
10098 /* We can normally use the value of the field in the
10099 CONSTRUCTOR. However, if this is a bitfield in
10100 an integral mode that we can fit in a HOST_WIDE_INT,
10101 we must mask only the number of bits in the bitfield,
10102 since this is done implicitly by the constructor. If
10103 the bitfield does not meet either of those conditions,
10104 we can't do this optimization. */
10105 && (! DECL_BIT_FIELD (field)
10106 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10107 && (GET_MODE_PRECISION (DECL_MODE (field))
10108 <= HOST_BITS_PER_WIDE_INT))))
10109 {
10110 if (DECL_BIT_FIELD (field)
10111 && modifier == EXPAND_STACK_PARM)
10112 target = 0;
10113 op0 = expand_expr (value, target, tmode, modifier);
10114 if (DECL_BIT_FIELD (field))
10115 {
10116 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10117 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10118
10119 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10120 {
10121 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10122 imode);
10123 op0 = expand_and (imode, op0, op1, target);
10124 }
10125 else
10126 {
10127 int count = GET_MODE_PRECISION (imode) - bitsize;
10128
10129 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10130 target, 0);
10131 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10132 target, 0);
10133 }
10134 }
10135
10136 return op0;
10137 }
10138 }
10139 goto normal_inner_ref;
10140
10141 case BIT_FIELD_REF:
10142 case ARRAY_RANGE_REF:
10143 normal_inner_ref:
10144 {
10145 machine_mode mode1, mode2;
10146 HOST_WIDE_INT bitsize, bitpos;
10147 tree offset;
10148 int volatilep = 0, must_force_mem;
10149 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10150 &mode1, &unsignedp, &volatilep, true);
10151 rtx orig_op0, memloc;
10152 bool clear_mem_expr = false;
10153
10154 /* If we got back the original object, something is wrong. Perhaps
10155 we are evaluating an expression too early. In any event, don't
10156 infinitely recurse. */
10157 gcc_assert (tem != exp);
10158
10159 /* If TEM's type is a union of variable size, pass TARGET to the inner
10160 computation, since it will need a temporary and TARGET is known
10161 to have to do. This occurs in unchecked conversion in Ada. */
10162 orig_op0 = op0
10163 = expand_expr_real (tem,
10164 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10165 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10166 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10167 != INTEGER_CST)
10168 && modifier != EXPAND_STACK_PARM
10169 ? target : NULL_RTX),
10170 VOIDmode,
10171 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10172 NULL, true);
10173
10174 /* If the field has a mode, we want to access it in the
10175 field's mode, not the computed mode.
10176 If a MEM has VOIDmode (external with incomplete type),
10177 use BLKmode for it instead. */
10178 if (MEM_P (op0))
10179 {
10180 if (mode1 != VOIDmode)
10181 op0 = adjust_address (op0, mode1, 0);
10182 else if (GET_MODE (op0) == VOIDmode)
10183 op0 = adjust_address (op0, BLKmode, 0);
10184 }
10185
10186 mode2
10187 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10188
10189 /* If we have either an offset, a BLKmode result, or a reference
10190 outside the underlying object, we must force it to memory.
10191 Such a case can occur in Ada if we have unchecked conversion
10192 of an expression from a scalar type to an aggregate type or
10193 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10194 passed a partially uninitialized object or a view-conversion
10195 to a larger size. */
10196 must_force_mem = (offset
10197 || mode1 == BLKmode
10198 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10199
10200 /* Handle CONCAT first. */
10201 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10202 {
10203 if (bitpos == 0
10204 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10205 return op0;
10206 if (bitpos == 0
10207 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10208 && bitsize)
10209 {
10210 op0 = XEXP (op0, 0);
10211 mode2 = GET_MODE (op0);
10212 }
10213 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10214 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10215 && bitpos
10216 && bitsize)
10217 {
10218 op0 = XEXP (op0, 1);
10219 bitpos = 0;
10220 mode2 = GET_MODE (op0);
10221 }
10222 else
10223 /* Otherwise force into memory. */
10224 must_force_mem = 1;
10225 }
10226
10227 /* If this is a constant, put it in a register if it is a legitimate
10228 constant and we don't need a memory reference. */
10229 if (CONSTANT_P (op0)
10230 && mode2 != BLKmode
10231 && targetm.legitimate_constant_p (mode2, op0)
10232 && !must_force_mem)
10233 op0 = force_reg (mode2, op0);
10234
10235 /* Otherwise, if this is a constant, try to force it to the constant
10236 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10237 is a legitimate constant. */
10238 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10239 op0 = validize_mem (memloc);
10240
10241 /* Otherwise, if this is a constant or the object is not in memory
10242 and need be, put it there. */
10243 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10244 {
10245 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10246 emit_move_insn (memloc, op0);
10247 op0 = memloc;
10248 clear_mem_expr = true;
10249 }
10250
10251 if (offset)
10252 {
10253 machine_mode address_mode;
10254 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10255 EXPAND_SUM);
10256
10257 gcc_assert (MEM_P (op0));
10258
10259 address_mode = get_address_mode (op0);
10260 if (GET_MODE (offset_rtx) != address_mode)
10261 {
10262 /* We cannot be sure that the RTL in offset_rtx is valid outside
10263 of a memory address context, so force it into a register
10264 before attempting to convert it to the desired mode. */
10265 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10266 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10267 }
10268
10269 /* See the comment in expand_assignment for the rationale. */
10270 if (mode1 != VOIDmode
10271 && bitpos != 0
10272 && bitsize > 0
10273 && (bitpos % bitsize) == 0
10274 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10275 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10276 {
10277 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10278 bitpos = 0;
10279 }
10280
10281 op0 = offset_address (op0, offset_rtx,
10282 highest_pow2_factor (offset));
10283 }
10284
10285 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10286 record its alignment as BIGGEST_ALIGNMENT. */
10287 if (MEM_P (op0) && bitpos == 0 && offset != 0
10288 && is_aligning_offset (offset, tem))
10289 set_mem_align (op0, BIGGEST_ALIGNMENT);
10290
10291 /* Don't forget about volatility even if this is a bitfield. */
10292 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10293 {
10294 if (op0 == orig_op0)
10295 op0 = copy_rtx (op0);
10296
10297 MEM_VOLATILE_P (op0) = 1;
10298 }
10299
10300 /* In cases where an aligned union has an unaligned object
10301 as a field, we might be extracting a BLKmode value from
10302 an integer-mode (e.g., SImode) object. Handle this case
10303 by doing the extract into an object as wide as the field
10304 (which we know to be the width of a basic mode), then
10305 storing into memory, and changing the mode to BLKmode. */
10306 if (mode1 == VOIDmode
10307 || REG_P (op0) || GET_CODE (op0) == SUBREG
10308 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10309 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10310 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10311 && modifier != EXPAND_CONST_ADDRESS
10312 && modifier != EXPAND_INITIALIZER
10313 && modifier != EXPAND_MEMORY)
10314 /* If the bitfield is volatile and the bitsize
10315 is narrower than the access size of the bitfield,
10316 we need to extract bitfields from the access. */
10317 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10318 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10319 && mode1 != BLKmode
10320 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10321 /* If the field isn't aligned enough to fetch as a memref,
10322 fetch it as a bit field. */
10323 || (mode1 != BLKmode
10324 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10325 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10326 || (MEM_P (op0)
10327 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10328 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10329 && modifier != EXPAND_MEMORY
10330 && ((modifier == EXPAND_CONST_ADDRESS
10331 || modifier == EXPAND_INITIALIZER)
10332 ? STRICT_ALIGNMENT
10333 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10334 || (bitpos % BITS_PER_UNIT != 0)))
10335 /* If the type and the field are a constant size and the
10336 size of the type isn't the same size as the bitfield,
10337 we must use bitfield operations. */
10338 || (bitsize >= 0
10339 && TYPE_SIZE (TREE_TYPE (exp))
10340 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10341 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10342 bitsize)))
10343 {
10344 machine_mode ext_mode = mode;
10345
10346 if (ext_mode == BLKmode
10347 && ! (target != 0 && MEM_P (op0)
10348 && MEM_P (target)
10349 && bitpos % BITS_PER_UNIT == 0))
10350 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10351
10352 if (ext_mode == BLKmode)
10353 {
10354 if (target == 0)
10355 target = assign_temp (type, 1, 1);
10356
10357 /* ??? Unlike the similar test a few lines below, this one is
10358 very likely obsolete. */
10359 if (bitsize == 0)
10360 return target;
10361
10362 /* In this case, BITPOS must start at a byte boundary and
10363 TARGET, if specified, must be a MEM. */
10364 gcc_assert (MEM_P (op0)
10365 && (!target || MEM_P (target))
10366 && !(bitpos % BITS_PER_UNIT));
10367
10368 emit_block_move (target,
10369 adjust_address (op0, VOIDmode,
10370 bitpos / BITS_PER_UNIT),
10371 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10372 / BITS_PER_UNIT),
10373 (modifier == EXPAND_STACK_PARM
10374 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10375
10376 return target;
10377 }
10378
10379 /* If we have nothing to extract, the result will be 0 for targets
10380 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10381 return 0 for the sake of consistency, as reading a zero-sized
10382 bitfield is valid in Ada and the value is fully specified. */
10383 if (bitsize == 0)
10384 return const0_rtx;
10385
10386 op0 = validize_mem (op0);
10387
10388 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10389 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10390
10391 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10392 (modifier == EXPAND_STACK_PARM
10393 ? NULL_RTX : target),
10394 ext_mode, ext_mode);
10395
10396 /* If the result is a record type and BITSIZE is narrower than
10397 the mode of OP0, an integral mode, and this is a big endian
10398 machine, we must put the field into the high-order bits. */
10399 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10400 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10401 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10402 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10403 GET_MODE_BITSIZE (GET_MODE (op0))
10404 - bitsize, op0, 1);
10405
10406 /* If the result type is BLKmode, store the data into a temporary
10407 of the appropriate type, but with the mode corresponding to the
10408 mode for the data we have (op0's mode). */
10409 if (mode == BLKmode)
10410 {
10411 rtx new_rtx
10412 = assign_stack_temp_for_type (ext_mode,
10413 GET_MODE_BITSIZE (ext_mode),
10414 type);
10415 emit_move_insn (new_rtx, op0);
10416 op0 = copy_rtx (new_rtx);
10417 PUT_MODE (op0, BLKmode);
10418 }
10419
10420 return op0;
10421 }
10422
10423 /* If the result is BLKmode, use that to access the object
10424 now as well. */
10425 if (mode == BLKmode)
10426 mode1 = BLKmode;
10427
10428 /* Get a reference to just this component. */
10429 if (modifier == EXPAND_CONST_ADDRESS
10430 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10431 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10432 else
10433 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10434
10435 if (op0 == orig_op0)
10436 op0 = copy_rtx (op0);
10437
10438 set_mem_attributes (op0, exp, 0);
10439
10440 if (REG_P (XEXP (op0, 0)))
10441 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10442
10443 /* If op0 is a temporary because the original expressions was forced
10444 to memory, clear MEM_EXPR so that the original expression cannot
10445 be marked as addressable through MEM_EXPR of the temporary. */
10446 if (clear_mem_expr)
10447 set_mem_expr (op0, NULL_TREE);
10448
10449 MEM_VOLATILE_P (op0) |= volatilep;
10450 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10451 || modifier == EXPAND_CONST_ADDRESS
10452 || modifier == EXPAND_INITIALIZER)
10453 return op0;
10454
10455 if (target == 0)
10456 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10457
10458 convert_move (target, op0, unsignedp);
10459 return target;
10460 }
10461
10462 case OBJ_TYPE_REF:
10463 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10464
10465 case CALL_EXPR:
10466 /* All valid uses of __builtin_va_arg_pack () are removed during
10467 inlining. */
10468 if (CALL_EXPR_VA_ARG_PACK (exp))
10469 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10470 {
10471 tree fndecl = get_callee_fndecl (exp), attr;
10472
10473 if (fndecl
10474 && (attr = lookup_attribute ("error",
10475 DECL_ATTRIBUTES (fndecl))) != NULL)
10476 error ("%Kcall to %qs declared with attribute error: %s",
10477 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10478 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10479 if (fndecl
10480 && (attr = lookup_attribute ("warning",
10481 DECL_ATTRIBUTES (fndecl))) != NULL)
10482 warning_at (tree_nonartificial_location (exp),
10483 0, "%Kcall to %qs declared with attribute warning: %s",
10484 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10485 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10486
10487 /* Check for a built-in function. */
10488 if (fndecl && DECL_BUILT_IN (fndecl))
10489 {
10490 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10491 if (CALL_WITH_BOUNDS_P (exp))
10492 return expand_builtin_with_bounds (exp, target, subtarget,
10493 tmode, ignore);
10494 else
10495 return expand_builtin (exp, target, subtarget, tmode, ignore);
10496 }
10497 }
10498 return expand_call (exp, target, ignore);
10499
10500 case VIEW_CONVERT_EXPR:
10501 op0 = NULL_RTX;
10502
10503 /* If we are converting to BLKmode, try to avoid an intermediate
10504 temporary by fetching an inner memory reference. */
10505 if (mode == BLKmode
10506 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10507 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10508 && handled_component_p (treeop0))
10509 {
10510 machine_mode mode1;
10511 HOST_WIDE_INT bitsize, bitpos;
10512 tree offset;
10513 int unsignedp;
10514 int volatilep = 0;
10515 tree tem
10516 = get_inner_reference (treeop0, &bitsize, &bitpos,
10517 &offset, &mode1, &unsignedp, &volatilep,
10518 true);
10519 rtx orig_op0;
10520
10521 /* ??? We should work harder and deal with non-zero offsets. */
10522 if (!offset
10523 && (bitpos % BITS_PER_UNIT) == 0
10524 && bitsize >= 0
10525 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10526 {
10527 /* See the normal_inner_ref case for the rationale. */
10528 orig_op0
10529 = expand_expr_real (tem,
10530 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10531 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10532 != INTEGER_CST)
10533 && modifier != EXPAND_STACK_PARM
10534 ? target : NULL_RTX),
10535 VOIDmode,
10536 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10537 NULL, true);
10538
10539 if (MEM_P (orig_op0))
10540 {
10541 op0 = orig_op0;
10542
10543 /* Get a reference to just this component. */
10544 if (modifier == EXPAND_CONST_ADDRESS
10545 || modifier == EXPAND_SUM
10546 || modifier == EXPAND_INITIALIZER)
10547 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10548 else
10549 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10550
10551 if (op0 == orig_op0)
10552 op0 = copy_rtx (op0);
10553
10554 set_mem_attributes (op0, treeop0, 0);
10555 if (REG_P (XEXP (op0, 0)))
10556 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10557
10558 MEM_VOLATILE_P (op0) |= volatilep;
10559 }
10560 }
10561 }
10562
10563 if (!op0)
10564 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10565 NULL, inner_reference_p);
10566
10567 /* If the input and output modes are both the same, we are done. */
10568 if (mode == GET_MODE (op0))
10569 ;
10570 /* If neither mode is BLKmode, and both modes are the same size
10571 then we can use gen_lowpart. */
10572 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10573 && (GET_MODE_PRECISION (mode)
10574 == GET_MODE_PRECISION (GET_MODE (op0)))
10575 && !COMPLEX_MODE_P (GET_MODE (op0)))
10576 {
10577 if (GET_CODE (op0) == SUBREG)
10578 op0 = force_reg (GET_MODE (op0), op0);
10579 temp = gen_lowpart_common (mode, op0);
10580 if (temp)
10581 op0 = temp;
10582 else
10583 {
10584 if (!REG_P (op0) && !MEM_P (op0))
10585 op0 = force_reg (GET_MODE (op0), op0);
10586 op0 = gen_lowpart (mode, op0);
10587 }
10588 }
10589 /* If both types are integral, convert from one mode to the other. */
10590 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10591 op0 = convert_modes (mode, GET_MODE (op0), op0,
10592 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10593 /* If the output type is a bit-field type, do an extraction. */
10594 else if (reduce_bit_field)
10595 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10596 TYPE_UNSIGNED (type), NULL_RTX,
10597 mode, mode);
10598 /* As a last resort, spill op0 to memory, and reload it in a
10599 different mode. */
10600 else if (!MEM_P (op0))
10601 {
10602 /* If the operand is not a MEM, force it into memory. Since we
10603 are going to be changing the mode of the MEM, don't call
10604 force_const_mem for constants because we don't allow pool
10605 constants to change mode. */
10606 tree inner_type = TREE_TYPE (treeop0);
10607
10608 gcc_assert (!TREE_ADDRESSABLE (exp));
10609
10610 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10611 target
10612 = assign_stack_temp_for_type
10613 (TYPE_MODE (inner_type),
10614 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10615
10616 emit_move_insn (target, op0);
10617 op0 = target;
10618 }
10619
10620 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10621 output type is such that the operand is known to be aligned, indicate
10622 that it is. Otherwise, we need only be concerned about alignment for
10623 non-BLKmode results. */
10624 if (MEM_P (op0))
10625 {
10626 enum insn_code icode;
10627
10628 if (TYPE_ALIGN_OK (type))
10629 {
10630 /* ??? Copying the MEM without substantially changing it might
10631 run afoul of the code handling volatile memory references in
10632 store_expr, which assumes that TARGET is returned unmodified
10633 if it has been used. */
10634 op0 = copy_rtx (op0);
10635 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10636 }
10637 else if (modifier != EXPAND_WRITE
10638 && modifier != EXPAND_MEMORY
10639 && !inner_reference_p
10640 && mode != BLKmode
10641 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10642 {
10643 /* If the target does have special handling for unaligned
10644 loads of mode then use them. */
10645 if ((icode = optab_handler (movmisalign_optab, mode))
10646 != CODE_FOR_nothing)
10647 {
10648 rtx reg, insn;
10649
10650 op0 = adjust_address (op0, mode, 0);
10651 /* We've already validated the memory, and we're creating a
10652 new pseudo destination. The predicates really can't
10653 fail. */
10654 reg = gen_reg_rtx (mode);
10655
10656 /* Nor can the insn generator. */
10657 insn = GEN_FCN (icode) (reg, op0);
10658 emit_insn (insn);
10659 return reg;
10660 }
10661 else if (STRICT_ALIGNMENT)
10662 {
10663 tree inner_type = TREE_TYPE (treeop0);
10664 HOST_WIDE_INT temp_size
10665 = MAX (int_size_in_bytes (inner_type),
10666 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10667 rtx new_rtx
10668 = assign_stack_temp_for_type (mode, temp_size, type);
10669 rtx new_with_op0_mode
10670 = adjust_address (new_rtx, GET_MODE (op0), 0);
10671
10672 gcc_assert (!TREE_ADDRESSABLE (exp));
10673
10674 if (GET_MODE (op0) == BLKmode)
10675 emit_block_move (new_with_op0_mode, op0,
10676 GEN_INT (GET_MODE_SIZE (mode)),
10677 (modifier == EXPAND_STACK_PARM
10678 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10679 else
10680 emit_move_insn (new_with_op0_mode, op0);
10681
10682 op0 = new_rtx;
10683 }
10684 }
10685
10686 op0 = adjust_address (op0, mode, 0);
10687 }
10688
10689 return op0;
10690
10691 case MODIFY_EXPR:
10692 {
10693 tree lhs = treeop0;
10694 tree rhs = treeop1;
10695 gcc_assert (ignore);
10696
10697 /* Check for |= or &= of a bitfield of size one into another bitfield
10698 of size 1. In this case, (unless we need the result of the
10699 assignment) we can do this more efficiently with a
10700 test followed by an assignment, if necessary.
10701
10702 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10703 things change so we do, this code should be enhanced to
10704 support it. */
10705 if (TREE_CODE (lhs) == COMPONENT_REF
10706 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10707 || TREE_CODE (rhs) == BIT_AND_EXPR)
10708 && TREE_OPERAND (rhs, 0) == lhs
10709 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10710 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10711 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10712 {
10713 rtx_code_label *label = gen_label_rtx ();
10714 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10715 do_jump (TREE_OPERAND (rhs, 1),
10716 value ? label : 0,
10717 value ? 0 : label, -1);
10718 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10719 false);
10720 do_pending_stack_adjust ();
10721 emit_label (label);
10722 return const0_rtx;
10723 }
10724
10725 expand_assignment (lhs, rhs, false);
10726 return const0_rtx;
10727 }
10728
10729 case ADDR_EXPR:
10730 return expand_expr_addr_expr (exp, target, tmode, modifier);
10731
10732 case REALPART_EXPR:
10733 op0 = expand_normal (treeop0);
10734 return read_complex_part (op0, false);
10735
10736 case IMAGPART_EXPR:
10737 op0 = expand_normal (treeop0);
10738 return read_complex_part (op0, true);
10739
10740 case RETURN_EXPR:
10741 case LABEL_EXPR:
10742 case GOTO_EXPR:
10743 case SWITCH_EXPR:
10744 case ASM_EXPR:
10745 /* Expanded in cfgexpand.c. */
10746 gcc_unreachable ();
10747
10748 case TRY_CATCH_EXPR:
10749 case CATCH_EXPR:
10750 case EH_FILTER_EXPR:
10751 case TRY_FINALLY_EXPR:
10752 /* Lowered by tree-eh.c. */
10753 gcc_unreachable ();
10754
10755 case WITH_CLEANUP_EXPR:
10756 case CLEANUP_POINT_EXPR:
10757 case TARGET_EXPR:
10758 case CASE_LABEL_EXPR:
10759 case VA_ARG_EXPR:
10760 case BIND_EXPR:
10761 case INIT_EXPR:
10762 case CONJ_EXPR:
10763 case COMPOUND_EXPR:
10764 case PREINCREMENT_EXPR:
10765 case PREDECREMENT_EXPR:
10766 case POSTINCREMENT_EXPR:
10767 case POSTDECREMENT_EXPR:
10768 case LOOP_EXPR:
10769 case EXIT_EXPR:
10770 case COMPOUND_LITERAL_EXPR:
10771 /* Lowered by gimplify.c. */
10772 gcc_unreachable ();
10773
10774 case FDESC_EXPR:
10775 /* Function descriptors are not valid except for as
10776 initialization constants, and should not be expanded. */
10777 gcc_unreachable ();
10778
10779 case WITH_SIZE_EXPR:
10780 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10781 have pulled out the size to use in whatever context it needed. */
10782 return expand_expr_real (treeop0, original_target, tmode,
10783 modifier, alt_rtl, inner_reference_p);
10784
10785 default:
10786 return expand_expr_real_2 (&ops, target, tmode, modifier);
10787 }
10788 }
10789 \f
10790 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10791 signedness of TYPE), possibly returning the result in TARGET. */
10792 static rtx
10793 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10794 {
10795 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10796 if (target && GET_MODE (target) != GET_MODE (exp))
10797 target = 0;
10798 /* For constant values, reduce using build_int_cst_type. */
10799 if (CONST_INT_P (exp))
10800 {
10801 HOST_WIDE_INT value = INTVAL (exp);
10802 tree t = build_int_cst_type (type, value);
10803 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10804 }
10805 else if (TYPE_UNSIGNED (type))
10806 {
10807 machine_mode mode = GET_MODE (exp);
10808 rtx mask = immed_wide_int_const
10809 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10810 return expand_and (mode, exp, mask, target);
10811 }
10812 else
10813 {
10814 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10815 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10816 exp, count, target, 0);
10817 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10818 exp, count, target, 0);
10819 }
10820 }
10821 \f
10822 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10823 when applied to the address of EXP produces an address known to be
10824 aligned more than BIGGEST_ALIGNMENT. */
10825
10826 static int
10827 is_aligning_offset (const_tree offset, const_tree exp)
10828 {
10829 /* Strip off any conversions. */
10830 while (CONVERT_EXPR_P (offset))
10831 offset = TREE_OPERAND (offset, 0);
10832
10833 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10834 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10835 if (TREE_CODE (offset) != BIT_AND_EXPR
10836 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10837 || compare_tree_int (TREE_OPERAND (offset, 1),
10838 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10839 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10840 return 0;
10841
10842 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10843 It must be NEGATE_EXPR. Then strip any more conversions. */
10844 offset = TREE_OPERAND (offset, 0);
10845 while (CONVERT_EXPR_P (offset))
10846 offset = TREE_OPERAND (offset, 0);
10847
10848 if (TREE_CODE (offset) != NEGATE_EXPR)
10849 return 0;
10850
10851 offset = TREE_OPERAND (offset, 0);
10852 while (CONVERT_EXPR_P (offset))
10853 offset = TREE_OPERAND (offset, 0);
10854
10855 /* This must now be the address of EXP. */
10856 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10857 }
10858 \f
10859 /* Return the tree node if an ARG corresponds to a string constant or zero
10860 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10861 in bytes within the string that ARG is accessing. The type of the
10862 offset will be `sizetype'. */
10863
10864 tree
10865 string_constant (tree arg, tree *ptr_offset)
10866 {
10867 tree array, offset, lower_bound;
10868 STRIP_NOPS (arg);
10869
10870 if (TREE_CODE (arg) == ADDR_EXPR)
10871 {
10872 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10873 {
10874 *ptr_offset = size_zero_node;
10875 return TREE_OPERAND (arg, 0);
10876 }
10877 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10878 {
10879 array = TREE_OPERAND (arg, 0);
10880 offset = size_zero_node;
10881 }
10882 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10883 {
10884 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10885 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10886 if (TREE_CODE (array) != STRING_CST
10887 && TREE_CODE (array) != VAR_DECL)
10888 return 0;
10889
10890 /* Check if the array has a nonzero lower bound. */
10891 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10892 if (!integer_zerop (lower_bound))
10893 {
10894 /* If the offset and base aren't both constants, return 0. */
10895 if (TREE_CODE (lower_bound) != INTEGER_CST)
10896 return 0;
10897 if (TREE_CODE (offset) != INTEGER_CST)
10898 return 0;
10899 /* Adjust offset by the lower bound. */
10900 offset = size_diffop (fold_convert (sizetype, offset),
10901 fold_convert (sizetype, lower_bound));
10902 }
10903 }
10904 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10905 {
10906 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10907 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10908 if (TREE_CODE (array) != ADDR_EXPR)
10909 return 0;
10910 array = TREE_OPERAND (array, 0);
10911 if (TREE_CODE (array) != STRING_CST
10912 && TREE_CODE (array) != VAR_DECL)
10913 return 0;
10914 }
10915 else
10916 return 0;
10917 }
10918 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10919 {
10920 tree arg0 = TREE_OPERAND (arg, 0);
10921 tree arg1 = TREE_OPERAND (arg, 1);
10922
10923 STRIP_NOPS (arg0);
10924 STRIP_NOPS (arg1);
10925
10926 if (TREE_CODE (arg0) == ADDR_EXPR
10927 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10928 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10929 {
10930 array = TREE_OPERAND (arg0, 0);
10931 offset = arg1;
10932 }
10933 else if (TREE_CODE (arg1) == ADDR_EXPR
10934 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10935 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10936 {
10937 array = TREE_OPERAND (arg1, 0);
10938 offset = arg0;
10939 }
10940 else
10941 return 0;
10942 }
10943 else
10944 return 0;
10945
10946 if (TREE_CODE (array) == STRING_CST)
10947 {
10948 *ptr_offset = fold_convert (sizetype, offset);
10949 return array;
10950 }
10951 else if (TREE_CODE (array) == VAR_DECL
10952 || TREE_CODE (array) == CONST_DECL)
10953 {
10954 int length;
10955 tree init = ctor_for_folding (array);
10956
10957 /* Variables initialized to string literals can be handled too. */
10958 if (init == error_mark_node
10959 || !init
10960 || TREE_CODE (init) != STRING_CST)
10961 return 0;
10962
10963 /* Avoid const char foo[4] = "abcde"; */
10964 if (DECL_SIZE_UNIT (array) == NULL_TREE
10965 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10966 || (length = TREE_STRING_LENGTH (init)) <= 0
10967 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10968 return 0;
10969
10970 /* If variable is bigger than the string literal, OFFSET must be constant
10971 and inside of the bounds of the string literal. */
10972 offset = fold_convert (sizetype, offset);
10973 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10974 && (! tree_fits_uhwi_p (offset)
10975 || compare_tree_int (offset, length) >= 0))
10976 return 0;
10977
10978 *ptr_offset = offset;
10979 return init;
10980 }
10981
10982 return 0;
10983 }
10984 \f
10985 /* Generate code to calculate OPS, and exploded expression
10986 using a store-flag instruction and return an rtx for the result.
10987 OPS reflects a comparison.
10988
10989 If TARGET is nonzero, store the result there if convenient.
10990
10991 Return zero if there is no suitable set-flag instruction
10992 available on this machine.
10993
10994 Once expand_expr has been called on the arguments of the comparison,
10995 we are committed to doing the store flag, since it is not safe to
10996 re-evaluate the expression. We emit the store-flag insn by calling
10997 emit_store_flag, but only expand the arguments if we have a reason
10998 to believe that emit_store_flag will be successful. If we think that
10999 it will, but it isn't, we have to simulate the store-flag with a
11000 set/jump/set sequence. */
11001
11002 static rtx
11003 do_store_flag (sepops ops, rtx target, machine_mode mode)
11004 {
11005 enum rtx_code code;
11006 tree arg0, arg1, type;
11007 tree tem;
11008 machine_mode operand_mode;
11009 int unsignedp;
11010 rtx op0, op1;
11011 rtx subtarget = target;
11012 location_t loc = ops->location;
11013
11014 arg0 = ops->op0;
11015 arg1 = ops->op1;
11016
11017 /* Don't crash if the comparison was erroneous. */
11018 if (arg0 == error_mark_node || arg1 == error_mark_node)
11019 return const0_rtx;
11020
11021 type = TREE_TYPE (arg0);
11022 operand_mode = TYPE_MODE (type);
11023 unsignedp = TYPE_UNSIGNED (type);
11024
11025 /* We won't bother with BLKmode store-flag operations because it would mean
11026 passing a lot of information to emit_store_flag. */
11027 if (operand_mode == BLKmode)
11028 return 0;
11029
11030 /* We won't bother with store-flag operations involving function pointers
11031 when function pointers must be canonicalized before comparisons. */
11032 #ifdef HAVE_canonicalize_funcptr_for_compare
11033 if (HAVE_canonicalize_funcptr_for_compare
11034 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11035 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11036 == FUNCTION_TYPE))
11037 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11038 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11039 == FUNCTION_TYPE))))
11040 return 0;
11041 #endif
11042
11043 STRIP_NOPS (arg0);
11044 STRIP_NOPS (arg1);
11045
11046 /* For vector typed comparisons emit code to generate the desired
11047 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11048 expander for this. */
11049 if (TREE_CODE (ops->type) == VECTOR_TYPE)
11050 {
11051 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11052 tree if_true = constant_boolean_node (true, ops->type);
11053 tree if_false = constant_boolean_node (false, ops->type);
11054 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
11055 }
11056
11057 /* Get the rtx comparison code to use. We know that EXP is a comparison
11058 operation of some type. Some comparisons against 1 and -1 can be
11059 converted to comparisons with zero. Do so here so that the tests
11060 below will be aware that we have a comparison with zero. These
11061 tests will not catch constants in the first operand, but constants
11062 are rarely passed as the first operand. */
11063
11064 switch (ops->code)
11065 {
11066 case EQ_EXPR:
11067 code = EQ;
11068 break;
11069 case NE_EXPR:
11070 code = NE;
11071 break;
11072 case LT_EXPR:
11073 if (integer_onep (arg1))
11074 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11075 else
11076 code = unsignedp ? LTU : LT;
11077 break;
11078 case LE_EXPR:
11079 if (! unsignedp && integer_all_onesp (arg1))
11080 arg1 = integer_zero_node, code = LT;
11081 else
11082 code = unsignedp ? LEU : LE;
11083 break;
11084 case GT_EXPR:
11085 if (! unsignedp && integer_all_onesp (arg1))
11086 arg1 = integer_zero_node, code = GE;
11087 else
11088 code = unsignedp ? GTU : GT;
11089 break;
11090 case GE_EXPR:
11091 if (integer_onep (arg1))
11092 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11093 else
11094 code = unsignedp ? GEU : GE;
11095 break;
11096
11097 case UNORDERED_EXPR:
11098 code = UNORDERED;
11099 break;
11100 case ORDERED_EXPR:
11101 code = ORDERED;
11102 break;
11103 case UNLT_EXPR:
11104 code = UNLT;
11105 break;
11106 case UNLE_EXPR:
11107 code = UNLE;
11108 break;
11109 case UNGT_EXPR:
11110 code = UNGT;
11111 break;
11112 case UNGE_EXPR:
11113 code = UNGE;
11114 break;
11115 case UNEQ_EXPR:
11116 code = UNEQ;
11117 break;
11118 case LTGT_EXPR:
11119 code = LTGT;
11120 break;
11121
11122 default:
11123 gcc_unreachable ();
11124 }
11125
11126 /* Put a constant second. */
11127 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11128 || TREE_CODE (arg0) == FIXED_CST)
11129 {
11130 tem = arg0; arg0 = arg1; arg1 = tem;
11131 code = swap_condition (code);
11132 }
11133
11134 /* If this is an equality or inequality test of a single bit, we can
11135 do this by shifting the bit being tested to the low-order bit and
11136 masking the result with the constant 1. If the condition was EQ,
11137 we xor it with 1. This does not require an scc insn and is faster
11138 than an scc insn even if we have it.
11139
11140 The code to make this transformation was moved into fold_single_bit_test,
11141 so we just call into the folder and expand its result. */
11142
11143 if ((code == NE || code == EQ)
11144 && integer_zerop (arg1)
11145 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11146 {
11147 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11148 if (srcstmt
11149 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11150 {
11151 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11152 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11153 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11154 gimple_assign_rhs1 (srcstmt),
11155 gimple_assign_rhs2 (srcstmt));
11156 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11157 if (temp)
11158 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11159 }
11160 }
11161
11162 if (! get_subtarget (target)
11163 || GET_MODE (subtarget) != operand_mode)
11164 subtarget = 0;
11165
11166 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11167
11168 if (target == 0)
11169 target = gen_reg_rtx (mode);
11170
11171 /* Try a cstore if possible. */
11172 return emit_store_flag_force (target, code, op0, op1,
11173 operand_mode, unsignedp,
11174 (TYPE_PRECISION (ops->type) == 1
11175 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11176 }
11177 \f
11178
11179 /* Stubs in case we haven't got a casesi insn. */
11180 #ifndef HAVE_casesi
11181 # define HAVE_casesi 0
11182 # define gen_casesi(a, b, c, d, e) (0)
11183 # define CODE_FOR_casesi CODE_FOR_nothing
11184 #endif
11185
11186 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11187 0 otherwise (i.e. if there is no casesi instruction).
11188
11189 DEFAULT_PROBABILITY is the probability of jumping to the default
11190 label. */
11191 int
11192 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11193 rtx table_label, rtx default_label, rtx fallback_label,
11194 int default_probability)
11195 {
11196 struct expand_operand ops[5];
11197 machine_mode index_mode = SImode;
11198 rtx op1, op2, index;
11199
11200 if (! HAVE_casesi)
11201 return 0;
11202
11203 /* Convert the index to SImode. */
11204 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11205 {
11206 machine_mode omode = TYPE_MODE (index_type);
11207 rtx rangertx = expand_normal (range);
11208
11209 /* We must handle the endpoints in the original mode. */
11210 index_expr = build2 (MINUS_EXPR, index_type,
11211 index_expr, minval);
11212 minval = integer_zero_node;
11213 index = expand_normal (index_expr);
11214 if (default_label)
11215 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11216 omode, 1, default_label,
11217 default_probability);
11218 /* Now we can safely truncate. */
11219 index = convert_to_mode (index_mode, index, 0);
11220 }
11221 else
11222 {
11223 if (TYPE_MODE (index_type) != index_mode)
11224 {
11225 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11226 index_expr = fold_convert (index_type, index_expr);
11227 }
11228
11229 index = expand_normal (index_expr);
11230 }
11231
11232 do_pending_stack_adjust ();
11233
11234 op1 = expand_normal (minval);
11235 op2 = expand_normal (range);
11236
11237 create_input_operand (&ops[0], index, index_mode);
11238 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11239 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11240 create_fixed_operand (&ops[3], table_label);
11241 create_fixed_operand (&ops[4], (default_label
11242 ? default_label
11243 : fallback_label));
11244 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11245 return 1;
11246 }
11247
11248 /* Attempt to generate a tablejump instruction; same concept. */
11249 #ifndef HAVE_tablejump
11250 #define HAVE_tablejump 0
11251 #define gen_tablejump(x, y) (0)
11252 #endif
11253
11254 /* Subroutine of the next function.
11255
11256 INDEX is the value being switched on, with the lowest value
11257 in the table already subtracted.
11258 MODE is its expected mode (needed if INDEX is constant).
11259 RANGE is the length of the jump table.
11260 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11261
11262 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11263 index value is out of range.
11264 DEFAULT_PROBABILITY is the probability of jumping to
11265 the default label. */
11266
11267 static void
11268 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11269 rtx default_label, int default_probability)
11270 {
11271 rtx temp, vector;
11272
11273 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11274 cfun->cfg->max_jumptable_ents = INTVAL (range);
11275
11276 /* Do an unsigned comparison (in the proper mode) between the index
11277 expression and the value which represents the length of the range.
11278 Since we just finished subtracting the lower bound of the range
11279 from the index expression, this comparison allows us to simultaneously
11280 check that the original index expression value is both greater than
11281 or equal to the minimum value of the range and less than or equal to
11282 the maximum value of the range. */
11283
11284 if (default_label)
11285 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11286 default_label, default_probability);
11287
11288
11289 /* If index is in range, it must fit in Pmode.
11290 Convert to Pmode so we can index with it. */
11291 if (mode != Pmode)
11292 index = convert_to_mode (Pmode, index, 1);
11293
11294 /* Don't let a MEM slip through, because then INDEX that comes
11295 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11296 and break_out_memory_refs will go to work on it and mess it up. */
11297 #ifdef PIC_CASE_VECTOR_ADDRESS
11298 if (flag_pic && !REG_P (index))
11299 index = copy_to_mode_reg (Pmode, index);
11300 #endif
11301
11302 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11303 GET_MODE_SIZE, because this indicates how large insns are. The other
11304 uses should all be Pmode, because they are addresses. This code
11305 could fail if addresses and insns are not the same size. */
11306 index = simplify_gen_binary (MULT, Pmode, index,
11307 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11308 Pmode));
11309 index = simplify_gen_binary (PLUS, Pmode, index,
11310 gen_rtx_LABEL_REF (Pmode, table_label));
11311
11312 #ifdef PIC_CASE_VECTOR_ADDRESS
11313 if (flag_pic)
11314 index = PIC_CASE_VECTOR_ADDRESS (index);
11315 else
11316 #endif
11317 index = memory_address (CASE_VECTOR_MODE, index);
11318 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11319 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11320 convert_move (temp, vector, 0);
11321
11322 emit_jump_insn (gen_tablejump (temp, table_label));
11323
11324 /* If we are generating PIC code or if the table is PC-relative, the
11325 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11326 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11327 emit_barrier ();
11328 }
11329
11330 int
11331 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11332 rtx table_label, rtx default_label, int default_probability)
11333 {
11334 rtx index;
11335
11336 if (! HAVE_tablejump)
11337 return 0;
11338
11339 index_expr = fold_build2 (MINUS_EXPR, index_type,
11340 fold_convert (index_type, index_expr),
11341 fold_convert (index_type, minval));
11342 index = expand_normal (index_expr);
11343 do_pending_stack_adjust ();
11344
11345 do_tablejump (index, TYPE_MODE (index_type),
11346 convert_modes (TYPE_MODE (index_type),
11347 TYPE_MODE (TREE_TYPE (range)),
11348 expand_normal (range),
11349 TYPE_UNSIGNED (TREE_TYPE (range))),
11350 table_label, default_label, default_probability);
11351 return 1;
11352 }
11353
11354 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11355 static rtx
11356 const_vector_from_tree (tree exp)
11357 {
11358 rtvec v;
11359 unsigned i;
11360 int units;
11361 tree elt;
11362 machine_mode inner, mode;
11363
11364 mode = TYPE_MODE (TREE_TYPE (exp));
11365
11366 if (initializer_zerop (exp))
11367 return CONST0_RTX (mode);
11368
11369 units = GET_MODE_NUNITS (mode);
11370 inner = GET_MODE_INNER (mode);
11371
11372 v = rtvec_alloc (units);
11373
11374 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11375 {
11376 elt = VECTOR_CST_ELT (exp, i);
11377
11378 if (TREE_CODE (elt) == REAL_CST)
11379 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11380 inner);
11381 else if (TREE_CODE (elt) == FIXED_CST)
11382 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11383 inner);
11384 else
11385 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11386 }
11387
11388 return gen_rtx_CONST_VECTOR (mode, v);
11389 }
11390
11391 /* Build a decl for a personality function given a language prefix. */
11392
11393 tree
11394 build_personality_function (const char *lang)
11395 {
11396 const char *unwind_and_version;
11397 tree decl, type;
11398 char *name;
11399
11400 switch (targetm_common.except_unwind_info (&global_options))
11401 {
11402 case UI_NONE:
11403 return NULL;
11404 case UI_SJLJ:
11405 unwind_and_version = "_sj0";
11406 break;
11407 case UI_DWARF2:
11408 case UI_TARGET:
11409 unwind_and_version = "_v0";
11410 break;
11411 case UI_SEH:
11412 unwind_and_version = "_seh0";
11413 break;
11414 default:
11415 gcc_unreachable ();
11416 }
11417
11418 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11419
11420 type = build_function_type_list (integer_type_node, integer_type_node,
11421 long_long_unsigned_type_node,
11422 ptr_type_node, ptr_type_node, NULL_TREE);
11423 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11424 get_identifier (name), type);
11425 DECL_ARTIFICIAL (decl) = 1;
11426 DECL_EXTERNAL (decl) = 1;
11427 TREE_PUBLIC (decl) = 1;
11428
11429 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11430 are the flags assigned by targetm.encode_section_info. */
11431 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11432
11433 return decl;
11434 }
11435
11436 /* Extracts the personality function of DECL and returns the corresponding
11437 libfunc. */
11438
11439 rtx
11440 get_personality_function (tree decl)
11441 {
11442 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11443 enum eh_personality_kind pk;
11444
11445 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11446 if (pk == eh_personality_none)
11447 return NULL;
11448
11449 if (!personality
11450 && pk == eh_personality_any)
11451 personality = lang_hooks.eh_personality ();
11452
11453 if (pk == eh_personality_lang)
11454 gcc_assert (personality != NULL_TREE);
11455
11456 return XEXP (DECL_RTL (personality), 0);
11457 }
11458
11459 /* Returns a tree for the size of EXP in bytes. */
11460
11461 static tree
11462 tree_expr_size (const_tree exp)
11463 {
11464 if (DECL_P (exp)
11465 && DECL_SIZE_UNIT (exp) != 0)
11466 return DECL_SIZE_UNIT (exp);
11467 else
11468 return size_in_bytes (TREE_TYPE (exp));
11469 }
11470
11471 /* Return an rtx for the size in bytes of the value of EXP. */
11472
11473 rtx
11474 expr_size (tree exp)
11475 {
11476 tree size;
11477
11478 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11479 size = TREE_OPERAND (exp, 1);
11480 else
11481 {
11482 size = tree_expr_size (exp);
11483 gcc_assert (size);
11484 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11485 }
11486
11487 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11488 }
11489
11490 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11491 if the size can vary or is larger than an integer. */
11492
11493 static HOST_WIDE_INT
11494 int_expr_size (tree exp)
11495 {
11496 tree size;
11497
11498 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11499 size = TREE_OPERAND (exp, 1);
11500 else
11501 {
11502 size = tree_expr_size (exp);
11503 gcc_assert (size);
11504 }
11505
11506 if (size == 0 || !tree_fits_shwi_p (size))
11507 return -1;
11508
11509 return tree_to_shwi (size);
11510 }
11511
11512 #include "gt-expr.h"