add default for HAVE_load_multiple
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "hash-set.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "flags.h"
41 #include "regs.h"
42 #include "hard-reg-set.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-config.h"
46 #include "insn-attr.h"
47 #include "hashtab.h"
48 #include "statistics.h"
49 #include "real.h"
50 #include "fixed-value.h"
51 #include "expmed.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "calls.h"
55 #include "emit-rtl.h"
56 #include "stmt.h"
57 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
58 #include "expr.h"
59 #include "insn-codes.h"
60 #include "optabs.h"
61 #include "libfuncs.h"
62 #include "recog.h"
63 #include "reload.h"
64 #include "typeclass.h"
65 #include "toplev.h"
66 #include "langhooks.h"
67 #include "intl.h"
68 #include "tm_p.h"
69 #include "tree-iterator.h"
70 #include "predict.h"
71 #include "dominance.h"
72 #include "cfg.h"
73 #include "basic-block.h"
74 #include "tree-ssa-alias.h"
75 #include "internal-fn.h"
76 #include "gimple-expr.h"
77 #include "is-a.h"
78 #include "gimple.h"
79 #include "gimple-ssa.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "tree-ssanames.h"
85 #include "target.h"
86 #include "common/common-target.h"
87 #include "timevar.h"
88 #include "df.h"
89 #include "diagnostic.h"
90 #include "tree-ssa-live.h"
91 #include "tree-outof-ssa.h"
92 #include "target-globals.h"
93 #include "params.h"
94 #include "tree-ssa-address.h"
95 #include "cfgexpand.h"
96 #include "builtins.h"
97 #include "tree-chkp.h"
98 #include "rtl-chkp.h"
99 #include "ccmp.h"
100
101
102 /* If this is nonzero, we do not bother generating VOLATILE
103 around volatile memory references, and we are willing to
104 output indirect addresses. If cse is to follow, we reject
105 indirect addresses so a useful potential cse is generated;
106 if it is used only once, instruction combination will produce
107 the same indirect address eventually. */
108 int cse_not_expected;
109
110 /* This structure is used by move_by_pieces to describe the move to
111 be performed. */
112 struct move_by_pieces_d
113 {
114 rtx to;
115 rtx to_addr;
116 int autinc_to;
117 int explicit_inc_to;
118 rtx from;
119 rtx from_addr;
120 int autinc_from;
121 int explicit_inc_from;
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
124 int reverse;
125 };
126
127 /* This structure is used by store_by_pieces to describe the clear to
128 be performed. */
129
130 struct store_by_pieces_d
131 {
132 rtx to;
133 rtx to_addr;
134 int autinc_to;
135 int explicit_inc_to;
136 unsigned HOST_WIDE_INT len;
137 HOST_WIDE_INT offset;
138 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
139 void *constfundata;
140 int reverse;
141 };
142
143 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
144 struct move_by_pieces_d *);
145 static bool block_move_libcall_safe_for_call_parm (void);
146 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
147 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
148 unsigned HOST_WIDE_INT);
149 static tree emit_block_move_libcall_fn (int);
150 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
151 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
152 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
153 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
154 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
155 struct store_by_pieces_d *);
156 static tree clear_storage_libcall_fn (int);
157 static rtx_insn *compress_float_constant (rtx, rtx);
158 static rtx get_subtarget (rtx);
159 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
160 HOST_WIDE_INT, machine_mode,
161 tree, int, alias_set_type);
162 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
163 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
164 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
165 machine_mode, tree, alias_set_type, bool);
166
167 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
168
169 static int is_aligning_offset (const_tree, const_tree);
170 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
171 static rtx do_store_flag (sepops, rtx, machine_mode);
172 #ifdef PUSH_ROUNDING
173 static void emit_single_push_insn (machine_mode, rtx, tree);
174 #endif
175 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
176 static rtx const_vector_from_tree (tree);
177 static tree tree_expr_size (const_tree);
178 static HOST_WIDE_INT int_expr_size (tree);
179
180 \f
181 /* This is run to set up which modes can be used
182 directly in memory and to initialize the block move optab. It is run
183 at the beginning of compilation and when the target is reinitialized. */
184
185 void
186 init_expr_target (void)
187 {
188 rtx insn, pat;
189 machine_mode mode;
190 int num_clobbers;
191 rtx mem, mem1;
192 rtx reg;
193
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
197 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
198 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
199
200 /* A scratch register we can modify in-place below to avoid
201 useless RTL allocations. */
202 reg = gen_rtx_REG (word_mode, FIRST_PSEUDO_REGISTER);
203
204 insn = rtx_alloc (INSN);
205 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
206 PATTERN (insn) = pat;
207
208 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
209 mode = (machine_mode) ((int) mode + 1))
210 {
211 int regno;
212
213 direct_load[(int) mode] = direct_store[(int) mode] = 0;
214 PUT_MODE (mem, mode);
215 PUT_MODE (mem1, mode);
216
217 /* See if there is some register that can be used in this mode and
218 directly loaded or stored from memory. */
219
220 if (mode != VOIDmode && mode != BLKmode)
221 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
222 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
223 regno++)
224 {
225 if (! HARD_REGNO_MODE_OK (regno, mode))
226 continue;
227
228 set_mode_and_regno (reg, mode, regno);
229
230 SET_SRC (pat) = mem;
231 SET_DEST (pat) = reg;
232 if (recog (pat, insn, &num_clobbers) >= 0)
233 direct_load[(int) mode] = 1;
234
235 SET_SRC (pat) = mem1;
236 SET_DEST (pat) = reg;
237 if (recog (pat, insn, &num_clobbers) >= 0)
238 direct_load[(int) mode] = 1;
239
240 SET_SRC (pat) = reg;
241 SET_DEST (pat) = mem;
242 if (recog (pat, insn, &num_clobbers) >= 0)
243 direct_store[(int) mode] = 1;
244
245 SET_SRC (pat) = reg;
246 SET_DEST (pat) = mem1;
247 if (recog (pat, insn, &num_clobbers) >= 0)
248 direct_store[(int) mode] = 1;
249 }
250 }
251
252 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, FIRST_PSEUDO_REGISTER));
253
254 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
255 mode = GET_MODE_WIDER_MODE (mode))
256 {
257 machine_mode srcmode;
258 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
259 srcmode = GET_MODE_WIDER_MODE (srcmode))
260 {
261 enum insn_code ic;
262
263 ic = can_extend_p (mode, srcmode, 0);
264 if (ic == CODE_FOR_nothing)
265 continue;
266
267 PUT_MODE (mem, srcmode);
268
269 if (insn_operand_matches (ic, 1, mem))
270 float_extend_from_mem[mode][srcmode] = true;
271 }
272 }
273 }
274
275 /* This is run at the start of compiling a function. */
276
277 void
278 init_expr (void)
279 {
280 memset (&crtl->expr, 0, sizeof (crtl->expr));
281 }
282 \f
283 /* Copy data from FROM to TO, where the machine modes are not the same.
284 Both modes may be integer, or both may be floating, or both may be
285 fixed-point.
286 UNSIGNEDP should be nonzero if FROM is an unsigned type.
287 This causes zero-extension instead of sign-extension. */
288
289 void
290 convert_move (rtx to, rtx from, int unsignedp)
291 {
292 machine_mode to_mode = GET_MODE (to);
293 machine_mode from_mode = GET_MODE (from);
294 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
295 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
296 enum insn_code code;
297 rtx libcall;
298
299 /* rtx code for making an equivalent value. */
300 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
301 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
302
303
304 gcc_assert (to_real == from_real);
305 gcc_assert (to_mode != BLKmode);
306 gcc_assert (from_mode != BLKmode);
307
308 /* If the source and destination are already the same, then there's
309 nothing to do. */
310 if (to == from)
311 return;
312
313 /* If FROM is a SUBREG that indicates that we have already done at least
314 the required extension, strip it. We don't handle such SUBREGs as
315 TO here. */
316
317 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
318 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
319 >= GET_MODE_PRECISION (to_mode))
320 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
321 from = gen_lowpart (to_mode, from), from_mode = to_mode;
322
323 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
324
325 if (to_mode == from_mode
326 || (from_mode == VOIDmode && CONSTANT_P (from)))
327 {
328 emit_move_insn (to, from);
329 return;
330 }
331
332 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
333 {
334 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
335
336 if (VECTOR_MODE_P (to_mode))
337 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
338 else
339 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
340
341 emit_move_insn (to, from);
342 return;
343 }
344
345 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
346 {
347 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
348 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
349 return;
350 }
351
352 if (to_real)
353 {
354 rtx value;
355 rtx_insn *insns;
356 convert_optab tab;
357
358 gcc_assert ((GET_MODE_PRECISION (from_mode)
359 != GET_MODE_PRECISION (to_mode))
360 || (DECIMAL_FLOAT_MODE_P (from_mode)
361 != DECIMAL_FLOAT_MODE_P (to_mode)));
362
363 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
364 /* Conversion between decimal float and binary float, same size. */
365 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
366 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
367 tab = sext_optab;
368 else
369 tab = trunc_optab;
370
371 /* Try converting directly if the insn is supported. */
372
373 code = convert_optab_handler (tab, to_mode, from_mode);
374 if (code != CODE_FOR_nothing)
375 {
376 emit_unop_insn (code, to, from,
377 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
378 return;
379 }
380
381 /* Otherwise use a libcall. */
382 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
383
384 /* Is this conversion implemented yet? */
385 gcc_assert (libcall);
386
387 start_sequence ();
388 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
389 1, from, from_mode);
390 insns = get_insns ();
391 end_sequence ();
392 emit_libcall_block (insns, to, value,
393 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
394 from)
395 : gen_rtx_FLOAT_EXTEND (to_mode, from));
396 return;
397 }
398
399 /* Handle pointer conversion. */ /* SPEE 900220. */
400 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
401 {
402 convert_optab ctab;
403
404 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
405 ctab = trunc_optab;
406 else if (unsignedp)
407 ctab = zext_optab;
408 else
409 ctab = sext_optab;
410
411 if (convert_optab_handler (ctab, to_mode, from_mode)
412 != CODE_FOR_nothing)
413 {
414 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
415 to, from, UNKNOWN);
416 return;
417 }
418 }
419
420 /* Targets are expected to provide conversion insns between PxImode and
421 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
422 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
423 {
424 machine_mode full_mode
425 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
426
427 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
428 != CODE_FOR_nothing);
429
430 if (full_mode != from_mode)
431 from = convert_to_mode (full_mode, from, unsignedp);
432 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
433 to, from, UNKNOWN);
434 return;
435 }
436 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
437 {
438 rtx new_from;
439 machine_mode full_mode
440 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
441 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
442 enum insn_code icode;
443
444 icode = convert_optab_handler (ctab, full_mode, from_mode);
445 gcc_assert (icode != CODE_FOR_nothing);
446
447 if (to_mode == full_mode)
448 {
449 emit_unop_insn (icode, to, from, UNKNOWN);
450 return;
451 }
452
453 new_from = gen_reg_rtx (full_mode);
454 emit_unop_insn (icode, new_from, from, UNKNOWN);
455
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
458 from = new_from;
459 }
460
461 /* Make sure both are fixed-point modes or both are not. */
462 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
463 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
464 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
465 {
466 /* If we widen from_mode to to_mode and they are in the same class,
467 we won't saturate the result.
468 Otherwise, always saturate the result to play safe. */
469 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
470 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
471 expand_fixed_convert (to, from, 0, 0);
472 else
473 expand_fixed_convert (to, from, 0, 1);
474 return;
475 }
476
477 /* Now both modes are integers. */
478
479 /* Handle expanding beyond a word. */
480 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
481 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
482 {
483 rtx_insn *insns;
484 rtx lowpart;
485 rtx fill_value;
486 rtx lowfrom;
487 int i;
488 machine_mode lowpart_mode;
489 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
490
491 /* Try converting directly if the insn is supported. */
492 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
493 != CODE_FOR_nothing)
494 {
495 /* If FROM is a SUBREG, put it into a register. Do this
496 so that we always generate the same set of insns for
497 better cse'ing; if an intermediate assignment occurred,
498 we won't be doing the operation directly on the SUBREG. */
499 if (optimize > 0 && GET_CODE (from) == SUBREG)
500 from = force_reg (from_mode, from);
501 emit_unop_insn (code, to, from, equiv_code);
502 return;
503 }
504 /* Next, try converting via full word. */
505 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
506 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
507 != CODE_FOR_nothing))
508 {
509 rtx word_to = gen_reg_rtx (word_mode);
510 if (REG_P (to))
511 {
512 if (reg_overlap_mentioned_p (to, from))
513 from = force_reg (from_mode, from);
514 emit_clobber (to);
515 }
516 convert_move (word_to, from, unsignedp);
517 emit_unop_insn (code, to, word_to, equiv_code);
518 return;
519 }
520
521 /* No special multiword conversion insn; do it by hand. */
522 start_sequence ();
523
524 /* Since we will turn this into a no conflict block, we must ensure the
525 the source does not overlap the target so force it into an isolated
526 register when maybe so. Likewise for any MEM input, since the
527 conversion sequence might require several references to it and we
528 must ensure we're getting the same value every time. */
529
530 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
531 from = force_reg (from_mode, from);
532
533 /* Get a copy of FROM widened to a word, if necessary. */
534 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
535 lowpart_mode = word_mode;
536 else
537 lowpart_mode = from_mode;
538
539 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
540
541 lowpart = gen_lowpart (lowpart_mode, to);
542 emit_move_insn (lowpart, lowfrom);
543
544 /* Compute the value to put in each remaining word. */
545 if (unsignedp)
546 fill_value = const0_rtx;
547 else
548 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
549 LT, lowfrom, const0_rtx,
550 lowpart_mode, 0, -1);
551
552 /* Fill the remaining words. */
553 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
554 {
555 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
556 rtx subword = operand_subword (to, index, 1, to_mode);
557
558 gcc_assert (subword);
559
560 if (fill_value != subword)
561 emit_move_insn (subword, fill_value);
562 }
563
564 insns = get_insns ();
565 end_sequence ();
566
567 emit_insn (insns);
568 return;
569 }
570
571 /* Truncating multi-word to a word or less. */
572 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
573 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
574 {
575 if (!((MEM_P (from)
576 && ! MEM_VOLATILE_P (from)
577 && direct_load[(int) to_mode]
578 && ! mode_dependent_address_p (XEXP (from, 0),
579 MEM_ADDR_SPACE (from)))
580 || REG_P (from)
581 || GET_CODE (from) == SUBREG))
582 from = force_reg (from_mode, from);
583 convert_move (to, gen_lowpart (word_mode, from), 0);
584 return;
585 }
586
587 /* Now follow all the conversions between integers
588 no more than a word long. */
589
590 /* For truncation, usually we can just refer to FROM in a narrower mode. */
591 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
592 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
593 {
594 if (!((MEM_P (from)
595 && ! MEM_VOLATILE_P (from)
596 && direct_load[(int) to_mode]
597 && ! mode_dependent_address_p (XEXP (from, 0),
598 MEM_ADDR_SPACE (from)))
599 || REG_P (from)
600 || GET_CODE (from) == SUBREG))
601 from = force_reg (from_mode, from);
602 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
603 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
604 from = copy_to_reg (from);
605 emit_move_insn (to, gen_lowpart (to_mode, from));
606 return;
607 }
608
609 /* Handle extension. */
610 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
611 {
612 /* Convert directly if that works. */
613 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
614 != CODE_FOR_nothing)
615 {
616 emit_unop_insn (code, to, from, equiv_code);
617 return;
618 }
619 else
620 {
621 machine_mode intermediate;
622 rtx tmp;
623 int shift_amount;
624
625 /* Search for a mode to convert via. */
626 for (intermediate = from_mode; intermediate != VOIDmode;
627 intermediate = GET_MODE_WIDER_MODE (intermediate))
628 if (((can_extend_p (to_mode, intermediate, unsignedp)
629 != CODE_FOR_nothing)
630 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
631 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
632 && (can_extend_p (intermediate, from_mode, unsignedp)
633 != CODE_FOR_nothing))
634 {
635 convert_move (to, convert_to_mode (intermediate, from,
636 unsignedp), unsignedp);
637 return;
638 }
639
640 /* No suitable intermediate mode.
641 Generate what we need with shifts. */
642 shift_amount = (GET_MODE_PRECISION (to_mode)
643 - GET_MODE_PRECISION (from_mode));
644 from = gen_lowpart (to_mode, force_reg (from_mode, from));
645 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
646 to, unsignedp);
647 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
648 to, unsignedp);
649 if (tmp != to)
650 emit_move_insn (to, tmp);
651 return;
652 }
653 }
654
655 /* Support special truncate insns for certain modes. */
656 if (convert_optab_handler (trunc_optab, to_mode,
657 from_mode) != CODE_FOR_nothing)
658 {
659 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
660 to, from, UNKNOWN);
661 return;
662 }
663
664 /* Handle truncation of volatile memrefs, and so on;
665 the things that couldn't be truncated directly,
666 and for which there was no special instruction.
667
668 ??? Code above formerly short-circuited this, for most integer
669 mode pairs, with a force_reg in from_mode followed by a recursive
670 call to this routine. Appears always to have been wrong. */
671 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
672 {
673 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
674 emit_move_insn (to, temp);
675 return;
676 }
677
678 /* Mode combination is not recognized. */
679 gcc_unreachable ();
680 }
681
682 /* Return an rtx for a value that would result
683 from converting X to mode MODE.
684 Both X and MODE may be floating, or both integer.
685 UNSIGNEDP is nonzero if X is an unsigned value.
686 This can be done by referring to a part of X in place
687 or by copying to a new temporary with conversion. */
688
689 rtx
690 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
691 {
692 return convert_modes (mode, VOIDmode, x, unsignedp);
693 }
694
695 /* Return an rtx for a value that would result
696 from converting X from mode OLDMODE to mode MODE.
697 Both modes may be floating, or both integer.
698 UNSIGNEDP is nonzero if X is an unsigned value.
699
700 This can be done by referring to a part of X in place
701 or by copying to a new temporary with conversion.
702
703 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
704
705 rtx
706 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
707 {
708 rtx temp;
709
710 /* If FROM is a SUBREG that indicates that we have already done at least
711 the required extension, strip it. */
712
713 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
714 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
715 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
716 x = gen_lowpart (mode, SUBREG_REG (x));
717
718 if (GET_MODE (x) != VOIDmode)
719 oldmode = GET_MODE (x);
720
721 if (mode == oldmode)
722 return x;
723
724 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
725 {
726 /* If the caller did not tell us the old mode, then there is not
727 much to do with respect to canonicalization. We have to
728 assume that all the bits are significant. */
729 if (GET_MODE_CLASS (oldmode) != MODE_INT)
730 oldmode = MAX_MODE_INT;
731 wide_int w = wide_int::from (std::make_pair (x, oldmode),
732 GET_MODE_PRECISION (mode),
733 unsignedp ? UNSIGNED : SIGNED);
734 return immed_wide_int_const (w, mode);
735 }
736
737 /* We can do this with a gen_lowpart if both desired and current modes
738 are integer, and this is either a constant integer, a register, or a
739 non-volatile MEM. */
740 if (GET_MODE_CLASS (mode) == MODE_INT
741 && GET_MODE_CLASS (oldmode) == MODE_INT
742 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
743 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
744 || (REG_P (x)
745 && (!HARD_REGISTER_P (x)
746 || HARD_REGNO_MODE_OK (REGNO (x), mode))
747 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
748
749 return gen_lowpart (mode, x);
750
751 /* Converting from integer constant into mode is always equivalent to an
752 subreg operation. */
753 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
754 {
755 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
756 return simplify_gen_subreg (mode, x, oldmode, 0);
757 }
758
759 temp = gen_reg_rtx (mode);
760 convert_move (temp, x, unsignedp);
761 return temp;
762 }
763 \f
764 /* Return the largest alignment we can use for doing a move (or store)
765 of MAX_PIECES. ALIGN is the largest alignment we could use. */
766
767 static unsigned int
768 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
769 {
770 machine_mode tmode;
771
772 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
773 if (align >= GET_MODE_ALIGNMENT (tmode))
774 align = GET_MODE_ALIGNMENT (tmode);
775 else
776 {
777 machine_mode tmode, xmode;
778
779 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
780 tmode != VOIDmode;
781 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
782 if (GET_MODE_SIZE (tmode) > max_pieces
783 || SLOW_UNALIGNED_ACCESS (tmode, align))
784 break;
785
786 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
787 }
788
789 return align;
790 }
791
792 /* Return the widest integer mode no wider than SIZE. If no such mode
793 can be found, return VOIDmode. */
794
795 static machine_mode
796 widest_int_mode_for_size (unsigned int size)
797 {
798 machine_mode tmode, mode = VOIDmode;
799
800 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
801 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
802 if (GET_MODE_SIZE (tmode) < size)
803 mode = tmode;
804
805 return mode;
806 }
807
808 /* Determine whether the LEN bytes can be moved by using several move
809 instructions. Return nonzero if a call to move_by_pieces should
810 succeed. */
811
812 int
813 can_move_by_pieces (unsigned HOST_WIDE_INT len,
814 unsigned int align)
815 {
816 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
817 optimize_insn_for_speed_p ());
818 }
819
820 /* Generate several move instructions to copy LEN bytes from block FROM to
821 block TO. (These are MEM rtx's with BLKmode).
822
823 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
824 used to push FROM to the stack.
825
826 ALIGN is maximum stack alignment we can assume.
827
828 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
829 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
830 stpcpy. */
831
832 rtx
833 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
834 unsigned int align, int endp)
835 {
836 struct move_by_pieces_d data;
837 machine_mode to_addr_mode;
838 machine_mode from_addr_mode = get_address_mode (from);
839 rtx to_addr, from_addr = XEXP (from, 0);
840 unsigned int max_size = MOVE_MAX_PIECES + 1;
841 enum insn_code icode;
842
843 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
844
845 data.offset = 0;
846 data.from_addr = from_addr;
847 if (to)
848 {
849 to_addr_mode = get_address_mode (to);
850 to_addr = XEXP (to, 0);
851 data.to = to;
852 data.autinc_to
853 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
854 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
855 data.reverse
856 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
857 }
858 else
859 {
860 to_addr_mode = VOIDmode;
861 to_addr = NULL_RTX;
862 data.to = NULL_RTX;
863 data.autinc_to = 1;
864 if (STACK_GROWS_DOWNWARD)
865 data.reverse = 1;
866 else
867 data.reverse = 0;
868 }
869 data.to_addr = to_addr;
870 data.from = from;
871 data.autinc_from
872 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
873 || GET_CODE (from_addr) == POST_INC
874 || GET_CODE (from_addr) == POST_DEC);
875
876 data.explicit_inc_from = 0;
877 data.explicit_inc_to = 0;
878 if (data.reverse) data.offset = len;
879 data.len = len;
880
881 /* If copying requires more than two move insns,
882 copy addresses to registers (to make displacements shorter)
883 and use post-increment if available. */
884 if (!(data.autinc_from && data.autinc_to)
885 && move_by_pieces_ninsns (len, align, max_size) > 2)
886 {
887 /* Find the mode of the largest move...
888 MODE might not be used depending on the definitions of the
889 USE_* macros below. */
890 machine_mode mode ATTRIBUTE_UNUSED
891 = widest_int_mode_for_size (max_size);
892
893 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
894 {
895 data.from_addr = copy_to_mode_reg (from_addr_mode,
896 plus_constant (from_addr_mode,
897 from_addr, len));
898 data.autinc_from = 1;
899 data.explicit_inc_from = -1;
900 }
901 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
902 {
903 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
904 data.autinc_from = 1;
905 data.explicit_inc_from = 1;
906 }
907 if (!data.autinc_from && CONSTANT_P (from_addr))
908 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
909 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
910 {
911 data.to_addr = copy_to_mode_reg (to_addr_mode,
912 plus_constant (to_addr_mode,
913 to_addr, len));
914 data.autinc_to = 1;
915 data.explicit_inc_to = -1;
916 }
917 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
918 {
919 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
920 data.autinc_to = 1;
921 data.explicit_inc_to = 1;
922 }
923 if (!data.autinc_to && CONSTANT_P (to_addr))
924 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
925 }
926
927 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
928
929 /* First move what we can in the largest integer mode, then go to
930 successively smaller modes. */
931
932 while (max_size > 1 && data.len > 0)
933 {
934 machine_mode mode = widest_int_mode_for_size (max_size);
935
936 if (mode == VOIDmode)
937 break;
938
939 icode = optab_handler (mov_optab, mode);
940 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
941 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
942
943 max_size = GET_MODE_SIZE (mode);
944 }
945
946 /* The code above should have handled everything. */
947 gcc_assert (!data.len);
948
949 if (endp)
950 {
951 rtx to1;
952
953 gcc_assert (!data.reverse);
954 if (data.autinc_to)
955 {
956 if (endp == 2)
957 {
958 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
959 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
960 else
961 data.to_addr = copy_to_mode_reg (to_addr_mode,
962 plus_constant (to_addr_mode,
963 data.to_addr,
964 -1));
965 }
966 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
967 data.offset);
968 }
969 else
970 {
971 if (endp == 2)
972 --data.offset;
973 to1 = adjust_address (data.to, QImode, data.offset);
974 }
975 return to1;
976 }
977 else
978 return data.to;
979 }
980
981 /* Return number of insns required to move L bytes by pieces.
982 ALIGN (in bits) is maximum alignment we can assume. */
983
984 unsigned HOST_WIDE_INT
985 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
986 unsigned int max_size)
987 {
988 unsigned HOST_WIDE_INT n_insns = 0;
989
990 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
991
992 while (max_size > 1 && l > 0)
993 {
994 machine_mode mode;
995 enum insn_code icode;
996
997 mode = widest_int_mode_for_size (max_size);
998
999 if (mode == VOIDmode)
1000 break;
1001
1002 icode = optab_handler (mov_optab, mode);
1003 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1004 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1005
1006 max_size = GET_MODE_SIZE (mode);
1007 }
1008
1009 gcc_assert (!l);
1010 return n_insns;
1011 }
1012
1013 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1014 with move instructions for mode MODE. GENFUN is the gen_... function
1015 to make a move insn for that mode. DATA has all the other info. */
1016
1017 static void
1018 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1019 struct move_by_pieces_d *data)
1020 {
1021 unsigned int size = GET_MODE_SIZE (mode);
1022 rtx to1 = NULL_RTX, from1;
1023
1024 while (data->len >= size)
1025 {
1026 if (data->reverse)
1027 data->offset -= size;
1028
1029 if (data->to)
1030 {
1031 if (data->autinc_to)
1032 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1033 data->offset);
1034 else
1035 to1 = adjust_address (data->to, mode, data->offset);
1036 }
1037
1038 if (data->autinc_from)
1039 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1040 data->offset);
1041 else
1042 from1 = adjust_address (data->from, mode, data->offset);
1043
1044 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1045 emit_insn (gen_add2_insn (data->to_addr,
1046 gen_int_mode (-(HOST_WIDE_INT) size,
1047 GET_MODE (data->to_addr))));
1048 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1049 emit_insn (gen_add2_insn (data->from_addr,
1050 gen_int_mode (-(HOST_WIDE_INT) size,
1051 GET_MODE (data->from_addr))));
1052
1053 if (data->to)
1054 emit_insn ((*genfun) (to1, from1));
1055 else
1056 {
1057 #ifdef PUSH_ROUNDING
1058 emit_single_push_insn (mode, from1, NULL);
1059 #else
1060 gcc_unreachable ();
1061 #endif
1062 }
1063
1064 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1065 emit_insn (gen_add2_insn (data->to_addr,
1066 gen_int_mode (size,
1067 GET_MODE (data->to_addr))));
1068 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1069 emit_insn (gen_add2_insn (data->from_addr,
1070 gen_int_mode (size,
1071 GET_MODE (data->from_addr))));
1072
1073 if (! data->reverse)
1074 data->offset += size;
1075
1076 data->len -= size;
1077 }
1078 }
1079 \f
1080 /* Emit code to move a block Y to a block X. This may be done with
1081 string-move instructions, with multiple scalar move instructions,
1082 or with a library call.
1083
1084 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1085 SIZE is an rtx that says how long they are.
1086 ALIGN is the maximum alignment we can assume they have.
1087 METHOD describes what kind of copy this is, and what mechanisms may be used.
1088 MIN_SIZE is the minimal size of block to move
1089 MAX_SIZE is the maximal size of block to move, if it can not be represented
1090 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1091
1092 Return the address of the new block, if memcpy is called and returns it,
1093 0 otherwise. */
1094
1095 rtx
1096 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1097 unsigned int expected_align, HOST_WIDE_INT expected_size,
1098 unsigned HOST_WIDE_INT min_size,
1099 unsigned HOST_WIDE_INT max_size,
1100 unsigned HOST_WIDE_INT probable_max_size)
1101 {
1102 bool may_use_call;
1103 rtx retval = 0;
1104 unsigned int align;
1105
1106 gcc_assert (size);
1107 if (CONST_INT_P (size)
1108 && INTVAL (size) == 0)
1109 return 0;
1110
1111 switch (method)
1112 {
1113 case BLOCK_OP_NORMAL:
1114 case BLOCK_OP_TAILCALL:
1115 may_use_call = true;
1116 break;
1117
1118 case BLOCK_OP_CALL_PARM:
1119 may_use_call = block_move_libcall_safe_for_call_parm ();
1120
1121 /* Make inhibit_defer_pop nonzero around the library call
1122 to force it to pop the arguments right away. */
1123 NO_DEFER_POP;
1124 break;
1125
1126 case BLOCK_OP_NO_LIBCALL:
1127 may_use_call = false;
1128 break;
1129
1130 default:
1131 gcc_unreachable ();
1132 }
1133
1134 gcc_assert (MEM_P (x) && MEM_P (y));
1135 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1136 gcc_assert (align >= BITS_PER_UNIT);
1137
1138 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1139 block copy is more efficient for other large modes, e.g. DCmode. */
1140 x = adjust_address (x, BLKmode, 0);
1141 y = adjust_address (y, BLKmode, 0);
1142
1143 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1144 can be incorrect is coming from __builtin_memcpy. */
1145 if (CONST_INT_P (size))
1146 {
1147 x = shallow_copy_rtx (x);
1148 y = shallow_copy_rtx (y);
1149 set_mem_size (x, INTVAL (size));
1150 set_mem_size (y, INTVAL (size));
1151 }
1152
1153 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1154 move_by_pieces (x, y, INTVAL (size), align, 0);
1155 else if (emit_block_move_via_movmem (x, y, size, align,
1156 expected_align, expected_size,
1157 min_size, max_size, probable_max_size))
1158 ;
1159 else if (may_use_call
1160 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1161 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1162 {
1163 /* Since x and y are passed to a libcall, mark the corresponding
1164 tree EXPR as addressable. */
1165 tree y_expr = MEM_EXPR (y);
1166 tree x_expr = MEM_EXPR (x);
1167 if (y_expr)
1168 mark_addressable (y_expr);
1169 if (x_expr)
1170 mark_addressable (x_expr);
1171 retval = emit_block_move_via_libcall (x, y, size,
1172 method == BLOCK_OP_TAILCALL);
1173 }
1174
1175 else
1176 emit_block_move_via_loop (x, y, size, align);
1177
1178 if (method == BLOCK_OP_CALL_PARM)
1179 OK_DEFER_POP;
1180
1181 return retval;
1182 }
1183
1184 rtx
1185 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1186 {
1187 unsigned HOST_WIDE_INT max, min = 0;
1188 if (GET_CODE (size) == CONST_INT)
1189 min = max = UINTVAL (size);
1190 else
1191 max = GET_MODE_MASK (GET_MODE (size));
1192 return emit_block_move_hints (x, y, size, method, 0, -1,
1193 min, max, max);
1194 }
1195
1196 /* A subroutine of emit_block_move. Returns true if calling the
1197 block move libcall will not clobber any parameters which may have
1198 already been placed on the stack. */
1199
1200 static bool
1201 block_move_libcall_safe_for_call_parm (void)
1202 {
1203 #if defined (REG_PARM_STACK_SPACE)
1204 tree fn;
1205 #endif
1206
1207 /* If arguments are pushed on the stack, then they're safe. */
1208 if (PUSH_ARGS)
1209 return true;
1210
1211 /* If registers go on the stack anyway, any argument is sure to clobber
1212 an outgoing argument. */
1213 #if defined (REG_PARM_STACK_SPACE)
1214 fn = emit_block_move_libcall_fn (false);
1215 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1216 depend on its argument. */
1217 (void) fn;
1218 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1219 && REG_PARM_STACK_SPACE (fn) != 0)
1220 return false;
1221 #endif
1222
1223 /* If any argument goes in memory, then it might clobber an outgoing
1224 argument. */
1225 {
1226 CUMULATIVE_ARGS args_so_far_v;
1227 cumulative_args_t args_so_far;
1228 tree fn, arg;
1229
1230 fn = emit_block_move_libcall_fn (false);
1231 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1232 args_so_far = pack_cumulative_args (&args_so_far_v);
1233
1234 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1235 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1236 {
1237 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1238 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1239 NULL_TREE, true);
1240 if (!tmp || !REG_P (tmp))
1241 return false;
1242 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1243 return false;
1244 targetm.calls.function_arg_advance (args_so_far, mode,
1245 NULL_TREE, true);
1246 }
1247 }
1248 return true;
1249 }
1250
1251 /* A subroutine of emit_block_move. Expand a movmem pattern;
1252 return true if successful. */
1253
1254 static bool
1255 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1256 unsigned int expected_align, HOST_WIDE_INT expected_size,
1257 unsigned HOST_WIDE_INT min_size,
1258 unsigned HOST_WIDE_INT max_size,
1259 unsigned HOST_WIDE_INT probable_max_size)
1260 {
1261 int save_volatile_ok = volatile_ok;
1262 machine_mode mode;
1263
1264 if (expected_align < align)
1265 expected_align = align;
1266 if (expected_size != -1)
1267 {
1268 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1269 expected_size = probable_max_size;
1270 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1271 expected_size = min_size;
1272 }
1273
1274 /* Since this is a move insn, we don't care about volatility. */
1275 volatile_ok = 1;
1276
1277 /* Try the most limited insn first, because there's no point
1278 including more than one in the machine description unless
1279 the more limited one has some advantage. */
1280
1281 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1282 mode = GET_MODE_WIDER_MODE (mode))
1283 {
1284 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1285
1286 if (code != CODE_FOR_nothing
1287 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1288 here because if SIZE is less than the mode mask, as it is
1289 returned by the macro, it will definitely be less than the
1290 actual mode mask. Since SIZE is within the Pmode address
1291 space, we limit MODE to Pmode. */
1292 && ((CONST_INT_P (size)
1293 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1294 <= (GET_MODE_MASK (mode) >> 1)))
1295 || max_size <= (GET_MODE_MASK (mode) >> 1)
1296 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1297 {
1298 struct expand_operand ops[9];
1299 unsigned int nops;
1300
1301 /* ??? When called via emit_block_move_for_call, it'd be
1302 nice if there were some way to inform the backend, so
1303 that it doesn't fail the expansion because it thinks
1304 emitting the libcall would be more efficient. */
1305 nops = insn_data[(int) code].n_generator_args;
1306 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1307
1308 create_fixed_operand (&ops[0], x);
1309 create_fixed_operand (&ops[1], y);
1310 /* The check above guarantees that this size conversion is valid. */
1311 create_convert_operand_to (&ops[2], size, mode, true);
1312 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1313 if (nops >= 6)
1314 {
1315 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1316 create_integer_operand (&ops[5], expected_size);
1317 }
1318 if (nops >= 8)
1319 {
1320 create_integer_operand (&ops[6], min_size);
1321 /* If we can not represent the maximal size,
1322 make parameter NULL. */
1323 if ((HOST_WIDE_INT) max_size != -1)
1324 create_integer_operand (&ops[7], max_size);
1325 else
1326 create_fixed_operand (&ops[7], NULL);
1327 }
1328 if (nops == 9)
1329 {
1330 /* If we can not represent the maximal size,
1331 make parameter NULL. */
1332 if ((HOST_WIDE_INT) probable_max_size != -1)
1333 create_integer_operand (&ops[8], probable_max_size);
1334 else
1335 create_fixed_operand (&ops[8], NULL);
1336 }
1337 if (maybe_expand_insn (code, nops, ops))
1338 {
1339 volatile_ok = save_volatile_ok;
1340 return true;
1341 }
1342 }
1343 }
1344
1345 volatile_ok = save_volatile_ok;
1346 return false;
1347 }
1348
1349 /* A subroutine of emit_block_move. Expand a call to memcpy.
1350 Return the return value from memcpy, 0 otherwise. */
1351
1352 rtx
1353 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1354 {
1355 rtx dst_addr, src_addr;
1356 tree call_expr, fn, src_tree, dst_tree, size_tree;
1357 machine_mode size_mode;
1358 rtx retval;
1359
1360 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1361 pseudos. We can then place those new pseudos into a VAR_DECL and
1362 use them later. */
1363
1364 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1365 src_addr = copy_addr_to_reg (XEXP (src, 0));
1366
1367 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1368 src_addr = convert_memory_address (ptr_mode, src_addr);
1369
1370 dst_tree = make_tree (ptr_type_node, dst_addr);
1371 src_tree = make_tree (ptr_type_node, src_addr);
1372
1373 size_mode = TYPE_MODE (sizetype);
1374
1375 size = convert_to_mode (size_mode, size, 1);
1376 size = copy_to_mode_reg (size_mode, size);
1377
1378 /* It is incorrect to use the libcall calling conventions to call
1379 memcpy in this context. This could be a user call to memcpy and
1380 the user may wish to examine the return value from memcpy. For
1381 targets where libcalls and normal calls have different conventions
1382 for returning pointers, we could end up generating incorrect code. */
1383
1384 size_tree = make_tree (sizetype, size);
1385
1386 fn = emit_block_move_libcall_fn (true);
1387 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1388 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1389
1390 retval = expand_normal (call_expr);
1391
1392 return retval;
1393 }
1394
1395 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1396 for the function we use for block copies. */
1397
1398 static GTY(()) tree block_move_fn;
1399
1400 void
1401 init_block_move_fn (const char *asmspec)
1402 {
1403 if (!block_move_fn)
1404 {
1405 tree args, fn, attrs, attr_args;
1406
1407 fn = get_identifier ("memcpy");
1408 args = build_function_type_list (ptr_type_node, ptr_type_node,
1409 const_ptr_type_node, sizetype,
1410 NULL_TREE);
1411
1412 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1413 DECL_EXTERNAL (fn) = 1;
1414 TREE_PUBLIC (fn) = 1;
1415 DECL_ARTIFICIAL (fn) = 1;
1416 TREE_NOTHROW (fn) = 1;
1417 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1418 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1419
1420 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1421 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1422
1423 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1424
1425 block_move_fn = fn;
1426 }
1427
1428 if (asmspec)
1429 set_user_assembler_name (block_move_fn, asmspec);
1430 }
1431
1432 static tree
1433 emit_block_move_libcall_fn (int for_call)
1434 {
1435 static bool emitted_extern;
1436
1437 if (!block_move_fn)
1438 init_block_move_fn (NULL);
1439
1440 if (for_call && !emitted_extern)
1441 {
1442 emitted_extern = true;
1443 make_decl_rtl (block_move_fn);
1444 }
1445
1446 return block_move_fn;
1447 }
1448
1449 /* A subroutine of emit_block_move. Copy the data via an explicit
1450 loop. This is used only when libcalls are forbidden. */
1451 /* ??? It'd be nice to copy in hunks larger than QImode. */
1452
1453 static void
1454 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1455 unsigned int align ATTRIBUTE_UNUSED)
1456 {
1457 rtx_code_label *cmp_label, *top_label;
1458 rtx iter, x_addr, y_addr, tmp;
1459 machine_mode x_addr_mode = get_address_mode (x);
1460 machine_mode y_addr_mode = get_address_mode (y);
1461 machine_mode iter_mode;
1462
1463 iter_mode = GET_MODE (size);
1464 if (iter_mode == VOIDmode)
1465 iter_mode = word_mode;
1466
1467 top_label = gen_label_rtx ();
1468 cmp_label = gen_label_rtx ();
1469 iter = gen_reg_rtx (iter_mode);
1470
1471 emit_move_insn (iter, const0_rtx);
1472
1473 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1474 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1475 do_pending_stack_adjust ();
1476
1477 emit_jump (cmp_label);
1478 emit_label (top_label);
1479
1480 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1481 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1482
1483 if (x_addr_mode != y_addr_mode)
1484 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1485 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1486
1487 x = change_address (x, QImode, x_addr);
1488 y = change_address (y, QImode, y_addr);
1489
1490 emit_move_insn (x, y);
1491
1492 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1493 true, OPTAB_LIB_WIDEN);
1494 if (tmp != iter)
1495 emit_move_insn (iter, tmp);
1496
1497 emit_label (cmp_label);
1498
1499 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1500 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1501 }
1502 \f
1503 /* Copy all or part of a value X into registers starting at REGNO.
1504 The number of registers to be filled is NREGS. */
1505
1506 void
1507 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1508 {
1509 int i;
1510 rtx pat;
1511 rtx_insn *last;
1512
1513 if (nregs == 0)
1514 return;
1515
1516 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1517 x = validize_mem (force_const_mem (mode, x));
1518
1519 /* See if the machine can do this with a load multiple insn. */
1520 if (HAVE_load_multiple)
1521 {
1522 last = get_last_insn ();
1523 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1524 GEN_INT (nregs));
1525 if (pat)
1526 {
1527 emit_insn (pat);
1528 return;
1529 }
1530 else
1531 delete_insns_since (last);
1532 }
1533
1534 for (i = 0; i < nregs; i++)
1535 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1536 operand_subword_force (x, i, mode));
1537 }
1538
1539 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1540 The number of registers to be filled is NREGS. */
1541
1542 void
1543 move_block_from_reg (int regno, rtx x, int nregs)
1544 {
1545 int i;
1546
1547 if (nregs == 0)
1548 return;
1549
1550 /* See if the machine can do this with a store multiple insn. */
1551 #ifdef HAVE_store_multiple
1552 if (HAVE_store_multiple)
1553 {
1554 rtx_insn *last = get_last_insn ();
1555 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1556 GEN_INT (nregs));
1557 if (pat)
1558 {
1559 emit_insn (pat);
1560 return;
1561 }
1562 else
1563 delete_insns_since (last);
1564 }
1565 #endif
1566
1567 for (i = 0; i < nregs; i++)
1568 {
1569 rtx tem = operand_subword (x, i, 1, BLKmode);
1570
1571 gcc_assert (tem);
1572
1573 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1574 }
1575 }
1576
1577 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1578 ORIG, where ORIG is a non-consecutive group of registers represented by
1579 a PARALLEL. The clone is identical to the original except in that the
1580 original set of registers is replaced by a new set of pseudo registers.
1581 The new set has the same modes as the original set. */
1582
1583 rtx
1584 gen_group_rtx (rtx orig)
1585 {
1586 int i, length;
1587 rtx *tmps;
1588
1589 gcc_assert (GET_CODE (orig) == PARALLEL);
1590
1591 length = XVECLEN (orig, 0);
1592 tmps = XALLOCAVEC (rtx, length);
1593
1594 /* Skip a NULL entry in first slot. */
1595 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1596
1597 if (i)
1598 tmps[0] = 0;
1599
1600 for (; i < length; i++)
1601 {
1602 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1603 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1604
1605 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1606 }
1607
1608 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1609 }
1610
1611 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1612 except that values are placed in TMPS[i], and must later be moved
1613 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1614
1615 static void
1616 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1617 {
1618 rtx src;
1619 int start, i;
1620 machine_mode m = GET_MODE (orig_src);
1621
1622 gcc_assert (GET_CODE (dst) == PARALLEL);
1623
1624 if (m != VOIDmode
1625 && !SCALAR_INT_MODE_P (m)
1626 && !MEM_P (orig_src)
1627 && GET_CODE (orig_src) != CONCAT)
1628 {
1629 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1630 if (imode == BLKmode)
1631 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1632 else
1633 src = gen_reg_rtx (imode);
1634 if (imode != BLKmode)
1635 src = gen_lowpart (GET_MODE (orig_src), src);
1636 emit_move_insn (src, orig_src);
1637 /* ...and back again. */
1638 if (imode != BLKmode)
1639 src = gen_lowpart (imode, src);
1640 emit_group_load_1 (tmps, dst, src, type, ssize);
1641 return;
1642 }
1643
1644 /* Check for a NULL entry, used to indicate that the parameter goes
1645 both on the stack and in registers. */
1646 if (XEXP (XVECEXP (dst, 0, 0), 0))
1647 start = 0;
1648 else
1649 start = 1;
1650
1651 /* Process the pieces. */
1652 for (i = start; i < XVECLEN (dst, 0); i++)
1653 {
1654 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1655 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1656 unsigned int bytelen = GET_MODE_SIZE (mode);
1657 int shift = 0;
1658
1659 /* Handle trailing fragments that run over the size of the struct. */
1660 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1661 {
1662 /* Arrange to shift the fragment to where it belongs.
1663 extract_bit_field loads to the lsb of the reg. */
1664 if (
1665 #ifdef BLOCK_REG_PADDING
1666 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1667 == (BYTES_BIG_ENDIAN ? upward : downward)
1668 #else
1669 BYTES_BIG_ENDIAN
1670 #endif
1671 )
1672 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1673 bytelen = ssize - bytepos;
1674 gcc_assert (bytelen > 0);
1675 }
1676
1677 /* If we won't be loading directly from memory, protect the real source
1678 from strange tricks we might play; but make sure that the source can
1679 be loaded directly into the destination. */
1680 src = orig_src;
1681 if (!MEM_P (orig_src)
1682 && (!CONSTANT_P (orig_src)
1683 || (GET_MODE (orig_src) != mode
1684 && GET_MODE (orig_src) != VOIDmode)))
1685 {
1686 if (GET_MODE (orig_src) == VOIDmode)
1687 src = gen_reg_rtx (mode);
1688 else
1689 src = gen_reg_rtx (GET_MODE (orig_src));
1690
1691 emit_move_insn (src, orig_src);
1692 }
1693
1694 /* Optimize the access just a bit. */
1695 if (MEM_P (src)
1696 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1697 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1698 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1699 && bytelen == GET_MODE_SIZE (mode))
1700 {
1701 tmps[i] = gen_reg_rtx (mode);
1702 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1703 }
1704 else if (COMPLEX_MODE_P (mode)
1705 && GET_MODE (src) == mode
1706 && bytelen == GET_MODE_SIZE (mode))
1707 /* Let emit_move_complex do the bulk of the work. */
1708 tmps[i] = src;
1709 else if (GET_CODE (src) == CONCAT)
1710 {
1711 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1712 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1713
1714 if ((bytepos == 0 && bytelen == slen0)
1715 || (bytepos != 0 && bytepos + bytelen <= slen))
1716 {
1717 /* The following assumes that the concatenated objects all
1718 have the same size. In this case, a simple calculation
1719 can be used to determine the object and the bit field
1720 to be extracted. */
1721 tmps[i] = XEXP (src, bytepos / slen0);
1722 if (! CONSTANT_P (tmps[i])
1723 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1724 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1725 (bytepos % slen0) * BITS_PER_UNIT,
1726 1, NULL_RTX, mode, mode);
1727 }
1728 else
1729 {
1730 rtx mem;
1731
1732 gcc_assert (!bytepos);
1733 mem = assign_stack_temp (GET_MODE (src), slen);
1734 emit_move_insn (mem, src);
1735 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1736 0, 1, NULL_RTX, mode, mode);
1737 }
1738 }
1739 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1740 SIMD register, which is currently broken. While we get GCC
1741 to emit proper RTL for these cases, let's dump to memory. */
1742 else if (VECTOR_MODE_P (GET_MODE (dst))
1743 && REG_P (src))
1744 {
1745 int slen = GET_MODE_SIZE (GET_MODE (src));
1746 rtx mem;
1747
1748 mem = assign_stack_temp (GET_MODE (src), slen);
1749 emit_move_insn (mem, src);
1750 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1751 }
1752 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1753 && XVECLEN (dst, 0) > 1)
1754 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1755 else if (CONSTANT_P (src))
1756 {
1757 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1758
1759 if (len == ssize)
1760 tmps[i] = src;
1761 else
1762 {
1763 rtx first, second;
1764
1765 /* TODO: const_wide_int can have sizes other than this... */
1766 gcc_assert (2 * len == ssize);
1767 split_double (src, &first, &second);
1768 if (i)
1769 tmps[i] = second;
1770 else
1771 tmps[i] = first;
1772 }
1773 }
1774 else if (REG_P (src) && GET_MODE (src) == mode)
1775 tmps[i] = src;
1776 else
1777 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1778 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1779 mode, mode);
1780
1781 if (shift)
1782 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1783 shift, tmps[i], 0);
1784 }
1785 }
1786
1787 /* Emit code to move a block SRC of type TYPE to a block DST,
1788 where DST is non-consecutive registers represented by a PARALLEL.
1789 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1790 if not known. */
1791
1792 void
1793 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1794 {
1795 rtx *tmps;
1796 int i;
1797
1798 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1799 emit_group_load_1 (tmps, dst, src, type, ssize);
1800
1801 /* Copy the extracted pieces into the proper (probable) hard regs. */
1802 for (i = 0; i < XVECLEN (dst, 0); i++)
1803 {
1804 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1805 if (d == NULL)
1806 continue;
1807 emit_move_insn (d, tmps[i]);
1808 }
1809 }
1810
1811 /* Similar, but load SRC into new pseudos in a format that looks like
1812 PARALLEL. This can later be fed to emit_group_move to get things
1813 in the right place. */
1814
1815 rtx
1816 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1817 {
1818 rtvec vec;
1819 int i;
1820
1821 vec = rtvec_alloc (XVECLEN (parallel, 0));
1822 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1823
1824 /* Convert the vector to look just like the original PARALLEL, except
1825 with the computed values. */
1826 for (i = 0; i < XVECLEN (parallel, 0); i++)
1827 {
1828 rtx e = XVECEXP (parallel, 0, i);
1829 rtx d = XEXP (e, 0);
1830
1831 if (d)
1832 {
1833 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1834 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1835 }
1836 RTVEC_ELT (vec, i) = e;
1837 }
1838
1839 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1840 }
1841
1842 /* Emit code to move a block SRC to block DST, where SRC and DST are
1843 non-consecutive groups of registers, each represented by a PARALLEL. */
1844
1845 void
1846 emit_group_move (rtx dst, rtx src)
1847 {
1848 int i;
1849
1850 gcc_assert (GET_CODE (src) == PARALLEL
1851 && GET_CODE (dst) == PARALLEL
1852 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1853
1854 /* Skip first entry if NULL. */
1855 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1856 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1857 XEXP (XVECEXP (src, 0, i), 0));
1858 }
1859
1860 /* Move a group of registers represented by a PARALLEL into pseudos. */
1861
1862 rtx
1863 emit_group_move_into_temps (rtx src)
1864 {
1865 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1866 int i;
1867
1868 for (i = 0; i < XVECLEN (src, 0); i++)
1869 {
1870 rtx e = XVECEXP (src, 0, i);
1871 rtx d = XEXP (e, 0);
1872
1873 if (d)
1874 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1875 RTVEC_ELT (vec, i) = e;
1876 }
1877
1878 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1879 }
1880
1881 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1882 where SRC is non-consecutive registers represented by a PARALLEL.
1883 SSIZE represents the total size of block ORIG_DST, or -1 if not
1884 known. */
1885
1886 void
1887 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1888 {
1889 rtx *tmps, dst;
1890 int start, finish, i;
1891 machine_mode m = GET_MODE (orig_dst);
1892
1893 gcc_assert (GET_CODE (src) == PARALLEL);
1894
1895 if (!SCALAR_INT_MODE_P (m)
1896 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1897 {
1898 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1899 if (imode == BLKmode)
1900 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1901 else
1902 dst = gen_reg_rtx (imode);
1903 emit_group_store (dst, src, type, ssize);
1904 if (imode != BLKmode)
1905 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1906 emit_move_insn (orig_dst, dst);
1907 return;
1908 }
1909
1910 /* Check for a NULL entry, used to indicate that the parameter goes
1911 both on the stack and in registers. */
1912 if (XEXP (XVECEXP (src, 0, 0), 0))
1913 start = 0;
1914 else
1915 start = 1;
1916 finish = XVECLEN (src, 0);
1917
1918 tmps = XALLOCAVEC (rtx, finish);
1919
1920 /* Copy the (probable) hard regs into pseudos. */
1921 for (i = start; i < finish; i++)
1922 {
1923 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1924 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1925 {
1926 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1927 emit_move_insn (tmps[i], reg);
1928 }
1929 else
1930 tmps[i] = reg;
1931 }
1932
1933 /* If we won't be storing directly into memory, protect the real destination
1934 from strange tricks we might play. */
1935 dst = orig_dst;
1936 if (GET_CODE (dst) == PARALLEL)
1937 {
1938 rtx temp;
1939
1940 /* We can get a PARALLEL dst if there is a conditional expression in
1941 a return statement. In that case, the dst and src are the same,
1942 so no action is necessary. */
1943 if (rtx_equal_p (dst, src))
1944 return;
1945
1946 /* It is unclear if we can ever reach here, but we may as well handle
1947 it. Allocate a temporary, and split this into a store/load to/from
1948 the temporary. */
1949 temp = assign_stack_temp (GET_MODE (dst), ssize);
1950 emit_group_store (temp, src, type, ssize);
1951 emit_group_load (dst, temp, type, ssize);
1952 return;
1953 }
1954 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1955 {
1956 machine_mode outer = GET_MODE (dst);
1957 machine_mode inner;
1958 HOST_WIDE_INT bytepos;
1959 bool done = false;
1960 rtx temp;
1961
1962 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1963 dst = gen_reg_rtx (outer);
1964
1965 /* Make life a bit easier for combine. */
1966 /* If the first element of the vector is the low part
1967 of the destination mode, use a paradoxical subreg to
1968 initialize the destination. */
1969 if (start < finish)
1970 {
1971 inner = GET_MODE (tmps[start]);
1972 bytepos = subreg_lowpart_offset (inner, outer);
1973 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1974 {
1975 temp = simplify_gen_subreg (outer, tmps[start],
1976 inner, 0);
1977 if (temp)
1978 {
1979 emit_move_insn (dst, temp);
1980 done = true;
1981 start++;
1982 }
1983 }
1984 }
1985
1986 /* If the first element wasn't the low part, try the last. */
1987 if (!done
1988 && start < finish - 1)
1989 {
1990 inner = GET_MODE (tmps[finish - 1]);
1991 bytepos = subreg_lowpart_offset (inner, outer);
1992 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1993 {
1994 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1995 inner, 0);
1996 if (temp)
1997 {
1998 emit_move_insn (dst, temp);
1999 done = true;
2000 finish--;
2001 }
2002 }
2003 }
2004
2005 /* Otherwise, simply initialize the result to zero. */
2006 if (!done)
2007 emit_move_insn (dst, CONST0_RTX (outer));
2008 }
2009
2010 /* Process the pieces. */
2011 for (i = start; i < finish; i++)
2012 {
2013 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2014 machine_mode mode = GET_MODE (tmps[i]);
2015 unsigned int bytelen = GET_MODE_SIZE (mode);
2016 unsigned int adj_bytelen;
2017 rtx dest = dst;
2018
2019 /* Handle trailing fragments that run over the size of the struct. */
2020 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2021 adj_bytelen = ssize - bytepos;
2022 else
2023 adj_bytelen = bytelen;
2024
2025 if (GET_CODE (dst) == CONCAT)
2026 {
2027 if (bytepos + adj_bytelen
2028 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2029 dest = XEXP (dst, 0);
2030 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2031 {
2032 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2033 dest = XEXP (dst, 1);
2034 }
2035 else
2036 {
2037 machine_mode dest_mode = GET_MODE (dest);
2038 machine_mode tmp_mode = GET_MODE (tmps[i]);
2039
2040 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2041
2042 if (GET_MODE_ALIGNMENT (dest_mode)
2043 >= GET_MODE_ALIGNMENT (tmp_mode))
2044 {
2045 dest = assign_stack_temp (dest_mode,
2046 GET_MODE_SIZE (dest_mode));
2047 emit_move_insn (adjust_address (dest,
2048 tmp_mode,
2049 bytepos),
2050 tmps[i]);
2051 dst = dest;
2052 }
2053 else
2054 {
2055 dest = assign_stack_temp (tmp_mode,
2056 GET_MODE_SIZE (tmp_mode));
2057 emit_move_insn (dest, tmps[i]);
2058 dst = adjust_address (dest, dest_mode, bytepos);
2059 }
2060 break;
2061 }
2062 }
2063
2064 /* Handle trailing fragments that run over the size of the struct. */
2065 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2066 {
2067 /* store_bit_field always takes its value from the lsb.
2068 Move the fragment to the lsb if it's not already there. */
2069 if (
2070 #ifdef BLOCK_REG_PADDING
2071 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2072 == (BYTES_BIG_ENDIAN ? upward : downward)
2073 #else
2074 BYTES_BIG_ENDIAN
2075 #endif
2076 )
2077 {
2078 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2079 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2080 shift, tmps[i], 0);
2081 }
2082
2083 /* Make sure not to write past the end of the struct. */
2084 store_bit_field (dest,
2085 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2086 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2087 VOIDmode, tmps[i]);
2088 }
2089
2090 /* Optimize the access just a bit. */
2091 else if (MEM_P (dest)
2092 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2093 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2094 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2095 && bytelen == GET_MODE_SIZE (mode))
2096 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2097
2098 else
2099 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2100 0, 0, mode, tmps[i]);
2101 }
2102
2103 /* Copy from the pseudo into the (probable) hard reg. */
2104 if (orig_dst != dst)
2105 emit_move_insn (orig_dst, dst);
2106 }
2107
2108 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2109 of the value stored in X. */
2110
2111 rtx
2112 maybe_emit_group_store (rtx x, tree type)
2113 {
2114 machine_mode mode = TYPE_MODE (type);
2115 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2116 if (GET_CODE (x) == PARALLEL)
2117 {
2118 rtx result = gen_reg_rtx (mode);
2119 emit_group_store (result, x, type, int_size_in_bytes (type));
2120 return result;
2121 }
2122 return x;
2123 }
2124
2125 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2126
2127 This is used on targets that return BLKmode values in registers. */
2128
2129 void
2130 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2131 {
2132 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2133 rtx src = NULL, dst = NULL;
2134 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2135 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2136 machine_mode mode = GET_MODE (srcreg);
2137 machine_mode tmode = GET_MODE (target);
2138 machine_mode copy_mode;
2139
2140 /* BLKmode registers created in the back-end shouldn't have survived. */
2141 gcc_assert (mode != BLKmode);
2142
2143 /* If the structure doesn't take up a whole number of words, see whether
2144 SRCREG is padded on the left or on the right. If it's on the left,
2145 set PADDING_CORRECTION to the number of bits to skip.
2146
2147 In most ABIs, the structure will be returned at the least end of
2148 the register, which translates to right padding on little-endian
2149 targets and left padding on big-endian targets. The opposite
2150 holds if the structure is returned at the most significant
2151 end of the register. */
2152 if (bytes % UNITS_PER_WORD != 0
2153 && (targetm.calls.return_in_msb (type)
2154 ? !BYTES_BIG_ENDIAN
2155 : BYTES_BIG_ENDIAN))
2156 padding_correction
2157 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2158
2159 /* We can use a single move if we have an exact mode for the size. */
2160 else if (MEM_P (target)
2161 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2162 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2163 && bytes == GET_MODE_SIZE (mode))
2164 {
2165 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2166 return;
2167 }
2168
2169 /* And if we additionally have the same mode for a register. */
2170 else if (REG_P (target)
2171 && GET_MODE (target) == mode
2172 && bytes == GET_MODE_SIZE (mode))
2173 {
2174 emit_move_insn (target, srcreg);
2175 return;
2176 }
2177
2178 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2179 into a new pseudo which is a full word. */
2180 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2181 {
2182 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2183 mode = word_mode;
2184 }
2185
2186 /* Copy the structure BITSIZE bits at a time. If the target lives in
2187 memory, take care of not reading/writing past its end by selecting
2188 a copy mode suited to BITSIZE. This should always be possible given
2189 how it is computed.
2190
2191 If the target lives in register, make sure not to select a copy mode
2192 larger than the mode of the register.
2193
2194 We could probably emit more efficient code for machines which do not use
2195 strict alignment, but it doesn't seem worth the effort at the current
2196 time. */
2197
2198 copy_mode = word_mode;
2199 if (MEM_P (target))
2200 {
2201 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2202 if (mem_mode != BLKmode)
2203 copy_mode = mem_mode;
2204 }
2205 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2206 copy_mode = tmode;
2207
2208 for (bitpos = 0, xbitpos = padding_correction;
2209 bitpos < bytes * BITS_PER_UNIT;
2210 bitpos += bitsize, xbitpos += bitsize)
2211 {
2212 /* We need a new source operand each time xbitpos is on a
2213 word boundary and when xbitpos == padding_correction
2214 (the first time through). */
2215 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2216 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2217
2218 /* We need a new destination operand each time bitpos is on
2219 a word boundary. */
2220 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2221 dst = target;
2222 else if (bitpos % BITS_PER_WORD == 0)
2223 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2224
2225 /* Use xbitpos for the source extraction (right justified) and
2226 bitpos for the destination store (left justified). */
2227 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2228 extract_bit_field (src, bitsize,
2229 xbitpos % BITS_PER_WORD, 1,
2230 NULL_RTX, copy_mode, copy_mode));
2231 }
2232 }
2233
2234 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2235 register if it contains any data, otherwise return null.
2236
2237 This is used on targets that return BLKmode values in registers. */
2238
2239 rtx
2240 copy_blkmode_to_reg (machine_mode mode, tree src)
2241 {
2242 int i, n_regs;
2243 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2244 unsigned int bitsize;
2245 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2246 machine_mode dst_mode;
2247
2248 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2249
2250 x = expand_normal (src);
2251
2252 bytes = int_size_in_bytes (TREE_TYPE (src));
2253 if (bytes == 0)
2254 return NULL_RTX;
2255
2256 /* If the structure doesn't take up a whole number of words, see
2257 whether the register value should be padded on the left or on
2258 the right. Set PADDING_CORRECTION to the number of padding
2259 bits needed on the left side.
2260
2261 In most ABIs, the structure will be returned at the least end of
2262 the register, which translates to right padding on little-endian
2263 targets and left padding on big-endian targets. The opposite
2264 holds if the structure is returned at the most significant
2265 end of the register. */
2266 if (bytes % UNITS_PER_WORD != 0
2267 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2268 ? !BYTES_BIG_ENDIAN
2269 : BYTES_BIG_ENDIAN))
2270 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2271 * BITS_PER_UNIT));
2272
2273 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2274 dst_words = XALLOCAVEC (rtx, n_regs);
2275 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2276
2277 /* Copy the structure BITSIZE bits at a time. */
2278 for (bitpos = 0, xbitpos = padding_correction;
2279 bitpos < bytes * BITS_PER_UNIT;
2280 bitpos += bitsize, xbitpos += bitsize)
2281 {
2282 /* We need a new destination pseudo each time xbitpos is
2283 on a word boundary and when xbitpos == padding_correction
2284 (the first time through). */
2285 if (xbitpos % BITS_PER_WORD == 0
2286 || xbitpos == padding_correction)
2287 {
2288 /* Generate an appropriate register. */
2289 dst_word = gen_reg_rtx (word_mode);
2290 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2291
2292 /* Clear the destination before we move anything into it. */
2293 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2294 }
2295
2296 /* We need a new source operand each time bitpos is on a word
2297 boundary. */
2298 if (bitpos % BITS_PER_WORD == 0)
2299 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2300
2301 /* Use bitpos for the source extraction (left justified) and
2302 xbitpos for the destination store (right justified). */
2303 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2304 0, 0, word_mode,
2305 extract_bit_field (src_word, bitsize,
2306 bitpos % BITS_PER_WORD, 1,
2307 NULL_RTX, word_mode, word_mode));
2308 }
2309
2310 if (mode == BLKmode)
2311 {
2312 /* Find the smallest integer mode large enough to hold the
2313 entire structure. */
2314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2315 mode != VOIDmode;
2316 mode = GET_MODE_WIDER_MODE (mode))
2317 /* Have we found a large enough mode? */
2318 if (GET_MODE_SIZE (mode) >= bytes)
2319 break;
2320
2321 /* A suitable mode should have been found. */
2322 gcc_assert (mode != VOIDmode);
2323 }
2324
2325 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2326 dst_mode = word_mode;
2327 else
2328 dst_mode = mode;
2329 dst = gen_reg_rtx (dst_mode);
2330
2331 for (i = 0; i < n_regs; i++)
2332 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2333
2334 if (mode != dst_mode)
2335 dst = gen_lowpart (mode, dst);
2336
2337 return dst;
2338 }
2339
2340 /* Add a USE expression for REG to the (possibly empty) list pointed
2341 to by CALL_FUSAGE. REG must denote a hard register. */
2342
2343 void
2344 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2345 {
2346 gcc_assert (REG_P (reg));
2347
2348 if (!HARD_REGISTER_P (reg))
2349 return;
2350
2351 *call_fusage
2352 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2353 }
2354
2355 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2356 to by CALL_FUSAGE. REG must denote a hard register. */
2357
2358 void
2359 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2360 {
2361 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2362
2363 *call_fusage
2364 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2365 }
2366
2367 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2368 starting at REGNO. All of these registers must be hard registers. */
2369
2370 void
2371 use_regs (rtx *call_fusage, int regno, int nregs)
2372 {
2373 int i;
2374
2375 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2376
2377 for (i = 0; i < nregs; i++)
2378 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2379 }
2380
2381 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2382 PARALLEL REGS. This is for calls that pass values in multiple
2383 non-contiguous locations. The Irix 6 ABI has examples of this. */
2384
2385 void
2386 use_group_regs (rtx *call_fusage, rtx regs)
2387 {
2388 int i;
2389
2390 for (i = 0; i < XVECLEN (regs, 0); i++)
2391 {
2392 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2393
2394 /* A NULL entry means the parameter goes both on the stack and in
2395 registers. This can also be a MEM for targets that pass values
2396 partially on the stack and partially in registers. */
2397 if (reg != 0 && REG_P (reg))
2398 use_reg (call_fusage, reg);
2399 }
2400 }
2401
2402 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2403 assigment and the code of the expresion on the RHS is CODE. Return
2404 NULL otherwise. */
2405
2406 static gimple
2407 get_def_for_expr (tree name, enum tree_code code)
2408 {
2409 gimple def_stmt;
2410
2411 if (TREE_CODE (name) != SSA_NAME)
2412 return NULL;
2413
2414 def_stmt = get_gimple_for_ssa_name (name);
2415 if (!def_stmt
2416 || gimple_assign_rhs_code (def_stmt) != code)
2417 return NULL;
2418
2419 return def_stmt;
2420 }
2421
2422 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2423 assigment and the class of the expresion on the RHS is CLASS. Return
2424 NULL otherwise. */
2425
2426 static gimple
2427 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2428 {
2429 gimple def_stmt;
2430
2431 if (TREE_CODE (name) != SSA_NAME)
2432 return NULL;
2433
2434 def_stmt = get_gimple_for_ssa_name (name);
2435 if (!def_stmt
2436 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2437 return NULL;
2438
2439 return def_stmt;
2440 }
2441 \f
2442
2443 /* Determine whether the LEN bytes generated by CONSTFUN can be
2444 stored to memory using several move instructions. CONSTFUNDATA is
2445 a pointer which will be passed as argument in every CONSTFUN call.
2446 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2447 a memset operation and false if it's a copy of a constant string.
2448 Return nonzero if a call to store_by_pieces should succeed. */
2449
2450 int
2451 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2452 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2453 void *constfundata, unsigned int align, bool memsetp)
2454 {
2455 unsigned HOST_WIDE_INT l;
2456 unsigned int max_size;
2457 HOST_WIDE_INT offset = 0;
2458 machine_mode mode;
2459 enum insn_code icode;
2460 int reverse;
2461 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2462 rtx cst ATTRIBUTE_UNUSED;
2463
2464 if (len == 0)
2465 return 1;
2466
2467 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2468 memsetp
2469 ? SET_BY_PIECES
2470 : STORE_BY_PIECES,
2471 optimize_insn_for_speed_p ()))
2472 return 0;
2473
2474 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2475
2476 /* We would first store what we can in the largest integer mode, then go to
2477 successively smaller modes. */
2478
2479 for (reverse = 0;
2480 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2481 reverse++)
2482 {
2483 l = len;
2484 max_size = STORE_MAX_PIECES + 1;
2485 while (max_size > 1 && l > 0)
2486 {
2487 mode = widest_int_mode_for_size (max_size);
2488
2489 if (mode == VOIDmode)
2490 break;
2491
2492 icode = optab_handler (mov_optab, mode);
2493 if (icode != CODE_FOR_nothing
2494 && align >= GET_MODE_ALIGNMENT (mode))
2495 {
2496 unsigned int size = GET_MODE_SIZE (mode);
2497
2498 while (l >= size)
2499 {
2500 if (reverse)
2501 offset -= size;
2502
2503 cst = (*constfun) (constfundata, offset, mode);
2504 if (!targetm.legitimate_constant_p (mode, cst))
2505 return 0;
2506
2507 if (!reverse)
2508 offset += size;
2509
2510 l -= size;
2511 }
2512 }
2513
2514 max_size = GET_MODE_SIZE (mode);
2515 }
2516
2517 /* The code above should have handled everything. */
2518 gcc_assert (!l);
2519 }
2520
2521 return 1;
2522 }
2523
2524 /* Generate several move instructions to store LEN bytes generated by
2525 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2526 pointer which will be passed as argument in every CONSTFUN call.
2527 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2528 a memset operation and false if it's a copy of a constant string.
2529 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2530 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2531 stpcpy. */
2532
2533 rtx
2534 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2535 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2536 void *constfundata, unsigned int align, bool memsetp, int endp)
2537 {
2538 machine_mode to_addr_mode = get_address_mode (to);
2539 struct store_by_pieces_d data;
2540
2541 if (len == 0)
2542 {
2543 gcc_assert (endp != 2);
2544 return to;
2545 }
2546
2547 gcc_assert (targetm.use_by_pieces_infrastructure_p
2548 (len, align,
2549 memsetp
2550 ? SET_BY_PIECES
2551 : STORE_BY_PIECES,
2552 optimize_insn_for_speed_p ()));
2553
2554 data.constfun = constfun;
2555 data.constfundata = constfundata;
2556 data.len = len;
2557 data.to = to;
2558 store_by_pieces_1 (&data, align);
2559 if (endp)
2560 {
2561 rtx to1;
2562
2563 gcc_assert (!data.reverse);
2564 if (data.autinc_to)
2565 {
2566 if (endp == 2)
2567 {
2568 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2569 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2570 else
2571 data.to_addr = copy_to_mode_reg (to_addr_mode,
2572 plus_constant (to_addr_mode,
2573 data.to_addr,
2574 -1));
2575 }
2576 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2577 data.offset);
2578 }
2579 else
2580 {
2581 if (endp == 2)
2582 --data.offset;
2583 to1 = adjust_address (data.to, QImode, data.offset);
2584 }
2585 return to1;
2586 }
2587 else
2588 return data.to;
2589 }
2590
2591 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2592 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2593
2594 static void
2595 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2596 {
2597 struct store_by_pieces_d data;
2598
2599 if (len == 0)
2600 return;
2601
2602 data.constfun = clear_by_pieces_1;
2603 data.constfundata = NULL;
2604 data.len = len;
2605 data.to = to;
2606 store_by_pieces_1 (&data, align);
2607 }
2608
2609 /* Callback routine for clear_by_pieces.
2610 Return const0_rtx unconditionally. */
2611
2612 static rtx
2613 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2614 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2615 machine_mode mode ATTRIBUTE_UNUSED)
2616 {
2617 return const0_rtx;
2618 }
2619
2620 /* Subroutine of clear_by_pieces and store_by_pieces.
2621 Generate several move instructions to store LEN bytes of block TO. (A MEM
2622 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2623
2624 static void
2625 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2626 unsigned int align ATTRIBUTE_UNUSED)
2627 {
2628 machine_mode to_addr_mode = get_address_mode (data->to);
2629 rtx to_addr = XEXP (data->to, 0);
2630 unsigned int max_size = STORE_MAX_PIECES + 1;
2631 enum insn_code icode;
2632
2633 data->offset = 0;
2634 data->to_addr = to_addr;
2635 data->autinc_to
2636 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2637 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2638
2639 data->explicit_inc_to = 0;
2640 data->reverse
2641 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2642 if (data->reverse)
2643 data->offset = data->len;
2644
2645 /* If storing requires more than two move insns,
2646 copy addresses to registers (to make displacements shorter)
2647 and use post-increment if available. */
2648 if (!data->autinc_to
2649 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2650 {
2651 /* Determine the main mode we'll be using.
2652 MODE might not be used depending on the definitions of the
2653 USE_* macros below. */
2654 machine_mode mode ATTRIBUTE_UNUSED
2655 = widest_int_mode_for_size (max_size);
2656
2657 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2658 {
2659 data->to_addr = copy_to_mode_reg (to_addr_mode,
2660 plus_constant (to_addr_mode,
2661 to_addr,
2662 data->len));
2663 data->autinc_to = 1;
2664 data->explicit_inc_to = -1;
2665 }
2666
2667 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2668 && ! data->autinc_to)
2669 {
2670 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2671 data->autinc_to = 1;
2672 data->explicit_inc_to = 1;
2673 }
2674
2675 if ( !data->autinc_to && CONSTANT_P (to_addr))
2676 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2677 }
2678
2679 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2680
2681 /* First store what we can in the largest integer mode, then go to
2682 successively smaller modes. */
2683
2684 while (max_size > 1 && data->len > 0)
2685 {
2686 machine_mode mode = widest_int_mode_for_size (max_size);
2687
2688 if (mode == VOIDmode)
2689 break;
2690
2691 icode = optab_handler (mov_optab, mode);
2692 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2693 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2694
2695 max_size = GET_MODE_SIZE (mode);
2696 }
2697
2698 /* The code above should have handled everything. */
2699 gcc_assert (!data->len);
2700 }
2701
2702 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2703 with move instructions for mode MODE. GENFUN is the gen_... function
2704 to make a move insn for that mode. DATA has all the other info. */
2705
2706 static void
2707 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2708 struct store_by_pieces_d *data)
2709 {
2710 unsigned int size = GET_MODE_SIZE (mode);
2711 rtx to1, cst;
2712
2713 while (data->len >= size)
2714 {
2715 if (data->reverse)
2716 data->offset -= size;
2717
2718 if (data->autinc_to)
2719 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2720 data->offset);
2721 else
2722 to1 = adjust_address (data->to, mode, data->offset);
2723
2724 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2725 emit_insn (gen_add2_insn (data->to_addr,
2726 gen_int_mode (-(HOST_WIDE_INT) size,
2727 GET_MODE (data->to_addr))));
2728
2729 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2730 emit_insn ((*genfun) (to1, cst));
2731
2732 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2733 emit_insn (gen_add2_insn (data->to_addr,
2734 gen_int_mode (size,
2735 GET_MODE (data->to_addr))));
2736
2737 if (! data->reverse)
2738 data->offset += size;
2739
2740 data->len -= size;
2741 }
2742 }
2743 \f
2744 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2745 its length in bytes. */
2746
2747 rtx
2748 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2749 unsigned int expected_align, HOST_WIDE_INT expected_size,
2750 unsigned HOST_WIDE_INT min_size,
2751 unsigned HOST_WIDE_INT max_size,
2752 unsigned HOST_WIDE_INT probable_max_size)
2753 {
2754 machine_mode mode = GET_MODE (object);
2755 unsigned int align;
2756
2757 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2758
2759 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2760 just move a zero. Otherwise, do this a piece at a time. */
2761 if (mode != BLKmode
2762 && CONST_INT_P (size)
2763 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2764 {
2765 rtx zero = CONST0_RTX (mode);
2766 if (zero != NULL)
2767 {
2768 emit_move_insn (object, zero);
2769 return NULL;
2770 }
2771
2772 if (COMPLEX_MODE_P (mode))
2773 {
2774 zero = CONST0_RTX (GET_MODE_INNER (mode));
2775 if (zero != NULL)
2776 {
2777 write_complex_part (object, zero, 0);
2778 write_complex_part (object, zero, 1);
2779 return NULL;
2780 }
2781 }
2782 }
2783
2784 if (size == const0_rtx)
2785 return NULL;
2786
2787 align = MEM_ALIGN (object);
2788
2789 if (CONST_INT_P (size)
2790 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2791 CLEAR_BY_PIECES,
2792 optimize_insn_for_speed_p ()))
2793 clear_by_pieces (object, INTVAL (size), align);
2794 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2795 expected_align, expected_size,
2796 min_size, max_size, probable_max_size))
2797 ;
2798 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2799 return set_storage_via_libcall (object, size, const0_rtx,
2800 method == BLOCK_OP_TAILCALL);
2801 else
2802 gcc_unreachable ();
2803
2804 return NULL;
2805 }
2806
2807 rtx
2808 clear_storage (rtx object, rtx size, enum block_op_methods method)
2809 {
2810 unsigned HOST_WIDE_INT max, min = 0;
2811 if (GET_CODE (size) == CONST_INT)
2812 min = max = UINTVAL (size);
2813 else
2814 max = GET_MODE_MASK (GET_MODE (size));
2815 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2816 }
2817
2818
2819 /* A subroutine of clear_storage. Expand a call to memset.
2820 Return the return value of memset, 0 otherwise. */
2821
2822 rtx
2823 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2824 {
2825 tree call_expr, fn, object_tree, size_tree, val_tree;
2826 machine_mode size_mode;
2827 rtx retval;
2828
2829 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2830 place those into new pseudos into a VAR_DECL and use them later. */
2831
2832 object = copy_addr_to_reg (XEXP (object, 0));
2833
2834 size_mode = TYPE_MODE (sizetype);
2835 size = convert_to_mode (size_mode, size, 1);
2836 size = copy_to_mode_reg (size_mode, size);
2837
2838 /* It is incorrect to use the libcall calling conventions to call
2839 memset in this context. This could be a user call to memset and
2840 the user may wish to examine the return value from memset. For
2841 targets where libcalls and normal calls have different conventions
2842 for returning pointers, we could end up generating incorrect code. */
2843
2844 object_tree = make_tree (ptr_type_node, object);
2845 if (!CONST_INT_P (val))
2846 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2847 size_tree = make_tree (sizetype, size);
2848 val_tree = make_tree (integer_type_node, val);
2849
2850 fn = clear_storage_libcall_fn (true);
2851 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2852 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2853
2854 retval = expand_normal (call_expr);
2855
2856 return retval;
2857 }
2858
2859 /* A subroutine of set_storage_via_libcall. Create the tree node
2860 for the function we use for block clears. */
2861
2862 tree block_clear_fn;
2863
2864 void
2865 init_block_clear_fn (const char *asmspec)
2866 {
2867 if (!block_clear_fn)
2868 {
2869 tree fn, args;
2870
2871 fn = get_identifier ("memset");
2872 args = build_function_type_list (ptr_type_node, ptr_type_node,
2873 integer_type_node, sizetype,
2874 NULL_TREE);
2875
2876 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2877 DECL_EXTERNAL (fn) = 1;
2878 TREE_PUBLIC (fn) = 1;
2879 DECL_ARTIFICIAL (fn) = 1;
2880 TREE_NOTHROW (fn) = 1;
2881 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2882 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2883
2884 block_clear_fn = fn;
2885 }
2886
2887 if (asmspec)
2888 set_user_assembler_name (block_clear_fn, asmspec);
2889 }
2890
2891 static tree
2892 clear_storage_libcall_fn (int for_call)
2893 {
2894 static bool emitted_extern;
2895
2896 if (!block_clear_fn)
2897 init_block_clear_fn (NULL);
2898
2899 if (for_call && !emitted_extern)
2900 {
2901 emitted_extern = true;
2902 make_decl_rtl (block_clear_fn);
2903 }
2904
2905 return block_clear_fn;
2906 }
2907 \f
2908 /* Expand a setmem pattern; return true if successful. */
2909
2910 bool
2911 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2912 unsigned int expected_align, HOST_WIDE_INT expected_size,
2913 unsigned HOST_WIDE_INT min_size,
2914 unsigned HOST_WIDE_INT max_size,
2915 unsigned HOST_WIDE_INT probable_max_size)
2916 {
2917 /* Try the most limited insn first, because there's no point
2918 including more than one in the machine description unless
2919 the more limited one has some advantage. */
2920
2921 machine_mode mode;
2922
2923 if (expected_align < align)
2924 expected_align = align;
2925 if (expected_size != -1)
2926 {
2927 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2928 expected_size = max_size;
2929 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2930 expected_size = min_size;
2931 }
2932
2933 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2934 mode = GET_MODE_WIDER_MODE (mode))
2935 {
2936 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2937
2938 if (code != CODE_FOR_nothing
2939 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2940 here because if SIZE is less than the mode mask, as it is
2941 returned by the macro, it will definitely be less than the
2942 actual mode mask. Since SIZE is within the Pmode address
2943 space, we limit MODE to Pmode. */
2944 && ((CONST_INT_P (size)
2945 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2946 <= (GET_MODE_MASK (mode) >> 1)))
2947 || max_size <= (GET_MODE_MASK (mode) >> 1)
2948 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2949 {
2950 struct expand_operand ops[9];
2951 unsigned int nops;
2952
2953 nops = insn_data[(int) code].n_generator_args;
2954 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2955
2956 create_fixed_operand (&ops[0], object);
2957 /* The check above guarantees that this size conversion is valid. */
2958 create_convert_operand_to (&ops[1], size, mode, true);
2959 create_convert_operand_from (&ops[2], val, byte_mode, true);
2960 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2961 if (nops >= 6)
2962 {
2963 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2964 create_integer_operand (&ops[5], expected_size);
2965 }
2966 if (nops >= 8)
2967 {
2968 create_integer_operand (&ops[6], min_size);
2969 /* If we can not represent the maximal size,
2970 make parameter NULL. */
2971 if ((HOST_WIDE_INT) max_size != -1)
2972 create_integer_operand (&ops[7], max_size);
2973 else
2974 create_fixed_operand (&ops[7], NULL);
2975 }
2976 if (nops == 9)
2977 {
2978 /* If we can not represent the maximal size,
2979 make parameter NULL. */
2980 if ((HOST_WIDE_INT) probable_max_size != -1)
2981 create_integer_operand (&ops[8], probable_max_size);
2982 else
2983 create_fixed_operand (&ops[8], NULL);
2984 }
2985 if (maybe_expand_insn (code, nops, ops))
2986 return true;
2987 }
2988 }
2989
2990 return false;
2991 }
2992
2993 \f
2994 /* Write to one of the components of the complex value CPLX. Write VAL to
2995 the real part if IMAG_P is false, and the imaginary part if its true. */
2996
2997 void
2998 write_complex_part (rtx cplx, rtx val, bool imag_p)
2999 {
3000 machine_mode cmode;
3001 machine_mode imode;
3002 unsigned ibitsize;
3003
3004 if (GET_CODE (cplx) == CONCAT)
3005 {
3006 emit_move_insn (XEXP (cplx, imag_p), val);
3007 return;
3008 }
3009
3010 cmode = GET_MODE (cplx);
3011 imode = GET_MODE_INNER (cmode);
3012 ibitsize = GET_MODE_BITSIZE (imode);
3013
3014 /* For MEMs simplify_gen_subreg may generate an invalid new address
3015 because, e.g., the original address is considered mode-dependent
3016 by the target, which restricts simplify_subreg from invoking
3017 adjust_address_nv. Instead of preparing fallback support for an
3018 invalid address, we call adjust_address_nv directly. */
3019 if (MEM_P (cplx))
3020 {
3021 emit_move_insn (adjust_address_nv (cplx, imode,
3022 imag_p ? GET_MODE_SIZE (imode) : 0),
3023 val);
3024 return;
3025 }
3026
3027 /* If the sub-object is at least word sized, then we know that subregging
3028 will work. This special case is important, since store_bit_field
3029 wants to operate on integer modes, and there's rarely an OImode to
3030 correspond to TCmode. */
3031 if (ibitsize >= BITS_PER_WORD
3032 /* For hard regs we have exact predicates. Assume we can split
3033 the original object if it spans an even number of hard regs.
3034 This special case is important for SCmode on 64-bit platforms
3035 where the natural size of floating-point regs is 32-bit. */
3036 || (REG_P (cplx)
3037 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3038 && REG_NREGS (cplx) % 2 == 0))
3039 {
3040 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3041 imag_p ? GET_MODE_SIZE (imode) : 0);
3042 if (part)
3043 {
3044 emit_move_insn (part, val);
3045 return;
3046 }
3047 else
3048 /* simplify_gen_subreg may fail for sub-word MEMs. */
3049 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3050 }
3051
3052 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3053 }
3054
3055 /* Extract one of the components of the complex value CPLX. Extract the
3056 real part if IMAG_P is false, and the imaginary part if it's true. */
3057
3058 static rtx
3059 read_complex_part (rtx cplx, bool imag_p)
3060 {
3061 machine_mode cmode, imode;
3062 unsigned ibitsize;
3063
3064 if (GET_CODE (cplx) == CONCAT)
3065 return XEXP (cplx, imag_p);
3066
3067 cmode = GET_MODE (cplx);
3068 imode = GET_MODE_INNER (cmode);
3069 ibitsize = GET_MODE_BITSIZE (imode);
3070
3071 /* Special case reads from complex constants that got spilled to memory. */
3072 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3073 {
3074 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3075 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3076 {
3077 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3078 if (CONSTANT_CLASS_P (part))
3079 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3080 }
3081 }
3082
3083 /* For MEMs simplify_gen_subreg may generate an invalid new address
3084 because, e.g., the original address is considered mode-dependent
3085 by the target, which restricts simplify_subreg from invoking
3086 adjust_address_nv. Instead of preparing fallback support for an
3087 invalid address, we call adjust_address_nv directly. */
3088 if (MEM_P (cplx))
3089 return adjust_address_nv (cplx, imode,
3090 imag_p ? GET_MODE_SIZE (imode) : 0);
3091
3092 /* If the sub-object is at least word sized, then we know that subregging
3093 will work. This special case is important, since extract_bit_field
3094 wants to operate on integer modes, and there's rarely an OImode to
3095 correspond to TCmode. */
3096 if (ibitsize >= BITS_PER_WORD
3097 /* For hard regs we have exact predicates. Assume we can split
3098 the original object if it spans an even number of hard regs.
3099 This special case is important for SCmode on 64-bit platforms
3100 where the natural size of floating-point regs is 32-bit. */
3101 || (REG_P (cplx)
3102 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3103 && REG_NREGS (cplx) % 2 == 0))
3104 {
3105 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3106 imag_p ? GET_MODE_SIZE (imode) : 0);
3107 if (ret)
3108 return ret;
3109 else
3110 /* simplify_gen_subreg may fail for sub-word MEMs. */
3111 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3112 }
3113
3114 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3115 true, NULL_RTX, imode, imode);
3116 }
3117 \f
3118 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3119 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3120 represented in NEW_MODE. If FORCE is true, this will never happen, as
3121 we'll force-create a SUBREG if needed. */
3122
3123 static rtx
3124 emit_move_change_mode (machine_mode new_mode,
3125 machine_mode old_mode, rtx x, bool force)
3126 {
3127 rtx ret;
3128
3129 if (push_operand (x, GET_MODE (x)))
3130 {
3131 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3132 MEM_COPY_ATTRIBUTES (ret, x);
3133 }
3134 else if (MEM_P (x))
3135 {
3136 /* We don't have to worry about changing the address since the
3137 size in bytes is supposed to be the same. */
3138 if (reload_in_progress)
3139 {
3140 /* Copy the MEM to change the mode and move any
3141 substitutions from the old MEM to the new one. */
3142 ret = adjust_address_nv (x, new_mode, 0);
3143 copy_replacements (x, ret);
3144 }
3145 else
3146 ret = adjust_address (x, new_mode, 0);
3147 }
3148 else
3149 {
3150 /* Note that we do want simplify_subreg's behavior of validating
3151 that the new mode is ok for a hard register. If we were to use
3152 simplify_gen_subreg, we would create the subreg, but would
3153 probably run into the target not being able to implement it. */
3154 /* Except, of course, when FORCE is true, when this is exactly what
3155 we want. Which is needed for CCmodes on some targets. */
3156 if (force)
3157 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3158 else
3159 ret = simplify_subreg (new_mode, x, old_mode, 0);
3160 }
3161
3162 return ret;
3163 }
3164
3165 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3166 an integer mode of the same size as MODE. Returns the instruction
3167 emitted, or NULL if such a move could not be generated. */
3168
3169 static rtx_insn *
3170 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3171 {
3172 machine_mode imode;
3173 enum insn_code code;
3174
3175 /* There must exist a mode of the exact size we require. */
3176 imode = int_mode_for_mode (mode);
3177 if (imode == BLKmode)
3178 return NULL;
3179
3180 /* The target must support moves in this mode. */
3181 code = optab_handler (mov_optab, imode);
3182 if (code == CODE_FOR_nothing)
3183 return NULL;
3184
3185 x = emit_move_change_mode (imode, mode, x, force);
3186 if (x == NULL_RTX)
3187 return NULL;
3188 y = emit_move_change_mode (imode, mode, y, force);
3189 if (y == NULL_RTX)
3190 return NULL;
3191 return emit_insn (GEN_FCN (code) (x, y));
3192 }
3193
3194 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3195 Return an equivalent MEM that does not use an auto-increment. */
3196
3197 rtx
3198 emit_move_resolve_push (machine_mode mode, rtx x)
3199 {
3200 enum rtx_code code = GET_CODE (XEXP (x, 0));
3201 HOST_WIDE_INT adjust;
3202 rtx temp;
3203
3204 adjust = GET_MODE_SIZE (mode);
3205 #ifdef PUSH_ROUNDING
3206 adjust = PUSH_ROUNDING (adjust);
3207 #endif
3208 if (code == PRE_DEC || code == POST_DEC)
3209 adjust = -adjust;
3210 else if (code == PRE_MODIFY || code == POST_MODIFY)
3211 {
3212 rtx expr = XEXP (XEXP (x, 0), 1);
3213 HOST_WIDE_INT val;
3214
3215 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3216 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3217 val = INTVAL (XEXP (expr, 1));
3218 if (GET_CODE (expr) == MINUS)
3219 val = -val;
3220 gcc_assert (adjust == val || adjust == -val);
3221 adjust = val;
3222 }
3223
3224 /* Do not use anti_adjust_stack, since we don't want to update
3225 stack_pointer_delta. */
3226 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3227 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3228 0, OPTAB_LIB_WIDEN);
3229 if (temp != stack_pointer_rtx)
3230 emit_move_insn (stack_pointer_rtx, temp);
3231
3232 switch (code)
3233 {
3234 case PRE_INC:
3235 case PRE_DEC:
3236 case PRE_MODIFY:
3237 temp = stack_pointer_rtx;
3238 break;
3239 case POST_INC:
3240 case POST_DEC:
3241 case POST_MODIFY:
3242 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3243 break;
3244 default:
3245 gcc_unreachable ();
3246 }
3247
3248 return replace_equiv_address (x, temp);
3249 }
3250
3251 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3252 X is known to satisfy push_operand, and MODE is known to be complex.
3253 Returns the last instruction emitted. */
3254
3255 rtx_insn *
3256 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3257 {
3258 machine_mode submode = GET_MODE_INNER (mode);
3259 bool imag_first;
3260
3261 #ifdef PUSH_ROUNDING
3262 unsigned int submodesize = GET_MODE_SIZE (submode);
3263
3264 /* In case we output to the stack, but the size is smaller than the
3265 machine can push exactly, we need to use move instructions. */
3266 if (PUSH_ROUNDING (submodesize) != submodesize)
3267 {
3268 x = emit_move_resolve_push (mode, x);
3269 return emit_move_insn (x, y);
3270 }
3271 #endif
3272
3273 /* Note that the real part always precedes the imag part in memory
3274 regardless of machine's endianness. */
3275 switch (GET_CODE (XEXP (x, 0)))
3276 {
3277 case PRE_DEC:
3278 case POST_DEC:
3279 imag_first = true;
3280 break;
3281 case PRE_INC:
3282 case POST_INC:
3283 imag_first = false;
3284 break;
3285 default:
3286 gcc_unreachable ();
3287 }
3288
3289 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3290 read_complex_part (y, imag_first));
3291 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3292 read_complex_part (y, !imag_first));
3293 }
3294
3295 /* A subroutine of emit_move_complex. Perform the move from Y to X
3296 via two moves of the parts. Returns the last instruction emitted. */
3297
3298 rtx_insn *
3299 emit_move_complex_parts (rtx x, rtx y)
3300 {
3301 /* Show the output dies here. This is necessary for SUBREGs
3302 of pseudos since we cannot track their lifetimes correctly;
3303 hard regs shouldn't appear here except as return values. */
3304 if (!reload_completed && !reload_in_progress
3305 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3306 emit_clobber (x);
3307
3308 write_complex_part (x, read_complex_part (y, false), false);
3309 write_complex_part (x, read_complex_part (y, true), true);
3310
3311 return get_last_insn ();
3312 }
3313
3314 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3315 MODE is known to be complex. Returns the last instruction emitted. */
3316
3317 static rtx_insn *
3318 emit_move_complex (machine_mode mode, rtx x, rtx y)
3319 {
3320 bool try_int;
3321
3322 /* Need to take special care for pushes, to maintain proper ordering
3323 of the data, and possibly extra padding. */
3324 if (push_operand (x, mode))
3325 return emit_move_complex_push (mode, x, y);
3326
3327 /* See if we can coerce the target into moving both values at once, except
3328 for floating point where we favor moving as parts if this is easy. */
3329 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3330 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3331 && !(REG_P (x)
3332 && HARD_REGISTER_P (x)
3333 && REG_NREGS (x) == 1)
3334 && !(REG_P (y)
3335 && HARD_REGISTER_P (y)
3336 && REG_NREGS (y) == 1))
3337 try_int = false;
3338 /* Not possible if the values are inherently not adjacent. */
3339 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3340 try_int = false;
3341 /* Is possible if both are registers (or subregs of registers). */
3342 else if (register_operand (x, mode) && register_operand (y, mode))
3343 try_int = true;
3344 /* If one of the operands is a memory, and alignment constraints
3345 are friendly enough, we may be able to do combined memory operations.
3346 We do not attempt this if Y is a constant because that combination is
3347 usually better with the by-parts thing below. */
3348 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3349 && (!STRICT_ALIGNMENT
3350 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3351 try_int = true;
3352 else
3353 try_int = false;
3354
3355 if (try_int)
3356 {
3357 rtx_insn *ret;
3358
3359 /* For memory to memory moves, optimal behavior can be had with the
3360 existing block move logic. */
3361 if (MEM_P (x) && MEM_P (y))
3362 {
3363 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3364 BLOCK_OP_NO_LIBCALL);
3365 return get_last_insn ();
3366 }
3367
3368 ret = emit_move_via_integer (mode, x, y, true);
3369 if (ret)
3370 return ret;
3371 }
3372
3373 return emit_move_complex_parts (x, y);
3374 }
3375
3376 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3377 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3378
3379 static rtx_insn *
3380 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3381 {
3382 rtx_insn *ret;
3383
3384 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3385 if (mode != CCmode)
3386 {
3387 enum insn_code code = optab_handler (mov_optab, CCmode);
3388 if (code != CODE_FOR_nothing)
3389 {
3390 x = emit_move_change_mode (CCmode, mode, x, true);
3391 y = emit_move_change_mode (CCmode, mode, y, true);
3392 return emit_insn (GEN_FCN (code) (x, y));
3393 }
3394 }
3395
3396 /* Otherwise, find the MODE_INT mode of the same width. */
3397 ret = emit_move_via_integer (mode, x, y, false);
3398 gcc_assert (ret != NULL);
3399 return ret;
3400 }
3401
3402 /* Return true if word I of OP lies entirely in the
3403 undefined bits of a paradoxical subreg. */
3404
3405 static bool
3406 undefined_operand_subword_p (const_rtx op, int i)
3407 {
3408 machine_mode innermode, innermostmode;
3409 int offset;
3410 if (GET_CODE (op) != SUBREG)
3411 return false;
3412 innermode = GET_MODE (op);
3413 innermostmode = GET_MODE (SUBREG_REG (op));
3414 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3415 /* The SUBREG_BYTE represents offset, as if the value were stored in
3416 memory, except for a paradoxical subreg where we define
3417 SUBREG_BYTE to be 0; undo this exception as in
3418 simplify_subreg. */
3419 if (SUBREG_BYTE (op) == 0
3420 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3421 {
3422 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3423 if (WORDS_BIG_ENDIAN)
3424 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3425 if (BYTES_BIG_ENDIAN)
3426 offset += difference % UNITS_PER_WORD;
3427 }
3428 if (offset >= GET_MODE_SIZE (innermostmode)
3429 || offset <= -GET_MODE_SIZE (word_mode))
3430 return true;
3431 return false;
3432 }
3433
3434 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3435 MODE is any multi-word or full-word mode that lacks a move_insn
3436 pattern. Note that you will get better code if you define such
3437 patterns, even if they must turn into multiple assembler instructions. */
3438
3439 static rtx_insn *
3440 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3441 {
3442 rtx_insn *last_insn = 0;
3443 rtx_insn *seq;
3444 rtx inner;
3445 bool need_clobber;
3446 int i;
3447
3448 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3449
3450 /* If X is a push on the stack, do the push now and replace
3451 X with a reference to the stack pointer. */
3452 if (push_operand (x, mode))
3453 x = emit_move_resolve_push (mode, x);
3454
3455 /* If we are in reload, see if either operand is a MEM whose address
3456 is scheduled for replacement. */
3457 if (reload_in_progress && MEM_P (x)
3458 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3459 x = replace_equiv_address_nv (x, inner);
3460 if (reload_in_progress && MEM_P (y)
3461 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3462 y = replace_equiv_address_nv (y, inner);
3463
3464 start_sequence ();
3465
3466 need_clobber = false;
3467 for (i = 0;
3468 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3469 i++)
3470 {
3471 rtx xpart = operand_subword (x, i, 1, mode);
3472 rtx ypart;
3473
3474 /* Do not generate code for a move if it would come entirely
3475 from the undefined bits of a paradoxical subreg. */
3476 if (undefined_operand_subword_p (y, i))
3477 continue;
3478
3479 ypart = operand_subword (y, i, 1, mode);
3480
3481 /* If we can't get a part of Y, put Y into memory if it is a
3482 constant. Otherwise, force it into a register. Then we must
3483 be able to get a part of Y. */
3484 if (ypart == 0 && CONSTANT_P (y))
3485 {
3486 y = use_anchored_address (force_const_mem (mode, y));
3487 ypart = operand_subword (y, i, 1, mode);
3488 }
3489 else if (ypart == 0)
3490 ypart = operand_subword_force (y, i, mode);
3491
3492 gcc_assert (xpart && ypart);
3493
3494 need_clobber |= (GET_CODE (xpart) == SUBREG);
3495
3496 last_insn = emit_move_insn (xpart, ypart);
3497 }
3498
3499 seq = get_insns ();
3500 end_sequence ();
3501
3502 /* Show the output dies here. This is necessary for SUBREGs
3503 of pseudos since we cannot track their lifetimes correctly;
3504 hard regs shouldn't appear here except as return values.
3505 We never want to emit such a clobber after reload. */
3506 if (x != y
3507 && ! (reload_in_progress || reload_completed)
3508 && need_clobber != 0)
3509 emit_clobber (x);
3510
3511 emit_insn (seq);
3512
3513 return last_insn;
3514 }
3515
3516 /* Low level part of emit_move_insn.
3517 Called just like emit_move_insn, but assumes X and Y
3518 are basically valid. */
3519
3520 rtx_insn *
3521 emit_move_insn_1 (rtx x, rtx y)
3522 {
3523 machine_mode mode = GET_MODE (x);
3524 enum insn_code code;
3525
3526 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3527
3528 code = optab_handler (mov_optab, mode);
3529 if (code != CODE_FOR_nothing)
3530 return emit_insn (GEN_FCN (code) (x, y));
3531
3532 /* Expand complex moves by moving real part and imag part. */
3533 if (COMPLEX_MODE_P (mode))
3534 return emit_move_complex (mode, x, y);
3535
3536 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3537 || ALL_FIXED_POINT_MODE_P (mode))
3538 {
3539 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3540
3541 /* If we can't find an integer mode, use multi words. */
3542 if (result)
3543 return result;
3544 else
3545 return emit_move_multi_word (mode, x, y);
3546 }
3547
3548 if (GET_MODE_CLASS (mode) == MODE_CC)
3549 return emit_move_ccmode (mode, x, y);
3550
3551 /* Try using a move pattern for the corresponding integer mode. This is
3552 only safe when simplify_subreg can convert MODE constants into integer
3553 constants. At present, it can only do this reliably if the value
3554 fits within a HOST_WIDE_INT. */
3555 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3556 {
3557 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3558
3559 if (ret)
3560 {
3561 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3562 return ret;
3563 }
3564 }
3565
3566 return emit_move_multi_word (mode, x, y);
3567 }
3568
3569 /* Generate code to copy Y into X.
3570 Both Y and X must have the same mode, except that
3571 Y can be a constant with VOIDmode.
3572 This mode cannot be BLKmode; use emit_block_move for that.
3573
3574 Return the last instruction emitted. */
3575
3576 rtx_insn *
3577 emit_move_insn (rtx x, rtx y)
3578 {
3579 machine_mode mode = GET_MODE (x);
3580 rtx y_cst = NULL_RTX;
3581 rtx_insn *last_insn;
3582 rtx set;
3583
3584 gcc_assert (mode != BLKmode
3585 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3586
3587 if (CONSTANT_P (y))
3588 {
3589 if (optimize
3590 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3591 && (last_insn = compress_float_constant (x, y)))
3592 return last_insn;
3593
3594 y_cst = y;
3595
3596 if (!targetm.legitimate_constant_p (mode, y))
3597 {
3598 y = force_const_mem (mode, y);
3599
3600 /* If the target's cannot_force_const_mem prevented the spill,
3601 assume that the target's move expanders will also take care
3602 of the non-legitimate constant. */
3603 if (!y)
3604 y = y_cst;
3605 else
3606 y = use_anchored_address (y);
3607 }
3608 }
3609
3610 /* If X or Y are memory references, verify that their addresses are valid
3611 for the machine. */
3612 if (MEM_P (x)
3613 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3614 MEM_ADDR_SPACE (x))
3615 && ! push_operand (x, GET_MODE (x))))
3616 x = validize_mem (x);
3617
3618 if (MEM_P (y)
3619 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3620 MEM_ADDR_SPACE (y)))
3621 y = validize_mem (y);
3622
3623 gcc_assert (mode != BLKmode);
3624
3625 last_insn = emit_move_insn_1 (x, y);
3626
3627 if (y_cst && REG_P (x)
3628 && (set = single_set (last_insn)) != NULL_RTX
3629 && SET_DEST (set) == x
3630 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3631 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3632
3633 return last_insn;
3634 }
3635
3636 /* Generate the body of an instruction to copy Y into X.
3637 It may be a list of insns, if one insn isn't enough. */
3638
3639 rtx_insn *
3640 gen_move_insn (rtx x, rtx y)
3641 {
3642 rtx_insn *seq;
3643
3644 start_sequence ();
3645 emit_move_insn_1 (x, y);
3646 seq = get_insns ();
3647 end_sequence ();
3648 return seq;
3649 }
3650
3651 /* Same as above, but return rtx (used as a callback, which must have
3652 prototype compatible with other functions returning rtx). */
3653
3654 rtx
3655 gen_move_insn_uncast (rtx x, rtx y)
3656 {
3657 return gen_move_insn (x, y);
3658 }
3659
3660 /* If Y is representable exactly in a narrower mode, and the target can
3661 perform the extension directly from constant or memory, then emit the
3662 move as an extension. */
3663
3664 static rtx_insn *
3665 compress_float_constant (rtx x, rtx y)
3666 {
3667 machine_mode dstmode = GET_MODE (x);
3668 machine_mode orig_srcmode = GET_MODE (y);
3669 machine_mode srcmode;
3670 REAL_VALUE_TYPE r;
3671 int oldcost, newcost;
3672 bool speed = optimize_insn_for_speed_p ();
3673
3674 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3675
3676 if (targetm.legitimate_constant_p (dstmode, y))
3677 oldcost = set_src_cost (y, speed);
3678 else
3679 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3680
3681 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3682 srcmode != orig_srcmode;
3683 srcmode = GET_MODE_WIDER_MODE (srcmode))
3684 {
3685 enum insn_code ic;
3686 rtx trunc_y;
3687 rtx_insn *last_insn;
3688
3689 /* Skip if the target can't extend this way. */
3690 ic = can_extend_p (dstmode, srcmode, 0);
3691 if (ic == CODE_FOR_nothing)
3692 continue;
3693
3694 /* Skip if the narrowed value isn't exact. */
3695 if (! exact_real_truncate (srcmode, &r))
3696 continue;
3697
3698 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3699
3700 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3701 {
3702 /* Skip if the target needs extra instructions to perform
3703 the extension. */
3704 if (!insn_operand_matches (ic, 1, trunc_y))
3705 continue;
3706 /* This is valid, but may not be cheaper than the original. */
3707 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3708 speed);
3709 if (oldcost < newcost)
3710 continue;
3711 }
3712 else if (float_extend_from_mem[dstmode][srcmode])
3713 {
3714 trunc_y = force_const_mem (srcmode, trunc_y);
3715 /* This is valid, but may not be cheaper than the original. */
3716 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3717 speed);
3718 if (oldcost < newcost)
3719 continue;
3720 trunc_y = validize_mem (trunc_y);
3721 }
3722 else
3723 continue;
3724
3725 /* For CSE's benefit, force the compressed constant pool entry
3726 into a new pseudo. This constant may be used in different modes,
3727 and if not, combine will put things back together for us. */
3728 trunc_y = force_reg (srcmode, trunc_y);
3729
3730 /* If x is a hard register, perform the extension into a pseudo,
3731 so that e.g. stack realignment code is aware of it. */
3732 rtx target = x;
3733 if (REG_P (x) && HARD_REGISTER_P (x))
3734 target = gen_reg_rtx (dstmode);
3735
3736 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3737 last_insn = get_last_insn ();
3738
3739 if (REG_P (target))
3740 set_unique_reg_note (last_insn, REG_EQUAL, y);
3741
3742 if (target != x)
3743 return emit_move_insn (x, target);
3744 return last_insn;
3745 }
3746
3747 return NULL;
3748 }
3749 \f
3750 /* Pushing data onto the stack. */
3751
3752 /* Push a block of length SIZE (perhaps variable)
3753 and return an rtx to address the beginning of the block.
3754 The value may be virtual_outgoing_args_rtx.
3755
3756 EXTRA is the number of bytes of padding to push in addition to SIZE.
3757 BELOW nonzero means this padding comes at low addresses;
3758 otherwise, the padding comes at high addresses. */
3759
3760 rtx
3761 push_block (rtx size, int extra, int below)
3762 {
3763 rtx temp;
3764
3765 size = convert_modes (Pmode, ptr_mode, size, 1);
3766 if (CONSTANT_P (size))
3767 anti_adjust_stack (plus_constant (Pmode, size, extra));
3768 else if (REG_P (size) && extra == 0)
3769 anti_adjust_stack (size);
3770 else
3771 {
3772 temp = copy_to_mode_reg (Pmode, size);
3773 if (extra != 0)
3774 temp = expand_binop (Pmode, add_optab, temp,
3775 gen_int_mode (extra, Pmode),
3776 temp, 0, OPTAB_LIB_WIDEN);
3777 anti_adjust_stack (temp);
3778 }
3779
3780 if (STACK_GROWS_DOWNWARD)
3781 {
3782 temp = virtual_outgoing_args_rtx;
3783 if (extra != 0 && below)
3784 temp = plus_constant (Pmode, temp, extra);
3785 }
3786 else
3787 {
3788 if (CONST_INT_P (size))
3789 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3790 -INTVAL (size) - (below ? 0 : extra));
3791 else if (extra != 0 && !below)
3792 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3793 negate_rtx (Pmode, plus_constant (Pmode, size,
3794 extra)));
3795 else
3796 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3797 negate_rtx (Pmode, size));
3798 }
3799
3800 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3801 }
3802
3803 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3804
3805 static rtx
3806 mem_autoinc_base (rtx mem)
3807 {
3808 if (MEM_P (mem))
3809 {
3810 rtx addr = XEXP (mem, 0);
3811 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3812 return XEXP (addr, 0);
3813 }
3814 return NULL;
3815 }
3816
3817 /* A utility routine used here, in reload, and in try_split. The insns
3818 after PREV up to and including LAST are known to adjust the stack,
3819 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3820 placing notes as appropriate. PREV may be NULL, indicating the
3821 entire insn sequence prior to LAST should be scanned.
3822
3823 The set of allowed stack pointer modifications is small:
3824 (1) One or more auto-inc style memory references (aka pushes),
3825 (2) One or more addition/subtraction with the SP as destination,
3826 (3) A single move insn with the SP as destination,
3827 (4) A call_pop insn,
3828 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3829
3830 Insns in the sequence that do not modify the SP are ignored,
3831 except for noreturn calls.
3832
3833 The return value is the amount of adjustment that can be trivially
3834 verified, via immediate operand or auto-inc. If the adjustment
3835 cannot be trivially extracted, the return value is INT_MIN. */
3836
3837 HOST_WIDE_INT
3838 find_args_size_adjust (rtx_insn *insn)
3839 {
3840 rtx dest, set, pat;
3841 int i;
3842
3843 pat = PATTERN (insn);
3844 set = NULL;
3845
3846 /* Look for a call_pop pattern. */
3847 if (CALL_P (insn))
3848 {
3849 /* We have to allow non-call_pop patterns for the case
3850 of emit_single_push_insn of a TLS address. */
3851 if (GET_CODE (pat) != PARALLEL)
3852 return 0;
3853
3854 /* All call_pop have a stack pointer adjust in the parallel.
3855 The call itself is always first, and the stack adjust is
3856 usually last, so search from the end. */
3857 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3858 {
3859 set = XVECEXP (pat, 0, i);
3860 if (GET_CODE (set) != SET)
3861 continue;
3862 dest = SET_DEST (set);
3863 if (dest == stack_pointer_rtx)
3864 break;
3865 }
3866 /* We'd better have found the stack pointer adjust. */
3867 if (i == 0)
3868 return 0;
3869 /* Fall through to process the extracted SET and DEST
3870 as if it was a standalone insn. */
3871 }
3872 else if (GET_CODE (pat) == SET)
3873 set = pat;
3874 else if ((set = single_set (insn)) != NULL)
3875 ;
3876 else if (GET_CODE (pat) == PARALLEL)
3877 {
3878 /* ??? Some older ports use a parallel with a stack adjust
3879 and a store for a PUSH_ROUNDING pattern, rather than a
3880 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3881 /* ??? See h8300 and m68k, pushqi1. */
3882 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3883 {
3884 set = XVECEXP (pat, 0, i);
3885 if (GET_CODE (set) != SET)
3886 continue;
3887 dest = SET_DEST (set);
3888 if (dest == stack_pointer_rtx)
3889 break;
3890
3891 /* We do not expect an auto-inc of the sp in the parallel. */
3892 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3893 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3894 != stack_pointer_rtx);
3895 }
3896 if (i < 0)
3897 return 0;
3898 }
3899 else
3900 return 0;
3901
3902 dest = SET_DEST (set);
3903
3904 /* Look for direct modifications of the stack pointer. */
3905 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3906 {
3907 /* Look for a trivial adjustment, otherwise assume nothing. */
3908 /* Note that the SPU restore_stack_block pattern refers to
3909 the stack pointer in V4SImode. Consider that non-trivial. */
3910 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3911 && GET_CODE (SET_SRC (set)) == PLUS
3912 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3913 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3914 return INTVAL (XEXP (SET_SRC (set), 1));
3915 /* ??? Reload can generate no-op moves, which will be cleaned
3916 up later. Recognize it and continue searching. */
3917 else if (rtx_equal_p (dest, SET_SRC (set)))
3918 return 0;
3919 else
3920 return HOST_WIDE_INT_MIN;
3921 }
3922 else
3923 {
3924 rtx mem, addr;
3925
3926 /* Otherwise only think about autoinc patterns. */
3927 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3928 {
3929 mem = dest;
3930 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3931 != stack_pointer_rtx);
3932 }
3933 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3934 mem = SET_SRC (set);
3935 else
3936 return 0;
3937
3938 addr = XEXP (mem, 0);
3939 switch (GET_CODE (addr))
3940 {
3941 case PRE_INC:
3942 case POST_INC:
3943 return GET_MODE_SIZE (GET_MODE (mem));
3944 case PRE_DEC:
3945 case POST_DEC:
3946 return -GET_MODE_SIZE (GET_MODE (mem));
3947 case PRE_MODIFY:
3948 case POST_MODIFY:
3949 addr = XEXP (addr, 1);
3950 gcc_assert (GET_CODE (addr) == PLUS);
3951 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3952 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3953 return INTVAL (XEXP (addr, 1));
3954 default:
3955 gcc_unreachable ();
3956 }
3957 }
3958 }
3959
3960 int
3961 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3962 {
3963 int args_size = end_args_size;
3964 bool saw_unknown = false;
3965 rtx_insn *insn;
3966
3967 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3968 {
3969 HOST_WIDE_INT this_delta;
3970
3971 if (!NONDEBUG_INSN_P (insn))
3972 continue;
3973
3974 this_delta = find_args_size_adjust (insn);
3975 if (this_delta == 0)
3976 {
3977 if (!CALL_P (insn)
3978 || ACCUMULATE_OUTGOING_ARGS
3979 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3980 continue;
3981 }
3982
3983 gcc_assert (!saw_unknown);
3984 if (this_delta == HOST_WIDE_INT_MIN)
3985 saw_unknown = true;
3986
3987 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3988 if (STACK_GROWS_DOWNWARD)
3989 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3990
3991 args_size -= this_delta;
3992 }
3993
3994 return saw_unknown ? INT_MIN : args_size;
3995 }
3996
3997 #ifdef PUSH_ROUNDING
3998 /* Emit single push insn. */
3999
4000 static void
4001 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4002 {
4003 rtx dest_addr;
4004 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4005 rtx dest;
4006 enum insn_code icode;
4007
4008 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4009 /* If there is push pattern, use it. Otherwise try old way of throwing
4010 MEM representing push operation to move expander. */
4011 icode = optab_handler (push_optab, mode);
4012 if (icode != CODE_FOR_nothing)
4013 {
4014 struct expand_operand ops[1];
4015
4016 create_input_operand (&ops[0], x, mode);
4017 if (maybe_expand_insn (icode, 1, ops))
4018 return;
4019 }
4020 if (GET_MODE_SIZE (mode) == rounded_size)
4021 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4022 /* If we are to pad downward, adjust the stack pointer first and
4023 then store X into the stack location using an offset. This is
4024 because emit_move_insn does not know how to pad; it does not have
4025 access to type. */
4026 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4027 {
4028 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4029 HOST_WIDE_INT offset;
4030
4031 emit_move_insn (stack_pointer_rtx,
4032 expand_binop (Pmode,
4033 STACK_GROWS_DOWNWARD ? sub_optab
4034 : add_optab,
4035 stack_pointer_rtx,
4036 gen_int_mode (rounded_size, Pmode),
4037 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4038
4039 offset = (HOST_WIDE_INT) padding_size;
4040 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
4041 /* We have already decremented the stack pointer, so get the
4042 previous value. */
4043 offset += (HOST_WIDE_INT) rounded_size;
4044
4045 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
4046 /* We have already incremented the stack pointer, so get the
4047 previous value. */
4048 offset -= (HOST_WIDE_INT) rounded_size;
4049
4050 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4051 gen_int_mode (offset, Pmode));
4052 }
4053 else
4054 {
4055 if (STACK_GROWS_DOWNWARD)
4056 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4057 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4058 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4059 Pmode));
4060 else
4061 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4062 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4063 gen_int_mode (rounded_size, Pmode));
4064
4065 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4066 }
4067
4068 dest = gen_rtx_MEM (mode, dest_addr);
4069
4070 if (type != 0)
4071 {
4072 set_mem_attributes (dest, type, 1);
4073
4074 if (cfun->tail_call_marked)
4075 /* Function incoming arguments may overlap with sibling call
4076 outgoing arguments and we cannot allow reordering of reads
4077 from function arguments with stores to outgoing arguments
4078 of sibling calls. */
4079 set_mem_alias_set (dest, 0);
4080 }
4081 emit_move_insn (dest, x);
4082 }
4083
4084 /* Emit and annotate a single push insn. */
4085
4086 static void
4087 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4088 {
4089 int delta, old_delta = stack_pointer_delta;
4090 rtx_insn *prev = get_last_insn ();
4091 rtx_insn *last;
4092
4093 emit_single_push_insn_1 (mode, x, type);
4094
4095 last = get_last_insn ();
4096
4097 /* Notice the common case where we emitted exactly one insn. */
4098 if (PREV_INSN (last) == prev)
4099 {
4100 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4101 return;
4102 }
4103
4104 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4105 gcc_assert (delta == INT_MIN || delta == old_delta);
4106 }
4107 #endif
4108
4109 /* Generate code to push X onto the stack, assuming it has mode MODE and
4110 type TYPE.
4111 MODE is redundant except when X is a CONST_INT (since they don't
4112 carry mode info).
4113 SIZE is an rtx for the size of data to be copied (in bytes),
4114 needed only if X is BLKmode.
4115
4116 ALIGN (in bits) is maximum alignment we can assume.
4117
4118 If PARTIAL and REG are both nonzero, then copy that many of the first
4119 bytes of X into registers starting with REG, and push the rest of X.
4120 The amount of space pushed is decreased by PARTIAL bytes.
4121 REG must be a hard register in this case.
4122 If REG is zero but PARTIAL is not, take any all others actions for an
4123 argument partially in registers, but do not actually load any
4124 registers.
4125
4126 EXTRA is the amount in bytes of extra space to leave next to this arg.
4127 This is ignored if an argument block has already been allocated.
4128
4129 On a machine that lacks real push insns, ARGS_ADDR is the address of
4130 the bottom of the argument block for this call. We use indexing off there
4131 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4132 argument block has not been preallocated.
4133
4134 ARGS_SO_FAR is the size of args previously pushed for this call.
4135
4136 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4137 for arguments passed in registers. If nonzero, it will be the number
4138 of bytes required. */
4139
4140 void
4141 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4142 unsigned int align, int partial, rtx reg, int extra,
4143 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4144 rtx alignment_pad)
4145 {
4146 rtx xinner;
4147 enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
4148
4149 /* Decide where to pad the argument: `downward' for below,
4150 `upward' for above, or `none' for don't pad it.
4151 Default is below for small data on big-endian machines; else above. */
4152 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4153
4154 /* Invert direction if stack is post-decrement.
4155 FIXME: why? */
4156 if (STACK_PUSH_CODE == POST_DEC)
4157 if (where_pad != none)
4158 where_pad = (where_pad == downward ? upward : downward);
4159
4160 xinner = x;
4161
4162 if (mode == BLKmode
4163 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4164 {
4165 /* Copy a block into the stack, entirely or partially. */
4166
4167 rtx temp;
4168 int used;
4169 int offset;
4170 int skip;
4171
4172 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4173 used = partial - offset;
4174
4175 if (mode != BLKmode)
4176 {
4177 /* A value is to be stored in an insufficiently aligned
4178 stack slot; copy via a suitably aligned slot if
4179 necessary. */
4180 size = GEN_INT (GET_MODE_SIZE (mode));
4181 if (!MEM_P (xinner))
4182 {
4183 temp = assign_temp (type, 1, 1);
4184 emit_move_insn (temp, xinner);
4185 xinner = temp;
4186 }
4187 }
4188
4189 gcc_assert (size);
4190
4191 /* USED is now the # of bytes we need not copy to the stack
4192 because registers will take care of them. */
4193
4194 if (partial != 0)
4195 xinner = adjust_address (xinner, BLKmode, used);
4196
4197 /* If the partial register-part of the arg counts in its stack size,
4198 skip the part of stack space corresponding to the registers.
4199 Otherwise, start copying to the beginning of the stack space,
4200 by setting SKIP to 0. */
4201 skip = (reg_parm_stack_space == 0) ? 0 : used;
4202
4203 #ifdef PUSH_ROUNDING
4204 /* Do it with several push insns if that doesn't take lots of insns
4205 and if there is no difficulty with push insns that skip bytes
4206 on the stack for alignment purposes. */
4207 if (args_addr == 0
4208 && PUSH_ARGS
4209 && CONST_INT_P (size)
4210 && skip == 0
4211 && MEM_ALIGN (xinner) >= align
4212 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4213 /* Here we avoid the case of a structure whose weak alignment
4214 forces many pushes of a small amount of data,
4215 and such small pushes do rounding that causes trouble. */
4216 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4217 || align >= BIGGEST_ALIGNMENT
4218 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4219 == (align / BITS_PER_UNIT)))
4220 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4221 {
4222 /* Push padding now if padding above and stack grows down,
4223 or if padding below and stack grows up.
4224 But if space already allocated, this has already been done. */
4225 if (extra && args_addr == 0
4226 && where_pad != none && where_pad != stack_direction)
4227 anti_adjust_stack (GEN_INT (extra));
4228
4229 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4230 }
4231 else
4232 #endif /* PUSH_ROUNDING */
4233 {
4234 rtx target;
4235
4236 /* Otherwise make space on the stack and copy the data
4237 to the address of that space. */
4238
4239 /* Deduct words put into registers from the size we must copy. */
4240 if (partial != 0)
4241 {
4242 if (CONST_INT_P (size))
4243 size = GEN_INT (INTVAL (size) - used);
4244 else
4245 size = expand_binop (GET_MODE (size), sub_optab, size,
4246 gen_int_mode (used, GET_MODE (size)),
4247 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4248 }
4249
4250 /* Get the address of the stack space.
4251 In this case, we do not deal with EXTRA separately.
4252 A single stack adjust will do. */
4253 if (! args_addr)
4254 {
4255 temp = push_block (size, extra, where_pad == downward);
4256 extra = 0;
4257 }
4258 else if (CONST_INT_P (args_so_far))
4259 temp = memory_address (BLKmode,
4260 plus_constant (Pmode, args_addr,
4261 skip + INTVAL (args_so_far)));
4262 else
4263 temp = memory_address (BLKmode,
4264 plus_constant (Pmode,
4265 gen_rtx_PLUS (Pmode,
4266 args_addr,
4267 args_so_far),
4268 skip));
4269
4270 if (!ACCUMULATE_OUTGOING_ARGS)
4271 {
4272 /* If the source is referenced relative to the stack pointer,
4273 copy it to another register to stabilize it. We do not need
4274 to do this if we know that we won't be changing sp. */
4275
4276 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4277 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4278 temp = copy_to_reg (temp);
4279 }
4280
4281 target = gen_rtx_MEM (BLKmode, temp);
4282
4283 /* We do *not* set_mem_attributes here, because incoming arguments
4284 may overlap with sibling call outgoing arguments and we cannot
4285 allow reordering of reads from function arguments with stores
4286 to outgoing arguments of sibling calls. We do, however, want
4287 to record the alignment of the stack slot. */
4288 /* ALIGN may well be better aligned than TYPE, e.g. due to
4289 PARM_BOUNDARY. Assume the caller isn't lying. */
4290 set_mem_align (target, align);
4291
4292 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4293 }
4294 }
4295 else if (partial > 0)
4296 {
4297 /* Scalar partly in registers. */
4298
4299 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4300 int i;
4301 int not_stack;
4302 /* # bytes of start of argument
4303 that we must make space for but need not store. */
4304 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4305 int args_offset = INTVAL (args_so_far);
4306 int skip;
4307
4308 /* Push padding now if padding above and stack grows down,
4309 or if padding below and stack grows up.
4310 But if space already allocated, this has already been done. */
4311 if (extra && args_addr == 0
4312 && where_pad != none && where_pad != stack_direction)
4313 anti_adjust_stack (GEN_INT (extra));
4314
4315 /* If we make space by pushing it, we might as well push
4316 the real data. Otherwise, we can leave OFFSET nonzero
4317 and leave the space uninitialized. */
4318 if (args_addr == 0)
4319 offset = 0;
4320
4321 /* Now NOT_STACK gets the number of words that we don't need to
4322 allocate on the stack. Convert OFFSET to words too. */
4323 not_stack = (partial - offset) / UNITS_PER_WORD;
4324 offset /= UNITS_PER_WORD;
4325
4326 /* If the partial register-part of the arg counts in its stack size,
4327 skip the part of stack space corresponding to the registers.
4328 Otherwise, start copying to the beginning of the stack space,
4329 by setting SKIP to 0. */
4330 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4331
4332 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4333 x = validize_mem (force_const_mem (mode, x));
4334
4335 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4336 SUBREGs of such registers are not allowed. */
4337 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4338 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4339 x = copy_to_reg (x);
4340
4341 /* Loop over all the words allocated on the stack for this arg. */
4342 /* We can do it by words, because any scalar bigger than a word
4343 has a size a multiple of a word. */
4344 for (i = size - 1; i >= not_stack; i--)
4345 if (i >= not_stack + offset)
4346 emit_push_insn (operand_subword_force (x, i, mode),
4347 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4348 0, args_addr,
4349 GEN_INT (args_offset + ((i - not_stack + skip)
4350 * UNITS_PER_WORD)),
4351 reg_parm_stack_space, alignment_pad);
4352 }
4353 else
4354 {
4355 rtx addr;
4356 rtx dest;
4357
4358 /* Push padding now if padding above and stack grows down,
4359 or if padding below and stack grows up.
4360 But if space already allocated, this has already been done. */
4361 if (extra && args_addr == 0
4362 && where_pad != none && where_pad != stack_direction)
4363 anti_adjust_stack (GEN_INT (extra));
4364
4365 #ifdef PUSH_ROUNDING
4366 if (args_addr == 0 && PUSH_ARGS)
4367 emit_single_push_insn (mode, x, type);
4368 else
4369 #endif
4370 {
4371 if (CONST_INT_P (args_so_far))
4372 addr
4373 = memory_address (mode,
4374 plus_constant (Pmode, args_addr,
4375 INTVAL (args_so_far)));
4376 else
4377 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4378 args_so_far));
4379 dest = gen_rtx_MEM (mode, addr);
4380
4381 /* We do *not* set_mem_attributes here, because incoming arguments
4382 may overlap with sibling call outgoing arguments and we cannot
4383 allow reordering of reads from function arguments with stores
4384 to outgoing arguments of sibling calls. We do, however, want
4385 to record the alignment of the stack slot. */
4386 /* ALIGN may well be better aligned than TYPE, e.g. due to
4387 PARM_BOUNDARY. Assume the caller isn't lying. */
4388 set_mem_align (dest, align);
4389
4390 emit_move_insn (dest, x);
4391 }
4392 }
4393
4394 /* If part should go in registers, copy that part
4395 into the appropriate registers. Do this now, at the end,
4396 since mem-to-mem copies above may do function calls. */
4397 if (partial > 0 && reg != 0)
4398 {
4399 /* Handle calls that pass values in multiple non-contiguous locations.
4400 The Irix 6 ABI has examples of this. */
4401 if (GET_CODE (reg) == PARALLEL)
4402 emit_group_load (reg, x, type, -1);
4403 else
4404 {
4405 gcc_assert (partial % UNITS_PER_WORD == 0);
4406 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4407 }
4408 }
4409
4410 if (extra && args_addr == 0 && where_pad == stack_direction)
4411 anti_adjust_stack (GEN_INT (extra));
4412
4413 if (alignment_pad && args_addr == 0)
4414 anti_adjust_stack (alignment_pad);
4415 }
4416 \f
4417 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4418 operations. */
4419
4420 static rtx
4421 get_subtarget (rtx x)
4422 {
4423 return (optimize
4424 || x == 0
4425 /* Only registers can be subtargets. */
4426 || !REG_P (x)
4427 /* Don't use hard regs to avoid extending their life. */
4428 || REGNO (x) < FIRST_PSEUDO_REGISTER
4429 ? 0 : x);
4430 }
4431
4432 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4433 FIELD is a bitfield. Returns true if the optimization was successful,
4434 and there's nothing else to do. */
4435
4436 static bool
4437 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4438 unsigned HOST_WIDE_INT bitpos,
4439 unsigned HOST_WIDE_INT bitregion_start,
4440 unsigned HOST_WIDE_INT bitregion_end,
4441 machine_mode mode1, rtx str_rtx,
4442 tree to, tree src)
4443 {
4444 machine_mode str_mode = GET_MODE (str_rtx);
4445 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4446 tree op0, op1;
4447 rtx value, result;
4448 optab binop;
4449 gimple srcstmt;
4450 enum tree_code code;
4451
4452 if (mode1 != VOIDmode
4453 || bitsize >= BITS_PER_WORD
4454 || str_bitsize > BITS_PER_WORD
4455 || TREE_SIDE_EFFECTS (to)
4456 || TREE_THIS_VOLATILE (to))
4457 return false;
4458
4459 STRIP_NOPS (src);
4460 if (TREE_CODE (src) != SSA_NAME)
4461 return false;
4462 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4463 return false;
4464
4465 srcstmt = get_gimple_for_ssa_name (src);
4466 if (!srcstmt
4467 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4468 return false;
4469
4470 code = gimple_assign_rhs_code (srcstmt);
4471
4472 op0 = gimple_assign_rhs1 (srcstmt);
4473
4474 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4475 to find its initialization. Hopefully the initialization will
4476 be from a bitfield load. */
4477 if (TREE_CODE (op0) == SSA_NAME)
4478 {
4479 gimple op0stmt = get_gimple_for_ssa_name (op0);
4480
4481 /* We want to eventually have OP0 be the same as TO, which
4482 should be a bitfield. */
4483 if (!op0stmt
4484 || !is_gimple_assign (op0stmt)
4485 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4486 return false;
4487 op0 = gimple_assign_rhs1 (op0stmt);
4488 }
4489
4490 op1 = gimple_assign_rhs2 (srcstmt);
4491
4492 if (!operand_equal_p (to, op0, 0))
4493 return false;
4494
4495 if (MEM_P (str_rtx))
4496 {
4497 unsigned HOST_WIDE_INT offset1;
4498
4499 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4500 str_mode = word_mode;
4501 str_mode = get_best_mode (bitsize, bitpos,
4502 bitregion_start, bitregion_end,
4503 MEM_ALIGN (str_rtx), str_mode, 0);
4504 if (str_mode == VOIDmode)
4505 return false;
4506 str_bitsize = GET_MODE_BITSIZE (str_mode);
4507
4508 offset1 = bitpos;
4509 bitpos %= str_bitsize;
4510 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4511 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4512 }
4513 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4514 return false;
4515
4516 /* If the bit field covers the whole REG/MEM, store_field
4517 will likely generate better code. */
4518 if (bitsize >= str_bitsize)
4519 return false;
4520
4521 /* We can't handle fields split across multiple entities. */
4522 if (bitpos + bitsize > str_bitsize)
4523 return false;
4524
4525 if (BYTES_BIG_ENDIAN)
4526 bitpos = str_bitsize - bitpos - bitsize;
4527
4528 switch (code)
4529 {
4530 case PLUS_EXPR:
4531 case MINUS_EXPR:
4532 /* For now, just optimize the case of the topmost bitfield
4533 where we don't need to do any masking and also
4534 1 bit bitfields where xor can be used.
4535 We might win by one instruction for the other bitfields
4536 too if insv/extv instructions aren't used, so that
4537 can be added later. */
4538 if (bitpos + bitsize != str_bitsize
4539 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4540 break;
4541
4542 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4543 value = convert_modes (str_mode,
4544 TYPE_MODE (TREE_TYPE (op1)), value,
4545 TYPE_UNSIGNED (TREE_TYPE (op1)));
4546
4547 /* We may be accessing data outside the field, which means
4548 we can alias adjacent data. */
4549 if (MEM_P (str_rtx))
4550 {
4551 str_rtx = shallow_copy_rtx (str_rtx);
4552 set_mem_alias_set (str_rtx, 0);
4553 set_mem_expr (str_rtx, 0);
4554 }
4555
4556 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4557 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4558 {
4559 value = expand_and (str_mode, value, const1_rtx, NULL);
4560 binop = xor_optab;
4561 }
4562 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4563 result = expand_binop (str_mode, binop, str_rtx,
4564 value, str_rtx, 1, OPTAB_WIDEN);
4565 if (result != str_rtx)
4566 emit_move_insn (str_rtx, result);
4567 return true;
4568
4569 case BIT_IOR_EXPR:
4570 case BIT_XOR_EXPR:
4571 if (TREE_CODE (op1) != INTEGER_CST)
4572 break;
4573 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4574 value = convert_modes (str_mode,
4575 TYPE_MODE (TREE_TYPE (op1)), value,
4576 TYPE_UNSIGNED (TREE_TYPE (op1)));
4577
4578 /* We may be accessing data outside the field, which means
4579 we can alias adjacent data. */
4580 if (MEM_P (str_rtx))
4581 {
4582 str_rtx = shallow_copy_rtx (str_rtx);
4583 set_mem_alias_set (str_rtx, 0);
4584 set_mem_expr (str_rtx, 0);
4585 }
4586
4587 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4588 if (bitpos + bitsize != str_bitsize)
4589 {
4590 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4591 str_mode);
4592 value = expand_and (str_mode, value, mask, NULL_RTX);
4593 }
4594 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4595 result = expand_binop (str_mode, binop, str_rtx,
4596 value, str_rtx, 1, OPTAB_WIDEN);
4597 if (result != str_rtx)
4598 emit_move_insn (str_rtx, result);
4599 return true;
4600
4601 default:
4602 break;
4603 }
4604
4605 return false;
4606 }
4607
4608 /* In the C++ memory model, consecutive bit fields in a structure are
4609 considered one memory location.
4610
4611 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4612 returns the bit range of consecutive bits in which this COMPONENT_REF
4613 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4614 and *OFFSET may be adjusted in the process.
4615
4616 If the access does not need to be restricted, 0 is returned in both
4617 *BITSTART and *BITEND. */
4618
4619 static void
4620 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4621 unsigned HOST_WIDE_INT *bitend,
4622 tree exp,
4623 HOST_WIDE_INT *bitpos,
4624 tree *offset)
4625 {
4626 HOST_WIDE_INT bitoffset;
4627 tree field, repr;
4628
4629 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4630
4631 field = TREE_OPERAND (exp, 1);
4632 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4633 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4634 need to limit the range we can access. */
4635 if (!repr)
4636 {
4637 *bitstart = *bitend = 0;
4638 return;
4639 }
4640
4641 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4642 part of a larger bit field, then the representative does not serve any
4643 useful purpose. This can occur in Ada. */
4644 if (handled_component_p (TREE_OPERAND (exp, 0)))
4645 {
4646 machine_mode rmode;
4647 HOST_WIDE_INT rbitsize, rbitpos;
4648 tree roffset;
4649 int unsignedp;
4650 int volatilep = 0;
4651 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4652 &roffset, &rmode, &unsignedp, &volatilep, false);
4653 if ((rbitpos % BITS_PER_UNIT) != 0)
4654 {
4655 *bitstart = *bitend = 0;
4656 return;
4657 }
4658 }
4659
4660 /* Compute the adjustment to bitpos from the offset of the field
4661 relative to the representative. DECL_FIELD_OFFSET of field and
4662 repr are the same by construction if they are not constants,
4663 see finish_bitfield_layout. */
4664 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4665 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4666 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4667 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4668 else
4669 bitoffset = 0;
4670 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4671 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4672
4673 /* If the adjustment is larger than bitpos, we would have a negative bit
4674 position for the lower bound and this may wreak havoc later. Adjust
4675 offset and bitpos to make the lower bound non-negative in that case. */
4676 if (bitoffset > *bitpos)
4677 {
4678 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4679 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4680
4681 *bitpos += adjust;
4682 if (*offset == NULL_TREE)
4683 *offset = size_int (-adjust / BITS_PER_UNIT);
4684 else
4685 *offset
4686 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4687 *bitstart = 0;
4688 }
4689 else
4690 *bitstart = *bitpos - bitoffset;
4691
4692 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4693 }
4694
4695 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4696 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4697 DECL_RTL was not set yet, return NORTL. */
4698
4699 static inline bool
4700 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4701 {
4702 if (TREE_CODE (addr) != ADDR_EXPR)
4703 return false;
4704
4705 tree base = TREE_OPERAND (addr, 0);
4706
4707 if (!DECL_P (base)
4708 || TREE_ADDRESSABLE (base)
4709 || DECL_MODE (base) == BLKmode)
4710 return false;
4711
4712 if (!DECL_RTL_SET_P (base))
4713 return nortl;
4714
4715 return (!MEM_P (DECL_RTL (base)));
4716 }
4717
4718 /* Returns true if the MEM_REF REF refers to an object that does not
4719 reside in memory and has non-BLKmode. */
4720
4721 static inline bool
4722 mem_ref_refers_to_non_mem_p (tree ref)
4723 {
4724 tree base = TREE_OPERAND (ref, 0);
4725 return addr_expr_of_non_mem_decl_p_1 (base, false);
4726 }
4727
4728 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4729 is true, try generating a nontemporal store. */
4730
4731 void
4732 expand_assignment (tree to, tree from, bool nontemporal)
4733 {
4734 rtx to_rtx = 0;
4735 rtx result;
4736 machine_mode mode;
4737 unsigned int align;
4738 enum insn_code icode;
4739
4740 /* Don't crash if the lhs of the assignment was erroneous. */
4741 if (TREE_CODE (to) == ERROR_MARK)
4742 {
4743 expand_normal (from);
4744 return;
4745 }
4746
4747 /* Optimize away no-op moves without side-effects. */
4748 if (operand_equal_p (to, from, 0))
4749 return;
4750
4751 /* Handle misaligned stores. */
4752 mode = TYPE_MODE (TREE_TYPE (to));
4753 if ((TREE_CODE (to) == MEM_REF
4754 || TREE_CODE (to) == TARGET_MEM_REF)
4755 && mode != BLKmode
4756 && !mem_ref_refers_to_non_mem_p (to)
4757 && ((align = get_object_alignment (to))
4758 < GET_MODE_ALIGNMENT (mode))
4759 && (((icode = optab_handler (movmisalign_optab, mode))
4760 != CODE_FOR_nothing)
4761 || SLOW_UNALIGNED_ACCESS (mode, align)))
4762 {
4763 rtx reg, mem;
4764
4765 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4766 reg = force_not_mem (reg);
4767 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4768
4769 if (icode != CODE_FOR_nothing)
4770 {
4771 struct expand_operand ops[2];
4772
4773 create_fixed_operand (&ops[0], mem);
4774 create_input_operand (&ops[1], reg, mode);
4775 /* The movmisalign<mode> pattern cannot fail, else the assignment
4776 would silently be omitted. */
4777 expand_insn (icode, 2, ops);
4778 }
4779 else
4780 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4781 return;
4782 }
4783
4784 /* Assignment of a structure component needs special treatment
4785 if the structure component's rtx is not simply a MEM.
4786 Assignment of an array element at a constant index, and assignment of
4787 an array element in an unaligned packed structure field, has the same
4788 problem. Same for (partially) storing into a non-memory object. */
4789 if (handled_component_p (to)
4790 || (TREE_CODE (to) == MEM_REF
4791 && mem_ref_refers_to_non_mem_p (to))
4792 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4793 {
4794 machine_mode mode1;
4795 HOST_WIDE_INT bitsize, bitpos;
4796 unsigned HOST_WIDE_INT bitregion_start = 0;
4797 unsigned HOST_WIDE_INT bitregion_end = 0;
4798 tree offset;
4799 int unsignedp;
4800 int volatilep = 0;
4801 tree tem;
4802
4803 push_temp_slots ();
4804 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4805 &unsignedp, &volatilep, true);
4806
4807 /* Make sure bitpos is not negative, it can wreak havoc later. */
4808 if (bitpos < 0)
4809 {
4810 gcc_assert (offset == NULL_TREE);
4811 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4812 ? 3 : exact_log2 (BITS_PER_UNIT)));
4813 bitpos &= BITS_PER_UNIT - 1;
4814 }
4815
4816 if (TREE_CODE (to) == COMPONENT_REF
4817 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4818 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4819 /* The C++ memory model naturally applies to byte-aligned fields.
4820 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4821 BITSIZE are not byte-aligned, there is no need to limit the range
4822 we can access. This can occur with packed structures in Ada. */
4823 else if (bitsize > 0
4824 && bitsize % BITS_PER_UNIT == 0
4825 && bitpos % BITS_PER_UNIT == 0)
4826 {
4827 bitregion_start = bitpos;
4828 bitregion_end = bitpos + bitsize - 1;
4829 }
4830
4831 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4832
4833 /* If the field has a mode, we want to access it in the
4834 field's mode, not the computed mode.
4835 If a MEM has VOIDmode (external with incomplete type),
4836 use BLKmode for it instead. */
4837 if (MEM_P (to_rtx))
4838 {
4839 if (mode1 != VOIDmode)
4840 to_rtx = adjust_address (to_rtx, mode1, 0);
4841 else if (GET_MODE (to_rtx) == VOIDmode)
4842 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4843 }
4844
4845 if (offset != 0)
4846 {
4847 machine_mode address_mode;
4848 rtx offset_rtx;
4849
4850 if (!MEM_P (to_rtx))
4851 {
4852 /* We can get constant negative offsets into arrays with broken
4853 user code. Translate this to a trap instead of ICEing. */
4854 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4855 expand_builtin_trap ();
4856 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4857 }
4858
4859 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4860 address_mode = get_address_mode (to_rtx);
4861 if (GET_MODE (offset_rtx) != address_mode)
4862 {
4863 /* We cannot be sure that the RTL in offset_rtx is valid outside
4864 of a memory address context, so force it into a register
4865 before attempting to convert it to the desired mode. */
4866 offset_rtx = force_operand (offset_rtx, NULL_RTX);
4867 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4868 }
4869
4870 /* If we have an expression in OFFSET_RTX and a non-zero
4871 byte offset in BITPOS, adding the byte offset before the
4872 OFFSET_RTX results in better intermediate code, which makes
4873 later rtl optimization passes perform better.
4874
4875 We prefer intermediate code like this:
4876
4877 r124:DI=r123:DI+0x18
4878 [r124:DI]=r121:DI
4879
4880 ... instead of ...
4881
4882 r124:DI=r123:DI+0x10
4883 [r124:DI+0x8]=r121:DI
4884
4885 This is only done for aligned data values, as these can
4886 be expected to result in single move instructions. */
4887 if (mode1 != VOIDmode
4888 && bitpos != 0
4889 && bitsize > 0
4890 && (bitpos % bitsize) == 0
4891 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4892 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4893 {
4894 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4895 bitregion_start = 0;
4896 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4897 bitregion_end -= bitpos;
4898 bitpos = 0;
4899 }
4900
4901 to_rtx = offset_address (to_rtx, offset_rtx,
4902 highest_pow2_factor_for_target (to,
4903 offset));
4904 }
4905
4906 /* No action is needed if the target is not a memory and the field
4907 lies completely outside that target. This can occur if the source
4908 code contains an out-of-bounds access to a small array. */
4909 if (!MEM_P (to_rtx)
4910 && GET_MODE (to_rtx) != BLKmode
4911 && (unsigned HOST_WIDE_INT) bitpos
4912 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4913 {
4914 expand_normal (from);
4915 result = NULL;
4916 }
4917 /* Handle expand_expr of a complex value returning a CONCAT. */
4918 else if (GET_CODE (to_rtx) == CONCAT)
4919 {
4920 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4921 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4922 && bitpos == 0
4923 && bitsize == mode_bitsize)
4924 result = store_expr (from, to_rtx, false, nontemporal);
4925 else if (bitsize == mode_bitsize / 2
4926 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4927 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4928 nontemporal);
4929 else if (bitpos + bitsize <= mode_bitsize / 2)
4930 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4931 bitregion_start, bitregion_end,
4932 mode1, from,
4933 get_alias_set (to), nontemporal);
4934 else if (bitpos >= mode_bitsize / 2)
4935 result = store_field (XEXP (to_rtx, 1), bitsize,
4936 bitpos - mode_bitsize / 2,
4937 bitregion_start, bitregion_end,
4938 mode1, from,
4939 get_alias_set (to), nontemporal);
4940 else if (bitpos == 0 && bitsize == mode_bitsize)
4941 {
4942 rtx from_rtx;
4943 result = expand_normal (from);
4944 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4945 TYPE_MODE (TREE_TYPE (from)), 0);
4946 emit_move_insn (XEXP (to_rtx, 0),
4947 read_complex_part (from_rtx, false));
4948 emit_move_insn (XEXP (to_rtx, 1),
4949 read_complex_part (from_rtx, true));
4950 }
4951 else
4952 {
4953 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4954 GET_MODE_SIZE (GET_MODE (to_rtx)));
4955 write_complex_part (temp, XEXP (to_rtx, 0), false);
4956 write_complex_part (temp, XEXP (to_rtx, 1), true);
4957 result = store_field (temp, bitsize, bitpos,
4958 bitregion_start, bitregion_end,
4959 mode1, from,
4960 get_alias_set (to), nontemporal);
4961 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4962 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4963 }
4964 }
4965 else
4966 {
4967 if (MEM_P (to_rtx))
4968 {
4969 /* If the field is at offset zero, we could have been given the
4970 DECL_RTX of the parent struct. Don't munge it. */
4971 to_rtx = shallow_copy_rtx (to_rtx);
4972 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4973 if (volatilep)
4974 MEM_VOLATILE_P (to_rtx) = 1;
4975 }
4976
4977 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4978 bitregion_start, bitregion_end,
4979 mode1,
4980 to_rtx, to, from))
4981 result = NULL;
4982 else
4983 result = store_field (to_rtx, bitsize, bitpos,
4984 bitregion_start, bitregion_end,
4985 mode1, from,
4986 get_alias_set (to), nontemporal);
4987 }
4988
4989 if (result)
4990 preserve_temp_slots (result);
4991 pop_temp_slots ();
4992 return;
4993 }
4994
4995 /* If the rhs is a function call and its value is not an aggregate,
4996 call the function before we start to compute the lhs.
4997 This is needed for correct code for cases such as
4998 val = setjmp (buf) on machines where reference to val
4999 requires loading up part of an address in a separate insn.
5000
5001 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5002 since it might be a promoted variable where the zero- or sign- extension
5003 needs to be done. Handling this in the normal way is safe because no
5004 computation is done before the call. The same is true for SSA names. */
5005 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5006 && COMPLETE_TYPE_P (TREE_TYPE (from))
5007 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5008 && ! (((TREE_CODE (to) == VAR_DECL
5009 || TREE_CODE (to) == PARM_DECL
5010 || TREE_CODE (to) == RESULT_DECL)
5011 && REG_P (DECL_RTL (to)))
5012 || TREE_CODE (to) == SSA_NAME))
5013 {
5014 rtx value;
5015 rtx bounds;
5016
5017 push_temp_slots ();
5018 value = expand_normal (from);
5019
5020 /* Split value and bounds to store them separately. */
5021 chkp_split_slot (value, &value, &bounds);
5022
5023 if (to_rtx == 0)
5024 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5025
5026 /* Handle calls that return values in multiple non-contiguous locations.
5027 The Irix 6 ABI has examples of this. */
5028 if (GET_CODE (to_rtx) == PARALLEL)
5029 {
5030 if (GET_CODE (value) == PARALLEL)
5031 emit_group_move (to_rtx, value);
5032 else
5033 emit_group_load (to_rtx, value, TREE_TYPE (from),
5034 int_size_in_bytes (TREE_TYPE (from)));
5035 }
5036 else if (GET_CODE (value) == PARALLEL)
5037 emit_group_store (to_rtx, value, TREE_TYPE (from),
5038 int_size_in_bytes (TREE_TYPE (from)));
5039 else if (GET_MODE (to_rtx) == BLKmode)
5040 {
5041 /* Handle calls that return BLKmode values in registers. */
5042 if (REG_P (value))
5043 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5044 else
5045 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5046 }
5047 else
5048 {
5049 if (POINTER_TYPE_P (TREE_TYPE (to)))
5050 value = convert_memory_address_addr_space
5051 (GET_MODE (to_rtx), value,
5052 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5053
5054 emit_move_insn (to_rtx, value);
5055 }
5056
5057 /* Store bounds if required. */
5058 if (bounds
5059 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5060 {
5061 gcc_assert (MEM_P (to_rtx));
5062 chkp_emit_bounds_store (bounds, value, to_rtx);
5063 }
5064
5065 preserve_temp_slots (to_rtx);
5066 pop_temp_slots ();
5067 return;
5068 }
5069
5070 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5071 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5072
5073 /* Don't move directly into a return register. */
5074 if (TREE_CODE (to) == RESULT_DECL
5075 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5076 {
5077 rtx temp;
5078
5079 push_temp_slots ();
5080
5081 /* If the source is itself a return value, it still is in a pseudo at
5082 this point so we can move it back to the return register directly. */
5083 if (REG_P (to_rtx)
5084 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5085 && TREE_CODE (from) != CALL_EXPR)
5086 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5087 else
5088 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5089
5090 /* Handle calls that return values in multiple non-contiguous locations.
5091 The Irix 6 ABI has examples of this. */
5092 if (GET_CODE (to_rtx) == PARALLEL)
5093 {
5094 if (GET_CODE (temp) == PARALLEL)
5095 emit_group_move (to_rtx, temp);
5096 else
5097 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5098 int_size_in_bytes (TREE_TYPE (from)));
5099 }
5100 else if (temp)
5101 emit_move_insn (to_rtx, temp);
5102
5103 preserve_temp_slots (to_rtx);
5104 pop_temp_slots ();
5105 return;
5106 }
5107
5108 /* In case we are returning the contents of an object which overlaps
5109 the place the value is being stored, use a safe function when copying
5110 a value through a pointer into a structure value return block. */
5111 if (TREE_CODE (to) == RESULT_DECL
5112 && TREE_CODE (from) == INDIRECT_REF
5113 && ADDR_SPACE_GENERIC_P
5114 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5115 && refs_may_alias_p (to, from)
5116 && cfun->returns_struct
5117 && !cfun->returns_pcc_struct)
5118 {
5119 rtx from_rtx, size;
5120
5121 push_temp_slots ();
5122 size = expr_size (from);
5123 from_rtx = expand_normal (from);
5124
5125 emit_library_call (memmove_libfunc, LCT_NORMAL,
5126 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5127 XEXP (from_rtx, 0), Pmode,
5128 convert_to_mode (TYPE_MODE (sizetype),
5129 size, TYPE_UNSIGNED (sizetype)),
5130 TYPE_MODE (sizetype));
5131
5132 preserve_temp_slots (to_rtx);
5133 pop_temp_slots ();
5134 return;
5135 }
5136
5137 /* Compute FROM and store the value in the rtx we got. */
5138
5139 push_temp_slots ();
5140 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, to);
5141 preserve_temp_slots (result);
5142 pop_temp_slots ();
5143 return;
5144 }
5145
5146 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5147 succeeded, false otherwise. */
5148
5149 bool
5150 emit_storent_insn (rtx to, rtx from)
5151 {
5152 struct expand_operand ops[2];
5153 machine_mode mode = GET_MODE (to);
5154 enum insn_code code = optab_handler (storent_optab, mode);
5155
5156 if (code == CODE_FOR_nothing)
5157 return false;
5158
5159 create_fixed_operand (&ops[0], to);
5160 create_input_operand (&ops[1], from, mode);
5161 return maybe_expand_insn (code, 2, ops);
5162 }
5163
5164 /* Generate code for computing expression EXP,
5165 and storing the value into TARGET.
5166
5167 If the mode is BLKmode then we may return TARGET itself.
5168 It turns out that in BLKmode it doesn't cause a problem.
5169 because C has no operators that could combine two different
5170 assignments into the same BLKmode object with different values
5171 with no sequence point. Will other languages need this to
5172 be more thorough?
5173
5174 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5175 stack, and block moves may need to be treated specially.
5176
5177 If NONTEMPORAL is true, try using a nontemporal store instruction.
5178
5179 If BTARGET is not NULL then computed bounds of EXP are
5180 associated with BTARGET. */
5181
5182 rtx
5183 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5184 bool nontemporal, tree btarget)
5185 {
5186 rtx temp;
5187 rtx alt_rtl = NULL_RTX;
5188 location_t loc = curr_insn_location ();
5189
5190 if (VOID_TYPE_P (TREE_TYPE (exp)))
5191 {
5192 /* C++ can generate ?: expressions with a throw expression in one
5193 branch and an rvalue in the other. Here, we resolve attempts to
5194 store the throw expression's nonexistent result. */
5195 gcc_assert (!call_param_p);
5196 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5197 return NULL_RTX;
5198 }
5199 if (TREE_CODE (exp) == COMPOUND_EXPR)
5200 {
5201 /* Perform first part of compound expression, then assign from second
5202 part. */
5203 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5204 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5205 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5206 call_param_p, nontemporal, btarget);
5207 }
5208 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5209 {
5210 /* For conditional expression, get safe form of the target. Then
5211 test the condition, doing the appropriate assignment on either
5212 side. This avoids the creation of unnecessary temporaries.
5213 For non-BLKmode, it is more efficient not to do this. */
5214
5215 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5216
5217 do_pending_stack_adjust ();
5218 NO_DEFER_POP;
5219 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5220 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5221 nontemporal, btarget);
5222 emit_jump_insn (gen_jump (lab2));
5223 emit_barrier ();
5224 emit_label (lab1);
5225 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5226 nontemporal, btarget);
5227 emit_label (lab2);
5228 OK_DEFER_POP;
5229
5230 return NULL_RTX;
5231 }
5232 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5233 /* If this is a scalar in a register that is stored in a wider mode
5234 than the declared mode, compute the result into its declared mode
5235 and then convert to the wider mode. Our value is the computed
5236 expression. */
5237 {
5238 rtx inner_target = 0;
5239
5240 /* We can do the conversion inside EXP, which will often result
5241 in some optimizations. Do the conversion in two steps: first
5242 change the signedness, if needed, then the extend. But don't
5243 do this if the type of EXP is a subtype of something else
5244 since then the conversion might involve more than just
5245 converting modes. */
5246 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5247 && TREE_TYPE (TREE_TYPE (exp)) == 0
5248 && GET_MODE_PRECISION (GET_MODE (target))
5249 == TYPE_PRECISION (TREE_TYPE (exp)))
5250 {
5251 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5252 TYPE_UNSIGNED (TREE_TYPE (exp))))
5253 {
5254 /* Some types, e.g. Fortran's logical*4, won't have a signed
5255 version, so use the mode instead. */
5256 tree ntype
5257 = (signed_or_unsigned_type_for
5258 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5259 if (ntype == NULL)
5260 ntype = lang_hooks.types.type_for_mode
5261 (TYPE_MODE (TREE_TYPE (exp)),
5262 SUBREG_PROMOTED_SIGN (target));
5263
5264 exp = fold_convert_loc (loc, ntype, exp);
5265 }
5266
5267 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5268 (GET_MODE (SUBREG_REG (target)),
5269 SUBREG_PROMOTED_SIGN (target)),
5270 exp);
5271
5272 inner_target = SUBREG_REG (target);
5273 }
5274
5275 temp = expand_expr (exp, inner_target, VOIDmode,
5276 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5277
5278 /* Handle bounds returned by call. */
5279 if (TREE_CODE (exp) == CALL_EXPR)
5280 {
5281 rtx bounds;
5282 chkp_split_slot (temp, &temp, &bounds);
5283 if (bounds && btarget)
5284 {
5285 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5286 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5287 chkp_set_rtl_bounds (btarget, tmp);
5288 }
5289 }
5290
5291 /* If TEMP is a VOIDmode constant, use convert_modes to make
5292 sure that we properly convert it. */
5293 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5294 {
5295 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5296 temp, SUBREG_PROMOTED_SIGN (target));
5297 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5298 GET_MODE (target), temp,
5299 SUBREG_PROMOTED_SIGN (target));
5300 }
5301
5302 convert_move (SUBREG_REG (target), temp,
5303 SUBREG_PROMOTED_SIGN (target));
5304
5305 return NULL_RTX;
5306 }
5307 else if ((TREE_CODE (exp) == STRING_CST
5308 || (TREE_CODE (exp) == MEM_REF
5309 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5310 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5311 == STRING_CST
5312 && integer_zerop (TREE_OPERAND (exp, 1))))
5313 && !nontemporal && !call_param_p
5314 && MEM_P (target))
5315 {
5316 /* Optimize initialization of an array with a STRING_CST. */
5317 HOST_WIDE_INT exp_len, str_copy_len;
5318 rtx dest_mem;
5319 tree str = TREE_CODE (exp) == STRING_CST
5320 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5321
5322 exp_len = int_expr_size (exp);
5323 if (exp_len <= 0)
5324 goto normal_expr;
5325
5326 if (TREE_STRING_LENGTH (str) <= 0)
5327 goto normal_expr;
5328
5329 str_copy_len = strlen (TREE_STRING_POINTER (str));
5330 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5331 goto normal_expr;
5332
5333 str_copy_len = TREE_STRING_LENGTH (str);
5334 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5335 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5336 {
5337 str_copy_len += STORE_MAX_PIECES - 1;
5338 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5339 }
5340 str_copy_len = MIN (str_copy_len, exp_len);
5341 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5342 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5343 MEM_ALIGN (target), false))
5344 goto normal_expr;
5345
5346 dest_mem = target;
5347
5348 dest_mem = store_by_pieces (dest_mem,
5349 str_copy_len, builtin_strncpy_read_str,
5350 CONST_CAST (char *,
5351 TREE_STRING_POINTER (str)),
5352 MEM_ALIGN (target), false,
5353 exp_len > str_copy_len ? 1 : 0);
5354 if (exp_len > str_copy_len)
5355 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5356 GEN_INT (exp_len - str_copy_len),
5357 BLOCK_OP_NORMAL);
5358 return NULL_RTX;
5359 }
5360 else
5361 {
5362 rtx tmp_target;
5363
5364 normal_expr:
5365 /* If we want to use a nontemporal store, force the value to
5366 register first. */
5367 tmp_target = nontemporal ? NULL_RTX : target;
5368 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5369 (call_param_p
5370 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5371 &alt_rtl, false);
5372
5373 /* Handle bounds returned by call. */
5374 if (TREE_CODE (exp) == CALL_EXPR)
5375 {
5376 rtx bounds;
5377 chkp_split_slot (temp, &temp, &bounds);
5378 if (bounds && btarget)
5379 {
5380 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5381 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5382 chkp_set_rtl_bounds (btarget, tmp);
5383 }
5384 }
5385 }
5386
5387 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5388 the same as that of TARGET, adjust the constant. This is needed, for
5389 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5390 only a word-sized value. */
5391 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5392 && TREE_CODE (exp) != ERROR_MARK
5393 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5394 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5395 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5396
5397 /* If value was not generated in the target, store it there.
5398 Convert the value to TARGET's type first if necessary and emit the
5399 pending incrementations that have been queued when expanding EXP.
5400 Note that we cannot emit the whole queue blindly because this will
5401 effectively disable the POST_INC optimization later.
5402
5403 If TEMP and TARGET compare equal according to rtx_equal_p, but
5404 one or both of them are volatile memory refs, we have to distinguish
5405 two cases:
5406 - expand_expr has used TARGET. In this case, we must not generate
5407 another copy. This can be detected by TARGET being equal according
5408 to == .
5409 - expand_expr has not used TARGET - that means that the source just
5410 happens to have the same RTX form. Since temp will have been created
5411 by expand_expr, it will compare unequal according to == .
5412 We must generate a copy in this case, to reach the correct number
5413 of volatile memory references. */
5414
5415 if ((! rtx_equal_p (temp, target)
5416 || (temp != target && (side_effects_p (temp)
5417 || side_effects_p (target))))
5418 && TREE_CODE (exp) != ERROR_MARK
5419 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5420 but TARGET is not valid memory reference, TEMP will differ
5421 from TARGET although it is really the same location. */
5422 && !(alt_rtl
5423 && rtx_equal_p (alt_rtl, target)
5424 && !side_effects_p (alt_rtl)
5425 && !side_effects_p (target))
5426 /* If there's nothing to copy, don't bother. Don't call
5427 expr_size unless necessary, because some front-ends (C++)
5428 expr_size-hook must not be given objects that are not
5429 supposed to be bit-copied or bit-initialized. */
5430 && expr_size (exp) != const0_rtx)
5431 {
5432 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5433 {
5434 if (GET_MODE (target) == BLKmode)
5435 {
5436 /* Handle calls that return BLKmode values in registers. */
5437 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5438 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5439 else
5440 store_bit_field (target,
5441 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5442 0, 0, 0, GET_MODE (temp), temp);
5443 }
5444 else
5445 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5446 }
5447
5448 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5449 {
5450 /* Handle copying a string constant into an array. The string
5451 constant may be shorter than the array. So copy just the string's
5452 actual length, and clear the rest. First get the size of the data
5453 type of the string, which is actually the size of the target. */
5454 rtx size = expr_size (exp);
5455
5456 if (CONST_INT_P (size)
5457 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5458 emit_block_move (target, temp, size,
5459 (call_param_p
5460 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5461 else
5462 {
5463 machine_mode pointer_mode
5464 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5465 machine_mode address_mode = get_address_mode (target);
5466
5467 /* Compute the size of the data to copy from the string. */
5468 tree copy_size
5469 = size_binop_loc (loc, MIN_EXPR,
5470 make_tree (sizetype, size),
5471 size_int (TREE_STRING_LENGTH (exp)));
5472 rtx copy_size_rtx
5473 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5474 (call_param_p
5475 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5476 rtx_code_label *label = 0;
5477
5478 /* Copy that much. */
5479 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5480 TYPE_UNSIGNED (sizetype));
5481 emit_block_move (target, temp, copy_size_rtx,
5482 (call_param_p
5483 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5484
5485 /* Figure out how much is left in TARGET that we have to clear.
5486 Do all calculations in pointer_mode. */
5487 if (CONST_INT_P (copy_size_rtx))
5488 {
5489 size = plus_constant (address_mode, size,
5490 -INTVAL (copy_size_rtx));
5491 target = adjust_address (target, BLKmode,
5492 INTVAL (copy_size_rtx));
5493 }
5494 else
5495 {
5496 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5497 copy_size_rtx, NULL_RTX, 0,
5498 OPTAB_LIB_WIDEN);
5499
5500 if (GET_MODE (copy_size_rtx) != address_mode)
5501 copy_size_rtx = convert_to_mode (address_mode,
5502 copy_size_rtx,
5503 TYPE_UNSIGNED (sizetype));
5504
5505 target = offset_address (target, copy_size_rtx,
5506 highest_pow2_factor (copy_size));
5507 label = gen_label_rtx ();
5508 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5509 GET_MODE (size), 0, label);
5510 }
5511
5512 if (size != const0_rtx)
5513 clear_storage (target, size, BLOCK_OP_NORMAL);
5514
5515 if (label)
5516 emit_label (label);
5517 }
5518 }
5519 /* Handle calls that return values in multiple non-contiguous locations.
5520 The Irix 6 ABI has examples of this. */
5521 else if (GET_CODE (target) == PARALLEL)
5522 {
5523 if (GET_CODE (temp) == PARALLEL)
5524 emit_group_move (target, temp);
5525 else
5526 emit_group_load (target, temp, TREE_TYPE (exp),
5527 int_size_in_bytes (TREE_TYPE (exp)));
5528 }
5529 else if (GET_CODE (temp) == PARALLEL)
5530 emit_group_store (target, temp, TREE_TYPE (exp),
5531 int_size_in_bytes (TREE_TYPE (exp)));
5532 else if (GET_MODE (temp) == BLKmode)
5533 emit_block_move (target, temp, expr_size (exp),
5534 (call_param_p
5535 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5536 /* If we emit a nontemporal store, there is nothing else to do. */
5537 else if (nontemporal && emit_storent_insn (target, temp))
5538 ;
5539 else
5540 {
5541 temp = force_operand (temp, target);
5542 if (temp != target)
5543 emit_move_insn (target, temp);
5544 }
5545 }
5546
5547 return NULL_RTX;
5548 }
5549
5550 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5551 rtx
5552 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5553 {
5554 return store_expr_with_bounds (exp, target, call_param_p, nontemporal, NULL);
5555 }
5556 \f
5557 /* Return true if field F of structure TYPE is a flexible array. */
5558
5559 static bool
5560 flexible_array_member_p (const_tree f, const_tree type)
5561 {
5562 const_tree tf;
5563
5564 tf = TREE_TYPE (f);
5565 return (DECL_CHAIN (f) == NULL
5566 && TREE_CODE (tf) == ARRAY_TYPE
5567 && TYPE_DOMAIN (tf)
5568 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5569 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5570 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5571 && int_size_in_bytes (type) >= 0);
5572 }
5573
5574 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5575 must have in order for it to completely initialize a value of type TYPE.
5576 Return -1 if the number isn't known.
5577
5578 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5579
5580 static HOST_WIDE_INT
5581 count_type_elements (const_tree type, bool for_ctor_p)
5582 {
5583 switch (TREE_CODE (type))
5584 {
5585 case ARRAY_TYPE:
5586 {
5587 tree nelts;
5588
5589 nelts = array_type_nelts (type);
5590 if (nelts && tree_fits_uhwi_p (nelts))
5591 {
5592 unsigned HOST_WIDE_INT n;
5593
5594 n = tree_to_uhwi (nelts) + 1;
5595 if (n == 0 || for_ctor_p)
5596 return n;
5597 else
5598 return n * count_type_elements (TREE_TYPE (type), false);
5599 }
5600 return for_ctor_p ? -1 : 1;
5601 }
5602
5603 case RECORD_TYPE:
5604 {
5605 unsigned HOST_WIDE_INT n;
5606 tree f;
5607
5608 n = 0;
5609 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5610 if (TREE_CODE (f) == FIELD_DECL)
5611 {
5612 if (!for_ctor_p)
5613 n += count_type_elements (TREE_TYPE (f), false);
5614 else if (!flexible_array_member_p (f, type))
5615 /* Don't count flexible arrays, which are not supposed
5616 to be initialized. */
5617 n += 1;
5618 }
5619
5620 return n;
5621 }
5622
5623 case UNION_TYPE:
5624 case QUAL_UNION_TYPE:
5625 {
5626 tree f;
5627 HOST_WIDE_INT n, m;
5628
5629 gcc_assert (!for_ctor_p);
5630 /* Estimate the number of scalars in each field and pick the
5631 maximum. Other estimates would do instead; the idea is simply
5632 to make sure that the estimate is not sensitive to the ordering
5633 of the fields. */
5634 n = 1;
5635 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5636 if (TREE_CODE (f) == FIELD_DECL)
5637 {
5638 m = count_type_elements (TREE_TYPE (f), false);
5639 /* If the field doesn't span the whole union, add an extra
5640 scalar for the rest. */
5641 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5642 TYPE_SIZE (type)) != 1)
5643 m++;
5644 if (n < m)
5645 n = m;
5646 }
5647 return n;
5648 }
5649
5650 case COMPLEX_TYPE:
5651 return 2;
5652
5653 case VECTOR_TYPE:
5654 return TYPE_VECTOR_SUBPARTS (type);
5655
5656 case INTEGER_TYPE:
5657 case REAL_TYPE:
5658 case FIXED_POINT_TYPE:
5659 case ENUMERAL_TYPE:
5660 case BOOLEAN_TYPE:
5661 case POINTER_TYPE:
5662 case OFFSET_TYPE:
5663 case REFERENCE_TYPE:
5664 case NULLPTR_TYPE:
5665 return 1;
5666
5667 case ERROR_MARK:
5668 return 0;
5669
5670 case VOID_TYPE:
5671 case METHOD_TYPE:
5672 case FUNCTION_TYPE:
5673 case LANG_TYPE:
5674 default:
5675 gcc_unreachable ();
5676 }
5677 }
5678
5679 /* Helper for categorize_ctor_elements. Identical interface. */
5680
5681 static bool
5682 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5683 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5684 {
5685 unsigned HOST_WIDE_INT idx;
5686 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5687 tree value, purpose, elt_type;
5688
5689 /* Whether CTOR is a valid constant initializer, in accordance with what
5690 initializer_constant_valid_p does. If inferred from the constructor
5691 elements, true until proven otherwise. */
5692 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5693 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5694
5695 nz_elts = 0;
5696 init_elts = 0;
5697 num_fields = 0;
5698 elt_type = NULL_TREE;
5699
5700 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5701 {
5702 HOST_WIDE_INT mult = 1;
5703
5704 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5705 {
5706 tree lo_index = TREE_OPERAND (purpose, 0);
5707 tree hi_index = TREE_OPERAND (purpose, 1);
5708
5709 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5710 mult = (tree_to_uhwi (hi_index)
5711 - tree_to_uhwi (lo_index) + 1);
5712 }
5713 num_fields += mult;
5714 elt_type = TREE_TYPE (value);
5715
5716 switch (TREE_CODE (value))
5717 {
5718 case CONSTRUCTOR:
5719 {
5720 HOST_WIDE_INT nz = 0, ic = 0;
5721
5722 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5723 p_complete);
5724
5725 nz_elts += mult * nz;
5726 init_elts += mult * ic;
5727
5728 if (const_from_elts_p && const_p)
5729 const_p = const_elt_p;
5730 }
5731 break;
5732
5733 case INTEGER_CST:
5734 case REAL_CST:
5735 case FIXED_CST:
5736 if (!initializer_zerop (value))
5737 nz_elts += mult;
5738 init_elts += mult;
5739 break;
5740
5741 case STRING_CST:
5742 nz_elts += mult * TREE_STRING_LENGTH (value);
5743 init_elts += mult * TREE_STRING_LENGTH (value);
5744 break;
5745
5746 case COMPLEX_CST:
5747 if (!initializer_zerop (TREE_REALPART (value)))
5748 nz_elts += mult;
5749 if (!initializer_zerop (TREE_IMAGPART (value)))
5750 nz_elts += mult;
5751 init_elts += mult;
5752 break;
5753
5754 case VECTOR_CST:
5755 {
5756 unsigned i;
5757 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5758 {
5759 tree v = VECTOR_CST_ELT (value, i);
5760 if (!initializer_zerop (v))
5761 nz_elts += mult;
5762 init_elts += mult;
5763 }
5764 }
5765 break;
5766
5767 default:
5768 {
5769 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5770 nz_elts += mult * tc;
5771 init_elts += mult * tc;
5772
5773 if (const_from_elts_p && const_p)
5774 const_p = initializer_constant_valid_p (value, elt_type)
5775 != NULL_TREE;
5776 }
5777 break;
5778 }
5779 }
5780
5781 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5782 num_fields, elt_type))
5783 *p_complete = false;
5784
5785 *p_nz_elts += nz_elts;
5786 *p_init_elts += init_elts;
5787
5788 return const_p;
5789 }
5790
5791 /* Examine CTOR to discover:
5792 * how many scalar fields are set to nonzero values,
5793 and place it in *P_NZ_ELTS;
5794 * how many scalar fields in total are in CTOR,
5795 and place it in *P_ELT_COUNT.
5796 * whether the constructor is complete -- in the sense that every
5797 meaningful byte is explicitly given a value --
5798 and place it in *P_COMPLETE.
5799
5800 Return whether or not CTOR is a valid static constant initializer, the same
5801 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5802
5803 bool
5804 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5805 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5806 {
5807 *p_nz_elts = 0;
5808 *p_init_elts = 0;
5809 *p_complete = true;
5810
5811 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5812 }
5813
5814 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5815 of which had type LAST_TYPE. Each element was itself a complete
5816 initializer, in the sense that every meaningful byte was explicitly
5817 given a value. Return true if the same is true for the constructor
5818 as a whole. */
5819
5820 bool
5821 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5822 const_tree last_type)
5823 {
5824 if (TREE_CODE (type) == UNION_TYPE
5825 || TREE_CODE (type) == QUAL_UNION_TYPE)
5826 {
5827 if (num_elts == 0)
5828 return false;
5829
5830 gcc_assert (num_elts == 1 && last_type);
5831
5832 /* ??? We could look at each element of the union, and find the
5833 largest element. Which would avoid comparing the size of the
5834 initialized element against any tail padding in the union.
5835 Doesn't seem worth the effort... */
5836 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5837 }
5838
5839 return count_type_elements (type, true) == num_elts;
5840 }
5841
5842 /* Return 1 if EXP contains mostly (3/4) zeros. */
5843
5844 static int
5845 mostly_zeros_p (const_tree exp)
5846 {
5847 if (TREE_CODE (exp) == CONSTRUCTOR)
5848 {
5849 HOST_WIDE_INT nz_elts, init_elts;
5850 bool complete_p;
5851
5852 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5853 return !complete_p || nz_elts < init_elts / 4;
5854 }
5855
5856 return initializer_zerop (exp);
5857 }
5858
5859 /* Return 1 if EXP contains all zeros. */
5860
5861 static int
5862 all_zeros_p (const_tree exp)
5863 {
5864 if (TREE_CODE (exp) == CONSTRUCTOR)
5865 {
5866 HOST_WIDE_INT nz_elts, init_elts;
5867 bool complete_p;
5868
5869 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5870 return nz_elts == 0;
5871 }
5872
5873 return initializer_zerop (exp);
5874 }
5875 \f
5876 /* Helper function for store_constructor.
5877 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5878 CLEARED is as for store_constructor.
5879 ALIAS_SET is the alias set to use for any stores.
5880
5881 This provides a recursive shortcut back to store_constructor when it isn't
5882 necessary to go through store_field. This is so that we can pass through
5883 the cleared field to let store_constructor know that we may not have to
5884 clear a substructure if the outer structure has already been cleared. */
5885
5886 static void
5887 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5888 HOST_WIDE_INT bitpos, machine_mode mode,
5889 tree exp, int cleared, alias_set_type alias_set)
5890 {
5891 if (TREE_CODE (exp) == CONSTRUCTOR
5892 /* We can only call store_constructor recursively if the size and
5893 bit position are on a byte boundary. */
5894 && bitpos % BITS_PER_UNIT == 0
5895 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5896 /* If we have a nonzero bitpos for a register target, then we just
5897 let store_field do the bitfield handling. This is unlikely to
5898 generate unnecessary clear instructions anyways. */
5899 && (bitpos == 0 || MEM_P (target)))
5900 {
5901 if (MEM_P (target))
5902 target
5903 = adjust_address (target,
5904 GET_MODE (target) == BLKmode
5905 || 0 != (bitpos
5906 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5907 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5908
5909
5910 /* Update the alias set, if required. */
5911 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5912 && MEM_ALIAS_SET (target) != 0)
5913 {
5914 target = copy_rtx (target);
5915 set_mem_alias_set (target, alias_set);
5916 }
5917
5918 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5919 }
5920 else
5921 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5922 }
5923
5924
5925 /* Returns the number of FIELD_DECLs in TYPE. */
5926
5927 static int
5928 fields_length (const_tree type)
5929 {
5930 tree t = TYPE_FIELDS (type);
5931 int count = 0;
5932
5933 for (; t; t = DECL_CHAIN (t))
5934 if (TREE_CODE (t) == FIELD_DECL)
5935 ++count;
5936
5937 return count;
5938 }
5939
5940
5941 /* Store the value of constructor EXP into the rtx TARGET.
5942 TARGET is either a REG or a MEM; we know it cannot conflict, since
5943 safe_from_p has been called.
5944 CLEARED is true if TARGET is known to have been zero'd.
5945 SIZE is the number of bytes of TARGET we are allowed to modify: this
5946 may not be the same as the size of EXP if we are assigning to a field
5947 which has been packed to exclude padding bits. */
5948
5949 static void
5950 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5951 {
5952 tree type = TREE_TYPE (exp);
5953 #ifdef WORD_REGISTER_OPERATIONS
5954 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5955 #endif
5956
5957 switch (TREE_CODE (type))
5958 {
5959 case RECORD_TYPE:
5960 case UNION_TYPE:
5961 case QUAL_UNION_TYPE:
5962 {
5963 unsigned HOST_WIDE_INT idx;
5964 tree field, value;
5965
5966 /* If size is zero or the target is already cleared, do nothing. */
5967 if (size == 0 || cleared)
5968 cleared = 1;
5969 /* We either clear the aggregate or indicate the value is dead. */
5970 else if ((TREE_CODE (type) == UNION_TYPE
5971 || TREE_CODE (type) == QUAL_UNION_TYPE)
5972 && ! CONSTRUCTOR_ELTS (exp))
5973 /* If the constructor is empty, clear the union. */
5974 {
5975 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5976 cleared = 1;
5977 }
5978
5979 /* If we are building a static constructor into a register,
5980 set the initial value as zero so we can fold the value into
5981 a constant. But if more than one register is involved,
5982 this probably loses. */
5983 else if (REG_P (target) && TREE_STATIC (exp)
5984 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5985 {
5986 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5987 cleared = 1;
5988 }
5989
5990 /* If the constructor has fewer fields than the structure or
5991 if we are initializing the structure to mostly zeros, clear
5992 the whole structure first. Don't do this if TARGET is a
5993 register whose mode size isn't equal to SIZE since
5994 clear_storage can't handle this case. */
5995 else if (size > 0
5996 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5997 != fields_length (type))
5998 || mostly_zeros_p (exp))
5999 && (!REG_P (target)
6000 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6001 == size)))
6002 {
6003 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6004 cleared = 1;
6005 }
6006
6007 if (REG_P (target) && !cleared)
6008 emit_clobber (target);
6009
6010 /* Store each element of the constructor into the
6011 corresponding field of TARGET. */
6012 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6013 {
6014 machine_mode mode;
6015 HOST_WIDE_INT bitsize;
6016 HOST_WIDE_INT bitpos = 0;
6017 tree offset;
6018 rtx to_rtx = target;
6019
6020 /* Just ignore missing fields. We cleared the whole
6021 structure, above, if any fields are missing. */
6022 if (field == 0)
6023 continue;
6024
6025 if (cleared && initializer_zerop (value))
6026 continue;
6027
6028 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6029 bitsize = tree_to_uhwi (DECL_SIZE (field));
6030 else
6031 bitsize = -1;
6032
6033 mode = DECL_MODE (field);
6034 if (DECL_BIT_FIELD (field))
6035 mode = VOIDmode;
6036
6037 offset = DECL_FIELD_OFFSET (field);
6038 if (tree_fits_shwi_p (offset)
6039 && tree_fits_shwi_p (bit_position (field)))
6040 {
6041 bitpos = int_bit_position (field);
6042 offset = 0;
6043 }
6044 else
6045 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6046
6047 if (offset)
6048 {
6049 machine_mode address_mode;
6050 rtx offset_rtx;
6051
6052 offset
6053 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6054 make_tree (TREE_TYPE (exp),
6055 target));
6056
6057 offset_rtx = expand_normal (offset);
6058 gcc_assert (MEM_P (to_rtx));
6059
6060 address_mode = get_address_mode (to_rtx);
6061 if (GET_MODE (offset_rtx) != address_mode)
6062 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6063
6064 to_rtx = offset_address (to_rtx, offset_rtx,
6065 highest_pow2_factor (offset));
6066 }
6067
6068 #ifdef WORD_REGISTER_OPERATIONS
6069 /* If this initializes a field that is smaller than a
6070 word, at the start of a word, try to widen it to a full
6071 word. This special case allows us to output C++ member
6072 function initializations in a form that the optimizers
6073 can understand. */
6074 if (REG_P (target)
6075 && bitsize < BITS_PER_WORD
6076 && bitpos % BITS_PER_WORD == 0
6077 && GET_MODE_CLASS (mode) == MODE_INT
6078 && TREE_CODE (value) == INTEGER_CST
6079 && exp_size >= 0
6080 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6081 {
6082 tree type = TREE_TYPE (value);
6083
6084 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6085 {
6086 type = lang_hooks.types.type_for_mode
6087 (word_mode, TYPE_UNSIGNED (type));
6088 value = fold_convert (type, value);
6089 }
6090
6091 if (BYTES_BIG_ENDIAN)
6092 value
6093 = fold_build2 (LSHIFT_EXPR, type, value,
6094 build_int_cst (type,
6095 BITS_PER_WORD - bitsize));
6096 bitsize = BITS_PER_WORD;
6097 mode = word_mode;
6098 }
6099 #endif
6100
6101 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6102 && DECL_NONADDRESSABLE_P (field))
6103 {
6104 to_rtx = copy_rtx (to_rtx);
6105 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6106 }
6107
6108 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6109 value, cleared,
6110 get_alias_set (TREE_TYPE (field)));
6111 }
6112 break;
6113 }
6114 case ARRAY_TYPE:
6115 {
6116 tree value, index;
6117 unsigned HOST_WIDE_INT i;
6118 int need_to_clear;
6119 tree domain;
6120 tree elttype = TREE_TYPE (type);
6121 int const_bounds_p;
6122 HOST_WIDE_INT minelt = 0;
6123 HOST_WIDE_INT maxelt = 0;
6124
6125 domain = TYPE_DOMAIN (type);
6126 const_bounds_p = (TYPE_MIN_VALUE (domain)
6127 && TYPE_MAX_VALUE (domain)
6128 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6129 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6130
6131 /* If we have constant bounds for the range of the type, get them. */
6132 if (const_bounds_p)
6133 {
6134 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6135 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6136 }
6137
6138 /* If the constructor has fewer elements than the array, clear
6139 the whole array first. Similarly if this is static
6140 constructor of a non-BLKmode object. */
6141 if (cleared)
6142 need_to_clear = 0;
6143 else if (REG_P (target) && TREE_STATIC (exp))
6144 need_to_clear = 1;
6145 else
6146 {
6147 unsigned HOST_WIDE_INT idx;
6148 tree index, value;
6149 HOST_WIDE_INT count = 0, zero_count = 0;
6150 need_to_clear = ! const_bounds_p;
6151
6152 /* This loop is a more accurate version of the loop in
6153 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6154 is also needed to check for missing elements. */
6155 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6156 {
6157 HOST_WIDE_INT this_node_count;
6158
6159 if (need_to_clear)
6160 break;
6161
6162 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6163 {
6164 tree lo_index = TREE_OPERAND (index, 0);
6165 tree hi_index = TREE_OPERAND (index, 1);
6166
6167 if (! tree_fits_uhwi_p (lo_index)
6168 || ! tree_fits_uhwi_p (hi_index))
6169 {
6170 need_to_clear = 1;
6171 break;
6172 }
6173
6174 this_node_count = (tree_to_uhwi (hi_index)
6175 - tree_to_uhwi (lo_index) + 1);
6176 }
6177 else
6178 this_node_count = 1;
6179
6180 count += this_node_count;
6181 if (mostly_zeros_p (value))
6182 zero_count += this_node_count;
6183 }
6184
6185 /* Clear the entire array first if there are any missing
6186 elements, or if the incidence of zero elements is >=
6187 75%. */
6188 if (! need_to_clear
6189 && (count < maxelt - minelt + 1
6190 || 4 * zero_count >= 3 * count))
6191 need_to_clear = 1;
6192 }
6193
6194 if (need_to_clear && size > 0)
6195 {
6196 if (REG_P (target))
6197 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6198 else
6199 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6200 cleared = 1;
6201 }
6202
6203 if (!cleared && REG_P (target))
6204 /* Inform later passes that the old value is dead. */
6205 emit_clobber (target);
6206
6207 /* Store each element of the constructor into the
6208 corresponding element of TARGET, determined by counting the
6209 elements. */
6210 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6211 {
6212 machine_mode mode;
6213 HOST_WIDE_INT bitsize;
6214 HOST_WIDE_INT bitpos;
6215 rtx xtarget = target;
6216
6217 if (cleared && initializer_zerop (value))
6218 continue;
6219
6220 mode = TYPE_MODE (elttype);
6221 if (mode == BLKmode)
6222 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6223 ? tree_to_uhwi (TYPE_SIZE (elttype))
6224 : -1);
6225 else
6226 bitsize = GET_MODE_BITSIZE (mode);
6227
6228 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6229 {
6230 tree lo_index = TREE_OPERAND (index, 0);
6231 tree hi_index = TREE_OPERAND (index, 1);
6232 rtx index_r, pos_rtx;
6233 HOST_WIDE_INT lo, hi, count;
6234 tree position;
6235
6236 /* If the range is constant and "small", unroll the loop. */
6237 if (const_bounds_p
6238 && tree_fits_shwi_p (lo_index)
6239 && tree_fits_shwi_p (hi_index)
6240 && (lo = tree_to_shwi (lo_index),
6241 hi = tree_to_shwi (hi_index),
6242 count = hi - lo + 1,
6243 (!MEM_P (target)
6244 || count <= 2
6245 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6246 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6247 <= 40 * 8)))))
6248 {
6249 lo -= minelt; hi -= minelt;
6250 for (; lo <= hi; lo++)
6251 {
6252 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6253
6254 if (MEM_P (target)
6255 && !MEM_KEEP_ALIAS_SET_P (target)
6256 && TREE_CODE (type) == ARRAY_TYPE
6257 && TYPE_NONALIASED_COMPONENT (type))
6258 {
6259 target = copy_rtx (target);
6260 MEM_KEEP_ALIAS_SET_P (target) = 1;
6261 }
6262
6263 store_constructor_field
6264 (target, bitsize, bitpos, mode, value, cleared,
6265 get_alias_set (elttype));
6266 }
6267 }
6268 else
6269 {
6270 rtx_code_label *loop_start = gen_label_rtx ();
6271 rtx_code_label *loop_end = gen_label_rtx ();
6272 tree exit_cond;
6273
6274 expand_normal (hi_index);
6275
6276 index = build_decl (EXPR_LOCATION (exp),
6277 VAR_DECL, NULL_TREE, domain);
6278 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6279 SET_DECL_RTL (index, index_r);
6280 store_expr (lo_index, index_r, 0, false);
6281
6282 /* Build the head of the loop. */
6283 do_pending_stack_adjust ();
6284 emit_label (loop_start);
6285
6286 /* Assign value to element index. */
6287 position =
6288 fold_convert (ssizetype,
6289 fold_build2 (MINUS_EXPR,
6290 TREE_TYPE (index),
6291 index,
6292 TYPE_MIN_VALUE (domain)));
6293
6294 position =
6295 size_binop (MULT_EXPR, position,
6296 fold_convert (ssizetype,
6297 TYPE_SIZE_UNIT (elttype)));
6298
6299 pos_rtx = expand_normal (position);
6300 xtarget = offset_address (target, pos_rtx,
6301 highest_pow2_factor (position));
6302 xtarget = adjust_address (xtarget, mode, 0);
6303 if (TREE_CODE (value) == CONSTRUCTOR)
6304 store_constructor (value, xtarget, cleared,
6305 bitsize / BITS_PER_UNIT);
6306 else
6307 store_expr (value, xtarget, 0, false);
6308
6309 /* Generate a conditional jump to exit the loop. */
6310 exit_cond = build2 (LT_EXPR, integer_type_node,
6311 index, hi_index);
6312 jumpif (exit_cond, loop_end, -1);
6313
6314 /* Update the loop counter, and jump to the head of
6315 the loop. */
6316 expand_assignment (index,
6317 build2 (PLUS_EXPR, TREE_TYPE (index),
6318 index, integer_one_node),
6319 false);
6320
6321 emit_jump (loop_start);
6322
6323 /* Build the end of the loop. */
6324 emit_label (loop_end);
6325 }
6326 }
6327 else if ((index != 0 && ! tree_fits_shwi_p (index))
6328 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6329 {
6330 tree position;
6331
6332 if (index == 0)
6333 index = ssize_int (1);
6334
6335 if (minelt)
6336 index = fold_convert (ssizetype,
6337 fold_build2 (MINUS_EXPR,
6338 TREE_TYPE (index),
6339 index,
6340 TYPE_MIN_VALUE (domain)));
6341
6342 position =
6343 size_binop (MULT_EXPR, index,
6344 fold_convert (ssizetype,
6345 TYPE_SIZE_UNIT (elttype)));
6346 xtarget = offset_address (target,
6347 expand_normal (position),
6348 highest_pow2_factor (position));
6349 xtarget = adjust_address (xtarget, mode, 0);
6350 store_expr (value, xtarget, 0, false);
6351 }
6352 else
6353 {
6354 if (index != 0)
6355 bitpos = ((tree_to_shwi (index) - minelt)
6356 * tree_to_uhwi (TYPE_SIZE (elttype)));
6357 else
6358 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6359
6360 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6361 && TREE_CODE (type) == ARRAY_TYPE
6362 && TYPE_NONALIASED_COMPONENT (type))
6363 {
6364 target = copy_rtx (target);
6365 MEM_KEEP_ALIAS_SET_P (target) = 1;
6366 }
6367 store_constructor_field (target, bitsize, bitpos, mode, value,
6368 cleared, get_alias_set (elttype));
6369 }
6370 }
6371 break;
6372 }
6373
6374 case VECTOR_TYPE:
6375 {
6376 unsigned HOST_WIDE_INT idx;
6377 constructor_elt *ce;
6378 int i;
6379 int need_to_clear;
6380 int icode = CODE_FOR_nothing;
6381 tree elttype = TREE_TYPE (type);
6382 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6383 machine_mode eltmode = TYPE_MODE (elttype);
6384 HOST_WIDE_INT bitsize;
6385 HOST_WIDE_INT bitpos;
6386 rtvec vector = NULL;
6387 unsigned n_elts;
6388 alias_set_type alias;
6389
6390 gcc_assert (eltmode != BLKmode);
6391
6392 n_elts = TYPE_VECTOR_SUBPARTS (type);
6393 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6394 {
6395 machine_mode mode = GET_MODE (target);
6396
6397 icode = (int) optab_handler (vec_init_optab, mode);
6398 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6399 if (icode != CODE_FOR_nothing)
6400 {
6401 tree value;
6402
6403 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6404 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6405 {
6406 icode = CODE_FOR_nothing;
6407 break;
6408 }
6409 }
6410 if (icode != CODE_FOR_nothing)
6411 {
6412 unsigned int i;
6413
6414 vector = rtvec_alloc (n_elts);
6415 for (i = 0; i < n_elts; i++)
6416 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6417 }
6418 }
6419
6420 /* If the constructor has fewer elements than the vector,
6421 clear the whole array first. Similarly if this is static
6422 constructor of a non-BLKmode object. */
6423 if (cleared)
6424 need_to_clear = 0;
6425 else if (REG_P (target) && TREE_STATIC (exp))
6426 need_to_clear = 1;
6427 else
6428 {
6429 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6430 tree value;
6431
6432 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6433 {
6434 int n_elts_here = tree_to_uhwi
6435 (int_const_binop (TRUNC_DIV_EXPR,
6436 TYPE_SIZE (TREE_TYPE (value)),
6437 TYPE_SIZE (elttype)));
6438
6439 count += n_elts_here;
6440 if (mostly_zeros_p (value))
6441 zero_count += n_elts_here;
6442 }
6443
6444 /* Clear the entire vector first if there are any missing elements,
6445 or if the incidence of zero elements is >= 75%. */
6446 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6447 }
6448
6449 if (need_to_clear && size > 0 && !vector)
6450 {
6451 if (REG_P (target))
6452 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6453 else
6454 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6455 cleared = 1;
6456 }
6457
6458 /* Inform later passes that the old value is dead. */
6459 if (!cleared && !vector && REG_P (target))
6460 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6461
6462 if (MEM_P (target))
6463 alias = MEM_ALIAS_SET (target);
6464 else
6465 alias = get_alias_set (elttype);
6466
6467 /* Store each element of the constructor into the corresponding
6468 element of TARGET, determined by counting the elements. */
6469 for (idx = 0, i = 0;
6470 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6471 idx++, i += bitsize / elt_size)
6472 {
6473 HOST_WIDE_INT eltpos;
6474 tree value = ce->value;
6475
6476 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6477 if (cleared && initializer_zerop (value))
6478 continue;
6479
6480 if (ce->index)
6481 eltpos = tree_to_uhwi (ce->index);
6482 else
6483 eltpos = i;
6484
6485 if (vector)
6486 {
6487 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6488 elements. */
6489 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6490 RTVEC_ELT (vector, eltpos)
6491 = expand_normal (value);
6492 }
6493 else
6494 {
6495 machine_mode value_mode =
6496 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6497 ? TYPE_MODE (TREE_TYPE (value))
6498 : eltmode;
6499 bitpos = eltpos * elt_size;
6500 store_constructor_field (target, bitsize, bitpos, value_mode,
6501 value, cleared, alias);
6502 }
6503 }
6504
6505 if (vector)
6506 emit_insn (GEN_FCN (icode)
6507 (target,
6508 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6509 break;
6510 }
6511
6512 default:
6513 gcc_unreachable ();
6514 }
6515 }
6516
6517 /* Store the value of EXP (an expression tree)
6518 into a subfield of TARGET which has mode MODE and occupies
6519 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6520 If MODE is VOIDmode, it means that we are storing into a bit-field.
6521
6522 BITREGION_START is bitpos of the first bitfield in this region.
6523 BITREGION_END is the bitpos of the ending bitfield in this region.
6524 These two fields are 0, if the C++ memory model does not apply,
6525 or we are not interested in keeping track of bitfield regions.
6526
6527 Always return const0_rtx unless we have something particular to
6528 return.
6529
6530 ALIAS_SET is the alias set for the destination. This value will
6531 (in general) be different from that for TARGET, since TARGET is a
6532 reference to the containing structure.
6533
6534 If NONTEMPORAL is true, try generating a nontemporal store. */
6535
6536 static rtx
6537 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6538 unsigned HOST_WIDE_INT bitregion_start,
6539 unsigned HOST_WIDE_INT bitregion_end,
6540 machine_mode mode, tree exp,
6541 alias_set_type alias_set, bool nontemporal)
6542 {
6543 if (TREE_CODE (exp) == ERROR_MARK)
6544 return const0_rtx;
6545
6546 /* If we have nothing to store, do nothing unless the expression has
6547 side-effects. */
6548 if (bitsize == 0)
6549 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6550
6551 if (GET_CODE (target) == CONCAT)
6552 {
6553 /* We're storing into a struct containing a single __complex. */
6554
6555 gcc_assert (!bitpos);
6556 return store_expr (exp, target, 0, nontemporal);
6557 }
6558
6559 /* If the structure is in a register or if the component
6560 is a bit field, we cannot use addressing to access it.
6561 Use bit-field techniques or SUBREG to store in it. */
6562
6563 if (mode == VOIDmode
6564 || (mode != BLKmode && ! direct_store[(int) mode]
6565 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6566 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6567 || REG_P (target)
6568 || GET_CODE (target) == SUBREG
6569 /* If the field isn't aligned enough to store as an ordinary memref,
6570 store it as a bit field. */
6571 || (mode != BLKmode
6572 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6573 || bitpos % GET_MODE_ALIGNMENT (mode))
6574 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6575 || (bitpos % BITS_PER_UNIT != 0)))
6576 || (bitsize >= 0 && mode != BLKmode
6577 && GET_MODE_BITSIZE (mode) > bitsize)
6578 /* If the RHS and field are a constant size and the size of the
6579 RHS isn't the same size as the bitfield, we must use bitfield
6580 operations. */
6581 || (bitsize >= 0
6582 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6583 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6584 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6585 decl we must use bitfield operations. */
6586 || (bitsize >= 0
6587 && TREE_CODE (exp) == MEM_REF
6588 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6589 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6590 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6591 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6592 {
6593 rtx temp;
6594 gimple nop_def;
6595
6596 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6597 implies a mask operation. If the precision is the same size as
6598 the field we're storing into, that mask is redundant. This is
6599 particularly common with bit field assignments generated by the
6600 C front end. */
6601 nop_def = get_def_for_expr (exp, NOP_EXPR);
6602 if (nop_def)
6603 {
6604 tree type = TREE_TYPE (exp);
6605 if (INTEGRAL_TYPE_P (type)
6606 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6607 && bitsize == TYPE_PRECISION (type))
6608 {
6609 tree op = gimple_assign_rhs1 (nop_def);
6610 type = TREE_TYPE (op);
6611 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6612 exp = op;
6613 }
6614 }
6615
6616 temp = expand_normal (exp);
6617
6618 /* If BITSIZE is narrower than the size of the type of EXP
6619 we will be narrowing TEMP. Normally, what's wanted are the
6620 low-order bits. However, if EXP's type is a record and this is
6621 big-endian machine, we want the upper BITSIZE bits. */
6622 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6623 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6624 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6625 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6626 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6627 NULL_RTX, 1);
6628
6629 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6630 if (mode != VOIDmode && mode != BLKmode
6631 && mode != TYPE_MODE (TREE_TYPE (exp)))
6632 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6633
6634 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6635 are both BLKmode, both must be in memory and BITPOS must be aligned
6636 on a byte boundary. If so, we simply do a block copy. Likewise for
6637 a BLKmode-like TARGET. */
6638 if (GET_CODE (temp) != PARALLEL
6639 && GET_MODE (temp) == BLKmode
6640 && (GET_MODE (target) == BLKmode
6641 || (MEM_P (target)
6642 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6643 && (bitpos % BITS_PER_UNIT) == 0
6644 && (bitsize % BITS_PER_UNIT) == 0)))
6645 {
6646 gcc_assert (MEM_P (target) && MEM_P (temp)
6647 && (bitpos % BITS_PER_UNIT) == 0);
6648
6649 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6650 emit_block_move (target, temp,
6651 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6652 / BITS_PER_UNIT),
6653 BLOCK_OP_NORMAL);
6654
6655 return const0_rtx;
6656 }
6657
6658 /* Handle calls that return values in multiple non-contiguous locations.
6659 The Irix 6 ABI has examples of this. */
6660 if (GET_CODE (temp) == PARALLEL)
6661 {
6662 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6663 rtx temp_target;
6664 if (mode == BLKmode || mode == VOIDmode)
6665 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6666 temp_target = gen_reg_rtx (mode);
6667 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6668 temp = temp_target;
6669 }
6670 else if (mode == BLKmode)
6671 {
6672 /* Handle calls that return BLKmode values in registers. */
6673 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6674 {
6675 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6676 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6677 temp = temp_target;
6678 }
6679 else
6680 {
6681 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6682 rtx temp_target;
6683 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6684 temp_target = gen_reg_rtx (mode);
6685 temp_target
6686 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6687 temp_target, mode, mode);
6688 temp = temp_target;
6689 }
6690 }
6691
6692 /* Store the value in the bitfield. */
6693 store_bit_field (target, bitsize, bitpos,
6694 bitregion_start, bitregion_end,
6695 mode, temp);
6696
6697 return const0_rtx;
6698 }
6699 else
6700 {
6701 /* Now build a reference to just the desired component. */
6702 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6703
6704 if (to_rtx == target)
6705 to_rtx = copy_rtx (to_rtx);
6706
6707 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6708 set_mem_alias_set (to_rtx, alias_set);
6709
6710 return store_expr (exp, to_rtx, 0, nontemporal);
6711 }
6712 }
6713 \f
6714 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6715 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6716 codes and find the ultimate containing object, which we return.
6717
6718 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6719 bit position, and *PUNSIGNEDP to the signedness of the field.
6720 If the position of the field is variable, we store a tree
6721 giving the variable offset (in units) in *POFFSET.
6722 This offset is in addition to the bit position.
6723 If the position is not variable, we store 0 in *POFFSET.
6724
6725 If any of the extraction expressions is volatile,
6726 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6727
6728 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6729 Otherwise, it is a mode that can be used to access the field.
6730
6731 If the field describes a variable-sized object, *PMODE is set to
6732 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6733 this case, but the address of the object can be found.
6734
6735 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6736 look through nodes that serve as markers of a greater alignment than
6737 the one that can be deduced from the expression. These nodes make it
6738 possible for front-ends to prevent temporaries from being created by
6739 the middle-end on alignment considerations. For that purpose, the
6740 normal operating mode at high-level is to always pass FALSE so that
6741 the ultimate containing object is really returned; moreover, the
6742 associated predicate handled_component_p will always return TRUE
6743 on these nodes, thus indicating that they are essentially handled
6744 by get_inner_reference. TRUE should only be passed when the caller
6745 is scanning the expression in order to build another representation
6746 and specifically knows how to handle these nodes; as such, this is
6747 the normal operating mode in the RTL expanders. */
6748
6749 tree
6750 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6751 HOST_WIDE_INT *pbitpos, tree *poffset,
6752 machine_mode *pmode, int *punsignedp,
6753 int *pvolatilep, bool keep_aligning)
6754 {
6755 tree size_tree = 0;
6756 machine_mode mode = VOIDmode;
6757 bool blkmode_bitfield = false;
6758 tree offset = size_zero_node;
6759 offset_int bit_offset = 0;
6760
6761 /* First get the mode, signedness, and size. We do this from just the
6762 outermost expression. */
6763 *pbitsize = -1;
6764 if (TREE_CODE (exp) == COMPONENT_REF)
6765 {
6766 tree field = TREE_OPERAND (exp, 1);
6767 size_tree = DECL_SIZE (field);
6768 if (flag_strict_volatile_bitfields > 0
6769 && TREE_THIS_VOLATILE (exp)
6770 && DECL_BIT_FIELD_TYPE (field)
6771 && DECL_MODE (field) != BLKmode)
6772 /* Volatile bitfields should be accessed in the mode of the
6773 field's type, not the mode computed based on the bit
6774 size. */
6775 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6776 else if (!DECL_BIT_FIELD (field))
6777 mode = DECL_MODE (field);
6778 else if (DECL_MODE (field) == BLKmode)
6779 blkmode_bitfield = true;
6780
6781 *punsignedp = DECL_UNSIGNED (field);
6782 }
6783 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6784 {
6785 size_tree = TREE_OPERAND (exp, 1);
6786 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6787 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6788
6789 /* For vector types, with the correct size of access, use the mode of
6790 inner type. */
6791 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6792 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6793 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6794 mode = TYPE_MODE (TREE_TYPE (exp));
6795 }
6796 else
6797 {
6798 mode = TYPE_MODE (TREE_TYPE (exp));
6799 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6800
6801 if (mode == BLKmode)
6802 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6803 else
6804 *pbitsize = GET_MODE_BITSIZE (mode);
6805 }
6806
6807 if (size_tree != 0)
6808 {
6809 if (! tree_fits_uhwi_p (size_tree))
6810 mode = BLKmode, *pbitsize = -1;
6811 else
6812 *pbitsize = tree_to_uhwi (size_tree);
6813 }
6814
6815 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6816 and find the ultimate containing object. */
6817 while (1)
6818 {
6819 switch (TREE_CODE (exp))
6820 {
6821 case BIT_FIELD_REF:
6822 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6823 break;
6824
6825 case COMPONENT_REF:
6826 {
6827 tree field = TREE_OPERAND (exp, 1);
6828 tree this_offset = component_ref_field_offset (exp);
6829
6830 /* If this field hasn't been filled in yet, don't go past it.
6831 This should only happen when folding expressions made during
6832 type construction. */
6833 if (this_offset == 0)
6834 break;
6835
6836 offset = size_binop (PLUS_EXPR, offset, this_offset);
6837 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6838
6839 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6840 }
6841 break;
6842
6843 case ARRAY_REF:
6844 case ARRAY_RANGE_REF:
6845 {
6846 tree index = TREE_OPERAND (exp, 1);
6847 tree low_bound = array_ref_low_bound (exp);
6848 tree unit_size = array_ref_element_size (exp);
6849
6850 /* We assume all arrays have sizes that are a multiple of a byte.
6851 First subtract the lower bound, if any, in the type of the
6852 index, then convert to sizetype and multiply by the size of
6853 the array element. */
6854 if (! integer_zerop (low_bound))
6855 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6856 index, low_bound);
6857
6858 offset = size_binop (PLUS_EXPR, offset,
6859 size_binop (MULT_EXPR,
6860 fold_convert (sizetype, index),
6861 unit_size));
6862 }
6863 break;
6864
6865 case REALPART_EXPR:
6866 break;
6867
6868 case IMAGPART_EXPR:
6869 bit_offset += *pbitsize;
6870 break;
6871
6872 case VIEW_CONVERT_EXPR:
6873 if (keep_aligning && STRICT_ALIGNMENT
6874 && (TYPE_ALIGN (TREE_TYPE (exp))
6875 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6876 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6877 < BIGGEST_ALIGNMENT)
6878 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6879 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6880 goto done;
6881 break;
6882
6883 case MEM_REF:
6884 /* Hand back the decl for MEM[&decl, off]. */
6885 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6886 {
6887 tree off = TREE_OPERAND (exp, 1);
6888 if (!integer_zerop (off))
6889 {
6890 offset_int boff, coff = mem_ref_offset (exp);
6891 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6892 bit_offset += boff;
6893 }
6894 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6895 }
6896 goto done;
6897
6898 default:
6899 goto done;
6900 }
6901
6902 /* If any reference in the chain is volatile, the effect is volatile. */
6903 if (TREE_THIS_VOLATILE (exp))
6904 *pvolatilep = 1;
6905
6906 exp = TREE_OPERAND (exp, 0);
6907 }
6908 done:
6909
6910 /* If OFFSET is constant, see if we can return the whole thing as a
6911 constant bit position. Make sure to handle overflow during
6912 this conversion. */
6913 if (TREE_CODE (offset) == INTEGER_CST)
6914 {
6915 offset_int tem = wi::sext (wi::to_offset (offset),
6916 TYPE_PRECISION (sizetype));
6917 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6918 tem += bit_offset;
6919 if (wi::fits_shwi_p (tem))
6920 {
6921 *pbitpos = tem.to_shwi ();
6922 *poffset = offset = NULL_TREE;
6923 }
6924 }
6925
6926 /* Otherwise, split it up. */
6927 if (offset)
6928 {
6929 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6930 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
6931 {
6932 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6933 offset_int tem = bit_offset.and_not (mask);
6934 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6935 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6936 bit_offset -= tem;
6937 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6938 offset = size_binop (PLUS_EXPR, offset,
6939 wide_int_to_tree (sizetype, tem));
6940 }
6941
6942 *pbitpos = bit_offset.to_shwi ();
6943 *poffset = offset;
6944 }
6945
6946 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6947 if (mode == VOIDmode
6948 && blkmode_bitfield
6949 && (*pbitpos % BITS_PER_UNIT) == 0
6950 && (*pbitsize % BITS_PER_UNIT) == 0)
6951 *pmode = BLKmode;
6952 else
6953 *pmode = mode;
6954
6955 return exp;
6956 }
6957
6958 /* Return a tree of sizetype representing the size, in bytes, of the element
6959 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6960
6961 tree
6962 array_ref_element_size (tree exp)
6963 {
6964 tree aligned_size = TREE_OPERAND (exp, 3);
6965 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6966 location_t loc = EXPR_LOCATION (exp);
6967
6968 /* If a size was specified in the ARRAY_REF, it's the size measured
6969 in alignment units of the element type. So multiply by that value. */
6970 if (aligned_size)
6971 {
6972 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6973 sizetype from another type of the same width and signedness. */
6974 if (TREE_TYPE (aligned_size) != sizetype)
6975 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6976 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6977 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6978 }
6979
6980 /* Otherwise, take the size from that of the element type. Substitute
6981 any PLACEHOLDER_EXPR that we have. */
6982 else
6983 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6984 }
6985
6986 /* Return a tree representing the lower bound of the array mentioned in
6987 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6988
6989 tree
6990 array_ref_low_bound (tree exp)
6991 {
6992 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6993
6994 /* If a lower bound is specified in EXP, use it. */
6995 if (TREE_OPERAND (exp, 2))
6996 return TREE_OPERAND (exp, 2);
6997
6998 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6999 substituting for a PLACEHOLDER_EXPR as needed. */
7000 if (domain_type && TYPE_MIN_VALUE (domain_type))
7001 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
7002
7003 /* Otherwise, return a zero of the appropriate type. */
7004 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
7005 }
7006
7007 /* Returns true if REF is an array reference to an array at the end of
7008 a structure. If this is the case, the array may be allocated larger
7009 than its upper bound implies. */
7010
7011 bool
7012 array_at_struct_end_p (tree ref)
7013 {
7014 if (TREE_CODE (ref) != ARRAY_REF
7015 && TREE_CODE (ref) != ARRAY_RANGE_REF)
7016 return false;
7017
7018 while (handled_component_p (ref))
7019 {
7020 /* If the reference chain contains a component reference to a
7021 non-union type and there follows another field the reference
7022 is not at the end of a structure. */
7023 if (TREE_CODE (ref) == COMPONENT_REF
7024 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
7025 {
7026 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
7027 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
7028 nextf = DECL_CHAIN (nextf);
7029 if (nextf)
7030 return false;
7031 }
7032
7033 ref = TREE_OPERAND (ref, 0);
7034 }
7035
7036 /* If the reference is based on a declared entity, the size of the array
7037 is constrained by its given domain. */
7038 if (DECL_P (ref))
7039 return false;
7040
7041 return true;
7042 }
7043
7044 /* Return a tree representing the upper bound of the array mentioned in
7045 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
7046
7047 tree
7048 array_ref_up_bound (tree exp)
7049 {
7050 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7051
7052 /* If there is a domain type and it has an upper bound, use it, substituting
7053 for a PLACEHOLDER_EXPR as needed. */
7054 if (domain_type && TYPE_MAX_VALUE (domain_type))
7055 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
7056
7057 /* Otherwise fail. */
7058 return NULL_TREE;
7059 }
7060
7061 /* Return a tree representing the offset, in bytes, of the field referenced
7062 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
7063
7064 tree
7065 component_ref_field_offset (tree exp)
7066 {
7067 tree aligned_offset = TREE_OPERAND (exp, 2);
7068 tree field = TREE_OPERAND (exp, 1);
7069 location_t loc = EXPR_LOCATION (exp);
7070
7071 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
7072 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
7073 value. */
7074 if (aligned_offset)
7075 {
7076 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7077 sizetype from another type of the same width and signedness. */
7078 if (TREE_TYPE (aligned_offset) != sizetype)
7079 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7080 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7081 size_int (DECL_OFFSET_ALIGN (field)
7082 / BITS_PER_UNIT));
7083 }
7084
7085 /* Otherwise, take the offset from that of the field. Substitute
7086 any PLACEHOLDER_EXPR that we have. */
7087 else
7088 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7089 }
7090
7091 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7092
7093 static unsigned HOST_WIDE_INT
7094 target_align (const_tree target)
7095 {
7096 /* We might have a chain of nested references with intermediate misaligning
7097 bitfields components, so need to recurse to find out. */
7098
7099 unsigned HOST_WIDE_INT this_align, outer_align;
7100
7101 switch (TREE_CODE (target))
7102 {
7103 case BIT_FIELD_REF:
7104 return 1;
7105
7106 case COMPONENT_REF:
7107 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7108 outer_align = target_align (TREE_OPERAND (target, 0));
7109 return MIN (this_align, outer_align);
7110
7111 case ARRAY_REF:
7112 case ARRAY_RANGE_REF:
7113 this_align = TYPE_ALIGN (TREE_TYPE (target));
7114 outer_align = target_align (TREE_OPERAND (target, 0));
7115 return MIN (this_align, outer_align);
7116
7117 CASE_CONVERT:
7118 case NON_LVALUE_EXPR:
7119 case VIEW_CONVERT_EXPR:
7120 this_align = TYPE_ALIGN (TREE_TYPE (target));
7121 outer_align = target_align (TREE_OPERAND (target, 0));
7122 return MAX (this_align, outer_align);
7123
7124 default:
7125 return TYPE_ALIGN (TREE_TYPE (target));
7126 }
7127 }
7128
7129 \f
7130 /* Given an rtx VALUE that may contain additions and multiplications, return
7131 an equivalent value that just refers to a register, memory, or constant.
7132 This is done by generating instructions to perform the arithmetic and
7133 returning a pseudo-register containing the value.
7134
7135 The returned value may be a REG, SUBREG, MEM or constant. */
7136
7137 rtx
7138 force_operand (rtx value, rtx target)
7139 {
7140 rtx op1, op2;
7141 /* Use subtarget as the target for operand 0 of a binary operation. */
7142 rtx subtarget = get_subtarget (target);
7143 enum rtx_code code = GET_CODE (value);
7144
7145 /* Check for subreg applied to an expression produced by loop optimizer. */
7146 if (code == SUBREG
7147 && !REG_P (SUBREG_REG (value))
7148 && !MEM_P (SUBREG_REG (value)))
7149 {
7150 value
7151 = simplify_gen_subreg (GET_MODE (value),
7152 force_reg (GET_MODE (SUBREG_REG (value)),
7153 force_operand (SUBREG_REG (value),
7154 NULL_RTX)),
7155 GET_MODE (SUBREG_REG (value)),
7156 SUBREG_BYTE (value));
7157 code = GET_CODE (value);
7158 }
7159
7160 /* Check for a PIC address load. */
7161 if ((code == PLUS || code == MINUS)
7162 && XEXP (value, 0) == pic_offset_table_rtx
7163 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7164 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7165 || GET_CODE (XEXP (value, 1)) == CONST))
7166 {
7167 if (!subtarget)
7168 subtarget = gen_reg_rtx (GET_MODE (value));
7169 emit_move_insn (subtarget, value);
7170 return subtarget;
7171 }
7172
7173 if (ARITHMETIC_P (value))
7174 {
7175 op2 = XEXP (value, 1);
7176 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7177 subtarget = 0;
7178 if (code == MINUS && CONST_INT_P (op2))
7179 {
7180 code = PLUS;
7181 op2 = negate_rtx (GET_MODE (value), op2);
7182 }
7183
7184 /* Check for an addition with OP2 a constant integer and our first
7185 operand a PLUS of a virtual register and something else. In that
7186 case, we want to emit the sum of the virtual register and the
7187 constant first and then add the other value. This allows virtual
7188 register instantiation to simply modify the constant rather than
7189 creating another one around this addition. */
7190 if (code == PLUS && CONST_INT_P (op2)
7191 && GET_CODE (XEXP (value, 0)) == PLUS
7192 && REG_P (XEXP (XEXP (value, 0), 0))
7193 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7194 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7195 {
7196 rtx temp = expand_simple_binop (GET_MODE (value), code,
7197 XEXP (XEXP (value, 0), 0), op2,
7198 subtarget, 0, OPTAB_LIB_WIDEN);
7199 return expand_simple_binop (GET_MODE (value), code, temp,
7200 force_operand (XEXP (XEXP (value,
7201 0), 1), 0),
7202 target, 0, OPTAB_LIB_WIDEN);
7203 }
7204
7205 op1 = force_operand (XEXP (value, 0), subtarget);
7206 op2 = force_operand (op2, NULL_RTX);
7207 switch (code)
7208 {
7209 case MULT:
7210 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7211 case DIV:
7212 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7213 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7214 target, 1, OPTAB_LIB_WIDEN);
7215 else
7216 return expand_divmod (0,
7217 FLOAT_MODE_P (GET_MODE (value))
7218 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7219 GET_MODE (value), op1, op2, target, 0);
7220 case MOD:
7221 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7222 target, 0);
7223 case UDIV:
7224 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7225 target, 1);
7226 case UMOD:
7227 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7228 target, 1);
7229 case ASHIFTRT:
7230 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7231 target, 0, OPTAB_LIB_WIDEN);
7232 default:
7233 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7234 target, 1, OPTAB_LIB_WIDEN);
7235 }
7236 }
7237 if (UNARY_P (value))
7238 {
7239 if (!target)
7240 target = gen_reg_rtx (GET_MODE (value));
7241 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7242 switch (code)
7243 {
7244 case ZERO_EXTEND:
7245 case SIGN_EXTEND:
7246 case TRUNCATE:
7247 case FLOAT_EXTEND:
7248 case FLOAT_TRUNCATE:
7249 convert_move (target, op1, code == ZERO_EXTEND);
7250 return target;
7251
7252 case FIX:
7253 case UNSIGNED_FIX:
7254 expand_fix (target, op1, code == UNSIGNED_FIX);
7255 return target;
7256
7257 case FLOAT:
7258 case UNSIGNED_FLOAT:
7259 expand_float (target, op1, code == UNSIGNED_FLOAT);
7260 return target;
7261
7262 default:
7263 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7264 }
7265 }
7266
7267 #ifdef INSN_SCHEDULING
7268 /* On machines that have insn scheduling, we want all memory reference to be
7269 explicit, so we need to deal with such paradoxical SUBREGs. */
7270 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7271 value
7272 = simplify_gen_subreg (GET_MODE (value),
7273 force_reg (GET_MODE (SUBREG_REG (value)),
7274 force_operand (SUBREG_REG (value),
7275 NULL_RTX)),
7276 GET_MODE (SUBREG_REG (value)),
7277 SUBREG_BYTE (value));
7278 #endif
7279
7280 return value;
7281 }
7282 \f
7283 /* Subroutine of expand_expr: return nonzero iff there is no way that
7284 EXP can reference X, which is being modified. TOP_P is nonzero if this
7285 call is going to be used to determine whether we need a temporary
7286 for EXP, as opposed to a recursive call to this function.
7287
7288 It is always safe for this routine to return zero since it merely
7289 searches for optimization opportunities. */
7290
7291 int
7292 safe_from_p (const_rtx x, tree exp, int top_p)
7293 {
7294 rtx exp_rtl = 0;
7295 int i, nops;
7296
7297 if (x == 0
7298 /* If EXP has varying size, we MUST use a target since we currently
7299 have no way of allocating temporaries of variable size
7300 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7301 So we assume here that something at a higher level has prevented a
7302 clash. This is somewhat bogus, but the best we can do. Only
7303 do this when X is BLKmode and when we are at the top level. */
7304 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7305 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7306 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7307 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7308 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7309 != INTEGER_CST)
7310 && GET_MODE (x) == BLKmode)
7311 /* If X is in the outgoing argument area, it is always safe. */
7312 || (MEM_P (x)
7313 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7314 || (GET_CODE (XEXP (x, 0)) == PLUS
7315 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7316 return 1;
7317
7318 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7319 find the underlying pseudo. */
7320 if (GET_CODE (x) == SUBREG)
7321 {
7322 x = SUBREG_REG (x);
7323 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7324 return 0;
7325 }
7326
7327 /* Now look at our tree code and possibly recurse. */
7328 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7329 {
7330 case tcc_declaration:
7331 exp_rtl = DECL_RTL_IF_SET (exp);
7332 break;
7333
7334 case tcc_constant:
7335 return 1;
7336
7337 case tcc_exceptional:
7338 if (TREE_CODE (exp) == TREE_LIST)
7339 {
7340 while (1)
7341 {
7342 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7343 return 0;
7344 exp = TREE_CHAIN (exp);
7345 if (!exp)
7346 return 1;
7347 if (TREE_CODE (exp) != TREE_LIST)
7348 return safe_from_p (x, exp, 0);
7349 }
7350 }
7351 else if (TREE_CODE (exp) == CONSTRUCTOR)
7352 {
7353 constructor_elt *ce;
7354 unsigned HOST_WIDE_INT idx;
7355
7356 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7357 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7358 || !safe_from_p (x, ce->value, 0))
7359 return 0;
7360 return 1;
7361 }
7362 else if (TREE_CODE (exp) == ERROR_MARK)
7363 return 1; /* An already-visited SAVE_EXPR? */
7364 else
7365 return 0;
7366
7367 case tcc_statement:
7368 /* The only case we look at here is the DECL_INITIAL inside a
7369 DECL_EXPR. */
7370 return (TREE_CODE (exp) != DECL_EXPR
7371 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7372 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7373 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7374
7375 case tcc_binary:
7376 case tcc_comparison:
7377 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7378 return 0;
7379 /* Fall through. */
7380
7381 case tcc_unary:
7382 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7383
7384 case tcc_expression:
7385 case tcc_reference:
7386 case tcc_vl_exp:
7387 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7388 the expression. If it is set, we conflict iff we are that rtx or
7389 both are in memory. Otherwise, we check all operands of the
7390 expression recursively. */
7391
7392 switch (TREE_CODE (exp))
7393 {
7394 case ADDR_EXPR:
7395 /* If the operand is static or we are static, we can't conflict.
7396 Likewise if we don't conflict with the operand at all. */
7397 if (staticp (TREE_OPERAND (exp, 0))
7398 || TREE_STATIC (exp)
7399 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7400 return 1;
7401
7402 /* Otherwise, the only way this can conflict is if we are taking
7403 the address of a DECL a that address if part of X, which is
7404 very rare. */
7405 exp = TREE_OPERAND (exp, 0);
7406 if (DECL_P (exp))
7407 {
7408 if (!DECL_RTL_SET_P (exp)
7409 || !MEM_P (DECL_RTL (exp)))
7410 return 0;
7411 else
7412 exp_rtl = XEXP (DECL_RTL (exp), 0);
7413 }
7414 break;
7415
7416 case MEM_REF:
7417 if (MEM_P (x)
7418 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7419 get_alias_set (exp)))
7420 return 0;
7421 break;
7422
7423 case CALL_EXPR:
7424 /* Assume that the call will clobber all hard registers and
7425 all of memory. */
7426 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7427 || MEM_P (x))
7428 return 0;
7429 break;
7430
7431 case WITH_CLEANUP_EXPR:
7432 case CLEANUP_POINT_EXPR:
7433 /* Lowered by gimplify.c. */
7434 gcc_unreachable ();
7435
7436 case SAVE_EXPR:
7437 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7438
7439 default:
7440 break;
7441 }
7442
7443 /* If we have an rtx, we do not need to scan our operands. */
7444 if (exp_rtl)
7445 break;
7446
7447 nops = TREE_OPERAND_LENGTH (exp);
7448 for (i = 0; i < nops; i++)
7449 if (TREE_OPERAND (exp, i) != 0
7450 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7451 return 0;
7452
7453 break;
7454
7455 case tcc_type:
7456 /* Should never get a type here. */
7457 gcc_unreachable ();
7458 }
7459
7460 /* If we have an rtl, find any enclosed object. Then see if we conflict
7461 with it. */
7462 if (exp_rtl)
7463 {
7464 if (GET_CODE (exp_rtl) == SUBREG)
7465 {
7466 exp_rtl = SUBREG_REG (exp_rtl);
7467 if (REG_P (exp_rtl)
7468 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7469 return 0;
7470 }
7471
7472 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7473 are memory and they conflict. */
7474 return ! (rtx_equal_p (x, exp_rtl)
7475 || (MEM_P (x) && MEM_P (exp_rtl)
7476 && true_dependence (exp_rtl, VOIDmode, x)));
7477 }
7478
7479 /* If we reach here, it is safe. */
7480 return 1;
7481 }
7482
7483 \f
7484 /* Return the highest power of two that EXP is known to be a multiple of.
7485 This is used in updating alignment of MEMs in array references. */
7486
7487 unsigned HOST_WIDE_INT
7488 highest_pow2_factor (const_tree exp)
7489 {
7490 unsigned HOST_WIDE_INT ret;
7491 int trailing_zeros = tree_ctz (exp);
7492 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7493 return BIGGEST_ALIGNMENT;
7494 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7495 if (ret > BIGGEST_ALIGNMENT)
7496 return BIGGEST_ALIGNMENT;
7497 return ret;
7498 }
7499
7500 /* Similar, except that the alignment requirements of TARGET are
7501 taken into account. Assume it is at least as aligned as its
7502 type, unless it is a COMPONENT_REF in which case the layout of
7503 the structure gives the alignment. */
7504
7505 static unsigned HOST_WIDE_INT
7506 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7507 {
7508 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7509 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7510
7511 return MAX (factor, talign);
7512 }
7513 \f
7514 /* Convert the tree comparison code TCODE to the rtl one where the
7515 signedness is UNSIGNEDP. */
7516
7517 static enum rtx_code
7518 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7519 {
7520 enum rtx_code code;
7521 switch (tcode)
7522 {
7523 case EQ_EXPR:
7524 code = EQ;
7525 break;
7526 case NE_EXPR:
7527 code = NE;
7528 break;
7529 case LT_EXPR:
7530 code = unsignedp ? LTU : LT;
7531 break;
7532 case LE_EXPR:
7533 code = unsignedp ? LEU : LE;
7534 break;
7535 case GT_EXPR:
7536 code = unsignedp ? GTU : GT;
7537 break;
7538 case GE_EXPR:
7539 code = unsignedp ? GEU : GE;
7540 break;
7541 case UNORDERED_EXPR:
7542 code = UNORDERED;
7543 break;
7544 case ORDERED_EXPR:
7545 code = ORDERED;
7546 break;
7547 case UNLT_EXPR:
7548 code = UNLT;
7549 break;
7550 case UNLE_EXPR:
7551 code = UNLE;
7552 break;
7553 case UNGT_EXPR:
7554 code = UNGT;
7555 break;
7556 case UNGE_EXPR:
7557 code = UNGE;
7558 break;
7559 case UNEQ_EXPR:
7560 code = UNEQ;
7561 break;
7562 case LTGT_EXPR:
7563 code = LTGT;
7564 break;
7565
7566 default:
7567 gcc_unreachable ();
7568 }
7569 return code;
7570 }
7571
7572 /* Subroutine of expand_expr. Expand the two operands of a binary
7573 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7574 The value may be stored in TARGET if TARGET is nonzero. The
7575 MODIFIER argument is as documented by expand_expr. */
7576
7577 void
7578 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7579 enum expand_modifier modifier)
7580 {
7581 if (! safe_from_p (target, exp1, 1))
7582 target = 0;
7583 if (operand_equal_p (exp0, exp1, 0))
7584 {
7585 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7586 *op1 = copy_rtx (*op0);
7587 }
7588 else
7589 {
7590 /* If we need to preserve evaluation order, copy exp0 into its own
7591 temporary variable so that it can't be clobbered by exp1. */
7592 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7593 exp0 = save_expr (exp0);
7594 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7595 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7596 }
7597 }
7598
7599 \f
7600 /* Return a MEM that contains constant EXP. DEFER is as for
7601 output_constant_def and MODIFIER is as for expand_expr. */
7602
7603 static rtx
7604 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7605 {
7606 rtx mem;
7607
7608 mem = output_constant_def (exp, defer);
7609 if (modifier != EXPAND_INITIALIZER)
7610 mem = use_anchored_address (mem);
7611 return mem;
7612 }
7613
7614 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7615 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7616
7617 static rtx
7618 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7619 enum expand_modifier modifier, addr_space_t as)
7620 {
7621 rtx result, subtarget;
7622 tree inner, offset;
7623 HOST_WIDE_INT bitsize, bitpos;
7624 int volatilep, unsignedp;
7625 machine_mode mode1;
7626
7627 /* If we are taking the address of a constant and are at the top level,
7628 we have to use output_constant_def since we can't call force_const_mem
7629 at top level. */
7630 /* ??? This should be considered a front-end bug. We should not be
7631 generating ADDR_EXPR of something that isn't an LVALUE. The only
7632 exception here is STRING_CST. */
7633 if (CONSTANT_CLASS_P (exp))
7634 {
7635 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7636 if (modifier < EXPAND_SUM)
7637 result = force_operand (result, target);
7638 return result;
7639 }
7640
7641 /* Everything must be something allowed by is_gimple_addressable. */
7642 switch (TREE_CODE (exp))
7643 {
7644 case INDIRECT_REF:
7645 /* This case will happen via recursion for &a->b. */
7646 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7647
7648 case MEM_REF:
7649 {
7650 tree tem = TREE_OPERAND (exp, 0);
7651 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7652 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7653 return expand_expr (tem, target, tmode, modifier);
7654 }
7655
7656 case CONST_DECL:
7657 /* Expand the initializer like constants above. */
7658 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7659 0, modifier), 0);
7660 if (modifier < EXPAND_SUM)
7661 result = force_operand (result, target);
7662 return result;
7663
7664 case REALPART_EXPR:
7665 /* The real part of the complex number is always first, therefore
7666 the address is the same as the address of the parent object. */
7667 offset = 0;
7668 bitpos = 0;
7669 inner = TREE_OPERAND (exp, 0);
7670 break;
7671
7672 case IMAGPART_EXPR:
7673 /* The imaginary part of the complex number is always second.
7674 The expression is therefore always offset by the size of the
7675 scalar type. */
7676 offset = 0;
7677 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7678 inner = TREE_OPERAND (exp, 0);
7679 break;
7680
7681 case COMPOUND_LITERAL_EXPR:
7682 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7683 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7684 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7685 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7686 the initializers aren't gimplified. */
7687 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7688 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7689 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7690 target, tmode, modifier, as);
7691 /* FALLTHRU */
7692 default:
7693 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7694 expand_expr, as that can have various side effects; LABEL_DECLs for
7695 example, may not have their DECL_RTL set yet. Expand the rtl of
7696 CONSTRUCTORs too, which should yield a memory reference for the
7697 constructor's contents. Assume language specific tree nodes can
7698 be expanded in some interesting way. */
7699 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7700 if (DECL_P (exp)
7701 || TREE_CODE (exp) == CONSTRUCTOR
7702 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7703 {
7704 result = expand_expr (exp, target, tmode,
7705 modifier == EXPAND_INITIALIZER
7706 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7707
7708 /* If the DECL isn't in memory, then the DECL wasn't properly
7709 marked TREE_ADDRESSABLE, which will be either a front-end
7710 or a tree optimizer bug. */
7711
7712 if (TREE_ADDRESSABLE (exp)
7713 && ! MEM_P (result)
7714 && ! targetm.calls.allocate_stack_slots_for_args ())
7715 {
7716 error ("local frame unavailable (naked function?)");
7717 return result;
7718 }
7719 else
7720 gcc_assert (MEM_P (result));
7721 result = XEXP (result, 0);
7722
7723 /* ??? Is this needed anymore? */
7724 if (DECL_P (exp))
7725 TREE_USED (exp) = 1;
7726
7727 if (modifier != EXPAND_INITIALIZER
7728 && modifier != EXPAND_CONST_ADDRESS
7729 && modifier != EXPAND_SUM)
7730 result = force_operand (result, target);
7731 return result;
7732 }
7733
7734 /* Pass FALSE as the last argument to get_inner_reference although
7735 we are expanding to RTL. The rationale is that we know how to
7736 handle "aligning nodes" here: we can just bypass them because
7737 they won't change the final object whose address will be returned
7738 (they actually exist only for that purpose). */
7739 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7740 &mode1, &unsignedp, &volatilep, false);
7741 break;
7742 }
7743
7744 /* We must have made progress. */
7745 gcc_assert (inner != exp);
7746
7747 subtarget = offset || bitpos ? NULL_RTX : target;
7748 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7749 inner alignment, force the inner to be sufficiently aligned. */
7750 if (CONSTANT_CLASS_P (inner)
7751 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7752 {
7753 inner = copy_node (inner);
7754 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7755 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7756 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7757 }
7758 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7759
7760 if (offset)
7761 {
7762 rtx tmp;
7763
7764 if (modifier != EXPAND_NORMAL)
7765 result = force_operand (result, NULL);
7766 tmp = expand_expr (offset, NULL_RTX, tmode,
7767 modifier == EXPAND_INITIALIZER
7768 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7769
7770 /* expand_expr is allowed to return an object in a mode other
7771 than TMODE. If it did, we need to convert. */
7772 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7773 tmp = convert_modes (tmode, GET_MODE (tmp),
7774 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7775 result = convert_memory_address_addr_space (tmode, result, as);
7776 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7777
7778 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7779 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7780 else
7781 {
7782 subtarget = bitpos ? NULL_RTX : target;
7783 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7784 1, OPTAB_LIB_WIDEN);
7785 }
7786 }
7787
7788 if (bitpos)
7789 {
7790 /* Someone beforehand should have rejected taking the address
7791 of such an object. */
7792 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7793
7794 result = convert_memory_address_addr_space (tmode, result, as);
7795 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7796 if (modifier < EXPAND_SUM)
7797 result = force_operand (result, target);
7798 }
7799
7800 return result;
7801 }
7802
7803 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7804 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7805
7806 static rtx
7807 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7808 enum expand_modifier modifier)
7809 {
7810 addr_space_t as = ADDR_SPACE_GENERIC;
7811 machine_mode address_mode = Pmode;
7812 machine_mode pointer_mode = ptr_mode;
7813 machine_mode rmode;
7814 rtx result;
7815
7816 /* Target mode of VOIDmode says "whatever's natural". */
7817 if (tmode == VOIDmode)
7818 tmode = TYPE_MODE (TREE_TYPE (exp));
7819
7820 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7821 {
7822 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7823 address_mode = targetm.addr_space.address_mode (as);
7824 pointer_mode = targetm.addr_space.pointer_mode (as);
7825 }
7826
7827 /* We can get called with some Weird Things if the user does silliness
7828 like "(short) &a". In that case, convert_memory_address won't do
7829 the right thing, so ignore the given target mode. */
7830 if (tmode != address_mode && tmode != pointer_mode)
7831 tmode = address_mode;
7832
7833 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7834 tmode, modifier, as);
7835
7836 /* Despite expand_expr claims concerning ignoring TMODE when not
7837 strictly convenient, stuff breaks if we don't honor it. Note
7838 that combined with the above, we only do this for pointer modes. */
7839 rmode = GET_MODE (result);
7840 if (rmode == VOIDmode)
7841 rmode = tmode;
7842 if (rmode != tmode)
7843 result = convert_memory_address_addr_space (tmode, result, as);
7844
7845 return result;
7846 }
7847
7848 /* Generate code for computing CONSTRUCTOR EXP.
7849 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7850 is TRUE, instead of creating a temporary variable in memory
7851 NULL is returned and the caller needs to handle it differently. */
7852
7853 static rtx
7854 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7855 bool avoid_temp_mem)
7856 {
7857 tree type = TREE_TYPE (exp);
7858 machine_mode mode = TYPE_MODE (type);
7859
7860 /* Try to avoid creating a temporary at all. This is possible
7861 if all of the initializer is zero.
7862 FIXME: try to handle all [0..255] initializers we can handle
7863 with memset. */
7864 if (TREE_STATIC (exp)
7865 && !TREE_ADDRESSABLE (exp)
7866 && target != 0 && mode == BLKmode
7867 && all_zeros_p (exp))
7868 {
7869 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7870 return target;
7871 }
7872
7873 /* All elts simple constants => refer to a constant in memory. But
7874 if this is a non-BLKmode mode, let it store a field at a time
7875 since that should make a CONST_INT, CONST_WIDE_INT or
7876 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7877 use, it is best to store directly into the target unless the type
7878 is large enough that memcpy will be used. If we are making an
7879 initializer and all operands are constant, put it in memory as
7880 well.
7881
7882 FIXME: Avoid trying to fill vector constructors piece-meal.
7883 Output them with output_constant_def below unless we're sure
7884 they're zeros. This should go away when vector initializers
7885 are treated like VECTOR_CST instead of arrays. */
7886 if ((TREE_STATIC (exp)
7887 && ((mode == BLKmode
7888 && ! (target != 0 && safe_from_p (target, exp, 1)))
7889 || TREE_ADDRESSABLE (exp)
7890 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7891 && (! can_move_by_pieces
7892 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7893 TYPE_ALIGN (type)))
7894 && ! mostly_zeros_p (exp))))
7895 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7896 && TREE_CONSTANT (exp)))
7897 {
7898 rtx constructor;
7899
7900 if (avoid_temp_mem)
7901 return NULL_RTX;
7902
7903 constructor = expand_expr_constant (exp, 1, modifier);
7904
7905 if (modifier != EXPAND_CONST_ADDRESS
7906 && modifier != EXPAND_INITIALIZER
7907 && modifier != EXPAND_SUM)
7908 constructor = validize_mem (constructor);
7909
7910 return constructor;
7911 }
7912
7913 /* Handle calls that pass values in multiple non-contiguous
7914 locations. The Irix 6 ABI has examples of this. */
7915 if (target == 0 || ! safe_from_p (target, exp, 1)
7916 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7917 {
7918 if (avoid_temp_mem)
7919 return NULL_RTX;
7920
7921 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7922 }
7923
7924 store_constructor (exp, target, 0, int_expr_size (exp));
7925 return target;
7926 }
7927
7928
7929 /* expand_expr: generate code for computing expression EXP.
7930 An rtx for the computed value is returned. The value is never null.
7931 In the case of a void EXP, const0_rtx is returned.
7932
7933 The value may be stored in TARGET if TARGET is nonzero.
7934 TARGET is just a suggestion; callers must assume that
7935 the rtx returned may not be the same as TARGET.
7936
7937 If TARGET is CONST0_RTX, it means that the value will be ignored.
7938
7939 If TMODE is not VOIDmode, it suggests generating the
7940 result in mode TMODE. But this is done only when convenient.
7941 Otherwise, TMODE is ignored and the value generated in its natural mode.
7942 TMODE is just a suggestion; callers must assume that
7943 the rtx returned may not have mode TMODE.
7944
7945 Note that TARGET may have neither TMODE nor MODE. In that case, it
7946 probably will not be used.
7947
7948 If MODIFIER is EXPAND_SUM then when EXP is an addition
7949 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7950 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7951 products as above, or REG or MEM, or constant.
7952 Ordinarily in such cases we would output mul or add instructions
7953 and then return a pseudo reg containing the sum.
7954
7955 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7956 it also marks a label as absolutely required (it can't be dead).
7957 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7958 This is used for outputting expressions used in initializers.
7959
7960 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7961 with a constant address even if that address is not normally legitimate.
7962 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7963
7964 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7965 a call parameter. Such targets require special care as we haven't yet
7966 marked TARGET so that it's safe from being trashed by libcalls. We
7967 don't want to use TARGET for anything but the final result;
7968 Intermediate values must go elsewhere. Additionally, calls to
7969 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7970
7971 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7972 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7973 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7974 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7975 recursively.
7976
7977 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7978 In this case, we don't adjust a returned MEM rtx that wouldn't be
7979 sufficiently aligned for its mode; instead, it's up to the caller
7980 to deal with it afterwards. This is used to make sure that unaligned
7981 base objects for which out-of-bounds accesses are supported, for
7982 example record types with trailing arrays, aren't realigned behind
7983 the back of the caller.
7984 The normal operating mode is to pass FALSE for this parameter. */
7985
7986 rtx
7987 expand_expr_real (tree exp, rtx target, machine_mode tmode,
7988 enum expand_modifier modifier, rtx *alt_rtl,
7989 bool inner_reference_p)
7990 {
7991 rtx ret;
7992
7993 /* Handle ERROR_MARK before anybody tries to access its type. */
7994 if (TREE_CODE (exp) == ERROR_MARK
7995 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7996 {
7997 ret = CONST0_RTX (tmode);
7998 return ret ? ret : const0_rtx;
7999 }
8000
8001 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8002 inner_reference_p);
8003 return ret;
8004 }
8005
8006 /* Try to expand the conditional expression which is represented by
8007 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
8008 return the rtl reg which repsents the result. Otherwise return
8009 NULL_RTL. */
8010
8011 static rtx
8012 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8013 tree treeop1 ATTRIBUTE_UNUSED,
8014 tree treeop2 ATTRIBUTE_UNUSED)
8015 {
8016 rtx insn;
8017 rtx op00, op01, op1, op2;
8018 enum rtx_code comparison_code;
8019 machine_mode comparison_mode;
8020 gimple srcstmt;
8021 rtx temp;
8022 tree type = TREE_TYPE (treeop1);
8023 int unsignedp = TYPE_UNSIGNED (type);
8024 machine_mode mode = TYPE_MODE (type);
8025 machine_mode orig_mode = mode;
8026
8027 /* If we cannot do a conditional move on the mode, try doing it
8028 with the promoted mode. */
8029 if (!can_conditionally_move_p (mode))
8030 {
8031 mode = promote_mode (type, mode, &unsignedp);
8032 if (!can_conditionally_move_p (mode))
8033 return NULL_RTX;
8034 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
8035 }
8036 else
8037 temp = assign_temp (type, 0, 1);
8038
8039 start_sequence ();
8040 expand_operands (treeop1, treeop2,
8041 temp, &op1, &op2, EXPAND_NORMAL);
8042
8043 if (TREE_CODE (treeop0) == SSA_NAME
8044 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8045 {
8046 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8047 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8048 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8049 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8050 comparison_mode = TYPE_MODE (type);
8051 unsignedp = TYPE_UNSIGNED (type);
8052 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8053 }
8054 else if (COMPARISON_CLASS_P (treeop0))
8055 {
8056 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8057 enum tree_code cmpcode = TREE_CODE (treeop0);
8058 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8059 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8060 unsignedp = TYPE_UNSIGNED (type);
8061 comparison_mode = TYPE_MODE (type);
8062 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8063 }
8064 else
8065 {
8066 op00 = expand_normal (treeop0);
8067 op01 = const0_rtx;
8068 comparison_code = NE;
8069 comparison_mode = GET_MODE (op00);
8070 if (comparison_mode == VOIDmode)
8071 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8072 }
8073
8074 if (GET_MODE (op1) != mode)
8075 op1 = gen_lowpart (mode, op1);
8076
8077 if (GET_MODE (op2) != mode)
8078 op2 = gen_lowpart (mode, op2);
8079
8080 /* Try to emit the conditional move. */
8081 insn = emit_conditional_move (temp, comparison_code,
8082 op00, op01, comparison_mode,
8083 op1, op2, mode,
8084 unsignedp);
8085
8086 /* If we could do the conditional move, emit the sequence,
8087 and return. */
8088 if (insn)
8089 {
8090 rtx_insn *seq = get_insns ();
8091 end_sequence ();
8092 emit_insn (seq);
8093 return convert_modes (orig_mode, mode, temp, 0);
8094 }
8095
8096 /* Otherwise discard the sequence and fall back to code with
8097 branches. */
8098 end_sequence ();
8099 return NULL_RTX;
8100 }
8101
8102 rtx
8103 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8104 enum expand_modifier modifier)
8105 {
8106 rtx op0, op1, op2, temp;
8107 rtx_code_label *lab;
8108 tree type;
8109 int unsignedp;
8110 machine_mode mode;
8111 enum tree_code code = ops->code;
8112 optab this_optab;
8113 rtx subtarget, original_target;
8114 int ignore;
8115 bool reduce_bit_field;
8116 location_t loc = ops->location;
8117 tree treeop0, treeop1, treeop2;
8118 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8119 ? reduce_to_bit_field_precision ((expr), \
8120 target, \
8121 type) \
8122 : (expr))
8123
8124 type = ops->type;
8125 mode = TYPE_MODE (type);
8126 unsignedp = TYPE_UNSIGNED (type);
8127
8128 treeop0 = ops->op0;
8129 treeop1 = ops->op1;
8130 treeop2 = ops->op2;
8131
8132 /* We should be called only on simple (binary or unary) expressions,
8133 exactly those that are valid in gimple expressions that aren't
8134 GIMPLE_SINGLE_RHS (or invalid). */
8135 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8136 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8137 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8138
8139 ignore = (target == const0_rtx
8140 || ((CONVERT_EXPR_CODE_P (code)
8141 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8142 && TREE_CODE (type) == VOID_TYPE));
8143
8144 /* We should be called only if we need the result. */
8145 gcc_assert (!ignore);
8146
8147 /* An operation in what may be a bit-field type needs the
8148 result to be reduced to the precision of the bit-field type,
8149 which is narrower than that of the type's mode. */
8150 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8151 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8152
8153 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8154 target = 0;
8155
8156 /* Use subtarget as the target for operand 0 of a binary operation. */
8157 subtarget = get_subtarget (target);
8158 original_target = target;
8159
8160 switch (code)
8161 {
8162 case NON_LVALUE_EXPR:
8163 case PAREN_EXPR:
8164 CASE_CONVERT:
8165 if (treeop0 == error_mark_node)
8166 return const0_rtx;
8167
8168 if (TREE_CODE (type) == UNION_TYPE)
8169 {
8170 tree valtype = TREE_TYPE (treeop0);
8171
8172 /* If both input and output are BLKmode, this conversion isn't doing
8173 anything except possibly changing memory attribute. */
8174 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8175 {
8176 rtx result = expand_expr (treeop0, target, tmode,
8177 modifier);
8178
8179 result = copy_rtx (result);
8180 set_mem_attributes (result, type, 0);
8181 return result;
8182 }
8183
8184 if (target == 0)
8185 {
8186 if (TYPE_MODE (type) != BLKmode)
8187 target = gen_reg_rtx (TYPE_MODE (type));
8188 else
8189 target = assign_temp (type, 1, 1);
8190 }
8191
8192 if (MEM_P (target))
8193 /* Store data into beginning of memory target. */
8194 store_expr (treeop0,
8195 adjust_address (target, TYPE_MODE (valtype), 0),
8196 modifier == EXPAND_STACK_PARM,
8197 false);
8198
8199 else
8200 {
8201 gcc_assert (REG_P (target));
8202
8203 /* Store this field into a union of the proper type. */
8204 store_field (target,
8205 MIN ((int_size_in_bytes (TREE_TYPE
8206 (treeop0))
8207 * BITS_PER_UNIT),
8208 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8209 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8210 }
8211
8212 /* Return the entire union. */
8213 return target;
8214 }
8215
8216 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8217 {
8218 op0 = expand_expr (treeop0, target, VOIDmode,
8219 modifier);
8220
8221 /* If the signedness of the conversion differs and OP0 is
8222 a promoted SUBREG, clear that indication since we now
8223 have to do the proper extension. */
8224 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8225 && GET_CODE (op0) == SUBREG)
8226 SUBREG_PROMOTED_VAR_P (op0) = 0;
8227
8228 return REDUCE_BIT_FIELD (op0);
8229 }
8230
8231 op0 = expand_expr (treeop0, NULL_RTX, mode,
8232 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8233 if (GET_MODE (op0) == mode)
8234 ;
8235
8236 /* If OP0 is a constant, just convert it into the proper mode. */
8237 else if (CONSTANT_P (op0))
8238 {
8239 tree inner_type = TREE_TYPE (treeop0);
8240 machine_mode inner_mode = GET_MODE (op0);
8241
8242 if (inner_mode == VOIDmode)
8243 inner_mode = TYPE_MODE (inner_type);
8244
8245 if (modifier == EXPAND_INITIALIZER)
8246 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8247 subreg_lowpart_offset (mode,
8248 inner_mode));
8249 else
8250 op0= convert_modes (mode, inner_mode, op0,
8251 TYPE_UNSIGNED (inner_type));
8252 }
8253
8254 else if (modifier == EXPAND_INITIALIZER)
8255 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8256
8257 else if (target == 0)
8258 op0 = convert_to_mode (mode, op0,
8259 TYPE_UNSIGNED (TREE_TYPE
8260 (treeop0)));
8261 else
8262 {
8263 convert_move (target, op0,
8264 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8265 op0 = target;
8266 }
8267
8268 return REDUCE_BIT_FIELD (op0);
8269
8270 case ADDR_SPACE_CONVERT_EXPR:
8271 {
8272 tree treeop0_type = TREE_TYPE (treeop0);
8273 addr_space_t as_to;
8274 addr_space_t as_from;
8275
8276 gcc_assert (POINTER_TYPE_P (type));
8277 gcc_assert (POINTER_TYPE_P (treeop0_type));
8278
8279 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8280 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8281
8282 /* Conversions between pointers to the same address space should
8283 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8284 gcc_assert (as_to != as_from);
8285
8286 /* Ask target code to handle conversion between pointers
8287 to overlapping address spaces. */
8288 if (targetm.addr_space.subset_p (as_to, as_from)
8289 || targetm.addr_space.subset_p (as_from, as_to))
8290 {
8291 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8292 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8293 gcc_assert (op0);
8294 return op0;
8295 }
8296
8297 /* For disjoint address spaces, converting anything but
8298 a null pointer invokes undefined behaviour. We simply
8299 always return a null pointer here. */
8300 return CONST0_RTX (mode);
8301 }
8302
8303 case POINTER_PLUS_EXPR:
8304 /* Even though the sizetype mode and the pointer's mode can be different
8305 expand is able to handle this correctly and get the correct result out
8306 of the PLUS_EXPR code. */
8307 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8308 if sizetype precision is smaller than pointer precision. */
8309 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8310 treeop1 = fold_convert_loc (loc, type,
8311 fold_convert_loc (loc, ssizetype,
8312 treeop1));
8313 /* If sizetype precision is larger than pointer precision, truncate the
8314 offset to have matching modes. */
8315 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8316 treeop1 = fold_convert_loc (loc, type, treeop1);
8317
8318 case PLUS_EXPR:
8319 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8320 something else, make sure we add the register to the constant and
8321 then to the other thing. This case can occur during strength
8322 reduction and doing it this way will produce better code if the
8323 frame pointer or argument pointer is eliminated.
8324
8325 fold-const.c will ensure that the constant is always in the inner
8326 PLUS_EXPR, so the only case we need to do anything about is if
8327 sp, ap, or fp is our second argument, in which case we must swap
8328 the innermost first argument and our second argument. */
8329
8330 if (TREE_CODE (treeop0) == PLUS_EXPR
8331 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8332 && TREE_CODE (treeop1) == VAR_DECL
8333 && (DECL_RTL (treeop1) == frame_pointer_rtx
8334 || DECL_RTL (treeop1) == stack_pointer_rtx
8335 || DECL_RTL (treeop1) == arg_pointer_rtx))
8336 {
8337 gcc_unreachable ();
8338 }
8339
8340 /* If the result is to be ptr_mode and we are adding an integer to
8341 something, we might be forming a constant. So try to use
8342 plus_constant. If it produces a sum and we can't accept it,
8343 use force_operand. This allows P = &ARR[const] to generate
8344 efficient code on machines where a SYMBOL_REF is not a valid
8345 address.
8346
8347 If this is an EXPAND_SUM call, always return the sum. */
8348 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8349 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8350 {
8351 if (modifier == EXPAND_STACK_PARM)
8352 target = 0;
8353 if (TREE_CODE (treeop0) == INTEGER_CST
8354 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8355 && TREE_CONSTANT (treeop1))
8356 {
8357 rtx constant_part;
8358 HOST_WIDE_INT wc;
8359 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8360
8361 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8362 EXPAND_SUM);
8363 /* Use wi::shwi to ensure that the constant is
8364 truncated according to the mode of OP1, then sign extended
8365 to a HOST_WIDE_INT. Using the constant directly can result
8366 in non-canonical RTL in a 64x32 cross compile. */
8367 wc = TREE_INT_CST_LOW (treeop0);
8368 constant_part =
8369 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8370 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8371 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8372 op1 = force_operand (op1, target);
8373 return REDUCE_BIT_FIELD (op1);
8374 }
8375
8376 else if (TREE_CODE (treeop1) == INTEGER_CST
8377 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8378 && TREE_CONSTANT (treeop0))
8379 {
8380 rtx constant_part;
8381 HOST_WIDE_INT wc;
8382 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8383
8384 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8385 (modifier == EXPAND_INITIALIZER
8386 ? EXPAND_INITIALIZER : EXPAND_SUM));
8387 if (! CONSTANT_P (op0))
8388 {
8389 op1 = expand_expr (treeop1, NULL_RTX,
8390 VOIDmode, modifier);
8391 /* Return a PLUS if modifier says it's OK. */
8392 if (modifier == EXPAND_SUM
8393 || modifier == EXPAND_INITIALIZER)
8394 return simplify_gen_binary (PLUS, mode, op0, op1);
8395 goto binop2;
8396 }
8397 /* Use wi::shwi to ensure that the constant is
8398 truncated according to the mode of OP1, then sign extended
8399 to a HOST_WIDE_INT. Using the constant directly can result
8400 in non-canonical RTL in a 64x32 cross compile. */
8401 wc = TREE_INT_CST_LOW (treeop1);
8402 constant_part
8403 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8404 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8405 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8406 op0 = force_operand (op0, target);
8407 return REDUCE_BIT_FIELD (op0);
8408 }
8409 }
8410
8411 /* Use TER to expand pointer addition of a negated value
8412 as pointer subtraction. */
8413 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8414 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8415 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8416 && TREE_CODE (treeop1) == SSA_NAME
8417 && TYPE_MODE (TREE_TYPE (treeop0))
8418 == TYPE_MODE (TREE_TYPE (treeop1)))
8419 {
8420 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8421 if (def)
8422 {
8423 treeop1 = gimple_assign_rhs1 (def);
8424 code = MINUS_EXPR;
8425 goto do_minus;
8426 }
8427 }
8428
8429 /* No sense saving up arithmetic to be done
8430 if it's all in the wrong mode to form part of an address.
8431 And force_operand won't know whether to sign-extend or
8432 zero-extend. */
8433 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8434 || mode != ptr_mode)
8435 {
8436 expand_operands (treeop0, treeop1,
8437 subtarget, &op0, &op1, EXPAND_NORMAL);
8438 if (op0 == const0_rtx)
8439 return op1;
8440 if (op1 == const0_rtx)
8441 return op0;
8442 goto binop2;
8443 }
8444
8445 expand_operands (treeop0, treeop1,
8446 subtarget, &op0, &op1, modifier);
8447 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8448
8449 case MINUS_EXPR:
8450 do_minus:
8451 /* For initializers, we are allowed to return a MINUS of two
8452 symbolic constants. Here we handle all cases when both operands
8453 are constant. */
8454 /* Handle difference of two symbolic constants,
8455 for the sake of an initializer. */
8456 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8457 && really_constant_p (treeop0)
8458 && really_constant_p (treeop1))
8459 {
8460 expand_operands (treeop0, treeop1,
8461 NULL_RTX, &op0, &op1, modifier);
8462
8463 /* If the last operand is a CONST_INT, use plus_constant of
8464 the negated constant. Else make the MINUS. */
8465 if (CONST_INT_P (op1))
8466 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8467 -INTVAL (op1)));
8468 else
8469 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8470 }
8471
8472 /* No sense saving up arithmetic to be done
8473 if it's all in the wrong mode to form part of an address.
8474 And force_operand won't know whether to sign-extend or
8475 zero-extend. */
8476 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8477 || mode != ptr_mode)
8478 goto binop;
8479
8480 expand_operands (treeop0, treeop1,
8481 subtarget, &op0, &op1, modifier);
8482
8483 /* Convert A - const to A + (-const). */
8484 if (CONST_INT_P (op1))
8485 {
8486 op1 = negate_rtx (mode, op1);
8487 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8488 }
8489
8490 goto binop2;
8491
8492 case WIDEN_MULT_PLUS_EXPR:
8493 case WIDEN_MULT_MINUS_EXPR:
8494 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8495 op2 = expand_normal (treeop2);
8496 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8497 target, unsignedp);
8498 return target;
8499
8500 case WIDEN_MULT_EXPR:
8501 /* If first operand is constant, swap them.
8502 Thus the following special case checks need only
8503 check the second operand. */
8504 if (TREE_CODE (treeop0) == INTEGER_CST)
8505 {
8506 tree t1 = treeop0;
8507 treeop0 = treeop1;
8508 treeop1 = t1;
8509 }
8510
8511 /* First, check if we have a multiplication of one signed and one
8512 unsigned operand. */
8513 if (TREE_CODE (treeop1) != INTEGER_CST
8514 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8515 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8516 {
8517 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8518 this_optab = usmul_widen_optab;
8519 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8520 != CODE_FOR_nothing)
8521 {
8522 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8523 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8524 EXPAND_NORMAL);
8525 else
8526 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8527 EXPAND_NORMAL);
8528 /* op0 and op1 might still be constant, despite the above
8529 != INTEGER_CST check. Handle it. */
8530 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8531 {
8532 op0 = convert_modes (innermode, mode, op0, true);
8533 op1 = convert_modes (innermode, mode, op1, false);
8534 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8535 target, unsignedp));
8536 }
8537 goto binop3;
8538 }
8539 }
8540 /* Check for a multiplication with matching signedness. */
8541 else if ((TREE_CODE (treeop1) == INTEGER_CST
8542 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8543 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8544 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8545 {
8546 tree op0type = TREE_TYPE (treeop0);
8547 machine_mode innermode = TYPE_MODE (op0type);
8548 bool zextend_p = TYPE_UNSIGNED (op0type);
8549 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8550 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8551
8552 if (TREE_CODE (treeop0) != INTEGER_CST)
8553 {
8554 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8555 != CODE_FOR_nothing)
8556 {
8557 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8558 EXPAND_NORMAL);
8559 /* op0 and op1 might still be constant, despite the above
8560 != INTEGER_CST check. Handle it. */
8561 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8562 {
8563 widen_mult_const:
8564 op0 = convert_modes (innermode, mode, op0, zextend_p);
8565 op1
8566 = convert_modes (innermode, mode, op1,
8567 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8568 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8569 target,
8570 unsignedp));
8571 }
8572 temp = expand_widening_mult (mode, op0, op1, target,
8573 unsignedp, this_optab);
8574 return REDUCE_BIT_FIELD (temp);
8575 }
8576 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8577 != CODE_FOR_nothing
8578 && innermode == word_mode)
8579 {
8580 rtx htem, hipart;
8581 op0 = expand_normal (treeop0);
8582 if (TREE_CODE (treeop1) == INTEGER_CST)
8583 op1 = convert_modes (innermode, mode,
8584 expand_normal (treeop1),
8585 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8586 else
8587 op1 = expand_normal (treeop1);
8588 /* op0 and op1 might still be constant, despite the above
8589 != INTEGER_CST check. Handle it. */
8590 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8591 goto widen_mult_const;
8592 temp = expand_binop (mode, other_optab, op0, op1, target,
8593 unsignedp, OPTAB_LIB_WIDEN);
8594 hipart = gen_highpart (innermode, temp);
8595 htem = expand_mult_highpart_adjust (innermode, hipart,
8596 op0, op1, hipart,
8597 zextend_p);
8598 if (htem != hipart)
8599 emit_move_insn (hipart, htem);
8600 return REDUCE_BIT_FIELD (temp);
8601 }
8602 }
8603 }
8604 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8605 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8606 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8607 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8608
8609 case FMA_EXPR:
8610 {
8611 optab opt = fma_optab;
8612 gimple def0, def2;
8613
8614 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8615 call. */
8616 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8617 {
8618 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8619 tree call_expr;
8620
8621 gcc_assert (fn != NULL_TREE);
8622 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8623 return expand_builtin (call_expr, target, subtarget, mode, false);
8624 }
8625
8626 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8627 /* The multiplication is commutative - look at its 2nd operand
8628 if the first isn't fed by a negate. */
8629 if (!def0)
8630 {
8631 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8632 /* Swap operands if the 2nd operand is fed by a negate. */
8633 if (def0)
8634 {
8635 tree tem = treeop0;
8636 treeop0 = treeop1;
8637 treeop1 = tem;
8638 }
8639 }
8640 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8641
8642 op0 = op2 = NULL;
8643
8644 if (def0 && def2
8645 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8646 {
8647 opt = fnms_optab;
8648 op0 = expand_normal (gimple_assign_rhs1 (def0));
8649 op2 = expand_normal (gimple_assign_rhs1 (def2));
8650 }
8651 else if (def0
8652 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8653 {
8654 opt = fnma_optab;
8655 op0 = expand_normal (gimple_assign_rhs1 (def0));
8656 }
8657 else if (def2
8658 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8659 {
8660 opt = fms_optab;
8661 op2 = expand_normal (gimple_assign_rhs1 (def2));
8662 }
8663
8664 if (op0 == NULL)
8665 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8666 if (op2 == NULL)
8667 op2 = expand_normal (treeop2);
8668 op1 = expand_normal (treeop1);
8669
8670 return expand_ternary_op (TYPE_MODE (type), opt,
8671 op0, op1, op2, target, 0);
8672 }
8673
8674 case MULT_EXPR:
8675 /* If this is a fixed-point operation, then we cannot use the code
8676 below because "expand_mult" doesn't support sat/no-sat fixed-point
8677 multiplications. */
8678 if (ALL_FIXED_POINT_MODE_P (mode))
8679 goto binop;
8680
8681 /* If first operand is constant, swap them.
8682 Thus the following special case checks need only
8683 check the second operand. */
8684 if (TREE_CODE (treeop0) == INTEGER_CST)
8685 {
8686 tree t1 = treeop0;
8687 treeop0 = treeop1;
8688 treeop1 = t1;
8689 }
8690
8691 /* Attempt to return something suitable for generating an
8692 indexed address, for machines that support that. */
8693
8694 if (modifier == EXPAND_SUM && mode == ptr_mode
8695 && tree_fits_shwi_p (treeop1))
8696 {
8697 tree exp1 = treeop1;
8698
8699 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8700 EXPAND_SUM);
8701
8702 if (!REG_P (op0))
8703 op0 = force_operand (op0, NULL_RTX);
8704 if (!REG_P (op0))
8705 op0 = copy_to_mode_reg (mode, op0);
8706
8707 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8708 gen_int_mode (tree_to_shwi (exp1),
8709 TYPE_MODE (TREE_TYPE (exp1)))));
8710 }
8711
8712 if (modifier == EXPAND_STACK_PARM)
8713 target = 0;
8714
8715 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8716 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8717
8718 case TRUNC_DIV_EXPR:
8719 case FLOOR_DIV_EXPR:
8720 case CEIL_DIV_EXPR:
8721 case ROUND_DIV_EXPR:
8722 case EXACT_DIV_EXPR:
8723 /* If this is a fixed-point operation, then we cannot use the code
8724 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8725 divisions. */
8726 if (ALL_FIXED_POINT_MODE_P (mode))
8727 goto binop;
8728
8729 if (modifier == EXPAND_STACK_PARM)
8730 target = 0;
8731 /* Possible optimization: compute the dividend with EXPAND_SUM
8732 then if the divisor is constant can optimize the case
8733 where some terms of the dividend have coeffs divisible by it. */
8734 expand_operands (treeop0, treeop1,
8735 subtarget, &op0, &op1, EXPAND_NORMAL);
8736 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8737
8738 case RDIV_EXPR:
8739 goto binop;
8740
8741 case MULT_HIGHPART_EXPR:
8742 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8743 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8744 gcc_assert (temp);
8745 return temp;
8746
8747 case TRUNC_MOD_EXPR:
8748 case FLOOR_MOD_EXPR:
8749 case CEIL_MOD_EXPR:
8750 case ROUND_MOD_EXPR:
8751 if (modifier == EXPAND_STACK_PARM)
8752 target = 0;
8753 expand_operands (treeop0, treeop1,
8754 subtarget, &op0, &op1, EXPAND_NORMAL);
8755 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8756
8757 case FIXED_CONVERT_EXPR:
8758 op0 = expand_normal (treeop0);
8759 if (target == 0 || modifier == EXPAND_STACK_PARM)
8760 target = gen_reg_rtx (mode);
8761
8762 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8763 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8764 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8765 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8766 else
8767 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8768 return target;
8769
8770 case FIX_TRUNC_EXPR:
8771 op0 = expand_normal (treeop0);
8772 if (target == 0 || modifier == EXPAND_STACK_PARM)
8773 target = gen_reg_rtx (mode);
8774 expand_fix (target, op0, unsignedp);
8775 return target;
8776
8777 case FLOAT_EXPR:
8778 op0 = expand_normal (treeop0);
8779 if (target == 0 || modifier == EXPAND_STACK_PARM)
8780 target = gen_reg_rtx (mode);
8781 /* expand_float can't figure out what to do if FROM has VOIDmode.
8782 So give it the correct mode. With -O, cse will optimize this. */
8783 if (GET_MODE (op0) == VOIDmode)
8784 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8785 op0);
8786 expand_float (target, op0,
8787 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8788 return target;
8789
8790 case NEGATE_EXPR:
8791 op0 = expand_expr (treeop0, subtarget,
8792 VOIDmode, EXPAND_NORMAL);
8793 if (modifier == EXPAND_STACK_PARM)
8794 target = 0;
8795 temp = expand_unop (mode,
8796 optab_for_tree_code (NEGATE_EXPR, type,
8797 optab_default),
8798 op0, target, 0);
8799 gcc_assert (temp);
8800 return REDUCE_BIT_FIELD (temp);
8801
8802 case ABS_EXPR:
8803 op0 = expand_expr (treeop0, subtarget,
8804 VOIDmode, EXPAND_NORMAL);
8805 if (modifier == EXPAND_STACK_PARM)
8806 target = 0;
8807
8808 /* ABS_EXPR is not valid for complex arguments. */
8809 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8810 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8811
8812 /* Unsigned abs is simply the operand. Testing here means we don't
8813 risk generating incorrect code below. */
8814 if (TYPE_UNSIGNED (type))
8815 return op0;
8816
8817 return expand_abs (mode, op0, target, unsignedp,
8818 safe_from_p (target, treeop0, 1));
8819
8820 case MAX_EXPR:
8821 case MIN_EXPR:
8822 target = original_target;
8823 if (target == 0
8824 || modifier == EXPAND_STACK_PARM
8825 || (MEM_P (target) && MEM_VOLATILE_P (target))
8826 || GET_MODE (target) != mode
8827 || (REG_P (target)
8828 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8829 target = gen_reg_rtx (mode);
8830 expand_operands (treeop0, treeop1,
8831 target, &op0, &op1, EXPAND_NORMAL);
8832
8833 /* First try to do it with a special MIN or MAX instruction.
8834 If that does not win, use a conditional jump to select the proper
8835 value. */
8836 this_optab = optab_for_tree_code (code, type, optab_default);
8837 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8838 OPTAB_WIDEN);
8839 if (temp != 0)
8840 return temp;
8841
8842 /* At this point, a MEM target is no longer useful; we will get better
8843 code without it. */
8844
8845 if (! REG_P (target))
8846 target = gen_reg_rtx (mode);
8847
8848 /* If op1 was placed in target, swap op0 and op1. */
8849 if (target != op0 && target == op1)
8850 std::swap (op0, op1);
8851
8852 /* We generate better code and avoid problems with op1 mentioning
8853 target by forcing op1 into a pseudo if it isn't a constant. */
8854 if (! CONSTANT_P (op1))
8855 op1 = force_reg (mode, op1);
8856
8857 {
8858 enum rtx_code comparison_code;
8859 rtx cmpop1 = op1;
8860
8861 if (code == MAX_EXPR)
8862 comparison_code = unsignedp ? GEU : GE;
8863 else
8864 comparison_code = unsignedp ? LEU : LE;
8865
8866 /* Canonicalize to comparisons against 0. */
8867 if (op1 == const1_rtx)
8868 {
8869 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8870 or (a != 0 ? a : 1) for unsigned.
8871 For MIN we are safe converting (a <= 1 ? a : 1)
8872 into (a <= 0 ? a : 1) */
8873 cmpop1 = const0_rtx;
8874 if (code == MAX_EXPR)
8875 comparison_code = unsignedp ? NE : GT;
8876 }
8877 if (op1 == constm1_rtx && !unsignedp)
8878 {
8879 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8880 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8881 cmpop1 = const0_rtx;
8882 if (code == MIN_EXPR)
8883 comparison_code = LT;
8884 }
8885
8886 /* Use a conditional move if possible. */
8887 if (can_conditionally_move_p (mode))
8888 {
8889 rtx insn;
8890
8891 start_sequence ();
8892
8893 /* Try to emit the conditional move. */
8894 insn = emit_conditional_move (target, comparison_code,
8895 op0, cmpop1, mode,
8896 op0, op1, mode,
8897 unsignedp);
8898
8899 /* If we could do the conditional move, emit the sequence,
8900 and return. */
8901 if (insn)
8902 {
8903 rtx_insn *seq = get_insns ();
8904 end_sequence ();
8905 emit_insn (seq);
8906 return target;
8907 }
8908
8909 /* Otherwise discard the sequence and fall back to code with
8910 branches. */
8911 end_sequence ();
8912 }
8913
8914 if (target != op0)
8915 emit_move_insn (target, op0);
8916
8917 lab = gen_label_rtx ();
8918 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8919 unsignedp, mode, NULL_RTX, NULL, lab,
8920 -1);
8921 }
8922 emit_move_insn (target, op1);
8923 emit_label (lab);
8924 return target;
8925
8926 case BIT_NOT_EXPR:
8927 op0 = expand_expr (treeop0, subtarget,
8928 VOIDmode, EXPAND_NORMAL);
8929 if (modifier == EXPAND_STACK_PARM)
8930 target = 0;
8931 /* In case we have to reduce the result to bitfield precision
8932 for unsigned bitfield expand this as XOR with a proper constant
8933 instead. */
8934 if (reduce_bit_field && TYPE_UNSIGNED (type))
8935 {
8936 wide_int mask = wi::mask (TYPE_PRECISION (type),
8937 false, GET_MODE_PRECISION (mode));
8938
8939 temp = expand_binop (mode, xor_optab, op0,
8940 immed_wide_int_const (mask, mode),
8941 target, 1, OPTAB_LIB_WIDEN);
8942 }
8943 else
8944 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8945 gcc_assert (temp);
8946 return temp;
8947
8948 /* ??? Can optimize bitwise operations with one arg constant.
8949 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8950 and (a bitwise1 b) bitwise2 b (etc)
8951 but that is probably not worth while. */
8952
8953 case BIT_AND_EXPR:
8954 case BIT_IOR_EXPR:
8955 case BIT_XOR_EXPR:
8956 goto binop;
8957
8958 case LROTATE_EXPR:
8959 case RROTATE_EXPR:
8960 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8961 || (GET_MODE_PRECISION (TYPE_MODE (type))
8962 == TYPE_PRECISION (type)));
8963 /* fall through */
8964
8965 case LSHIFT_EXPR:
8966 case RSHIFT_EXPR:
8967 /* If this is a fixed-point operation, then we cannot use the code
8968 below because "expand_shift" doesn't support sat/no-sat fixed-point
8969 shifts. */
8970 if (ALL_FIXED_POINT_MODE_P (mode))
8971 goto binop;
8972
8973 if (! safe_from_p (subtarget, treeop1, 1))
8974 subtarget = 0;
8975 if (modifier == EXPAND_STACK_PARM)
8976 target = 0;
8977 op0 = expand_expr (treeop0, subtarget,
8978 VOIDmode, EXPAND_NORMAL);
8979 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8980 unsignedp);
8981 if (code == LSHIFT_EXPR)
8982 temp = REDUCE_BIT_FIELD (temp);
8983 return temp;
8984
8985 /* Could determine the answer when only additive constants differ. Also,
8986 the addition of one can be handled by changing the condition. */
8987 case LT_EXPR:
8988 case LE_EXPR:
8989 case GT_EXPR:
8990 case GE_EXPR:
8991 case EQ_EXPR:
8992 case NE_EXPR:
8993 case UNORDERED_EXPR:
8994 case ORDERED_EXPR:
8995 case UNLT_EXPR:
8996 case UNLE_EXPR:
8997 case UNGT_EXPR:
8998 case UNGE_EXPR:
8999 case UNEQ_EXPR:
9000 case LTGT_EXPR:
9001 {
9002 temp = do_store_flag (ops,
9003 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9004 tmode != VOIDmode ? tmode : mode);
9005 if (temp)
9006 return temp;
9007
9008 /* Use a compare and a jump for BLKmode comparisons, or for function
9009 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9010
9011 if ((target == 0
9012 || modifier == EXPAND_STACK_PARM
9013 || ! safe_from_p (target, treeop0, 1)
9014 || ! safe_from_p (target, treeop1, 1)
9015 /* Make sure we don't have a hard reg (such as function's return
9016 value) live across basic blocks, if not optimizing. */
9017 || (!optimize && REG_P (target)
9018 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9019 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9020
9021 emit_move_insn (target, const0_rtx);
9022
9023 rtx_code_label *lab1 = gen_label_rtx ();
9024 jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
9025
9026 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9027 emit_move_insn (target, constm1_rtx);
9028 else
9029 emit_move_insn (target, const1_rtx);
9030
9031 emit_label (lab1);
9032 return target;
9033 }
9034 case COMPLEX_EXPR:
9035 /* Get the rtx code of the operands. */
9036 op0 = expand_normal (treeop0);
9037 op1 = expand_normal (treeop1);
9038
9039 if (!target)
9040 target = gen_reg_rtx (TYPE_MODE (type));
9041 else
9042 /* If target overlaps with op1, then either we need to force
9043 op1 into a pseudo (if target also overlaps with op0),
9044 or write the complex parts in reverse order. */
9045 switch (GET_CODE (target))
9046 {
9047 case CONCAT:
9048 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9049 {
9050 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9051 {
9052 complex_expr_force_op1:
9053 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9054 emit_move_insn (temp, op1);
9055 op1 = temp;
9056 break;
9057 }
9058 complex_expr_swap_order:
9059 /* Move the imaginary (op1) and real (op0) parts to their
9060 location. */
9061 write_complex_part (target, op1, true);
9062 write_complex_part (target, op0, false);
9063
9064 return target;
9065 }
9066 break;
9067 case MEM:
9068 temp = adjust_address_nv (target,
9069 GET_MODE_INNER (GET_MODE (target)), 0);
9070 if (reg_overlap_mentioned_p (temp, op1))
9071 {
9072 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9073 temp = adjust_address_nv (target, imode,
9074 GET_MODE_SIZE (imode));
9075 if (reg_overlap_mentioned_p (temp, op0))
9076 goto complex_expr_force_op1;
9077 goto complex_expr_swap_order;
9078 }
9079 break;
9080 default:
9081 if (reg_overlap_mentioned_p (target, op1))
9082 {
9083 if (reg_overlap_mentioned_p (target, op0))
9084 goto complex_expr_force_op1;
9085 goto complex_expr_swap_order;
9086 }
9087 break;
9088 }
9089
9090 /* Move the real (op0) and imaginary (op1) parts to their location. */
9091 write_complex_part (target, op0, false);
9092 write_complex_part (target, op1, true);
9093
9094 return target;
9095
9096 case WIDEN_SUM_EXPR:
9097 {
9098 tree oprnd0 = treeop0;
9099 tree oprnd1 = treeop1;
9100
9101 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9102 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9103 target, unsignedp);
9104 return target;
9105 }
9106
9107 case REDUC_MAX_EXPR:
9108 case REDUC_MIN_EXPR:
9109 case REDUC_PLUS_EXPR:
9110 {
9111 op0 = expand_normal (treeop0);
9112 this_optab = optab_for_tree_code (code, type, optab_default);
9113 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9114
9115 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9116 {
9117 struct expand_operand ops[2];
9118 enum insn_code icode = optab_handler (this_optab, vec_mode);
9119
9120 create_output_operand (&ops[0], target, mode);
9121 create_input_operand (&ops[1], op0, vec_mode);
9122 if (maybe_expand_insn (icode, 2, ops))
9123 {
9124 target = ops[0].value;
9125 if (GET_MODE (target) != mode)
9126 return gen_lowpart (tmode, target);
9127 return target;
9128 }
9129 }
9130 /* Fall back to optab with vector result, and then extract scalar. */
9131 this_optab = scalar_reduc_to_vector (this_optab, type);
9132 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9133 gcc_assert (temp);
9134 /* The tree code produces a scalar result, but (somewhat by convention)
9135 the optab produces a vector with the result in element 0 if
9136 little-endian, or element N-1 if big-endian. So pull the scalar
9137 result out of that element. */
9138 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9139 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9140 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9141 target, mode, mode);
9142 gcc_assert (temp);
9143 return temp;
9144 }
9145
9146 case VEC_UNPACK_HI_EXPR:
9147 case VEC_UNPACK_LO_EXPR:
9148 {
9149 op0 = expand_normal (treeop0);
9150 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9151 target, unsignedp);
9152 gcc_assert (temp);
9153 return temp;
9154 }
9155
9156 case VEC_UNPACK_FLOAT_HI_EXPR:
9157 case VEC_UNPACK_FLOAT_LO_EXPR:
9158 {
9159 op0 = expand_normal (treeop0);
9160 /* The signedness is determined from input operand. */
9161 temp = expand_widen_pattern_expr
9162 (ops, op0, NULL_RTX, NULL_RTX,
9163 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9164
9165 gcc_assert (temp);
9166 return temp;
9167 }
9168
9169 case VEC_WIDEN_MULT_HI_EXPR:
9170 case VEC_WIDEN_MULT_LO_EXPR:
9171 case VEC_WIDEN_MULT_EVEN_EXPR:
9172 case VEC_WIDEN_MULT_ODD_EXPR:
9173 case VEC_WIDEN_LSHIFT_HI_EXPR:
9174 case VEC_WIDEN_LSHIFT_LO_EXPR:
9175 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9176 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9177 target, unsignedp);
9178 gcc_assert (target);
9179 return target;
9180
9181 case VEC_PACK_TRUNC_EXPR:
9182 case VEC_PACK_SAT_EXPR:
9183 case VEC_PACK_FIX_TRUNC_EXPR:
9184 mode = TYPE_MODE (TREE_TYPE (treeop0));
9185 goto binop;
9186
9187 case VEC_PERM_EXPR:
9188 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9189 op2 = expand_normal (treeop2);
9190
9191 /* Careful here: if the target doesn't support integral vector modes,
9192 a constant selection vector could wind up smooshed into a normal
9193 integral constant. */
9194 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9195 {
9196 tree sel_type = TREE_TYPE (treeop2);
9197 machine_mode vmode
9198 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9199 TYPE_VECTOR_SUBPARTS (sel_type));
9200 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9201 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9202 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9203 }
9204 else
9205 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9206
9207 temp = expand_vec_perm (mode, op0, op1, op2, target);
9208 gcc_assert (temp);
9209 return temp;
9210
9211 case DOT_PROD_EXPR:
9212 {
9213 tree oprnd0 = treeop0;
9214 tree oprnd1 = treeop1;
9215 tree oprnd2 = treeop2;
9216 rtx op2;
9217
9218 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9219 op2 = expand_normal (oprnd2);
9220 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9221 target, unsignedp);
9222 return target;
9223 }
9224
9225 case SAD_EXPR:
9226 {
9227 tree oprnd0 = treeop0;
9228 tree oprnd1 = treeop1;
9229 tree oprnd2 = treeop2;
9230 rtx op2;
9231
9232 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9233 op2 = expand_normal (oprnd2);
9234 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9235 target, unsignedp);
9236 return target;
9237 }
9238
9239 case REALIGN_LOAD_EXPR:
9240 {
9241 tree oprnd0 = treeop0;
9242 tree oprnd1 = treeop1;
9243 tree oprnd2 = treeop2;
9244 rtx op2;
9245
9246 this_optab = optab_for_tree_code (code, type, optab_default);
9247 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9248 op2 = expand_normal (oprnd2);
9249 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9250 target, unsignedp);
9251 gcc_assert (temp);
9252 return temp;
9253 }
9254
9255 case COND_EXPR:
9256 {
9257 /* A COND_EXPR with its type being VOID_TYPE represents a
9258 conditional jump and is handled in
9259 expand_gimple_cond_expr. */
9260 gcc_assert (!VOID_TYPE_P (type));
9261
9262 /* Note that COND_EXPRs whose type is a structure or union
9263 are required to be constructed to contain assignments of
9264 a temporary variable, so that we can evaluate them here
9265 for side effect only. If type is void, we must do likewise. */
9266
9267 gcc_assert (!TREE_ADDRESSABLE (type)
9268 && !ignore
9269 && TREE_TYPE (treeop1) != void_type_node
9270 && TREE_TYPE (treeop2) != void_type_node);
9271
9272 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9273 if (temp)
9274 return temp;
9275
9276 /* If we are not to produce a result, we have no target. Otherwise,
9277 if a target was specified use it; it will not be used as an
9278 intermediate target unless it is safe. If no target, use a
9279 temporary. */
9280
9281 if (modifier != EXPAND_STACK_PARM
9282 && original_target
9283 && safe_from_p (original_target, treeop0, 1)
9284 && GET_MODE (original_target) == mode
9285 && !MEM_P (original_target))
9286 temp = original_target;
9287 else
9288 temp = assign_temp (type, 0, 1);
9289
9290 do_pending_stack_adjust ();
9291 NO_DEFER_POP;
9292 rtx_code_label *lab0 = gen_label_rtx ();
9293 rtx_code_label *lab1 = gen_label_rtx ();
9294 jumpifnot (treeop0, lab0, -1);
9295 store_expr (treeop1, temp,
9296 modifier == EXPAND_STACK_PARM,
9297 false);
9298
9299 emit_jump_insn (gen_jump (lab1));
9300 emit_barrier ();
9301 emit_label (lab0);
9302 store_expr (treeop2, temp,
9303 modifier == EXPAND_STACK_PARM,
9304 false);
9305
9306 emit_label (lab1);
9307 OK_DEFER_POP;
9308 return temp;
9309 }
9310
9311 case VEC_COND_EXPR:
9312 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9313 return target;
9314
9315 default:
9316 gcc_unreachable ();
9317 }
9318
9319 /* Here to do an ordinary binary operator. */
9320 binop:
9321 expand_operands (treeop0, treeop1,
9322 subtarget, &op0, &op1, EXPAND_NORMAL);
9323 binop2:
9324 this_optab = optab_for_tree_code (code, type, optab_default);
9325 binop3:
9326 if (modifier == EXPAND_STACK_PARM)
9327 target = 0;
9328 temp = expand_binop (mode, this_optab, op0, op1, target,
9329 unsignedp, OPTAB_LIB_WIDEN);
9330 gcc_assert (temp);
9331 /* Bitwise operations do not need bitfield reduction as we expect their
9332 operands being properly truncated. */
9333 if (code == BIT_XOR_EXPR
9334 || code == BIT_AND_EXPR
9335 || code == BIT_IOR_EXPR)
9336 return temp;
9337 return REDUCE_BIT_FIELD (temp);
9338 }
9339 #undef REDUCE_BIT_FIELD
9340
9341
9342 /* Return TRUE if expression STMT is suitable for replacement.
9343 Never consider memory loads as replaceable, because those don't ever lead
9344 into constant expressions. */
9345
9346 static bool
9347 stmt_is_replaceable_p (gimple stmt)
9348 {
9349 if (ssa_is_replaceable_p (stmt))
9350 {
9351 /* Don't move around loads. */
9352 if (!gimple_assign_single_p (stmt)
9353 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9354 return true;
9355 }
9356 return false;
9357 }
9358
9359 rtx
9360 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9361 enum expand_modifier modifier, rtx *alt_rtl,
9362 bool inner_reference_p)
9363 {
9364 rtx op0, op1, temp, decl_rtl;
9365 tree type;
9366 int unsignedp;
9367 machine_mode mode;
9368 enum tree_code code = TREE_CODE (exp);
9369 rtx subtarget, original_target;
9370 int ignore;
9371 tree context;
9372 bool reduce_bit_field;
9373 location_t loc = EXPR_LOCATION (exp);
9374 struct separate_ops ops;
9375 tree treeop0, treeop1, treeop2;
9376 tree ssa_name = NULL_TREE;
9377 gimple g;
9378
9379 type = TREE_TYPE (exp);
9380 mode = TYPE_MODE (type);
9381 unsignedp = TYPE_UNSIGNED (type);
9382
9383 treeop0 = treeop1 = treeop2 = NULL_TREE;
9384 if (!VL_EXP_CLASS_P (exp))
9385 switch (TREE_CODE_LENGTH (code))
9386 {
9387 default:
9388 case 3: treeop2 = TREE_OPERAND (exp, 2);
9389 case 2: treeop1 = TREE_OPERAND (exp, 1);
9390 case 1: treeop0 = TREE_OPERAND (exp, 0);
9391 case 0: break;
9392 }
9393 ops.code = code;
9394 ops.type = type;
9395 ops.op0 = treeop0;
9396 ops.op1 = treeop1;
9397 ops.op2 = treeop2;
9398 ops.location = loc;
9399
9400 ignore = (target == const0_rtx
9401 || ((CONVERT_EXPR_CODE_P (code)
9402 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9403 && TREE_CODE (type) == VOID_TYPE));
9404
9405 /* An operation in what may be a bit-field type needs the
9406 result to be reduced to the precision of the bit-field type,
9407 which is narrower than that of the type's mode. */
9408 reduce_bit_field = (!ignore
9409 && INTEGRAL_TYPE_P (type)
9410 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9411
9412 /* If we are going to ignore this result, we need only do something
9413 if there is a side-effect somewhere in the expression. If there
9414 is, short-circuit the most common cases here. Note that we must
9415 not call expand_expr with anything but const0_rtx in case this
9416 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9417
9418 if (ignore)
9419 {
9420 if (! TREE_SIDE_EFFECTS (exp))
9421 return const0_rtx;
9422
9423 /* Ensure we reference a volatile object even if value is ignored, but
9424 don't do this if all we are doing is taking its address. */
9425 if (TREE_THIS_VOLATILE (exp)
9426 && TREE_CODE (exp) != FUNCTION_DECL
9427 && mode != VOIDmode && mode != BLKmode
9428 && modifier != EXPAND_CONST_ADDRESS)
9429 {
9430 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9431 if (MEM_P (temp))
9432 copy_to_reg (temp);
9433 return const0_rtx;
9434 }
9435
9436 if (TREE_CODE_CLASS (code) == tcc_unary
9437 || code == BIT_FIELD_REF
9438 || code == COMPONENT_REF
9439 || code == INDIRECT_REF)
9440 return expand_expr (treeop0, const0_rtx, VOIDmode,
9441 modifier);
9442
9443 else if (TREE_CODE_CLASS (code) == tcc_binary
9444 || TREE_CODE_CLASS (code) == tcc_comparison
9445 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9446 {
9447 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9448 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9449 return const0_rtx;
9450 }
9451
9452 target = 0;
9453 }
9454
9455 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9456 target = 0;
9457
9458 /* Use subtarget as the target for operand 0 of a binary operation. */
9459 subtarget = get_subtarget (target);
9460 original_target = target;
9461
9462 switch (code)
9463 {
9464 case LABEL_DECL:
9465 {
9466 tree function = decl_function_context (exp);
9467
9468 temp = label_rtx (exp);
9469 temp = gen_rtx_LABEL_REF (Pmode, temp);
9470
9471 if (function != current_function_decl
9472 && function != 0)
9473 LABEL_REF_NONLOCAL_P (temp) = 1;
9474
9475 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9476 return temp;
9477 }
9478
9479 case SSA_NAME:
9480 /* ??? ivopts calls expander, without any preparation from
9481 out-of-ssa. So fake instructions as if this was an access to the
9482 base variable. This unnecessarily allocates a pseudo, see how we can
9483 reuse it, if partition base vars have it set already. */
9484 if (!currently_expanding_to_rtl)
9485 {
9486 tree var = SSA_NAME_VAR (exp);
9487 if (var && DECL_RTL_SET_P (var))
9488 return DECL_RTL (var);
9489 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9490 LAST_VIRTUAL_REGISTER + 1);
9491 }
9492
9493 g = get_gimple_for_ssa_name (exp);
9494 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9495 if (g == NULL
9496 && modifier == EXPAND_INITIALIZER
9497 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9498 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9499 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9500 g = SSA_NAME_DEF_STMT (exp);
9501 if (g)
9502 {
9503 rtx r;
9504 ops.code = gimple_assign_rhs_code (g);
9505 switch (get_gimple_rhs_class (ops.code))
9506 {
9507 case GIMPLE_TERNARY_RHS:
9508 ops.op2 = gimple_assign_rhs3 (g);
9509 /* Fallthru */
9510 case GIMPLE_BINARY_RHS:
9511 ops.op1 = gimple_assign_rhs2 (g);
9512
9513 /* Try to expand conditonal compare. */
9514 if (targetm.gen_ccmp_first)
9515 {
9516 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9517 r = expand_ccmp_expr (g);
9518 if (r)
9519 break;
9520 }
9521 /* Fallthru */
9522 case GIMPLE_UNARY_RHS:
9523 ops.op0 = gimple_assign_rhs1 (g);
9524 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9525 ops.location = gimple_location (g);
9526 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9527 break;
9528 case GIMPLE_SINGLE_RHS:
9529 {
9530 location_t saved_loc = curr_insn_location ();
9531 set_curr_insn_location (gimple_location (g));
9532 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9533 tmode, modifier, NULL, inner_reference_p);
9534 set_curr_insn_location (saved_loc);
9535 break;
9536 }
9537 default:
9538 gcc_unreachable ();
9539 }
9540 if (REG_P (r) && !REG_EXPR (r))
9541 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9542 return r;
9543 }
9544
9545 ssa_name = exp;
9546 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9547 exp = SSA_NAME_VAR (ssa_name);
9548 goto expand_decl_rtl;
9549
9550 case PARM_DECL:
9551 case VAR_DECL:
9552 /* If a static var's type was incomplete when the decl was written,
9553 but the type is complete now, lay out the decl now. */
9554 if (DECL_SIZE (exp) == 0
9555 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9556 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9557 layout_decl (exp, 0);
9558
9559 /* ... fall through ... */
9560
9561 case FUNCTION_DECL:
9562 case RESULT_DECL:
9563 decl_rtl = DECL_RTL (exp);
9564 expand_decl_rtl:
9565 gcc_assert (decl_rtl);
9566 decl_rtl = copy_rtx (decl_rtl);
9567 /* Record writes to register variables. */
9568 if (modifier == EXPAND_WRITE
9569 && REG_P (decl_rtl)
9570 && HARD_REGISTER_P (decl_rtl))
9571 add_to_hard_reg_set (&crtl->asm_clobbers,
9572 GET_MODE (decl_rtl), REGNO (decl_rtl));
9573
9574 /* Ensure variable marked as used even if it doesn't go through
9575 a parser. If it hasn't be used yet, write out an external
9576 definition. */
9577 TREE_USED (exp) = 1;
9578
9579 /* Show we haven't gotten RTL for this yet. */
9580 temp = 0;
9581
9582 /* Variables inherited from containing functions should have
9583 been lowered by this point. */
9584 context = decl_function_context (exp);
9585 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9586 || context == current_function_decl
9587 || TREE_STATIC (exp)
9588 || DECL_EXTERNAL (exp)
9589 /* ??? C++ creates functions that are not TREE_STATIC. */
9590 || TREE_CODE (exp) == FUNCTION_DECL);
9591
9592 /* This is the case of an array whose size is to be determined
9593 from its initializer, while the initializer is still being parsed.
9594 ??? We aren't parsing while expanding anymore. */
9595
9596 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9597 temp = validize_mem (decl_rtl);
9598
9599 /* If DECL_RTL is memory, we are in the normal case and the
9600 address is not valid, get the address into a register. */
9601
9602 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9603 {
9604 if (alt_rtl)
9605 *alt_rtl = decl_rtl;
9606 decl_rtl = use_anchored_address (decl_rtl);
9607 if (modifier != EXPAND_CONST_ADDRESS
9608 && modifier != EXPAND_SUM
9609 && !memory_address_addr_space_p (DECL_MODE (exp),
9610 XEXP (decl_rtl, 0),
9611 MEM_ADDR_SPACE (decl_rtl)))
9612 temp = replace_equiv_address (decl_rtl,
9613 copy_rtx (XEXP (decl_rtl, 0)));
9614 }
9615
9616 /* If we got something, return it. But first, set the alignment
9617 if the address is a register. */
9618 if (temp != 0)
9619 {
9620 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9621 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9622
9623 return temp;
9624 }
9625
9626 /* If the mode of DECL_RTL does not match that of the decl,
9627 there are two cases: we are dealing with a BLKmode value
9628 that is returned in a register, or we are dealing with
9629 a promoted value. In the latter case, return a SUBREG
9630 of the wanted mode, but mark it so that we know that it
9631 was already extended. */
9632 if (REG_P (decl_rtl)
9633 && DECL_MODE (exp) != BLKmode
9634 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9635 {
9636 machine_mode pmode;
9637
9638 /* Get the signedness to be used for this variable. Ensure we get
9639 the same mode we got when the variable was declared. */
9640 if (code == SSA_NAME
9641 && (g = SSA_NAME_DEF_STMT (ssa_name))
9642 && gimple_code (g) == GIMPLE_CALL
9643 && !gimple_call_internal_p (g))
9644 pmode = promote_function_mode (type, mode, &unsignedp,
9645 gimple_call_fntype (g),
9646 2);
9647 else
9648 pmode = promote_decl_mode (exp, &unsignedp);
9649 gcc_assert (GET_MODE (decl_rtl) == pmode);
9650
9651 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9652 SUBREG_PROMOTED_VAR_P (temp) = 1;
9653 SUBREG_PROMOTED_SET (temp, unsignedp);
9654 return temp;
9655 }
9656
9657 return decl_rtl;
9658
9659 case INTEGER_CST:
9660 /* Given that TYPE_PRECISION (type) is not always equal to
9661 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9662 the former to the latter according to the signedness of the
9663 type. */
9664 temp = immed_wide_int_const (wide_int::from
9665 (exp,
9666 GET_MODE_PRECISION (TYPE_MODE (type)),
9667 TYPE_SIGN (type)),
9668 TYPE_MODE (type));
9669 return temp;
9670
9671 case VECTOR_CST:
9672 {
9673 tree tmp = NULL_TREE;
9674 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9675 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9676 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9677 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9678 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9679 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9680 return const_vector_from_tree (exp);
9681 if (GET_MODE_CLASS (mode) == MODE_INT)
9682 {
9683 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9684 if (type_for_mode)
9685 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9686 }
9687 if (!tmp)
9688 {
9689 vec<constructor_elt, va_gc> *v;
9690 unsigned i;
9691 vec_alloc (v, VECTOR_CST_NELTS (exp));
9692 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9693 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9694 tmp = build_constructor (type, v);
9695 }
9696 return expand_expr (tmp, ignore ? const0_rtx : target,
9697 tmode, modifier);
9698 }
9699
9700 case CONST_DECL:
9701 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9702
9703 case REAL_CST:
9704 /* If optimized, generate immediate CONST_DOUBLE
9705 which will be turned into memory by reload if necessary.
9706
9707 We used to force a register so that loop.c could see it. But
9708 this does not allow gen_* patterns to perform optimizations with
9709 the constants. It also produces two insns in cases like "x = 1.0;".
9710 On most machines, floating-point constants are not permitted in
9711 many insns, so we'd end up copying it to a register in any case.
9712
9713 Now, we do the copying in expand_binop, if appropriate. */
9714 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9715 TYPE_MODE (TREE_TYPE (exp)));
9716
9717 case FIXED_CST:
9718 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9719 TYPE_MODE (TREE_TYPE (exp)));
9720
9721 case COMPLEX_CST:
9722 /* Handle evaluating a complex constant in a CONCAT target. */
9723 if (original_target && GET_CODE (original_target) == CONCAT)
9724 {
9725 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9726 rtx rtarg, itarg;
9727
9728 rtarg = XEXP (original_target, 0);
9729 itarg = XEXP (original_target, 1);
9730
9731 /* Move the real and imaginary parts separately. */
9732 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9733 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9734
9735 if (op0 != rtarg)
9736 emit_move_insn (rtarg, op0);
9737 if (op1 != itarg)
9738 emit_move_insn (itarg, op1);
9739
9740 return original_target;
9741 }
9742
9743 /* ... fall through ... */
9744
9745 case STRING_CST:
9746 temp = expand_expr_constant (exp, 1, modifier);
9747
9748 /* temp contains a constant address.
9749 On RISC machines where a constant address isn't valid,
9750 make some insns to get that address into a register. */
9751 if (modifier != EXPAND_CONST_ADDRESS
9752 && modifier != EXPAND_INITIALIZER
9753 && modifier != EXPAND_SUM
9754 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9755 MEM_ADDR_SPACE (temp)))
9756 return replace_equiv_address (temp,
9757 copy_rtx (XEXP (temp, 0)));
9758 return temp;
9759
9760 case SAVE_EXPR:
9761 {
9762 tree val = treeop0;
9763 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9764 inner_reference_p);
9765
9766 if (!SAVE_EXPR_RESOLVED_P (exp))
9767 {
9768 /* We can indeed still hit this case, typically via builtin
9769 expanders calling save_expr immediately before expanding
9770 something. Assume this means that we only have to deal
9771 with non-BLKmode values. */
9772 gcc_assert (GET_MODE (ret) != BLKmode);
9773
9774 val = build_decl (curr_insn_location (),
9775 VAR_DECL, NULL, TREE_TYPE (exp));
9776 DECL_ARTIFICIAL (val) = 1;
9777 DECL_IGNORED_P (val) = 1;
9778 treeop0 = val;
9779 TREE_OPERAND (exp, 0) = treeop0;
9780 SAVE_EXPR_RESOLVED_P (exp) = 1;
9781
9782 if (!CONSTANT_P (ret))
9783 ret = copy_to_reg (ret);
9784 SET_DECL_RTL (val, ret);
9785 }
9786
9787 return ret;
9788 }
9789
9790
9791 case CONSTRUCTOR:
9792 /* If we don't need the result, just ensure we evaluate any
9793 subexpressions. */
9794 if (ignore)
9795 {
9796 unsigned HOST_WIDE_INT idx;
9797 tree value;
9798
9799 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9800 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9801
9802 return const0_rtx;
9803 }
9804
9805 return expand_constructor (exp, target, modifier, false);
9806
9807 case TARGET_MEM_REF:
9808 {
9809 addr_space_t as
9810 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9811 enum insn_code icode;
9812 unsigned int align;
9813
9814 op0 = addr_for_mem_ref (exp, as, true);
9815 op0 = memory_address_addr_space (mode, op0, as);
9816 temp = gen_rtx_MEM (mode, op0);
9817 set_mem_attributes (temp, exp, 0);
9818 set_mem_addr_space (temp, as);
9819 align = get_object_alignment (exp);
9820 if (modifier != EXPAND_WRITE
9821 && modifier != EXPAND_MEMORY
9822 && mode != BLKmode
9823 && align < GET_MODE_ALIGNMENT (mode)
9824 /* If the target does not have special handling for unaligned
9825 loads of mode then it can use regular moves for them. */
9826 && ((icode = optab_handler (movmisalign_optab, mode))
9827 != CODE_FOR_nothing))
9828 {
9829 struct expand_operand ops[2];
9830
9831 /* We've already validated the memory, and we're creating a
9832 new pseudo destination. The predicates really can't fail,
9833 nor can the generator. */
9834 create_output_operand (&ops[0], NULL_RTX, mode);
9835 create_fixed_operand (&ops[1], temp);
9836 expand_insn (icode, 2, ops);
9837 temp = ops[0].value;
9838 }
9839 return temp;
9840 }
9841
9842 case MEM_REF:
9843 {
9844 addr_space_t as
9845 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9846 machine_mode address_mode;
9847 tree base = TREE_OPERAND (exp, 0);
9848 gimple def_stmt;
9849 enum insn_code icode;
9850 unsigned align;
9851 /* Handle expansion of non-aliased memory with non-BLKmode. That
9852 might end up in a register. */
9853 if (mem_ref_refers_to_non_mem_p (exp))
9854 {
9855 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9856 base = TREE_OPERAND (base, 0);
9857 if (offset == 0
9858 && tree_fits_uhwi_p (TYPE_SIZE (type))
9859 && (GET_MODE_BITSIZE (DECL_MODE (base))
9860 == tree_to_uhwi (TYPE_SIZE (type))))
9861 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9862 target, tmode, modifier);
9863 if (TYPE_MODE (type) == BLKmode)
9864 {
9865 temp = assign_stack_temp (DECL_MODE (base),
9866 GET_MODE_SIZE (DECL_MODE (base)));
9867 store_expr (base, temp, 0, false);
9868 temp = adjust_address (temp, BLKmode, offset);
9869 set_mem_size (temp, int_size_in_bytes (type));
9870 return temp;
9871 }
9872 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9873 bitsize_int (offset * BITS_PER_UNIT));
9874 return expand_expr (exp, target, tmode, modifier);
9875 }
9876 address_mode = targetm.addr_space.address_mode (as);
9877 base = TREE_OPERAND (exp, 0);
9878 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9879 {
9880 tree mask = gimple_assign_rhs2 (def_stmt);
9881 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9882 gimple_assign_rhs1 (def_stmt), mask);
9883 TREE_OPERAND (exp, 0) = base;
9884 }
9885 align = get_object_alignment (exp);
9886 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9887 op0 = memory_address_addr_space (mode, op0, as);
9888 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9889 {
9890 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9891 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9892 op0 = memory_address_addr_space (mode, op0, as);
9893 }
9894 temp = gen_rtx_MEM (mode, op0);
9895 set_mem_attributes (temp, exp, 0);
9896 set_mem_addr_space (temp, as);
9897 if (TREE_THIS_VOLATILE (exp))
9898 MEM_VOLATILE_P (temp) = 1;
9899 if (modifier != EXPAND_WRITE
9900 && modifier != EXPAND_MEMORY
9901 && !inner_reference_p
9902 && mode != BLKmode
9903 && align < GET_MODE_ALIGNMENT (mode))
9904 {
9905 if ((icode = optab_handler (movmisalign_optab, mode))
9906 != CODE_FOR_nothing)
9907 {
9908 struct expand_operand ops[2];
9909
9910 /* We've already validated the memory, and we're creating a
9911 new pseudo destination. The predicates really can't fail,
9912 nor can the generator. */
9913 create_output_operand (&ops[0], NULL_RTX, mode);
9914 create_fixed_operand (&ops[1], temp);
9915 expand_insn (icode, 2, ops);
9916 temp = ops[0].value;
9917 }
9918 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9919 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9920 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9921 (modifier == EXPAND_STACK_PARM
9922 ? NULL_RTX : target),
9923 mode, mode);
9924 }
9925 return temp;
9926 }
9927
9928 case ARRAY_REF:
9929
9930 {
9931 tree array = treeop0;
9932 tree index = treeop1;
9933 tree init;
9934
9935 /* Fold an expression like: "foo"[2].
9936 This is not done in fold so it won't happen inside &.
9937 Don't fold if this is for wide characters since it's too
9938 difficult to do correctly and this is a very rare case. */
9939
9940 if (modifier != EXPAND_CONST_ADDRESS
9941 && modifier != EXPAND_INITIALIZER
9942 && modifier != EXPAND_MEMORY)
9943 {
9944 tree t = fold_read_from_constant_string (exp);
9945
9946 if (t)
9947 return expand_expr (t, target, tmode, modifier);
9948 }
9949
9950 /* If this is a constant index into a constant array,
9951 just get the value from the array. Handle both the cases when
9952 we have an explicit constructor and when our operand is a variable
9953 that was declared const. */
9954
9955 if (modifier != EXPAND_CONST_ADDRESS
9956 && modifier != EXPAND_INITIALIZER
9957 && modifier != EXPAND_MEMORY
9958 && TREE_CODE (array) == CONSTRUCTOR
9959 && ! TREE_SIDE_EFFECTS (array)
9960 && TREE_CODE (index) == INTEGER_CST)
9961 {
9962 unsigned HOST_WIDE_INT ix;
9963 tree field, value;
9964
9965 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9966 field, value)
9967 if (tree_int_cst_equal (field, index))
9968 {
9969 if (!TREE_SIDE_EFFECTS (value))
9970 return expand_expr (fold (value), target, tmode, modifier);
9971 break;
9972 }
9973 }
9974
9975 else if (optimize >= 1
9976 && modifier != EXPAND_CONST_ADDRESS
9977 && modifier != EXPAND_INITIALIZER
9978 && modifier != EXPAND_MEMORY
9979 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9980 && TREE_CODE (index) == INTEGER_CST
9981 && (TREE_CODE (array) == VAR_DECL
9982 || TREE_CODE (array) == CONST_DECL)
9983 && (init = ctor_for_folding (array)) != error_mark_node)
9984 {
9985 if (init == NULL_TREE)
9986 {
9987 tree value = build_zero_cst (type);
9988 if (TREE_CODE (value) == CONSTRUCTOR)
9989 {
9990 /* If VALUE is a CONSTRUCTOR, this optimization is only
9991 useful if this doesn't store the CONSTRUCTOR into
9992 memory. If it does, it is more efficient to just
9993 load the data from the array directly. */
9994 rtx ret = expand_constructor (value, target,
9995 modifier, true);
9996 if (ret == NULL_RTX)
9997 value = NULL_TREE;
9998 }
9999
10000 if (value)
10001 return expand_expr (value, target, tmode, modifier);
10002 }
10003 else if (TREE_CODE (init) == CONSTRUCTOR)
10004 {
10005 unsigned HOST_WIDE_INT ix;
10006 tree field, value;
10007
10008 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10009 field, value)
10010 if (tree_int_cst_equal (field, index))
10011 {
10012 if (TREE_SIDE_EFFECTS (value))
10013 break;
10014
10015 if (TREE_CODE (value) == CONSTRUCTOR)
10016 {
10017 /* If VALUE is a CONSTRUCTOR, this
10018 optimization is only useful if
10019 this doesn't store the CONSTRUCTOR
10020 into memory. If it does, it is more
10021 efficient to just load the data from
10022 the array directly. */
10023 rtx ret = expand_constructor (value, target,
10024 modifier, true);
10025 if (ret == NULL_RTX)
10026 break;
10027 }
10028
10029 return
10030 expand_expr (fold (value), target, tmode, modifier);
10031 }
10032 }
10033 else if (TREE_CODE (init) == STRING_CST)
10034 {
10035 tree low_bound = array_ref_low_bound (exp);
10036 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10037
10038 /* Optimize the special case of a zero lower bound.
10039
10040 We convert the lower bound to sizetype to avoid problems
10041 with constant folding. E.g. suppose the lower bound is
10042 1 and its mode is QI. Without the conversion
10043 (ARRAY + (INDEX - (unsigned char)1))
10044 becomes
10045 (ARRAY + (-(unsigned char)1) + INDEX)
10046 which becomes
10047 (ARRAY + 255 + INDEX). Oops! */
10048 if (!integer_zerop (low_bound))
10049 index1 = size_diffop_loc (loc, index1,
10050 fold_convert_loc (loc, sizetype,
10051 low_bound));
10052
10053 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10054 {
10055 tree type = TREE_TYPE (TREE_TYPE (init));
10056 machine_mode mode = TYPE_MODE (type);
10057
10058 if (GET_MODE_CLASS (mode) == MODE_INT
10059 && GET_MODE_SIZE (mode) == 1)
10060 return gen_int_mode (TREE_STRING_POINTER (init)
10061 [TREE_INT_CST_LOW (index1)],
10062 mode);
10063 }
10064 }
10065 }
10066 }
10067 goto normal_inner_ref;
10068
10069 case COMPONENT_REF:
10070 /* If the operand is a CONSTRUCTOR, we can just extract the
10071 appropriate field if it is present. */
10072 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10073 {
10074 unsigned HOST_WIDE_INT idx;
10075 tree field, value;
10076
10077 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10078 idx, field, value)
10079 if (field == treeop1
10080 /* We can normally use the value of the field in the
10081 CONSTRUCTOR. However, if this is a bitfield in
10082 an integral mode that we can fit in a HOST_WIDE_INT,
10083 we must mask only the number of bits in the bitfield,
10084 since this is done implicitly by the constructor. If
10085 the bitfield does not meet either of those conditions,
10086 we can't do this optimization. */
10087 && (! DECL_BIT_FIELD (field)
10088 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10089 && (GET_MODE_PRECISION (DECL_MODE (field))
10090 <= HOST_BITS_PER_WIDE_INT))))
10091 {
10092 if (DECL_BIT_FIELD (field)
10093 && modifier == EXPAND_STACK_PARM)
10094 target = 0;
10095 op0 = expand_expr (value, target, tmode, modifier);
10096 if (DECL_BIT_FIELD (field))
10097 {
10098 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10099 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10100
10101 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10102 {
10103 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10104 imode);
10105 op0 = expand_and (imode, op0, op1, target);
10106 }
10107 else
10108 {
10109 int count = GET_MODE_PRECISION (imode) - bitsize;
10110
10111 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10112 target, 0);
10113 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10114 target, 0);
10115 }
10116 }
10117
10118 return op0;
10119 }
10120 }
10121 goto normal_inner_ref;
10122
10123 case BIT_FIELD_REF:
10124 case ARRAY_RANGE_REF:
10125 normal_inner_ref:
10126 {
10127 machine_mode mode1, mode2;
10128 HOST_WIDE_INT bitsize, bitpos;
10129 tree offset;
10130 int volatilep = 0, must_force_mem;
10131 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10132 &mode1, &unsignedp, &volatilep, true);
10133 rtx orig_op0, memloc;
10134 bool clear_mem_expr = false;
10135
10136 /* If we got back the original object, something is wrong. Perhaps
10137 we are evaluating an expression too early. In any event, don't
10138 infinitely recurse. */
10139 gcc_assert (tem != exp);
10140
10141 /* If TEM's type is a union of variable size, pass TARGET to the inner
10142 computation, since it will need a temporary and TARGET is known
10143 to have to do. This occurs in unchecked conversion in Ada. */
10144 orig_op0 = op0
10145 = expand_expr_real (tem,
10146 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10147 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10148 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10149 != INTEGER_CST)
10150 && modifier != EXPAND_STACK_PARM
10151 ? target : NULL_RTX),
10152 VOIDmode,
10153 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10154 NULL, true);
10155
10156 /* If the field has a mode, we want to access it in the
10157 field's mode, not the computed mode.
10158 If a MEM has VOIDmode (external with incomplete type),
10159 use BLKmode for it instead. */
10160 if (MEM_P (op0))
10161 {
10162 if (mode1 != VOIDmode)
10163 op0 = adjust_address (op0, mode1, 0);
10164 else if (GET_MODE (op0) == VOIDmode)
10165 op0 = adjust_address (op0, BLKmode, 0);
10166 }
10167
10168 mode2
10169 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10170
10171 /* If we have either an offset, a BLKmode result, or a reference
10172 outside the underlying object, we must force it to memory.
10173 Such a case can occur in Ada if we have unchecked conversion
10174 of an expression from a scalar type to an aggregate type or
10175 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10176 passed a partially uninitialized object or a view-conversion
10177 to a larger size. */
10178 must_force_mem = (offset
10179 || mode1 == BLKmode
10180 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10181
10182 /* Handle CONCAT first. */
10183 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10184 {
10185 if (bitpos == 0
10186 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10187 return op0;
10188 if (bitpos == 0
10189 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10190 && bitsize)
10191 {
10192 op0 = XEXP (op0, 0);
10193 mode2 = GET_MODE (op0);
10194 }
10195 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10196 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10197 && bitpos
10198 && bitsize)
10199 {
10200 op0 = XEXP (op0, 1);
10201 bitpos = 0;
10202 mode2 = GET_MODE (op0);
10203 }
10204 else
10205 /* Otherwise force into memory. */
10206 must_force_mem = 1;
10207 }
10208
10209 /* If this is a constant, put it in a register if it is a legitimate
10210 constant and we don't need a memory reference. */
10211 if (CONSTANT_P (op0)
10212 && mode2 != BLKmode
10213 && targetm.legitimate_constant_p (mode2, op0)
10214 && !must_force_mem)
10215 op0 = force_reg (mode2, op0);
10216
10217 /* Otherwise, if this is a constant, try to force it to the constant
10218 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10219 is a legitimate constant. */
10220 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10221 op0 = validize_mem (memloc);
10222
10223 /* Otherwise, if this is a constant or the object is not in memory
10224 and need be, put it there. */
10225 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10226 {
10227 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10228 emit_move_insn (memloc, op0);
10229 op0 = memloc;
10230 clear_mem_expr = true;
10231 }
10232
10233 if (offset)
10234 {
10235 machine_mode address_mode;
10236 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10237 EXPAND_SUM);
10238
10239 gcc_assert (MEM_P (op0));
10240
10241 address_mode = get_address_mode (op0);
10242 if (GET_MODE (offset_rtx) != address_mode)
10243 {
10244 /* We cannot be sure that the RTL in offset_rtx is valid outside
10245 of a memory address context, so force it into a register
10246 before attempting to convert it to the desired mode. */
10247 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10248 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10249 }
10250
10251 /* See the comment in expand_assignment for the rationale. */
10252 if (mode1 != VOIDmode
10253 && bitpos != 0
10254 && bitsize > 0
10255 && (bitpos % bitsize) == 0
10256 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10257 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10258 {
10259 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10260 bitpos = 0;
10261 }
10262
10263 op0 = offset_address (op0, offset_rtx,
10264 highest_pow2_factor (offset));
10265 }
10266
10267 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10268 record its alignment as BIGGEST_ALIGNMENT. */
10269 if (MEM_P (op0) && bitpos == 0 && offset != 0
10270 && is_aligning_offset (offset, tem))
10271 set_mem_align (op0, BIGGEST_ALIGNMENT);
10272
10273 /* Don't forget about volatility even if this is a bitfield. */
10274 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10275 {
10276 if (op0 == orig_op0)
10277 op0 = copy_rtx (op0);
10278
10279 MEM_VOLATILE_P (op0) = 1;
10280 }
10281
10282 /* In cases where an aligned union has an unaligned object
10283 as a field, we might be extracting a BLKmode value from
10284 an integer-mode (e.g., SImode) object. Handle this case
10285 by doing the extract into an object as wide as the field
10286 (which we know to be the width of a basic mode), then
10287 storing into memory, and changing the mode to BLKmode. */
10288 if (mode1 == VOIDmode
10289 || REG_P (op0) || GET_CODE (op0) == SUBREG
10290 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10291 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10292 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10293 && modifier != EXPAND_CONST_ADDRESS
10294 && modifier != EXPAND_INITIALIZER
10295 && modifier != EXPAND_MEMORY)
10296 /* If the bitfield is volatile and the bitsize
10297 is narrower than the access size of the bitfield,
10298 we need to extract bitfields from the access. */
10299 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10300 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10301 && mode1 != BLKmode
10302 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10303 /* If the field isn't aligned enough to fetch as a memref,
10304 fetch it as a bit field. */
10305 || (mode1 != BLKmode
10306 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10307 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10308 || (MEM_P (op0)
10309 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10310 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10311 && modifier != EXPAND_MEMORY
10312 && ((modifier == EXPAND_CONST_ADDRESS
10313 || modifier == EXPAND_INITIALIZER)
10314 ? STRICT_ALIGNMENT
10315 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10316 || (bitpos % BITS_PER_UNIT != 0)))
10317 /* If the type and the field are a constant size and the
10318 size of the type isn't the same size as the bitfield,
10319 we must use bitfield operations. */
10320 || (bitsize >= 0
10321 && TYPE_SIZE (TREE_TYPE (exp))
10322 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10323 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10324 bitsize)))
10325 {
10326 machine_mode ext_mode = mode;
10327
10328 if (ext_mode == BLKmode
10329 && ! (target != 0 && MEM_P (op0)
10330 && MEM_P (target)
10331 && bitpos % BITS_PER_UNIT == 0))
10332 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10333
10334 if (ext_mode == BLKmode)
10335 {
10336 if (target == 0)
10337 target = assign_temp (type, 1, 1);
10338
10339 /* ??? Unlike the similar test a few lines below, this one is
10340 very likely obsolete. */
10341 if (bitsize == 0)
10342 return target;
10343
10344 /* In this case, BITPOS must start at a byte boundary and
10345 TARGET, if specified, must be a MEM. */
10346 gcc_assert (MEM_P (op0)
10347 && (!target || MEM_P (target))
10348 && !(bitpos % BITS_PER_UNIT));
10349
10350 emit_block_move (target,
10351 adjust_address (op0, VOIDmode,
10352 bitpos / BITS_PER_UNIT),
10353 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10354 / BITS_PER_UNIT),
10355 (modifier == EXPAND_STACK_PARM
10356 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10357
10358 return target;
10359 }
10360
10361 /* If we have nothing to extract, the result will be 0 for targets
10362 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10363 return 0 for the sake of consistency, as reading a zero-sized
10364 bitfield is valid in Ada and the value is fully specified. */
10365 if (bitsize == 0)
10366 return const0_rtx;
10367
10368 op0 = validize_mem (op0);
10369
10370 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10371 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10372
10373 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10374 (modifier == EXPAND_STACK_PARM
10375 ? NULL_RTX : target),
10376 ext_mode, ext_mode);
10377
10378 /* If the result is a record type and BITSIZE is narrower than
10379 the mode of OP0, an integral mode, and this is a big endian
10380 machine, we must put the field into the high-order bits. */
10381 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10382 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10383 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10384 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10385 GET_MODE_BITSIZE (GET_MODE (op0))
10386 - bitsize, op0, 1);
10387
10388 /* If the result type is BLKmode, store the data into a temporary
10389 of the appropriate type, but with the mode corresponding to the
10390 mode for the data we have (op0's mode). */
10391 if (mode == BLKmode)
10392 {
10393 rtx new_rtx
10394 = assign_stack_temp_for_type (ext_mode,
10395 GET_MODE_BITSIZE (ext_mode),
10396 type);
10397 emit_move_insn (new_rtx, op0);
10398 op0 = copy_rtx (new_rtx);
10399 PUT_MODE (op0, BLKmode);
10400 }
10401
10402 return op0;
10403 }
10404
10405 /* If the result is BLKmode, use that to access the object
10406 now as well. */
10407 if (mode == BLKmode)
10408 mode1 = BLKmode;
10409
10410 /* Get a reference to just this component. */
10411 if (modifier == EXPAND_CONST_ADDRESS
10412 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10413 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10414 else
10415 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10416
10417 if (op0 == orig_op0)
10418 op0 = copy_rtx (op0);
10419
10420 set_mem_attributes (op0, exp, 0);
10421
10422 if (REG_P (XEXP (op0, 0)))
10423 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10424
10425 /* If op0 is a temporary because the original expressions was forced
10426 to memory, clear MEM_EXPR so that the original expression cannot
10427 be marked as addressable through MEM_EXPR of the temporary. */
10428 if (clear_mem_expr)
10429 set_mem_expr (op0, NULL_TREE);
10430
10431 MEM_VOLATILE_P (op0) |= volatilep;
10432 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10433 || modifier == EXPAND_CONST_ADDRESS
10434 || modifier == EXPAND_INITIALIZER)
10435 return op0;
10436
10437 if (target == 0)
10438 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10439
10440 convert_move (target, op0, unsignedp);
10441 return target;
10442 }
10443
10444 case OBJ_TYPE_REF:
10445 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10446
10447 case CALL_EXPR:
10448 /* All valid uses of __builtin_va_arg_pack () are removed during
10449 inlining. */
10450 if (CALL_EXPR_VA_ARG_PACK (exp))
10451 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10452 {
10453 tree fndecl = get_callee_fndecl (exp), attr;
10454
10455 if (fndecl
10456 && (attr = lookup_attribute ("error",
10457 DECL_ATTRIBUTES (fndecl))) != NULL)
10458 error ("%Kcall to %qs declared with attribute error: %s",
10459 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10460 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10461 if (fndecl
10462 && (attr = lookup_attribute ("warning",
10463 DECL_ATTRIBUTES (fndecl))) != NULL)
10464 warning_at (tree_nonartificial_location (exp),
10465 0, "%Kcall to %qs declared with attribute warning: %s",
10466 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10467 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10468
10469 /* Check for a built-in function. */
10470 if (fndecl && DECL_BUILT_IN (fndecl))
10471 {
10472 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10473 if (CALL_WITH_BOUNDS_P (exp))
10474 return expand_builtin_with_bounds (exp, target, subtarget,
10475 tmode, ignore);
10476 else
10477 return expand_builtin (exp, target, subtarget, tmode, ignore);
10478 }
10479 }
10480 return expand_call (exp, target, ignore);
10481
10482 case VIEW_CONVERT_EXPR:
10483 op0 = NULL_RTX;
10484
10485 /* If we are converting to BLKmode, try to avoid an intermediate
10486 temporary by fetching an inner memory reference. */
10487 if (mode == BLKmode
10488 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10489 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10490 && handled_component_p (treeop0))
10491 {
10492 machine_mode mode1;
10493 HOST_WIDE_INT bitsize, bitpos;
10494 tree offset;
10495 int unsignedp;
10496 int volatilep = 0;
10497 tree tem
10498 = get_inner_reference (treeop0, &bitsize, &bitpos,
10499 &offset, &mode1, &unsignedp, &volatilep,
10500 true);
10501 rtx orig_op0;
10502
10503 /* ??? We should work harder and deal with non-zero offsets. */
10504 if (!offset
10505 && (bitpos % BITS_PER_UNIT) == 0
10506 && bitsize >= 0
10507 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10508 {
10509 /* See the normal_inner_ref case for the rationale. */
10510 orig_op0
10511 = expand_expr_real (tem,
10512 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10513 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10514 != INTEGER_CST)
10515 && modifier != EXPAND_STACK_PARM
10516 ? target : NULL_RTX),
10517 VOIDmode,
10518 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10519 NULL, true);
10520
10521 if (MEM_P (orig_op0))
10522 {
10523 op0 = orig_op0;
10524
10525 /* Get a reference to just this component. */
10526 if (modifier == EXPAND_CONST_ADDRESS
10527 || modifier == EXPAND_SUM
10528 || modifier == EXPAND_INITIALIZER)
10529 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10530 else
10531 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10532
10533 if (op0 == orig_op0)
10534 op0 = copy_rtx (op0);
10535
10536 set_mem_attributes (op0, treeop0, 0);
10537 if (REG_P (XEXP (op0, 0)))
10538 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10539
10540 MEM_VOLATILE_P (op0) |= volatilep;
10541 }
10542 }
10543 }
10544
10545 if (!op0)
10546 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10547 NULL, inner_reference_p);
10548
10549 /* If the input and output modes are both the same, we are done. */
10550 if (mode == GET_MODE (op0))
10551 ;
10552 /* If neither mode is BLKmode, and both modes are the same size
10553 then we can use gen_lowpart. */
10554 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10555 && (GET_MODE_PRECISION (mode)
10556 == GET_MODE_PRECISION (GET_MODE (op0)))
10557 && !COMPLEX_MODE_P (GET_MODE (op0)))
10558 {
10559 if (GET_CODE (op0) == SUBREG)
10560 op0 = force_reg (GET_MODE (op0), op0);
10561 temp = gen_lowpart_common (mode, op0);
10562 if (temp)
10563 op0 = temp;
10564 else
10565 {
10566 if (!REG_P (op0) && !MEM_P (op0))
10567 op0 = force_reg (GET_MODE (op0), op0);
10568 op0 = gen_lowpart (mode, op0);
10569 }
10570 }
10571 /* If both types are integral, convert from one mode to the other. */
10572 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10573 op0 = convert_modes (mode, GET_MODE (op0), op0,
10574 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10575 /* If the output type is a bit-field type, do an extraction. */
10576 else if (reduce_bit_field)
10577 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10578 TYPE_UNSIGNED (type), NULL_RTX,
10579 mode, mode);
10580 /* As a last resort, spill op0 to memory, and reload it in a
10581 different mode. */
10582 else if (!MEM_P (op0))
10583 {
10584 /* If the operand is not a MEM, force it into memory. Since we
10585 are going to be changing the mode of the MEM, don't call
10586 force_const_mem for constants because we don't allow pool
10587 constants to change mode. */
10588 tree inner_type = TREE_TYPE (treeop0);
10589
10590 gcc_assert (!TREE_ADDRESSABLE (exp));
10591
10592 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10593 target
10594 = assign_stack_temp_for_type
10595 (TYPE_MODE (inner_type),
10596 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10597
10598 emit_move_insn (target, op0);
10599 op0 = target;
10600 }
10601
10602 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10603 output type is such that the operand is known to be aligned, indicate
10604 that it is. Otherwise, we need only be concerned about alignment for
10605 non-BLKmode results. */
10606 if (MEM_P (op0))
10607 {
10608 enum insn_code icode;
10609
10610 if (TYPE_ALIGN_OK (type))
10611 {
10612 /* ??? Copying the MEM without substantially changing it might
10613 run afoul of the code handling volatile memory references in
10614 store_expr, which assumes that TARGET is returned unmodified
10615 if it has been used. */
10616 op0 = copy_rtx (op0);
10617 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10618 }
10619 else if (modifier != EXPAND_WRITE
10620 && modifier != EXPAND_MEMORY
10621 && !inner_reference_p
10622 && mode != BLKmode
10623 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10624 {
10625 /* If the target does have special handling for unaligned
10626 loads of mode then use them. */
10627 if ((icode = optab_handler (movmisalign_optab, mode))
10628 != CODE_FOR_nothing)
10629 {
10630 rtx reg, insn;
10631
10632 op0 = adjust_address (op0, mode, 0);
10633 /* We've already validated the memory, and we're creating a
10634 new pseudo destination. The predicates really can't
10635 fail. */
10636 reg = gen_reg_rtx (mode);
10637
10638 /* Nor can the insn generator. */
10639 insn = GEN_FCN (icode) (reg, op0);
10640 emit_insn (insn);
10641 return reg;
10642 }
10643 else if (STRICT_ALIGNMENT)
10644 {
10645 tree inner_type = TREE_TYPE (treeop0);
10646 HOST_WIDE_INT temp_size
10647 = MAX (int_size_in_bytes (inner_type),
10648 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10649 rtx new_rtx
10650 = assign_stack_temp_for_type (mode, temp_size, type);
10651 rtx new_with_op0_mode
10652 = adjust_address (new_rtx, GET_MODE (op0), 0);
10653
10654 gcc_assert (!TREE_ADDRESSABLE (exp));
10655
10656 if (GET_MODE (op0) == BLKmode)
10657 emit_block_move (new_with_op0_mode, op0,
10658 GEN_INT (GET_MODE_SIZE (mode)),
10659 (modifier == EXPAND_STACK_PARM
10660 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10661 else
10662 emit_move_insn (new_with_op0_mode, op0);
10663
10664 op0 = new_rtx;
10665 }
10666 }
10667
10668 op0 = adjust_address (op0, mode, 0);
10669 }
10670
10671 return op0;
10672
10673 case MODIFY_EXPR:
10674 {
10675 tree lhs = treeop0;
10676 tree rhs = treeop1;
10677 gcc_assert (ignore);
10678
10679 /* Check for |= or &= of a bitfield of size one into another bitfield
10680 of size 1. In this case, (unless we need the result of the
10681 assignment) we can do this more efficiently with a
10682 test followed by an assignment, if necessary.
10683
10684 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10685 things change so we do, this code should be enhanced to
10686 support it. */
10687 if (TREE_CODE (lhs) == COMPONENT_REF
10688 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10689 || TREE_CODE (rhs) == BIT_AND_EXPR)
10690 && TREE_OPERAND (rhs, 0) == lhs
10691 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10692 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10693 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10694 {
10695 rtx_code_label *label = gen_label_rtx ();
10696 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10697 do_jump (TREE_OPERAND (rhs, 1),
10698 value ? label : 0,
10699 value ? 0 : label, -1);
10700 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10701 false);
10702 do_pending_stack_adjust ();
10703 emit_label (label);
10704 return const0_rtx;
10705 }
10706
10707 expand_assignment (lhs, rhs, false);
10708 return const0_rtx;
10709 }
10710
10711 case ADDR_EXPR:
10712 return expand_expr_addr_expr (exp, target, tmode, modifier);
10713
10714 case REALPART_EXPR:
10715 op0 = expand_normal (treeop0);
10716 return read_complex_part (op0, false);
10717
10718 case IMAGPART_EXPR:
10719 op0 = expand_normal (treeop0);
10720 return read_complex_part (op0, true);
10721
10722 case RETURN_EXPR:
10723 case LABEL_EXPR:
10724 case GOTO_EXPR:
10725 case SWITCH_EXPR:
10726 case ASM_EXPR:
10727 /* Expanded in cfgexpand.c. */
10728 gcc_unreachable ();
10729
10730 case TRY_CATCH_EXPR:
10731 case CATCH_EXPR:
10732 case EH_FILTER_EXPR:
10733 case TRY_FINALLY_EXPR:
10734 /* Lowered by tree-eh.c. */
10735 gcc_unreachable ();
10736
10737 case WITH_CLEANUP_EXPR:
10738 case CLEANUP_POINT_EXPR:
10739 case TARGET_EXPR:
10740 case CASE_LABEL_EXPR:
10741 case VA_ARG_EXPR:
10742 case BIND_EXPR:
10743 case INIT_EXPR:
10744 case CONJ_EXPR:
10745 case COMPOUND_EXPR:
10746 case PREINCREMENT_EXPR:
10747 case PREDECREMENT_EXPR:
10748 case POSTINCREMENT_EXPR:
10749 case POSTDECREMENT_EXPR:
10750 case LOOP_EXPR:
10751 case EXIT_EXPR:
10752 case COMPOUND_LITERAL_EXPR:
10753 /* Lowered by gimplify.c. */
10754 gcc_unreachable ();
10755
10756 case FDESC_EXPR:
10757 /* Function descriptors are not valid except for as
10758 initialization constants, and should not be expanded. */
10759 gcc_unreachable ();
10760
10761 case WITH_SIZE_EXPR:
10762 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10763 have pulled out the size to use in whatever context it needed. */
10764 return expand_expr_real (treeop0, original_target, tmode,
10765 modifier, alt_rtl, inner_reference_p);
10766
10767 default:
10768 return expand_expr_real_2 (&ops, target, tmode, modifier);
10769 }
10770 }
10771 \f
10772 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10773 signedness of TYPE), possibly returning the result in TARGET. */
10774 static rtx
10775 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10776 {
10777 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10778 if (target && GET_MODE (target) != GET_MODE (exp))
10779 target = 0;
10780 /* For constant values, reduce using build_int_cst_type. */
10781 if (CONST_INT_P (exp))
10782 {
10783 HOST_WIDE_INT value = INTVAL (exp);
10784 tree t = build_int_cst_type (type, value);
10785 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10786 }
10787 else if (TYPE_UNSIGNED (type))
10788 {
10789 machine_mode mode = GET_MODE (exp);
10790 rtx mask = immed_wide_int_const
10791 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10792 return expand_and (mode, exp, mask, target);
10793 }
10794 else
10795 {
10796 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10797 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10798 exp, count, target, 0);
10799 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10800 exp, count, target, 0);
10801 }
10802 }
10803 \f
10804 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10805 when applied to the address of EXP produces an address known to be
10806 aligned more than BIGGEST_ALIGNMENT. */
10807
10808 static int
10809 is_aligning_offset (const_tree offset, const_tree exp)
10810 {
10811 /* Strip off any conversions. */
10812 while (CONVERT_EXPR_P (offset))
10813 offset = TREE_OPERAND (offset, 0);
10814
10815 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10816 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10817 if (TREE_CODE (offset) != BIT_AND_EXPR
10818 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10819 || compare_tree_int (TREE_OPERAND (offset, 1),
10820 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10821 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10822 return 0;
10823
10824 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10825 It must be NEGATE_EXPR. Then strip any more conversions. */
10826 offset = TREE_OPERAND (offset, 0);
10827 while (CONVERT_EXPR_P (offset))
10828 offset = TREE_OPERAND (offset, 0);
10829
10830 if (TREE_CODE (offset) != NEGATE_EXPR)
10831 return 0;
10832
10833 offset = TREE_OPERAND (offset, 0);
10834 while (CONVERT_EXPR_P (offset))
10835 offset = TREE_OPERAND (offset, 0);
10836
10837 /* This must now be the address of EXP. */
10838 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10839 }
10840 \f
10841 /* Return the tree node if an ARG corresponds to a string constant or zero
10842 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10843 in bytes within the string that ARG is accessing. The type of the
10844 offset will be `sizetype'. */
10845
10846 tree
10847 string_constant (tree arg, tree *ptr_offset)
10848 {
10849 tree array, offset, lower_bound;
10850 STRIP_NOPS (arg);
10851
10852 if (TREE_CODE (arg) == ADDR_EXPR)
10853 {
10854 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10855 {
10856 *ptr_offset = size_zero_node;
10857 return TREE_OPERAND (arg, 0);
10858 }
10859 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10860 {
10861 array = TREE_OPERAND (arg, 0);
10862 offset = size_zero_node;
10863 }
10864 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10865 {
10866 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10867 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10868 if (TREE_CODE (array) != STRING_CST
10869 && TREE_CODE (array) != VAR_DECL)
10870 return 0;
10871
10872 /* Check if the array has a nonzero lower bound. */
10873 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10874 if (!integer_zerop (lower_bound))
10875 {
10876 /* If the offset and base aren't both constants, return 0. */
10877 if (TREE_CODE (lower_bound) != INTEGER_CST)
10878 return 0;
10879 if (TREE_CODE (offset) != INTEGER_CST)
10880 return 0;
10881 /* Adjust offset by the lower bound. */
10882 offset = size_diffop (fold_convert (sizetype, offset),
10883 fold_convert (sizetype, lower_bound));
10884 }
10885 }
10886 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10887 {
10888 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10889 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10890 if (TREE_CODE (array) != ADDR_EXPR)
10891 return 0;
10892 array = TREE_OPERAND (array, 0);
10893 if (TREE_CODE (array) != STRING_CST
10894 && TREE_CODE (array) != VAR_DECL)
10895 return 0;
10896 }
10897 else
10898 return 0;
10899 }
10900 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10901 {
10902 tree arg0 = TREE_OPERAND (arg, 0);
10903 tree arg1 = TREE_OPERAND (arg, 1);
10904
10905 STRIP_NOPS (arg0);
10906 STRIP_NOPS (arg1);
10907
10908 if (TREE_CODE (arg0) == ADDR_EXPR
10909 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10910 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10911 {
10912 array = TREE_OPERAND (arg0, 0);
10913 offset = arg1;
10914 }
10915 else if (TREE_CODE (arg1) == ADDR_EXPR
10916 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10917 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10918 {
10919 array = TREE_OPERAND (arg1, 0);
10920 offset = arg0;
10921 }
10922 else
10923 return 0;
10924 }
10925 else
10926 return 0;
10927
10928 if (TREE_CODE (array) == STRING_CST)
10929 {
10930 *ptr_offset = fold_convert (sizetype, offset);
10931 return array;
10932 }
10933 else if (TREE_CODE (array) == VAR_DECL
10934 || TREE_CODE (array) == CONST_DECL)
10935 {
10936 int length;
10937 tree init = ctor_for_folding (array);
10938
10939 /* Variables initialized to string literals can be handled too. */
10940 if (init == error_mark_node
10941 || !init
10942 || TREE_CODE (init) != STRING_CST)
10943 return 0;
10944
10945 /* Avoid const char foo[4] = "abcde"; */
10946 if (DECL_SIZE_UNIT (array) == NULL_TREE
10947 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10948 || (length = TREE_STRING_LENGTH (init)) <= 0
10949 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10950 return 0;
10951
10952 /* If variable is bigger than the string literal, OFFSET must be constant
10953 and inside of the bounds of the string literal. */
10954 offset = fold_convert (sizetype, offset);
10955 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10956 && (! tree_fits_uhwi_p (offset)
10957 || compare_tree_int (offset, length) >= 0))
10958 return 0;
10959
10960 *ptr_offset = offset;
10961 return init;
10962 }
10963
10964 return 0;
10965 }
10966 \f
10967 /* Generate code to calculate OPS, and exploded expression
10968 using a store-flag instruction and return an rtx for the result.
10969 OPS reflects a comparison.
10970
10971 If TARGET is nonzero, store the result there if convenient.
10972
10973 Return zero if there is no suitable set-flag instruction
10974 available on this machine.
10975
10976 Once expand_expr has been called on the arguments of the comparison,
10977 we are committed to doing the store flag, since it is not safe to
10978 re-evaluate the expression. We emit the store-flag insn by calling
10979 emit_store_flag, but only expand the arguments if we have a reason
10980 to believe that emit_store_flag will be successful. If we think that
10981 it will, but it isn't, we have to simulate the store-flag with a
10982 set/jump/set sequence. */
10983
10984 static rtx
10985 do_store_flag (sepops ops, rtx target, machine_mode mode)
10986 {
10987 enum rtx_code code;
10988 tree arg0, arg1, type;
10989 tree tem;
10990 machine_mode operand_mode;
10991 int unsignedp;
10992 rtx op0, op1;
10993 rtx subtarget = target;
10994 location_t loc = ops->location;
10995
10996 arg0 = ops->op0;
10997 arg1 = ops->op1;
10998
10999 /* Don't crash if the comparison was erroneous. */
11000 if (arg0 == error_mark_node || arg1 == error_mark_node)
11001 return const0_rtx;
11002
11003 type = TREE_TYPE (arg0);
11004 operand_mode = TYPE_MODE (type);
11005 unsignedp = TYPE_UNSIGNED (type);
11006
11007 /* We won't bother with BLKmode store-flag operations because it would mean
11008 passing a lot of information to emit_store_flag. */
11009 if (operand_mode == BLKmode)
11010 return 0;
11011
11012 /* We won't bother with store-flag operations involving function pointers
11013 when function pointers must be canonicalized before comparisons. */
11014 #ifdef HAVE_canonicalize_funcptr_for_compare
11015 if (HAVE_canonicalize_funcptr_for_compare
11016 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11017 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11018 == FUNCTION_TYPE))
11019 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11020 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11021 == FUNCTION_TYPE))))
11022 return 0;
11023 #endif
11024
11025 STRIP_NOPS (arg0);
11026 STRIP_NOPS (arg1);
11027
11028 /* For vector typed comparisons emit code to generate the desired
11029 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11030 expander for this. */
11031 if (TREE_CODE (ops->type) == VECTOR_TYPE)
11032 {
11033 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11034 tree if_true = constant_boolean_node (true, ops->type);
11035 tree if_false = constant_boolean_node (false, ops->type);
11036 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
11037 }
11038
11039 /* Get the rtx comparison code to use. We know that EXP is a comparison
11040 operation of some type. Some comparisons against 1 and -1 can be
11041 converted to comparisons with zero. Do so here so that the tests
11042 below will be aware that we have a comparison with zero. These
11043 tests will not catch constants in the first operand, but constants
11044 are rarely passed as the first operand. */
11045
11046 switch (ops->code)
11047 {
11048 case EQ_EXPR:
11049 code = EQ;
11050 break;
11051 case NE_EXPR:
11052 code = NE;
11053 break;
11054 case LT_EXPR:
11055 if (integer_onep (arg1))
11056 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11057 else
11058 code = unsignedp ? LTU : LT;
11059 break;
11060 case LE_EXPR:
11061 if (! unsignedp && integer_all_onesp (arg1))
11062 arg1 = integer_zero_node, code = LT;
11063 else
11064 code = unsignedp ? LEU : LE;
11065 break;
11066 case GT_EXPR:
11067 if (! unsignedp && integer_all_onesp (arg1))
11068 arg1 = integer_zero_node, code = GE;
11069 else
11070 code = unsignedp ? GTU : GT;
11071 break;
11072 case GE_EXPR:
11073 if (integer_onep (arg1))
11074 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11075 else
11076 code = unsignedp ? GEU : GE;
11077 break;
11078
11079 case UNORDERED_EXPR:
11080 code = UNORDERED;
11081 break;
11082 case ORDERED_EXPR:
11083 code = ORDERED;
11084 break;
11085 case UNLT_EXPR:
11086 code = UNLT;
11087 break;
11088 case UNLE_EXPR:
11089 code = UNLE;
11090 break;
11091 case UNGT_EXPR:
11092 code = UNGT;
11093 break;
11094 case UNGE_EXPR:
11095 code = UNGE;
11096 break;
11097 case UNEQ_EXPR:
11098 code = UNEQ;
11099 break;
11100 case LTGT_EXPR:
11101 code = LTGT;
11102 break;
11103
11104 default:
11105 gcc_unreachable ();
11106 }
11107
11108 /* Put a constant second. */
11109 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11110 || TREE_CODE (arg0) == FIXED_CST)
11111 {
11112 tem = arg0; arg0 = arg1; arg1 = tem;
11113 code = swap_condition (code);
11114 }
11115
11116 /* If this is an equality or inequality test of a single bit, we can
11117 do this by shifting the bit being tested to the low-order bit and
11118 masking the result with the constant 1. If the condition was EQ,
11119 we xor it with 1. This does not require an scc insn and is faster
11120 than an scc insn even if we have it.
11121
11122 The code to make this transformation was moved into fold_single_bit_test,
11123 so we just call into the folder and expand its result. */
11124
11125 if ((code == NE || code == EQ)
11126 && integer_zerop (arg1)
11127 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11128 {
11129 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11130 if (srcstmt
11131 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11132 {
11133 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11134 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11135 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11136 gimple_assign_rhs1 (srcstmt),
11137 gimple_assign_rhs2 (srcstmt));
11138 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11139 if (temp)
11140 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11141 }
11142 }
11143
11144 if (! get_subtarget (target)
11145 || GET_MODE (subtarget) != operand_mode)
11146 subtarget = 0;
11147
11148 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11149
11150 if (target == 0)
11151 target = gen_reg_rtx (mode);
11152
11153 /* Try a cstore if possible. */
11154 return emit_store_flag_force (target, code, op0, op1,
11155 operand_mode, unsignedp,
11156 (TYPE_PRECISION (ops->type) == 1
11157 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11158 }
11159 \f
11160
11161 /* Stubs in case we haven't got a casesi insn. */
11162 #ifndef HAVE_casesi
11163 # define HAVE_casesi 0
11164 # define gen_casesi(a, b, c, d, e) (0)
11165 # define CODE_FOR_casesi CODE_FOR_nothing
11166 #endif
11167
11168 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11169 0 otherwise (i.e. if there is no casesi instruction).
11170
11171 DEFAULT_PROBABILITY is the probability of jumping to the default
11172 label. */
11173 int
11174 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11175 rtx table_label, rtx default_label, rtx fallback_label,
11176 int default_probability)
11177 {
11178 struct expand_operand ops[5];
11179 machine_mode index_mode = SImode;
11180 rtx op1, op2, index;
11181
11182 if (! HAVE_casesi)
11183 return 0;
11184
11185 /* Convert the index to SImode. */
11186 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11187 {
11188 machine_mode omode = TYPE_MODE (index_type);
11189 rtx rangertx = expand_normal (range);
11190
11191 /* We must handle the endpoints in the original mode. */
11192 index_expr = build2 (MINUS_EXPR, index_type,
11193 index_expr, minval);
11194 minval = integer_zero_node;
11195 index = expand_normal (index_expr);
11196 if (default_label)
11197 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11198 omode, 1, default_label,
11199 default_probability);
11200 /* Now we can safely truncate. */
11201 index = convert_to_mode (index_mode, index, 0);
11202 }
11203 else
11204 {
11205 if (TYPE_MODE (index_type) != index_mode)
11206 {
11207 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11208 index_expr = fold_convert (index_type, index_expr);
11209 }
11210
11211 index = expand_normal (index_expr);
11212 }
11213
11214 do_pending_stack_adjust ();
11215
11216 op1 = expand_normal (minval);
11217 op2 = expand_normal (range);
11218
11219 create_input_operand (&ops[0], index, index_mode);
11220 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11221 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11222 create_fixed_operand (&ops[3], table_label);
11223 create_fixed_operand (&ops[4], (default_label
11224 ? default_label
11225 : fallback_label));
11226 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11227 return 1;
11228 }
11229
11230 /* Attempt to generate a tablejump instruction; same concept. */
11231 #ifndef HAVE_tablejump
11232 #define HAVE_tablejump 0
11233 #define gen_tablejump(x, y) (0)
11234 #endif
11235
11236 /* Subroutine of the next function.
11237
11238 INDEX is the value being switched on, with the lowest value
11239 in the table already subtracted.
11240 MODE is its expected mode (needed if INDEX is constant).
11241 RANGE is the length of the jump table.
11242 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11243
11244 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11245 index value is out of range.
11246 DEFAULT_PROBABILITY is the probability of jumping to
11247 the default label. */
11248
11249 static void
11250 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11251 rtx default_label, int default_probability)
11252 {
11253 rtx temp, vector;
11254
11255 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11256 cfun->cfg->max_jumptable_ents = INTVAL (range);
11257
11258 /* Do an unsigned comparison (in the proper mode) between the index
11259 expression and the value which represents the length of the range.
11260 Since we just finished subtracting the lower bound of the range
11261 from the index expression, this comparison allows us to simultaneously
11262 check that the original index expression value is both greater than
11263 or equal to the minimum value of the range and less than or equal to
11264 the maximum value of the range. */
11265
11266 if (default_label)
11267 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11268 default_label, default_probability);
11269
11270
11271 /* If index is in range, it must fit in Pmode.
11272 Convert to Pmode so we can index with it. */
11273 if (mode != Pmode)
11274 index = convert_to_mode (Pmode, index, 1);
11275
11276 /* Don't let a MEM slip through, because then INDEX that comes
11277 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11278 and break_out_memory_refs will go to work on it and mess it up. */
11279 #ifdef PIC_CASE_VECTOR_ADDRESS
11280 if (flag_pic && !REG_P (index))
11281 index = copy_to_mode_reg (Pmode, index);
11282 #endif
11283
11284 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11285 GET_MODE_SIZE, because this indicates how large insns are. The other
11286 uses should all be Pmode, because they are addresses. This code
11287 could fail if addresses and insns are not the same size. */
11288 index = simplify_gen_binary (MULT, Pmode, index,
11289 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11290 Pmode));
11291 index = simplify_gen_binary (PLUS, Pmode, index,
11292 gen_rtx_LABEL_REF (Pmode, table_label));
11293
11294 #ifdef PIC_CASE_VECTOR_ADDRESS
11295 if (flag_pic)
11296 index = PIC_CASE_VECTOR_ADDRESS (index);
11297 else
11298 #endif
11299 index = memory_address (CASE_VECTOR_MODE, index);
11300 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11301 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11302 convert_move (temp, vector, 0);
11303
11304 emit_jump_insn (gen_tablejump (temp, table_label));
11305
11306 /* If we are generating PIC code or if the table is PC-relative, the
11307 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11308 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11309 emit_barrier ();
11310 }
11311
11312 int
11313 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11314 rtx table_label, rtx default_label, int default_probability)
11315 {
11316 rtx index;
11317
11318 if (! HAVE_tablejump)
11319 return 0;
11320
11321 index_expr = fold_build2 (MINUS_EXPR, index_type,
11322 fold_convert (index_type, index_expr),
11323 fold_convert (index_type, minval));
11324 index = expand_normal (index_expr);
11325 do_pending_stack_adjust ();
11326
11327 do_tablejump (index, TYPE_MODE (index_type),
11328 convert_modes (TYPE_MODE (index_type),
11329 TYPE_MODE (TREE_TYPE (range)),
11330 expand_normal (range),
11331 TYPE_UNSIGNED (TREE_TYPE (range))),
11332 table_label, default_label, default_probability);
11333 return 1;
11334 }
11335
11336 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11337 static rtx
11338 const_vector_from_tree (tree exp)
11339 {
11340 rtvec v;
11341 unsigned i;
11342 int units;
11343 tree elt;
11344 machine_mode inner, mode;
11345
11346 mode = TYPE_MODE (TREE_TYPE (exp));
11347
11348 if (initializer_zerop (exp))
11349 return CONST0_RTX (mode);
11350
11351 units = GET_MODE_NUNITS (mode);
11352 inner = GET_MODE_INNER (mode);
11353
11354 v = rtvec_alloc (units);
11355
11356 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11357 {
11358 elt = VECTOR_CST_ELT (exp, i);
11359
11360 if (TREE_CODE (elt) == REAL_CST)
11361 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11362 inner);
11363 else if (TREE_CODE (elt) == FIXED_CST)
11364 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11365 inner);
11366 else
11367 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11368 }
11369
11370 return gen_rtx_CONST_VECTOR (mode, v);
11371 }
11372
11373 /* Build a decl for a personality function given a language prefix. */
11374
11375 tree
11376 build_personality_function (const char *lang)
11377 {
11378 const char *unwind_and_version;
11379 tree decl, type;
11380 char *name;
11381
11382 switch (targetm_common.except_unwind_info (&global_options))
11383 {
11384 case UI_NONE:
11385 return NULL;
11386 case UI_SJLJ:
11387 unwind_and_version = "_sj0";
11388 break;
11389 case UI_DWARF2:
11390 case UI_TARGET:
11391 unwind_and_version = "_v0";
11392 break;
11393 case UI_SEH:
11394 unwind_and_version = "_seh0";
11395 break;
11396 default:
11397 gcc_unreachable ();
11398 }
11399
11400 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11401
11402 type = build_function_type_list (integer_type_node, integer_type_node,
11403 long_long_unsigned_type_node,
11404 ptr_type_node, ptr_type_node, NULL_TREE);
11405 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11406 get_identifier (name), type);
11407 DECL_ARTIFICIAL (decl) = 1;
11408 DECL_EXTERNAL (decl) = 1;
11409 TREE_PUBLIC (decl) = 1;
11410
11411 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11412 are the flags assigned by targetm.encode_section_info. */
11413 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11414
11415 return decl;
11416 }
11417
11418 /* Extracts the personality function of DECL and returns the corresponding
11419 libfunc. */
11420
11421 rtx
11422 get_personality_function (tree decl)
11423 {
11424 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11425 enum eh_personality_kind pk;
11426
11427 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11428 if (pk == eh_personality_none)
11429 return NULL;
11430
11431 if (!personality
11432 && pk == eh_personality_any)
11433 personality = lang_hooks.eh_personality ();
11434
11435 if (pk == eh_personality_lang)
11436 gcc_assert (personality != NULL_TREE);
11437
11438 return XEXP (DECL_RTL (personality), 0);
11439 }
11440
11441 /* Returns a tree for the size of EXP in bytes. */
11442
11443 static tree
11444 tree_expr_size (const_tree exp)
11445 {
11446 if (DECL_P (exp)
11447 && DECL_SIZE_UNIT (exp) != 0)
11448 return DECL_SIZE_UNIT (exp);
11449 else
11450 return size_in_bytes (TREE_TYPE (exp));
11451 }
11452
11453 /* Return an rtx for the size in bytes of the value of EXP. */
11454
11455 rtx
11456 expr_size (tree exp)
11457 {
11458 tree size;
11459
11460 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11461 size = TREE_OPERAND (exp, 1);
11462 else
11463 {
11464 size = tree_expr_size (exp);
11465 gcc_assert (size);
11466 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11467 }
11468
11469 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11470 }
11471
11472 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11473 if the size can vary or is larger than an integer. */
11474
11475 static HOST_WIDE_INT
11476 int_expr_size (tree exp)
11477 {
11478 tree size;
11479
11480 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11481 size = TREE_OPERAND (exp, 1);
11482 else
11483 {
11484 size = tree_expr_size (exp);
11485 gcc_assert (size);
11486 }
11487
11488 if (size == 0 || !tree_fits_shwi_p (size))
11489 return -1;
11490
11491 return tree_to_shwi (size);
11492 }
11493
11494 #include "gt-expr.h"