ipa-chkp.c: New.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "attribs.h"
30 #include "varasm.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "vec.h"
38 #include "input.h"
39 #include "function.h"
40 #include "insn-config.h"
41 #include "insn-attr.h"
42 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "expr.h"
44 #include "insn-codes.h"
45 #include "optabs.h"
46 #include "libfuncs.h"
47 #include "recog.h"
48 #include "reload.h"
49 #include "typeclass.h"
50 #include "toplev.h"
51 #include "langhooks.h"
52 #include "intl.h"
53 #include "tm_p.h"
54 #include "tree-iterator.h"
55 #include "predict.h"
56 #include "dominance.h"
57 #include "cfg.h"
58 #include "basic-block.h"
59 #include "tree-ssa-alias.h"
60 #include "internal-fn.h"
61 #include "gimple-expr.h"
62 #include "is-a.h"
63 #include "gimple.h"
64 #include "gimple-ssa.h"
65 #include "hash-map.h"
66 #include "plugin-api.h"
67 #include "ipa-ref.h"
68 #include "cgraph.h"
69 #include "tree-ssanames.h"
70 #include "target.h"
71 #include "common/common-target.h"
72 #include "timevar.h"
73 #include "df.h"
74 #include "diagnostic.h"
75 #include "tree-ssa-live.h"
76 #include "tree-outof-ssa.h"
77 #include "target-globals.h"
78 #include "params.h"
79 #include "tree-ssa-address.h"
80 #include "cfgexpand.h"
81 #include "builtins.h"
82 #include "tree-chkp.h"
83 #include "rtl-chkp.h"
84
85 #ifndef STACK_PUSH_CODE
86 #ifdef STACK_GROWS_DOWNWARD
87 #define STACK_PUSH_CODE PRE_DEC
88 #else
89 #define STACK_PUSH_CODE PRE_INC
90 #endif
91 #endif
92
93
94 /* If this is nonzero, we do not bother generating VOLATILE
95 around volatile memory references, and we are willing to
96 output indirect addresses. If cse is to follow, we reject
97 indirect addresses so a useful potential cse is generated;
98 if it is used only once, instruction combination will produce
99 the same indirect address eventually. */
100 int cse_not_expected;
101
102 /* This structure is used by move_by_pieces to describe the move to
103 be performed. */
104 struct move_by_pieces_d
105 {
106 rtx to;
107 rtx to_addr;
108 int autinc_to;
109 int explicit_inc_to;
110 rtx from;
111 rtx from_addr;
112 int autinc_from;
113 int explicit_inc_from;
114 unsigned HOST_WIDE_INT len;
115 HOST_WIDE_INT offset;
116 int reverse;
117 };
118
119 /* This structure is used by store_by_pieces to describe the clear to
120 be performed. */
121
122 struct store_by_pieces_d
123 {
124 rtx to;
125 rtx to_addr;
126 int autinc_to;
127 int explicit_inc_to;
128 unsigned HOST_WIDE_INT len;
129 HOST_WIDE_INT offset;
130 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
131 void *constfundata;
132 int reverse;
133 };
134
135 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
136 struct move_by_pieces_d *);
137 static bool block_move_libcall_safe_for_call_parm (void);
138 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
139 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
140 unsigned HOST_WIDE_INT);
141 static tree emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
143 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
144 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
146 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
147 struct store_by_pieces_d *);
148 static tree clear_storage_libcall_fn (int);
149 static rtx_insn *compress_float_constant (rtx, rtx);
150 static rtx get_subtarget (rtx);
151 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
152 HOST_WIDE_INT, machine_mode,
153 tree, int, alias_set_type);
154 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
155 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
156 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
157 machine_mode, tree, alias_set_type, bool);
158
159 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
160
161 static int is_aligning_offset (const_tree, const_tree);
162 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
163 enum expand_modifier);
164 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
165 static rtx do_store_flag (sepops, rtx, machine_mode);
166 #ifdef PUSH_ROUNDING
167 static void emit_single_push_insn (machine_mode, rtx, tree);
168 #endif
169 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
170 static rtx const_vector_from_tree (tree);
171 static void write_complex_part (rtx, rtx, bool);
172
173 \f
174 /* This is run to set up which modes can be used
175 directly in memory and to initialize the block move optab. It is run
176 at the beginning of compilation and when the target is reinitialized. */
177
178 void
179 init_expr_target (void)
180 {
181 rtx insn, pat;
182 machine_mode mode;
183 int num_clobbers;
184 rtx mem, mem1;
185 rtx reg;
186
187 /* Try indexing by frame ptr and try by stack ptr.
188 It is known that on the Convex the stack ptr isn't a valid index.
189 With luck, one or the other is valid on any machine. */
190 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
191 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
192
193 /* A scratch register we can modify in-place below to avoid
194 useless RTL allocations. */
195 reg = gen_rtx_REG (VOIDmode, -1);
196
197 insn = rtx_alloc (INSN);
198 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
199 PATTERN (insn) = pat;
200
201 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
202 mode = (machine_mode) ((int) mode + 1))
203 {
204 int regno;
205
206 direct_load[(int) mode] = direct_store[(int) mode] = 0;
207 PUT_MODE (mem, mode);
208 PUT_MODE (mem1, mode);
209 PUT_MODE (reg, mode);
210
211 /* See if there is some register that can be used in this mode and
212 directly loaded or stored from memory. */
213
214 if (mode != VOIDmode && mode != BLKmode)
215 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
216 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
217 regno++)
218 {
219 if (! HARD_REGNO_MODE_OK (regno, mode))
220 continue;
221
222 SET_REGNO (reg, regno);
223
224 SET_SRC (pat) = mem;
225 SET_DEST (pat) = reg;
226 if (recog (pat, insn, &num_clobbers) >= 0)
227 direct_load[(int) mode] = 1;
228
229 SET_SRC (pat) = mem1;
230 SET_DEST (pat) = reg;
231 if (recog (pat, insn, &num_clobbers) >= 0)
232 direct_load[(int) mode] = 1;
233
234 SET_SRC (pat) = reg;
235 SET_DEST (pat) = mem;
236 if (recog (pat, insn, &num_clobbers) >= 0)
237 direct_store[(int) mode] = 1;
238
239 SET_SRC (pat) = reg;
240 SET_DEST (pat) = mem1;
241 if (recog (pat, insn, &num_clobbers) >= 0)
242 direct_store[(int) mode] = 1;
243 }
244 }
245
246 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
247
248 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
249 mode = GET_MODE_WIDER_MODE (mode))
250 {
251 machine_mode srcmode;
252 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
253 srcmode = GET_MODE_WIDER_MODE (srcmode))
254 {
255 enum insn_code ic;
256
257 ic = can_extend_p (mode, srcmode, 0);
258 if (ic == CODE_FOR_nothing)
259 continue;
260
261 PUT_MODE (mem, srcmode);
262
263 if (insn_operand_matches (ic, 1, mem))
264 float_extend_from_mem[mode][srcmode] = true;
265 }
266 }
267 }
268
269 /* This is run at the start of compiling a function. */
270
271 void
272 init_expr (void)
273 {
274 memset (&crtl->expr, 0, sizeof (crtl->expr));
275 }
276 \f
277 /* Copy data from FROM to TO, where the machine modes are not the same.
278 Both modes may be integer, or both may be floating, or both may be
279 fixed-point.
280 UNSIGNEDP should be nonzero if FROM is an unsigned type.
281 This causes zero-extension instead of sign-extension. */
282
283 void
284 convert_move (rtx to, rtx from, int unsignedp)
285 {
286 machine_mode to_mode = GET_MODE (to);
287 machine_mode from_mode = GET_MODE (from);
288 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
289 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
290 enum insn_code code;
291 rtx libcall;
292
293 /* rtx code for making an equivalent value. */
294 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
295 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
296
297
298 gcc_assert (to_real == from_real);
299 gcc_assert (to_mode != BLKmode);
300 gcc_assert (from_mode != BLKmode);
301
302 /* If the source and destination are already the same, then there's
303 nothing to do. */
304 if (to == from)
305 return;
306
307 /* If FROM is a SUBREG that indicates that we have already done at least
308 the required extension, strip it. We don't handle such SUBREGs as
309 TO here. */
310
311 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
312 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
313 >= GET_MODE_PRECISION (to_mode))
314 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
315 from = gen_lowpart (to_mode, from), from_mode = to_mode;
316
317 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
318
319 if (to_mode == from_mode
320 || (from_mode == VOIDmode && CONSTANT_P (from)))
321 {
322 emit_move_insn (to, from);
323 return;
324 }
325
326 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
327 {
328 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
329
330 if (VECTOR_MODE_P (to_mode))
331 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
332 else
333 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
334
335 emit_move_insn (to, from);
336 return;
337 }
338
339 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
340 {
341 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
342 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
343 return;
344 }
345
346 if (to_real)
347 {
348 rtx value;
349 rtx_insn *insns;
350 convert_optab tab;
351
352 gcc_assert ((GET_MODE_PRECISION (from_mode)
353 != GET_MODE_PRECISION (to_mode))
354 || (DECIMAL_FLOAT_MODE_P (from_mode)
355 != DECIMAL_FLOAT_MODE_P (to_mode)));
356
357 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
358 /* Conversion between decimal float and binary float, same size. */
359 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
360 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
361 tab = sext_optab;
362 else
363 tab = trunc_optab;
364
365 /* Try converting directly if the insn is supported. */
366
367 code = convert_optab_handler (tab, to_mode, from_mode);
368 if (code != CODE_FOR_nothing)
369 {
370 emit_unop_insn (code, to, from,
371 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
372 return;
373 }
374
375 /* Otherwise use a libcall. */
376 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
377
378 /* Is this conversion implemented yet? */
379 gcc_assert (libcall);
380
381 start_sequence ();
382 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
383 1, from, from_mode);
384 insns = get_insns ();
385 end_sequence ();
386 emit_libcall_block (insns, to, value,
387 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
388 from)
389 : gen_rtx_FLOAT_EXTEND (to_mode, from));
390 return;
391 }
392
393 /* Handle pointer conversion. */ /* SPEE 900220. */
394 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
395 {
396 convert_optab ctab;
397
398 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
399 ctab = trunc_optab;
400 else if (unsignedp)
401 ctab = zext_optab;
402 else
403 ctab = sext_optab;
404
405 if (convert_optab_handler (ctab, to_mode, from_mode)
406 != CODE_FOR_nothing)
407 {
408 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
409 to, from, UNKNOWN);
410 return;
411 }
412 }
413
414 /* Targets are expected to provide conversion insns between PxImode and
415 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
416 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
417 {
418 machine_mode full_mode
419 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
420
421 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
422 != CODE_FOR_nothing);
423
424 if (full_mode != from_mode)
425 from = convert_to_mode (full_mode, from, unsignedp);
426 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
427 to, from, UNKNOWN);
428 return;
429 }
430 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
431 {
432 rtx new_from;
433 machine_mode full_mode
434 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
435 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
436 enum insn_code icode;
437
438 icode = convert_optab_handler (ctab, full_mode, from_mode);
439 gcc_assert (icode != CODE_FOR_nothing);
440
441 if (to_mode == full_mode)
442 {
443 emit_unop_insn (icode, to, from, UNKNOWN);
444 return;
445 }
446
447 new_from = gen_reg_rtx (full_mode);
448 emit_unop_insn (icode, new_from, from, UNKNOWN);
449
450 /* else proceed to integer conversions below. */
451 from_mode = full_mode;
452 from = new_from;
453 }
454
455 /* Make sure both are fixed-point modes or both are not. */
456 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
457 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
458 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
459 {
460 /* If we widen from_mode to to_mode and they are in the same class,
461 we won't saturate the result.
462 Otherwise, always saturate the result to play safe. */
463 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
464 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
465 expand_fixed_convert (to, from, 0, 0);
466 else
467 expand_fixed_convert (to, from, 0, 1);
468 return;
469 }
470
471 /* Now both modes are integers. */
472
473 /* Handle expanding beyond a word. */
474 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
475 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
476 {
477 rtx_insn *insns;
478 rtx lowpart;
479 rtx fill_value;
480 rtx lowfrom;
481 int i;
482 machine_mode lowpart_mode;
483 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
484
485 /* Try converting directly if the insn is supported. */
486 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
487 != CODE_FOR_nothing)
488 {
489 /* If FROM is a SUBREG, put it into a register. Do this
490 so that we always generate the same set of insns for
491 better cse'ing; if an intermediate assignment occurred,
492 we won't be doing the operation directly on the SUBREG. */
493 if (optimize > 0 && GET_CODE (from) == SUBREG)
494 from = force_reg (from_mode, from);
495 emit_unop_insn (code, to, from, equiv_code);
496 return;
497 }
498 /* Next, try converting via full word. */
499 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
500 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
501 != CODE_FOR_nothing))
502 {
503 rtx word_to = gen_reg_rtx (word_mode);
504 if (REG_P (to))
505 {
506 if (reg_overlap_mentioned_p (to, from))
507 from = force_reg (from_mode, from);
508 emit_clobber (to);
509 }
510 convert_move (word_to, from, unsignedp);
511 emit_unop_insn (code, to, word_to, equiv_code);
512 return;
513 }
514
515 /* No special multiword conversion insn; do it by hand. */
516 start_sequence ();
517
518 /* Since we will turn this into a no conflict block, we must ensure the
519 the source does not overlap the target so force it into an isolated
520 register when maybe so. Likewise for any MEM input, since the
521 conversion sequence might require several references to it and we
522 must ensure we're getting the same value every time. */
523
524 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
525 from = force_reg (from_mode, from);
526
527 /* Get a copy of FROM widened to a word, if necessary. */
528 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
529 lowpart_mode = word_mode;
530 else
531 lowpart_mode = from_mode;
532
533 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
534
535 lowpart = gen_lowpart (lowpart_mode, to);
536 emit_move_insn (lowpart, lowfrom);
537
538 /* Compute the value to put in each remaining word. */
539 if (unsignedp)
540 fill_value = const0_rtx;
541 else
542 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
543 LT, lowfrom, const0_rtx,
544 lowpart_mode, 0, -1);
545
546 /* Fill the remaining words. */
547 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
548 {
549 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
550 rtx subword = operand_subword (to, index, 1, to_mode);
551
552 gcc_assert (subword);
553
554 if (fill_value != subword)
555 emit_move_insn (subword, fill_value);
556 }
557
558 insns = get_insns ();
559 end_sequence ();
560
561 emit_insn (insns);
562 return;
563 }
564
565 /* Truncating multi-word to a word or less. */
566 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
567 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
568 {
569 if (!((MEM_P (from)
570 && ! MEM_VOLATILE_P (from)
571 && direct_load[(int) to_mode]
572 && ! mode_dependent_address_p (XEXP (from, 0),
573 MEM_ADDR_SPACE (from)))
574 || REG_P (from)
575 || GET_CODE (from) == SUBREG))
576 from = force_reg (from_mode, from);
577 convert_move (to, gen_lowpart (word_mode, from), 0);
578 return;
579 }
580
581 /* Now follow all the conversions between integers
582 no more than a word long. */
583
584 /* For truncation, usually we can just refer to FROM in a narrower mode. */
585 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
586 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
587 {
588 if (!((MEM_P (from)
589 && ! MEM_VOLATILE_P (from)
590 && direct_load[(int) to_mode]
591 && ! mode_dependent_address_p (XEXP (from, 0),
592 MEM_ADDR_SPACE (from)))
593 || REG_P (from)
594 || GET_CODE (from) == SUBREG))
595 from = force_reg (from_mode, from);
596 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
597 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
598 from = copy_to_reg (from);
599 emit_move_insn (to, gen_lowpart (to_mode, from));
600 return;
601 }
602
603 /* Handle extension. */
604 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
605 {
606 /* Convert directly if that works. */
607 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
608 != CODE_FOR_nothing)
609 {
610 emit_unop_insn (code, to, from, equiv_code);
611 return;
612 }
613 else
614 {
615 machine_mode intermediate;
616 rtx tmp;
617 int shift_amount;
618
619 /* Search for a mode to convert via. */
620 for (intermediate = from_mode; intermediate != VOIDmode;
621 intermediate = GET_MODE_WIDER_MODE (intermediate))
622 if (((can_extend_p (to_mode, intermediate, unsignedp)
623 != CODE_FOR_nothing)
624 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
625 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
626 && (can_extend_p (intermediate, from_mode, unsignedp)
627 != CODE_FOR_nothing))
628 {
629 convert_move (to, convert_to_mode (intermediate, from,
630 unsignedp), unsignedp);
631 return;
632 }
633
634 /* No suitable intermediate mode.
635 Generate what we need with shifts. */
636 shift_amount = (GET_MODE_PRECISION (to_mode)
637 - GET_MODE_PRECISION (from_mode));
638 from = gen_lowpart (to_mode, force_reg (from_mode, from));
639 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
640 to, unsignedp);
641 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
642 to, unsignedp);
643 if (tmp != to)
644 emit_move_insn (to, tmp);
645 return;
646 }
647 }
648
649 /* Support special truncate insns for certain modes. */
650 if (convert_optab_handler (trunc_optab, to_mode,
651 from_mode) != CODE_FOR_nothing)
652 {
653 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
654 to, from, UNKNOWN);
655 return;
656 }
657
658 /* Handle truncation of volatile memrefs, and so on;
659 the things that couldn't be truncated directly,
660 and for which there was no special instruction.
661
662 ??? Code above formerly short-circuited this, for most integer
663 mode pairs, with a force_reg in from_mode followed by a recursive
664 call to this routine. Appears always to have been wrong. */
665 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
666 {
667 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
668 emit_move_insn (to, temp);
669 return;
670 }
671
672 /* Mode combination is not recognized. */
673 gcc_unreachable ();
674 }
675
676 /* Return an rtx for a value that would result
677 from converting X to mode MODE.
678 Both X and MODE may be floating, or both integer.
679 UNSIGNEDP is nonzero if X is an unsigned value.
680 This can be done by referring to a part of X in place
681 or by copying to a new temporary with conversion. */
682
683 rtx
684 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
685 {
686 return convert_modes (mode, VOIDmode, x, unsignedp);
687 }
688
689 /* Return an rtx for a value that would result
690 from converting X from mode OLDMODE to mode MODE.
691 Both modes may be floating, or both integer.
692 UNSIGNEDP is nonzero if X is an unsigned value.
693
694 This can be done by referring to a part of X in place
695 or by copying to a new temporary with conversion.
696
697 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
698
699 rtx
700 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
701 {
702 rtx temp;
703
704 /* If FROM is a SUBREG that indicates that we have already done at least
705 the required extension, strip it. */
706
707 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
708 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
709 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
710 x = gen_lowpart (mode, SUBREG_REG (x));
711
712 if (GET_MODE (x) != VOIDmode)
713 oldmode = GET_MODE (x);
714
715 if (mode == oldmode)
716 return x;
717
718 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
719 {
720 /* If the caller did not tell us the old mode, then there is not
721 much to do with respect to canonicalization. We have to
722 assume that all the bits are significant. */
723 if (GET_MODE_CLASS (oldmode) != MODE_INT)
724 oldmode = MAX_MODE_INT;
725 wide_int w = wide_int::from (std::make_pair (x, oldmode),
726 GET_MODE_PRECISION (mode),
727 unsignedp ? UNSIGNED : SIGNED);
728 return immed_wide_int_const (w, mode);
729 }
730
731 /* We can do this with a gen_lowpart if both desired and current modes
732 are integer, and this is either a constant integer, a register, or a
733 non-volatile MEM. */
734 if (GET_MODE_CLASS (mode) == MODE_INT
735 && GET_MODE_CLASS (oldmode) == MODE_INT
736 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
737 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
738 || (REG_P (x)
739 && (!HARD_REGISTER_P (x)
740 || HARD_REGNO_MODE_OK (REGNO (x), mode))
741 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
742
743 return gen_lowpart (mode, x);
744
745 /* Converting from integer constant into mode is always equivalent to an
746 subreg operation. */
747 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
748 {
749 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
750 return simplify_gen_subreg (mode, x, oldmode, 0);
751 }
752
753 temp = gen_reg_rtx (mode);
754 convert_move (temp, x, unsignedp);
755 return temp;
756 }
757 \f
758 /* Return the largest alignment we can use for doing a move (or store)
759 of MAX_PIECES. ALIGN is the largest alignment we could use. */
760
761 static unsigned int
762 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
763 {
764 machine_mode tmode;
765
766 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
767 if (align >= GET_MODE_ALIGNMENT (tmode))
768 align = GET_MODE_ALIGNMENT (tmode);
769 else
770 {
771 machine_mode tmode, xmode;
772
773 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
774 tmode != VOIDmode;
775 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
776 if (GET_MODE_SIZE (tmode) > max_pieces
777 || SLOW_UNALIGNED_ACCESS (tmode, align))
778 break;
779
780 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
781 }
782
783 return align;
784 }
785
786 /* Return the widest integer mode no wider than SIZE. If no such mode
787 can be found, return VOIDmode. */
788
789 static machine_mode
790 widest_int_mode_for_size (unsigned int size)
791 {
792 machine_mode tmode, mode = VOIDmode;
793
794 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
795 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
796 if (GET_MODE_SIZE (tmode) < size)
797 mode = tmode;
798
799 return mode;
800 }
801
802 /* Determine whether the LEN bytes can be moved by using several move
803 instructions. Return nonzero if a call to move_by_pieces should
804 succeed. */
805
806 int
807 can_move_by_pieces (unsigned HOST_WIDE_INT len,
808 unsigned int align)
809 {
810 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
811 optimize_insn_for_speed_p ());
812 }
813
814 /* Generate several move instructions to copy LEN bytes from block FROM to
815 block TO. (These are MEM rtx's with BLKmode).
816
817 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
818 used to push FROM to the stack.
819
820 ALIGN is maximum stack alignment we can assume.
821
822 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
823 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
824 stpcpy. */
825
826 rtx
827 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
828 unsigned int align, int endp)
829 {
830 struct move_by_pieces_d data;
831 machine_mode to_addr_mode;
832 machine_mode from_addr_mode = get_address_mode (from);
833 rtx to_addr, from_addr = XEXP (from, 0);
834 unsigned int max_size = MOVE_MAX_PIECES + 1;
835 enum insn_code icode;
836
837 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
838
839 data.offset = 0;
840 data.from_addr = from_addr;
841 if (to)
842 {
843 to_addr_mode = get_address_mode (to);
844 to_addr = XEXP (to, 0);
845 data.to = to;
846 data.autinc_to
847 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
848 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
849 data.reverse
850 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
851 }
852 else
853 {
854 to_addr_mode = VOIDmode;
855 to_addr = NULL_RTX;
856 data.to = NULL_RTX;
857 data.autinc_to = 1;
858 #ifdef STACK_GROWS_DOWNWARD
859 data.reverse = 1;
860 #else
861 data.reverse = 0;
862 #endif
863 }
864 data.to_addr = to_addr;
865 data.from = from;
866 data.autinc_from
867 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
868 || GET_CODE (from_addr) == POST_INC
869 || GET_CODE (from_addr) == POST_DEC);
870
871 data.explicit_inc_from = 0;
872 data.explicit_inc_to = 0;
873 if (data.reverse) data.offset = len;
874 data.len = len;
875
876 /* If copying requires more than two move insns,
877 copy addresses to registers (to make displacements shorter)
878 and use post-increment if available. */
879 if (!(data.autinc_from && data.autinc_to)
880 && move_by_pieces_ninsns (len, align, max_size) > 2)
881 {
882 /* Find the mode of the largest move...
883 MODE might not be used depending on the definitions of the
884 USE_* macros below. */
885 machine_mode mode ATTRIBUTE_UNUSED
886 = widest_int_mode_for_size (max_size);
887
888 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
889 {
890 data.from_addr = copy_to_mode_reg (from_addr_mode,
891 plus_constant (from_addr_mode,
892 from_addr, len));
893 data.autinc_from = 1;
894 data.explicit_inc_from = -1;
895 }
896 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
897 {
898 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
899 data.autinc_from = 1;
900 data.explicit_inc_from = 1;
901 }
902 if (!data.autinc_from && CONSTANT_P (from_addr))
903 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
904 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
905 {
906 data.to_addr = copy_to_mode_reg (to_addr_mode,
907 plus_constant (to_addr_mode,
908 to_addr, len));
909 data.autinc_to = 1;
910 data.explicit_inc_to = -1;
911 }
912 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
913 {
914 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
915 data.autinc_to = 1;
916 data.explicit_inc_to = 1;
917 }
918 if (!data.autinc_to && CONSTANT_P (to_addr))
919 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
920 }
921
922 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
923
924 /* First move what we can in the largest integer mode, then go to
925 successively smaller modes. */
926
927 while (max_size > 1 && data.len > 0)
928 {
929 machine_mode mode = widest_int_mode_for_size (max_size);
930
931 if (mode == VOIDmode)
932 break;
933
934 icode = optab_handler (mov_optab, mode);
935 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
936 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
937
938 max_size = GET_MODE_SIZE (mode);
939 }
940
941 /* The code above should have handled everything. */
942 gcc_assert (!data.len);
943
944 if (endp)
945 {
946 rtx to1;
947
948 gcc_assert (!data.reverse);
949 if (data.autinc_to)
950 {
951 if (endp == 2)
952 {
953 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
954 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
955 else
956 data.to_addr = copy_to_mode_reg (to_addr_mode,
957 plus_constant (to_addr_mode,
958 data.to_addr,
959 -1));
960 }
961 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
962 data.offset);
963 }
964 else
965 {
966 if (endp == 2)
967 --data.offset;
968 to1 = adjust_address (data.to, QImode, data.offset);
969 }
970 return to1;
971 }
972 else
973 return data.to;
974 }
975
976 /* Return number of insns required to move L bytes by pieces.
977 ALIGN (in bits) is maximum alignment we can assume. */
978
979 unsigned HOST_WIDE_INT
980 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
981 unsigned int max_size)
982 {
983 unsigned HOST_WIDE_INT n_insns = 0;
984
985 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
986
987 while (max_size > 1 && l > 0)
988 {
989 machine_mode mode;
990 enum insn_code icode;
991
992 mode = widest_int_mode_for_size (max_size);
993
994 if (mode == VOIDmode)
995 break;
996
997 icode = optab_handler (mov_optab, mode);
998 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
999 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1000
1001 max_size = GET_MODE_SIZE (mode);
1002 }
1003
1004 gcc_assert (!l);
1005 return n_insns;
1006 }
1007
1008 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1009 with move instructions for mode MODE. GENFUN is the gen_... function
1010 to make a move insn for that mode. DATA has all the other info. */
1011
1012 static void
1013 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1014 struct move_by_pieces_d *data)
1015 {
1016 unsigned int size = GET_MODE_SIZE (mode);
1017 rtx to1 = NULL_RTX, from1;
1018
1019 while (data->len >= size)
1020 {
1021 if (data->reverse)
1022 data->offset -= size;
1023
1024 if (data->to)
1025 {
1026 if (data->autinc_to)
1027 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1028 data->offset);
1029 else
1030 to1 = adjust_address (data->to, mode, data->offset);
1031 }
1032
1033 if (data->autinc_from)
1034 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1035 data->offset);
1036 else
1037 from1 = adjust_address (data->from, mode, data->offset);
1038
1039 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1040 emit_insn (gen_add2_insn (data->to_addr,
1041 gen_int_mode (-(HOST_WIDE_INT) size,
1042 GET_MODE (data->to_addr))));
1043 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1044 emit_insn (gen_add2_insn (data->from_addr,
1045 gen_int_mode (-(HOST_WIDE_INT) size,
1046 GET_MODE (data->from_addr))));
1047
1048 if (data->to)
1049 emit_insn ((*genfun) (to1, from1));
1050 else
1051 {
1052 #ifdef PUSH_ROUNDING
1053 emit_single_push_insn (mode, from1, NULL);
1054 #else
1055 gcc_unreachable ();
1056 #endif
1057 }
1058
1059 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1060 emit_insn (gen_add2_insn (data->to_addr,
1061 gen_int_mode (size,
1062 GET_MODE (data->to_addr))));
1063 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1064 emit_insn (gen_add2_insn (data->from_addr,
1065 gen_int_mode (size,
1066 GET_MODE (data->from_addr))));
1067
1068 if (! data->reverse)
1069 data->offset += size;
1070
1071 data->len -= size;
1072 }
1073 }
1074 \f
1075 /* Emit code to move a block Y to a block X. This may be done with
1076 string-move instructions, with multiple scalar move instructions,
1077 or with a library call.
1078
1079 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1080 SIZE is an rtx that says how long they are.
1081 ALIGN is the maximum alignment we can assume they have.
1082 METHOD describes what kind of copy this is, and what mechanisms may be used.
1083 MIN_SIZE is the minimal size of block to move
1084 MAX_SIZE is the maximal size of block to move, if it can not be represented
1085 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1086
1087 Return the address of the new block, if memcpy is called and returns it,
1088 0 otherwise. */
1089
1090 rtx
1091 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1092 unsigned int expected_align, HOST_WIDE_INT expected_size,
1093 unsigned HOST_WIDE_INT min_size,
1094 unsigned HOST_WIDE_INT max_size,
1095 unsigned HOST_WIDE_INT probable_max_size)
1096 {
1097 bool may_use_call;
1098 rtx retval = 0;
1099 unsigned int align;
1100
1101 gcc_assert (size);
1102 if (CONST_INT_P (size)
1103 && INTVAL (size) == 0)
1104 return 0;
1105
1106 switch (method)
1107 {
1108 case BLOCK_OP_NORMAL:
1109 case BLOCK_OP_TAILCALL:
1110 may_use_call = true;
1111 break;
1112
1113 case BLOCK_OP_CALL_PARM:
1114 may_use_call = block_move_libcall_safe_for_call_parm ();
1115
1116 /* Make inhibit_defer_pop nonzero around the library call
1117 to force it to pop the arguments right away. */
1118 NO_DEFER_POP;
1119 break;
1120
1121 case BLOCK_OP_NO_LIBCALL:
1122 may_use_call = false;
1123 break;
1124
1125 default:
1126 gcc_unreachable ();
1127 }
1128
1129 gcc_assert (MEM_P (x) && MEM_P (y));
1130 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1131 gcc_assert (align >= BITS_PER_UNIT);
1132
1133 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1134 block copy is more efficient for other large modes, e.g. DCmode. */
1135 x = adjust_address (x, BLKmode, 0);
1136 y = adjust_address (y, BLKmode, 0);
1137
1138 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1139 can be incorrect is coming from __builtin_memcpy. */
1140 if (CONST_INT_P (size))
1141 {
1142 x = shallow_copy_rtx (x);
1143 y = shallow_copy_rtx (y);
1144 set_mem_size (x, INTVAL (size));
1145 set_mem_size (y, INTVAL (size));
1146 }
1147
1148 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1149 move_by_pieces (x, y, INTVAL (size), align, 0);
1150 else if (emit_block_move_via_movmem (x, y, size, align,
1151 expected_align, expected_size,
1152 min_size, max_size, probable_max_size))
1153 ;
1154 else if (may_use_call
1155 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1156 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1157 {
1158 /* Since x and y are passed to a libcall, mark the corresponding
1159 tree EXPR as addressable. */
1160 tree y_expr = MEM_EXPR (y);
1161 tree x_expr = MEM_EXPR (x);
1162 if (y_expr)
1163 mark_addressable (y_expr);
1164 if (x_expr)
1165 mark_addressable (x_expr);
1166 retval = emit_block_move_via_libcall (x, y, size,
1167 method == BLOCK_OP_TAILCALL);
1168 }
1169
1170 else
1171 emit_block_move_via_loop (x, y, size, align);
1172
1173 if (method == BLOCK_OP_CALL_PARM)
1174 OK_DEFER_POP;
1175
1176 return retval;
1177 }
1178
1179 rtx
1180 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1181 {
1182 unsigned HOST_WIDE_INT max, min = 0;
1183 if (GET_CODE (size) == CONST_INT)
1184 min = max = UINTVAL (size);
1185 else
1186 max = GET_MODE_MASK (GET_MODE (size));
1187 return emit_block_move_hints (x, y, size, method, 0, -1,
1188 min, max, max);
1189 }
1190
1191 /* A subroutine of emit_block_move. Returns true if calling the
1192 block move libcall will not clobber any parameters which may have
1193 already been placed on the stack. */
1194
1195 static bool
1196 block_move_libcall_safe_for_call_parm (void)
1197 {
1198 #if defined (REG_PARM_STACK_SPACE)
1199 tree fn;
1200 #endif
1201
1202 /* If arguments are pushed on the stack, then they're safe. */
1203 if (PUSH_ARGS)
1204 return true;
1205
1206 /* If registers go on the stack anyway, any argument is sure to clobber
1207 an outgoing argument. */
1208 #if defined (REG_PARM_STACK_SPACE)
1209 fn = emit_block_move_libcall_fn (false);
1210 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1211 depend on its argument. */
1212 (void) fn;
1213 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1214 && REG_PARM_STACK_SPACE (fn) != 0)
1215 return false;
1216 #endif
1217
1218 /* If any argument goes in memory, then it might clobber an outgoing
1219 argument. */
1220 {
1221 CUMULATIVE_ARGS args_so_far_v;
1222 cumulative_args_t args_so_far;
1223 tree fn, arg;
1224
1225 fn = emit_block_move_libcall_fn (false);
1226 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1227 args_so_far = pack_cumulative_args (&args_so_far_v);
1228
1229 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1230 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1231 {
1232 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1233 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1234 NULL_TREE, true);
1235 if (!tmp || !REG_P (tmp))
1236 return false;
1237 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1238 return false;
1239 targetm.calls.function_arg_advance (args_so_far, mode,
1240 NULL_TREE, true);
1241 }
1242 }
1243 return true;
1244 }
1245
1246 /* A subroutine of emit_block_move. Expand a movmem pattern;
1247 return true if successful. */
1248
1249 static bool
1250 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1251 unsigned int expected_align, HOST_WIDE_INT expected_size,
1252 unsigned HOST_WIDE_INT min_size,
1253 unsigned HOST_WIDE_INT max_size,
1254 unsigned HOST_WIDE_INT probable_max_size)
1255 {
1256 int save_volatile_ok = volatile_ok;
1257 machine_mode mode;
1258
1259 if (expected_align < align)
1260 expected_align = align;
1261 if (expected_size != -1)
1262 {
1263 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1264 expected_size = probable_max_size;
1265 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1266 expected_size = min_size;
1267 }
1268
1269 /* Since this is a move insn, we don't care about volatility. */
1270 volatile_ok = 1;
1271
1272 /* Try the most limited insn first, because there's no point
1273 including more than one in the machine description unless
1274 the more limited one has some advantage. */
1275
1276 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1277 mode = GET_MODE_WIDER_MODE (mode))
1278 {
1279 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1280
1281 if (code != CODE_FOR_nothing
1282 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1283 here because if SIZE is less than the mode mask, as it is
1284 returned by the macro, it will definitely be less than the
1285 actual mode mask. Since SIZE is within the Pmode address
1286 space, we limit MODE to Pmode. */
1287 && ((CONST_INT_P (size)
1288 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1289 <= (GET_MODE_MASK (mode) >> 1)))
1290 || max_size <= (GET_MODE_MASK (mode) >> 1)
1291 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1292 {
1293 struct expand_operand ops[9];
1294 unsigned int nops;
1295
1296 /* ??? When called via emit_block_move_for_call, it'd be
1297 nice if there were some way to inform the backend, so
1298 that it doesn't fail the expansion because it thinks
1299 emitting the libcall would be more efficient. */
1300 nops = insn_data[(int) code].n_generator_args;
1301 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1302
1303 create_fixed_operand (&ops[0], x);
1304 create_fixed_operand (&ops[1], y);
1305 /* The check above guarantees that this size conversion is valid. */
1306 create_convert_operand_to (&ops[2], size, mode, true);
1307 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1308 if (nops >= 6)
1309 {
1310 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1311 create_integer_operand (&ops[5], expected_size);
1312 }
1313 if (nops >= 8)
1314 {
1315 create_integer_operand (&ops[6], min_size);
1316 /* If we can not represent the maximal size,
1317 make parameter NULL. */
1318 if ((HOST_WIDE_INT) max_size != -1)
1319 create_integer_operand (&ops[7], max_size);
1320 else
1321 create_fixed_operand (&ops[7], NULL);
1322 }
1323 if (nops == 9)
1324 {
1325 /* If we can not represent the maximal size,
1326 make parameter NULL. */
1327 if ((HOST_WIDE_INT) probable_max_size != -1)
1328 create_integer_operand (&ops[8], probable_max_size);
1329 else
1330 create_fixed_operand (&ops[8], NULL);
1331 }
1332 if (maybe_expand_insn (code, nops, ops))
1333 {
1334 volatile_ok = save_volatile_ok;
1335 return true;
1336 }
1337 }
1338 }
1339
1340 volatile_ok = save_volatile_ok;
1341 return false;
1342 }
1343
1344 /* A subroutine of emit_block_move. Expand a call to memcpy.
1345 Return the return value from memcpy, 0 otherwise. */
1346
1347 rtx
1348 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1349 {
1350 rtx dst_addr, src_addr;
1351 tree call_expr, fn, src_tree, dst_tree, size_tree;
1352 machine_mode size_mode;
1353 rtx retval;
1354
1355 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1356 pseudos. We can then place those new pseudos into a VAR_DECL and
1357 use them later. */
1358
1359 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1360 src_addr = copy_addr_to_reg (XEXP (src, 0));
1361
1362 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1363 src_addr = convert_memory_address (ptr_mode, src_addr);
1364
1365 dst_tree = make_tree (ptr_type_node, dst_addr);
1366 src_tree = make_tree (ptr_type_node, src_addr);
1367
1368 size_mode = TYPE_MODE (sizetype);
1369
1370 size = convert_to_mode (size_mode, size, 1);
1371 size = copy_to_mode_reg (size_mode, size);
1372
1373 /* It is incorrect to use the libcall calling conventions to call
1374 memcpy in this context. This could be a user call to memcpy and
1375 the user may wish to examine the return value from memcpy. For
1376 targets where libcalls and normal calls have different conventions
1377 for returning pointers, we could end up generating incorrect code. */
1378
1379 size_tree = make_tree (sizetype, size);
1380
1381 fn = emit_block_move_libcall_fn (true);
1382 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1383 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1384
1385 retval = expand_normal (call_expr);
1386
1387 return retval;
1388 }
1389
1390 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1391 for the function we use for block copies. */
1392
1393 static GTY(()) tree block_move_fn;
1394
1395 void
1396 init_block_move_fn (const char *asmspec)
1397 {
1398 if (!block_move_fn)
1399 {
1400 tree args, fn, attrs, attr_args;
1401
1402 fn = get_identifier ("memcpy");
1403 args = build_function_type_list (ptr_type_node, ptr_type_node,
1404 const_ptr_type_node, sizetype,
1405 NULL_TREE);
1406
1407 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1408 DECL_EXTERNAL (fn) = 1;
1409 TREE_PUBLIC (fn) = 1;
1410 DECL_ARTIFICIAL (fn) = 1;
1411 TREE_NOTHROW (fn) = 1;
1412 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1413 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1414
1415 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1416 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1417
1418 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1419
1420 block_move_fn = fn;
1421 }
1422
1423 if (asmspec)
1424 set_user_assembler_name (block_move_fn, asmspec);
1425 }
1426
1427 static tree
1428 emit_block_move_libcall_fn (int for_call)
1429 {
1430 static bool emitted_extern;
1431
1432 if (!block_move_fn)
1433 init_block_move_fn (NULL);
1434
1435 if (for_call && !emitted_extern)
1436 {
1437 emitted_extern = true;
1438 make_decl_rtl (block_move_fn);
1439 }
1440
1441 return block_move_fn;
1442 }
1443
1444 /* A subroutine of emit_block_move. Copy the data via an explicit
1445 loop. This is used only when libcalls are forbidden. */
1446 /* ??? It'd be nice to copy in hunks larger than QImode. */
1447
1448 static void
1449 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1450 unsigned int align ATTRIBUTE_UNUSED)
1451 {
1452 rtx_code_label *cmp_label, *top_label;
1453 rtx iter, x_addr, y_addr, tmp;
1454 machine_mode x_addr_mode = get_address_mode (x);
1455 machine_mode y_addr_mode = get_address_mode (y);
1456 machine_mode iter_mode;
1457
1458 iter_mode = GET_MODE (size);
1459 if (iter_mode == VOIDmode)
1460 iter_mode = word_mode;
1461
1462 top_label = gen_label_rtx ();
1463 cmp_label = gen_label_rtx ();
1464 iter = gen_reg_rtx (iter_mode);
1465
1466 emit_move_insn (iter, const0_rtx);
1467
1468 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1469 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1470 do_pending_stack_adjust ();
1471
1472 emit_jump (cmp_label);
1473 emit_label (top_label);
1474
1475 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1476 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1477
1478 if (x_addr_mode != y_addr_mode)
1479 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1480 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1481
1482 x = change_address (x, QImode, x_addr);
1483 y = change_address (y, QImode, y_addr);
1484
1485 emit_move_insn (x, y);
1486
1487 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1488 true, OPTAB_LIB_WIDEN);
1489 if (tmp != iter)
1490 emit_move_insn (iter, tmp);
1491
1492 emit_label (cmp_label);
1493
1494 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1495 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1496 }
1497 \f
1498 /* Copy all or part of a value X into registers starting at REGNO.
1499 The number of registers to be filled is NREGS. */
1500
1501 void
1502 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1503 {
1504 int i;
1505 #ifdef HAVE_load_multiple
1506 rtx pat;
1507 rtx_insn *last;
1508 #endif
1509
1510 if (nregs == 0)
1511 return;
1512
1513 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1514 x = validize_mem (force_const_mem (mode, x));
1515
1516 /* See if the machine can do this with a load multiple insn. */
1517 #ifdef HAVE_load_multiple
1518 if (HAVE_load_multiple)
1519 {
1520 last = get_last_insn ();
1521 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1522 GEN_INT (nregs));
1523 if (pat)
1524 {
1525 emit_insn (pat);
1526 return;
1527 }
1528 else
1529 delete_insns_since (last);
1530 }
1531 #endif
1532
1533 for (i = 0; i < nregs; i++)
1534 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1535 operand_subword_force (x, i, mode));
1536 }
1537
1538 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1539 The number of registers to be filled is NREGS. */
1540
1541 void
1542 move_block_from_reg (int regno, rtx x, int nregs)
1543 {
1544 int i;
1545
1546 if (nregs == 0)
1547 return;
1548
1549 /* See if the machine can do this with a store multiple insn. */
1550 #ifdef HAVE_store_multiple
1551 if (HAVE_store_multiple)
1552 {
1553 rtx_insn *last = get_last_insn ();
1554 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1555 GEN_INT (nregs));
1556 if (pat)
1557 {
1558 emit_insn (pat);
1559 return;
1560 }
1561 else
1562 delete_insns_since (last);
1563 }
1564 #endif
1565
1566 for (i = 0; i < nregs; i++)
1567 {
1568 rtx tem = operand_subword (x, i, 1, BLKmode);
1569
1570 gcc_assert (tem);
1571
1572 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1573 }
1574 }
1575
1576 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1577 ORIG, where ORIG is a non-consecutive group of registers represented by
1578 a PARALLEL. The clone is identical to the original except in that the
1579 original set of registers is replaced by a new set of pseudo registers.
1580 The new set has the same modes as the original set. */
1581
1582 rtx
1583 gen_group_rtx (rtx orig)
1584 {
1585 int i, length;
1586 rtx *tmps;
1587
1588 gcc_assert (GET_CODE (orig) == PARALLEL);
1589
1590 length = XVECLEN (orig, 0);
1591 tmps = XALLOCAVEC (rtx, length);
1592
1593 /* Skip a NULL entry in first slot. */
1594 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1595
1596 if (i)
1597 tmps[0] = 0;
1598
1599 for (; i < length; i++)
1600 {
1601 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1602 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1603
1604 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1605 }
1606
1607 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1608 }
1609
1610 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1611 except that values are placed in TMPS[i], and must later be moved
1612 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1613
1614 static void
1615 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1616 {
1617 rtx src;
1618 int start, i;
1619 machine_mode m = GET_MODE (orig_src);
1620
1621 gcc_assert (GET_CODE (dst) == PARALLEL);
1622
1623 if (m != VOIDmode
1624 && !SCALAR_INT_MODE_P (m)
1625 && !MEM_P (orig_src)
1626 && GET_CODE (orig_src) != CONCAT)
1627 {
1628 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1629 if (imode == BLKmode)
1630 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1631 else
1632 src = gen_reg_rtx (imode);
1633 if (imode != BLKmode)
1634 src = gen_lowpart (GET_MODE (orig_src), src);
1635 emit_move_insn (src, orig_src);
1636 /* ...and back again. */
1637 if (imode != BLKmode)
1638 src = gen_lowpart (imode, src);
1639 emit_group_load_1 (tmps, dst, src, type, ssize);
1640 return;
1641 }
1642
1643 /* Check for a NULL entry, used to indicate that the parameter goes
1644 both on the stack and in registers. */
1645 if (XEXP (XVECEXP (dst, 0, 0), 0))
1646 start = 0;
1647 else
1648 start = 1;
1649
1650 /* Process the pieces. */
1651 for (i = start; i < XVECLEN (dst, 0); i++)
1652 {
1653 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1654 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1655 unsigned int bytelen = GET_MODE_SIZE (mode);
1656 int shift = 0;
1657
1658 /* Handle trailing fragments that run over the size of the struct. */
1659 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1660 {
1661 /* Arrange to shift the fragment to where it belongs.
1662 extract_bit_field loads to the lsb of the reg. */
1663 if (
1664 #ifdef BLOCK_REG_PADDING
1665 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1666 == (BYTES_BIG_ENDIAN ? upward : downward)
1667 #else
1668 BYTES_BIG_ENDIAN
1669 #endif
1670 )
1671 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1672 bytelen = ssize - bytepos;
1673 gcc_assert (bytelen > 0);
1674 }
1675
1676 /* If we won't be loading directly from memory, protect the real source
1677 from strange tricks we might play; but make sure that the source can
1678 be loaded directly into the destination. */
1679 src = orig_src;
1680 if (!MEM_P (orig_src)
1681 && (!CONSTANT_P (orig_src)
1682 || (GET_MODE (orig_src) != mode
1683 && GET_MODE (orig_src) != VOIDmode)))
1684 {
1685 if (GET_MODE (orig_src) == VOIDmode)
1686 src = gen_reg_rtx (mode);
1687 else
1688 src = gen_reg_rtx (GET_MODE (orig_src));
1689
1690 emit_move_insn (src, orig_src);
1691 }
1692
1693 /* Optimize the access just a bit. */
1694 if (MEM_P (src)
1695 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1696 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1697 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1698 && bytelen == GET_MODE_SIZE (mode))
1699 {
1700 tmps[i] = gen_reg_rtx (mode);
1701 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1702 }
1703 else if (COMPLEX_MODE_P (mode)
1704 && GET_MODE (src) == mode
1705 && bytelen == GET_MODE_SIZE (mode))
1706 /* Let emit_move_complex do the bulk of the work. */
1707 tmps[i] = src;
1708 else if (GET_CODE (src) == CONCAT)
1709 {
1710 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1711 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1712
1713 if ((bytepos == 0 && bytelen == slen0)
1714 || (bytepos != 0 && bytepos + bytelen <= slen))
1715 {
1716 /* The following assumes that the concatenated objects all
1717 have the same size. In this case, a simple calculation
1718 can be used to determine the object and the bit field
1719 to be extracted. */
1720 tmps[i] = XEXP (src, bytepos / slen0);
1721 if (! CONSTANT_P (tmps[i])
1722 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1723 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1724 (bytepos % slen0) * BITS_PER_UNIT,
1725 1, NULL_RTX, mode, mode);
1726 }
1727 else
1728 {
1729 rtx mem;
1730
1731 gcc_assert (!bytepos);
1732 mem = assign_stack_temp (GET_MODE (src), slen);
1733 emit_move_insn (mem, src);
1734 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1735 0, 1, NULL_RTX, mode, mode);
1736 }
1737 }
1738 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1739 SIMD register, which is currently broken. While we get GCC
1740 to emit proper RTL for these cases, let's dump to memory. */
1741 else if (VECTOR_MODE_P (GET_MODE (dst))
1742 && REG_P (src))
1743 {
1744 int slen = GET_MODE_SIZE (GET_MODE (src));
1745 rtx mem;
1746
1747 mem = assign_stack_temp (GET_MODE (src), slen);
1748 emit_move_insn (mem, src);
1749 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1750 }
1751 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1752 && XVECLEN (dst, 0) > 1)
1753 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1754 else if (CONSTANT_P (src))
1755 {
1756 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1757
1758 if (len == ssize)
1759 tmps[i] = src;
1760 else
1761 {
1762 rtx first, second;
1763
1764 /* TODO: const_wide_int can have sizes other than this... */
1765 gcc_assert (2 * len == ssize);
1766 split_double (src, &first, &second);
1767 if (i)
1768 tmps[i] = second;
1769 else
1770 tmps[i] = first;
1771 }
1772 }
1773 else if (REG_P (src) && GET_MODE (src) == mode)
1774 tmps[i] = src;
1775 else
1776 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1777 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1778 mode, mode);
1779
1780 if (shift)
1781 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1782 shift, tmps[i], 0);
1783 }
1784 }
1785
1786 /* Emit code to move a block SRC of type TYPE to a block DST,
1787 where DST is non-consecutive registers represented by a PARALLEL.
1788 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1789 if not known. */
1790
1791 void
1792 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1793 {
1794 rtx *tmps;
1795 int i;
1796
1797 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1798 emit_group_load_1 (tmps, dst, src, type, ssize);
1799
1800 /* Copy the extracted pieces into the proper (probable) hard regs. */
1801 for (i = 0; i < XVECLEN (dst, 0); i++)
1802 {
1803 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1804 if (d == NULL)
1805 continue;
1806 emit_move_insn (d, tmps[i]);
1807 }
1808 }
1809
1810 /* Similar, but load SRC into new pseudos in a format that looks like
1811 PARALLEL. This can later be fed to emit_group_move to get things
1812 in the right place. */
1813
1814 rtx
1815 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1816 {
1817 rtvec vec;
1818 int i;
1819
1820 vec = rtvec_alloc (XVECLEN (parallel, 0));
1821 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1822
1823 /* Convert the vector to look just like the original PARALLEL, except
1824 with the computed values. */
1825 for (i = 0; i < XVECLEN (parallel, 0); i++)
1826 {
1827 rtx e = XVECEXP (parallel, 0, i);
1828 rtx d = XEXP (e, 0);
1829
1830 if (d)
1831 {
1832 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1833 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1834 }
1835 RTVEC_ELT (vec, i) = e;
1836 }
1837
1838 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1839 }
1840
1841 /* Emit code to move a block SRC to block DST, where SRC and DST are
1842 non-consecutive groups of registers, each represented by a PARALLEL. */
1843
1844 void
1845 emit_group_move (rtx dst, rtx src)
1846 {
1847 int i;
1848
1849 gcc_assert (GET_CODE (src) == PARALLEL
1850 && GET_CODE (dst) == PARALLEL
1851 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1852
1853 /* Skip first entry if NULL. */
1854 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1855 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1856 XEXP (XVECEXP (src, 0, i), 0));
1857 }
1858
1859 /* Move a group of registers represented by a PARALLEL into pseudos. */
1860
1861 rtx
1862 emit_group_move_into_temps (rtx src)
1863 {
1864 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1865 int i;
1866
1867 for (i = 0; i < XVECLEN (src, 0); i++)
1868 {
1869 rtx e = XVECEXP (src, 0, i);
1870 rtx d = XEXP (e, 0);
1871
1872 if (d)
1873 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1874 RTVEC_ELT (vec, i) = e;
1875 }
1876
1877 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1878 }
1879
1880 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1881 where SRC is non-consecutive registers represented by a PARALLEL.
1882 SSIZE represents the total size of block ORIG_DST, or -1 if not
1883 known. */
1884
1885 void
1886 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1887 {
1888 rtx *tmps, dst;
1889 int start, finish, i;
1890 machine_mode m = GET_MODE (orig_dst);
1891
1892 gcc_assert (GET_CODE (src) == PARALLEL);
1893
1894 if (!SCALAR_INT_MODE_P (m)
1895 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1896 {
1897 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1898 if (imode == BLKmode)
1899 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1900 else
1901 dst = gen_reg_rtx (imode);
1902 emit_group_store (dst, src, type, ssize);
1903 if (imode != BLKmode)
1904 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1905 emit_move_insn (orig_dst, dst);
1906 return;
1907 }
1908
1909 /* Check for a NULL entry, used to indicate that the parameter goes
1910 both on the stack and in registers. */
1911 if (XEXP (XVECEXP (src, 0, 0), 0))
1912 start = 0;
1913 else
1914 start = 1;
1915 finish = XVECLEN (src, 0);
1916
1917 tmps = XALLOCAVEC (rtx, finish);
1918
1919 /* Copy the (probable) hard regs into pseudos. */
1920 for (i = start; i < finish; i++)
1921 {
1922 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1923 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1924 {
1925 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1926 emit_move_insn (tmps[i], reg);
1927 }
1928 else
1929 tmps[i] = reg;
1930 }
1931
1932 /* If we won't be storing directly into memory, protect the real destination
1933 from strange tricks we might play. */
1934 dst = orig_dst;
1935 if (GET_CODE (dst) == PARALLEL)
1936 {
1937 rtx temp;
1938
1939 /* We can get a PARALLEL dst if there is a conditional expression in
1940 a return statement. In that case, the dst and src are the same,
1941 so no action is necessary. */
1942 if (rtx_equal_p (dst, src))
1943 return;
1944
1945 /* It is unclear if we can ever reach here, but we may as well handle
1946 it. Allocate a temporary, and split this into a store/load to/from
1947 the temporary. */
1948 temp = assign_stack_temp (GET_MODE (dst), ssize);
1949 emit_group_store (temp, src, type, ssize);
1950 emit_group_load (dst, temp, type, ssize);
1951 return;
1952 }
1953 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1954 {
1955 machine_mode outer = GET_MODE (dst);
1956 machine_mode inner;
1957 HOST_WIDE_INT bytepos;
1958 bool done = false;
1959 rtx temp;
1960
1961 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1962 dst = gen_reg_rtx (outer);
1963
1964 /* Make life a bit easier for combine. */
1965 /* If the first element of the vector is the low part
1966 of the destination mode, use a paradoxical subreg to
1967 initialize the destination. */
1968 if (start < finish)
1969 {
1970 inner = GET_MODE (tmps[start]);
1971 bytepos = subreg_lowpart_offset (inner, outer);
1972 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1973 {
1974 temp = simplify_gen_subreg (outer, tmps[start],
1975 inner, 0);
1976 if (temp)
1977 {
1978 emit_move_insn (dst, temp);
1979 done = true;
1980 start++;
1981 }
1982 }
1983 }
1984
1985 /* If the first element wasn't the low part, try the last. */
1986 if (!done
1987 && start < finish - 1)
1988 {
1989 inner = GET_MODE (tmps[finish - 1]);
1990 bytepos = subreg_lowpart_offset (inner, outer);
1991 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1992 {
1993 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1994 inner, 0);
1995 if (temp)
1996 {
1997 emit_move_insn (dst, temp);
1998 done = true;
1999 finish--;
2000 }
2001 }
2002 }
2003
2004 /* Otherwise, simply initialize the result to zero. */
2005 if (!done)
2006 emit_move_insn (dst, CONST0_RTX (outer));
2007 }
2008
2009 /* Process the pieces. */
2010 for (i = start; i < finish; i++)
2011 {
2012 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2013 machine_mode mode = GET_MODE (tmps[i]);
2014 unsigned int bytelen = GET_MODE_SIZE (mode);
2015 unsigned int adj_bytelen;
2016 rtx dest = dst;
2017
2018 /* Handle trailing fragments that run over the size of the struct. */
2019 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2020 adj_bytelen = ssize - bytepos;
2021 else
2022 adj_bytelen = bytelen;
2023
2024 if (GET_CODE (dst) == CONCAT)
2025 {
2026 if (bytepos + adj_bytelen
2027 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2028 dest = XEXP (dst, 0);
2029 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2030 {
2031 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2032 dest = XEXP (dst, 1);
2033 }
2034 else
2035 {
2036 machine_mode dest_mode = GET_MODE (dest);
2037 machine_mode tmp_mode = GET_MODE (tmps[i]);
2038
2039 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2040
2041 if (GET_MODE_ALIGNMENT (dest_mode)
2042 >= GET_MODE_ALIGNMENT (tmp_mode))
2043 {
2044 dest = assign_stack_temp (dest_mode,
2045 GET_MODE_SIZE (dest_mode));
2046 emit_move_insn (adjust_address (dest,
2047 tmp_mode,
2048 bytepos),
2049 tmps[i]);
2050 dst = dest;
2051 }
2052 else
2053 {
2054 dest = assign_stack_temp (tmp_mode,
2055 GET_MODE_SIZE (tmp_mode));
2056 emit_move_insn (dest, tmps[i]);
2057 dst = adjust_address (dest, dest_mode, bytepos);
2058 }
2059 break;
2060 }
2061 }
2062
2063 /* Handle trailing fragments that run over the size of the struct. */
2064 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2065 {
2066 /* store_bit_field always takes its value from the lsb.
2067 Move the fragment to the lsb if it's not already there. */
2068 if (
2069 #ifdef BLOCK_REG_PADDING
2070 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2071 == (BYTES_BIG_ENDIAN ? upward : downward)
2072 #else
2073 BYTES_BIG_ENDIAN
2074 #endif
2075 )
2076 {
2077 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2078 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2079 shift, tmps[i], 0);
2080 }
2081
2082 /* Make sure not to write past the end of the struct. */
2083 store_bit_field (dest,
2084 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2085 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2086 VOIDmode, tmps[i]);
2087 }
2088
2089 /* Optimize the access just a bit. */
2090 else if (MEM_P (dest)
2091 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2092 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2093 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2094 && bytelen == GET_MODE_SIZE (mode))
2095 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2096
2097 else
2098 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2099 0, 0, mode, tmps[i]);
2100 }
2101
2102 /* Copy from the pseudo into the (probable) hard reg. */
2103 if (orig_dst != dst)
2104 emit_move_insn (orig_dst, dst);
2105 }
2106
2107 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2108 of the value stored in X. */
2109
2110 rtx
2111 maybe_emit_group_store (rtx x, tree type)
2112 {
2113 machine_mode mode = TYPE_MODE (type);
2114 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2115 if (GET_CODE (x) == PARALLEL)
2116 {
2117 rtx result = gen_reg_rtx (mode);
2118 emit_group_store (result, x, type, int_size_in_bytes (type));
2119 return result;
2120 }
2121 return x;
2122 }
2123
2124 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2125
2126 This is used on targets that return BLKmode values in registers. */
2127
2128 void
2129 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2130 {
2131 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2132 rtx src = NULL, dst = NULL;
2133 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2134 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2135 machine_mode mode = GET_MODE (srcreg);
2136 machine_mode tmode = GET_MODE (target);
2137 machine_mode copy_mode;
2138
2139 /* BLKmode registers created in the back-end shouldn't have survived. */
2140 gcc_assert (mode != BLKmode);
2141
2142 /* If the structure doesn't take up a whole number of words, see whether
2143 SRCREG is padded on the left or on the right. If it's on the left,
2144 set PADDING_CORRECTION to the number of bits to skip.
2145
2146 In most ABIs, the structure will be returned at the least end of
2147 the register, which translates to right padding on little-endian
2148 targets and left padding on big-endian targets. The opposite
2149 holds if the structure is returned at the most significant
2150 end of the register. */
2151 if (bytes % UNITS_PER_WORD != 0
2152 && (targetm.calls.return_in_msb (type)
2153 ? !BYTES_BIG_ENDIAN
2154 : BYTES_BIG_ENDIAN))
2155 padding_correction
2156 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2157
2158 /* We can use a single move if we have an exact mode for the size. */
2159 else if (MEM_P (target)
2160 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2161 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2162 && bytes == GET_MODE_SIZE (mode))
2163 {
2164 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2165 return;
2166 }
2167
2168 /* And if we additionally have the same mode for a register. */
2169 else if (REG_P (target)
2170 && GET_MODE (target) == mode
2171 && bytes == GET_MODE_SIZE (mode))
2172 {
2173 emit_move_insn (target, srcreg);
2174 return;
2175 }
2176
2177 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2178 into a new pseudo which is a full word. */
2179 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2180 {
2181 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2182 mode = word_mode;
2183 }
2184
2185 /* Copy the structure BITSIZE bits at a time. If the target lives in
2186 memory, take care of not reading/writing past its end by selecting
2187 a copy mode suited to BITSIZE. This should always be possible given
2188 how it is computed.
2189
2190 If the target lives in register, make sure not to select a copy mode
2191 larger than the mode of the register.
2192
2193 We could probably emit more efficient code for machines which do not use
2194 strict alignment, but it doesn't seem worth the effort at the current
2195 time. */
2196
2197 copy_mode = word_mode;
2198 if (MEM_P (target))
2199 {
2200 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2201 if (mem_mode != BLKmode)
2202 copy_mode = mem_mode;
2203 }
2204 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2205 copy_mode = tmode;
2206
2207 for (bitpos = 0, xbitpos = padding_correction;
2208 bitpos < bytes * BITS_PER_UNIT;
2209 bitpos += bitsize, xbitpos += bitsize)
2210 {
2211 /* We need a new source operand each time xbitpos is on a
2212 word boundary and when xbitpos == padding_correction
2213 (the first time through). */
2214 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2215 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2216
2217 /* We need a new destination operand each time bitpos is on
2218 a word boundary. */
2219 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2220 dst = target;
2221 else if (bitpos % BITS_PER_WORD == 0)
2222 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2223
2224 /* Use xbitpos for the source extraction (right justified) and
2225 bitpos for the destination store (left justified). */
2226 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2227 extract_bit_field (src, bitsize,
2228 xbitpos % BITS_PER_WORD, 1,
2229 NULL_RTX, copy_mode, copy_mode));
2230 }
2231 }
2232
2233 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2234 register if it contains any data, otherwise return null.
2235
2236 This is used on targets that return BLKmode values in registers. */
2237
2238 rtx
2239 copy_blkmode_to_reg (machine_mode mode, tree src)
2240 {
2241 int i, n_regs;
2242 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2243 unsigned int bitsize;
2244 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2245 machine_mode dst_mode;
2246
2247 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2248
2249 x = expand_normal (src);
2250
2251 bytes = int_size_in_bytes (TREE_TYPE (src));
2252 if (bytes == 0)
2253 return NULL_RTX;
2254
2255 /* If the structure doesn't take up a whole number of words, see
2256 whether the register value should be padded on the left or on
2257 the right. Set PADDING_CORRECTION to the number of padding
2258 bits needed on the left side.
2259
2260 In most ABIs, the structure will be returned at the least end of
2261 the register, which translates to right padding on little-endian
2262 targets and left padding on big-endian targets. The opposite
2263 holds if the structure is returned at the most significant
2264 end of the register. */
2265 if (bytes % UNITS_PER_WORD != 0
2266 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2267 ? !BYTES_BIG_ENDIAN
2268 : BYTES_BIG_ENDIAN))
2269 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2270 * BITS_PER_UNIT));
2271
2272 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2273 dst_words = XALLOCAVEC (rtx, n_regs);
2274 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2275
2276 /* Copy the structure BITSIZE bits at a time. */
2277 for (bitpos = 0, xbitpos = padding_correction;
2278 bitpos < bytes * BITS_PER_UNIT;
2279 bitpos += bitsize, xbitpos += bitsize)
2280 {
2281 /* We need a new destination pseudo each time xbitpos is
2282 on a word boundary and when xbitpos == padding_correction
2283 (the first time through). */
2284 if (xbitpos % BITS_PER_WORD == 0
2285 || xbitpos == padding_correction)
2286 {
2287 /* Generate an appropriate register. */
2288 dst_word = gen_reg_rtx (word_mode);
2289 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2290
2291 /* Clear the destination before we move anything into it. */
2292 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2293 }
2294
2295 /* We need a new source operand each time bitpos is on a word
2296 boundary. */
2297 if (bitpos % BITS_PER_WORD == 0)
2298 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2299
2300 /* Use bitpos for the source extraction (left justified) and
2301 xbitpos for the destination store (right justified). */
2302 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2303 0, 0, word_mode,
2304 extract_bit_field (src_word, bitsize,
2305 bitpos % BITS_PER_WORD, 1,
2306 NULL_RTX, word_mode, word_mode));
2307 }
2308
2309 if (mode == BLKmode)
2310 {
2311 /* Find the smallest integer mode large enough to hold the
2312 entire structure. */
2313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2314 mode != VOIDmode;
2315 mode = GET_MODE_WIDER_MODE (mode))
2316 /* Have we found a large enough mode? */
2317 if (GET_MODE_SIZE (mode) >= bytes)
2318 break;
2319
2320 /* A suitable mode should have been found. */
2321 gcc_assert (mode != VOIDmode);
2322 }
2323
2324 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2325 dst_mode = word_mode;
2326 else
2327 dst_mode = mode;
2328 dst = gen_reg_rtx (dst_mode);
2329
2330 for (i = 0; i < n_regs; i++)
2331 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2332
2333 if (mode != dst_mode)
2334 dst = gen_lowpart (mode, dst);
2335
2336 return dst;
2337 }
2338
2339 /* Add a USE expression for REG to the (possibly empty) list pointed
2340 to by CALL_FUSAGE. REG must denote a hard register. */
2341
2342 void
2343 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2344 {
2345 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2346
2347 *call_fusage
2348 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2349 }
2350
2351 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2352 to by CALL_FUSAGE. REG must denote a hard register. */
2353
2354 void
2355 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2356 {
2357 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2358
2359 *call_fusage
2360 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2361 }
2362
2363 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2364 starting at REGNO. All of these registers must be hard registers. */
2365
2366 void
2367 use_regs (rtx *call_fusage, int regno, int nregs)
2368 {
2369 int i;
2370
2371 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2372
2373 for (i = 0; i < nregs; i++)
2374 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2375 }
2376
2377 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2378 PARALLEL REGS. This is for calls that pass values in multiple
2379 non-contiguous locations. The Irix 6 ABI has examples of this. */
2380
2381 void
2382 use_group_regs (rtx *call_fusage, rtx regs)
2383 {
2384 int i;
2385
2386 for (i = 0; i < XVECLEN (regs, 0); i++)
2387 {
2388 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2389
2390 /* A NULL entry means the parameter goes both on the stack and in
2391 registers. This can also be a MEM for targets that pass values
2392 partially on the stack and partially in registers. */
2393 if (reg != 0 && REG_P (reg))
2394 use_reg (call_fusage, reg);
2395 }
2396 }
2397
2398 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2399 assigment and the code of the expresion on the RHS is CODE. Return
2400 NULL otherwise. */
2401
2402 static gimple
2403 get_def_for_expr (tree name, enum tree_code code)
2404 {
2405 gimple def_stmt;
2406
2407 if (TREE_CODE (name) != SSA_NAME)
2408 return NULL;
2409
2410 def_stmt = get_gimple_for_ssa_name (name);
2411 if (!def_stmt
2412 || gimple_assign_rhs_code (def_stmt) != code)
2413 return NULL;
2414
2415 return def_stmt;
2416 }
2417
2418 #ifdef HAVE_conditional_move
2419 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2420 assigment and the class of the expresion on the RHS is CLASS. Return
2421 NULL otherwise. */
2422
2423 static gimple
2424 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2425 {
2426 gimple def_stmt;
2427
2428 if (TREE_CODE (name) != SSA_NAME)
2429 return NULL;
2430
2431 def_stmt = get_gimple_for_ssa_name (name);
2432 if (!def_stmt
2433 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2434 return NULL;
2435
2436 return def_stmt;
2437 }
2438 #endif
2439 \f
2440
2441 /* Determine whether the LEN bytes generated by CONSTFUN can be
2442 stored to memory using several move instructions. CONSTFUNDATA is
2443 a pointer which will be passed as argument in every CONSTFUN call.
2444 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2445 a memset operation and false if it's a copy of a constant string.
2446 Return nonzero if a call to store_by_pieces should succeed. */
2447
2448 int
2449 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2450 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2451 void *constfundata, unsigned int align, bool memsetp)
2452 {
2453 unsigned HOST_WIDE_INT l;
2454 unsigned int max_size;
2455 HOST_WIDE_INT offset = 0;
2456 machine_mode mode;
2457 enum insn_code icode;
2458 int reverse;
2459 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2460 rtx cst ATTRIBUTE_UNUSED;
2461
2462 if (len == 0)
2463 return 1;
2464
2465 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2466 memsetp
2467 ? SET_BY_PIECES
2468 : STORE_BY_PIECES,
2469 optimize_insn_for_speed_p ()))
2470 return 0;
2471
2472 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2473
2474 /* We would first store what we can in the largest integer mode, then go to
2475 successively smaller modes. */
2476
2477 for (reverse = 0;
2478 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2479 reverse++)
2480 {
2481 l = len;
2482 max_size = STORE_MAX_PIECES + 1;
2483 while (max_size > 1 && l > 0)
2484 {
2485 mode = widest_int_mode_for_size (max_size);
2486
2487 if (mode == VOIDmode)
2488 break;
2489
2490 icode = optab_handler (mov_optab, mode);
2491 if (icode != CODE_FOR_nothing
2492 && align >= GET_MODE_ALIGNMENT (mode))
2493 {
2494 unsigned int size = GET_MODE_SIZE (mode);
2495
2496 while (l >= size)
2497 {
2498 if (reverse)
2499 offset -= size;
2500
2501 cst = (*constfun) (constfundata, offset, mode);
2502 if (!targetm.legitimate_constant_p (mode, cst))
2503 return 0;
2504
2505 if (!reverse)
2506 offset += size;
2507
2508 l -= size;
2509 }
2510 }
2511
2512 max_size = GET_MODE_SIZE (mode);
2513 }
2514
2515 /* The code above should have handled everything. */
2516 gcc_assert (!l);
2517 }
2518
2519 return 1;
2520 }
2521
2522 /* Generate several move instructions to store LEN bytes generated by
2523 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2524 pointer which will be passed as argument in every CONSTFUN call.
2525 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2526 a memset operation and false if it's a copy of a constant string.
2527 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2528 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2529 stpcpy. */
2530
2531 rtx
2532 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2533 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2534 void *constfundata, unsigned int align, bool memsetp, int endp)
2535 {
2536 machine_mode to_addr_mode = get_address_mode (to);
2537 struct store_by_pieces_d data;
2538
2539 if (len == 0)
2540 {
2541 gcc_assert (endp != 2);
2542 return to;
2543 }
2544
2545 gcc_assert (targetm.use_by_pieces_infrastructure_p
2546 (len, align,
2547 memsetp
2548 ? SET_BY_PIECES
2549 : STORE_BY_PIECES,
2550 optimize_insn_for_speed_p ()));
2551
2552 data.constfun = constfun;
2553 data.constfundata = constfundata;
2554 data.len = len;
2555 data.to = to;
2556 store_by_pieces_1 (&data, align);
2557 if (endp)
2558 {
2559 rtx to1;
2560
2561 gcc_assert (!data.reverse);
2562 if (data.autinc_to)
2563 {
2564 if (endp == 2)
2565 {
2566 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2567 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2568 else
2569 data.to_addr = copy_to_mode_reg (to_addr_mode,
2570 plus_constant (to_addr_mode,
2571 data.to_addr,
2572 -1));
2573 }
2574 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2575 data.offset);
2576 }
2577 else
2578 {
2579 if (endp == 2)
2580 --data.offset;
2581 to1 = adjust_address (data.to, QImode, data.offset);
2582 }
2583 return to1;
2584 }
2585 else
2586 return data.to;
2587 }
2588
2589 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2590 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2591
2592 static void
2593 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2594 {
2595 struct store_by_pieces_d data;
2596
2597 if (len == 0)
2598 return;
2599
2600 data.constfun = clear_by_pieces_1;
2601 data.constfundata = NULL;
2602 data.len = len;
2603 data.to = to;
2604 store_by_pieces_1 (&data, align);
2605 }
2606
2607 /* Callback routine for clear_by_pieces.
2608 Return const0_rtx unconditionally. */
2609
2610 static rtx
2611 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2612 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2613 machine_mode mode ATTRIBUTE_UNUSED)
2614 {
2615 return const0_rtx;
2616 }
2617
2618 /* Subroutine of clear_by_pieces and store_by_pieces.
2619 Generate several move instructions to store LEN bytes of block TO. (A MEM
2620 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2621
2622 static void
2623 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2624 unsigned int align ATTRIBUTE_UNUSED)
2625 {
2626 machine_mode to_addr_mode = get_address_mode (data->to);
2627 rtx to_addr = XEXP (data->to, 0);
2628 unsigned int max_size = STORE_MAX_PIECES + 1;
2629 enum insn_code icode;
2630
2631 data->offset = 0;
2632 data->to_addr = to_addr;
2633 data->autinc_to
2634 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2635 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2636
2637 data->explicit_inc_to = 0;
2638 data->reverse
2639 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2640 if (data->reverse)
2641 data->offset = data->len;
2642
2643 /* If storing requires more than two move insns,
2644 copy addresses to registers (to make displacements shorter)
2645 and use post-increment if available. */
2646 if (!data->autinc_to
2647 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2648 {
2649 /* Determine the main mode we'll be using.
2650 MODE might not be used depending on the definitions of the
2651 USE_* macros below. */
2652 machine_mode mode ATTRIBUTE_UNUSED
2653 = widest_int_mode_for_size (max_size);
2654
2655 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2656 {
2657 data->to_addr = copy_to_mode_reg (to_addr_mode,
2658 plus_constant (to_addr_mode,
2659 to_addr,
2660 data->len));
2661 data->autinc_to = 1;
2662 data->explicit_inc_to = -1;
2663 }
2664
2665 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2666 && ! data->autinc_to)
2667 {
2668 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2669 data->autinc_to = 1;
2670 data->explicit_inc_to = 1;
2671 }
2672
2673 if ( !data->autinc_to && CONSTANT_P (to_addr))
2674 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2675 }
2676
2677 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2678
2679 /* First store what we can in the largest integer mode, then go to
2680 successively smaller modes. */
2681
2682 while (max_size > 1 && data->len > 0)
2683 {
2684 machine_mode mode = widest_int_mode_for_size (max_size);
2685
2686 if (mode == VOIDmode)
2687 break;
2688
2689 icode = optab_handler (mov_optab, mode);
2690 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2691 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2692
2693 max_size = GET_MODE_SIZE (mode);
2694 }
2695
2696 /* The code above should have handled everything. */
2697 gcc_assert (!data->len);
2698 }
2699
2700 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2701 with move instructions for mode MODE. GENFUN is the gen_... function
2702 to make a move insn for that mode. DATA has all the other info. */
2703
2704 static void
2705 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2706 struct store_by_pieces_d *data)
2707 {
2708 unsigned int size = GET_MODE_SIZE (mode);
2709 rtx to1, cst;
2710
2711 while (data->len >= size)
2712 {
2713 if (data->reverse)
2714 data->offset -= size;
2715
2716 if (data->autinc_to)
2717 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2718 data->offset);
2719 else
2720 to1 = adjust_address (data->to, mode, data->offset);
2721
2722 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2723 emit_insn (gen_add2_insn (data->to_addr,
2724 gen_int_mode (-(HOST_WIDE_INT) size,
2725 GET_MODE (data->to_addr))));
2726
2727 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2728 emit_insn ((*genfun) (to1, cst));
2729
2730 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2731 emit_insn (gen_add2_insn (data->to_addr,
2732 gen_int_mode (size,
2733 GET_MODE (data->to_addr))));
2734
2735 if (! data->reverse)
2736 data->offset += size;
2737
2738 data->len -= size;
2739 }
2740 }
2741 \f
2742 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2743 its length in bytes. */
2744
2745 rtx
2746 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2747 unsigned int expected_align, HOST_WIDE_INT expected_size,
2748 unsigned HOST_WIDE_INT min_size,
2749 unsigned HOST_WIDE_INT max_size,
2750 unsigned HOST_WIDE_INT probable_max_size)
2751 {
2752 machine_mode mode = GET_MODE (object);
2753 unsigned int align;
2754
2755 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2756
2757 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2758 just move a zero. Otherwise, do this a piece at a time. */
2759 if (mode != BLKmode
2760 && CONST_INT_P (size)
2761 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2762 {
2763 rtx zero = CONST0_RTX (mode);
2764 if (zero != NULL)
2765 {
2766 emit_move_insn (object, zero);
2767 return NULL;
2768 }
2769
2770 if (COMPLEX_MODE_P (mode))
2771 {
2772 zero = CONST0_RTX (GET_MODE_INNER (mode));
2773 if (zero != NULL)
2774 {
2775 write_complex_part (object, zero, 0);
2776 write_complex_part (object, zero, 1);
2777 return NULL;
2778 }
2779 }
2780 }
2781
2782 if (size == const0_rtx)
2783 return NULL;
2784
2785 align = MEM_ALIGN (object);
2786
2787 if (CONST_INT_P (size)
2788 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2789 CLEAR_BY_PIECES,
2790 optimize_insn_for_speed_p ()))
2791 clear_by_pieces (object, INTVAL (size), align);
2792 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2793 expected_align, expected_size,
2794 min_size, max_size, probable_max_size))
2795 ;
2796 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2797 return set_storage_via_libcall (object, size, const0_rtx,
2798 method == BLOCK_OP_TAILCALL);
2799 else
2800 gcc_unreachable ();
2801
2802 return NULL;
2803 }
2804
2805 rtx
2806 clear_storage (rtx object, rtx size, enum block_op_methods method)
2807 {
2808 unsigned HOST_WIDE_INT max, min = 0;
2809 if (GET_CODE (size) == CONST_INT)
2810 min = max = UINTVAL (size);
2811 else
2812 max = GET_MODE_MASK (GET_MODE (size));
2813 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2814 }
2815
2816
2817 /* A subroutine of clear_storage. Expand a call to memset.
2818 Return the return value of memset, 0 otherwise. */
2819
2820 rtx
2821 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2822 {
2823 tree call_expr, fn, object_tree, size_tree, val_tree;
2824 machine_mode size_mode;
2825 rtx retval;
2826
2827 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2828 place those into new pseudos into a VAR_DECL and use them later. */
2829
2830 object = copy_addr_to_reg (XEXP (object, 0));
2831
2832 size_mode = TYPE_MODE (sizetype);
2833 size = convert_to_mode (size_mode, size, 1);
2834 size = copy_to_mode_reg (size_mode, size);
2835
2836 /* It is incorrect to use the libcall calling conventions to call
2837 memset in this context. This could be a user call to memset and
2838 the user may wish to examine the return value from memset. For
2839 targets where libcalls and normal calls have different conventions
2840 for returning pointers, we could end up generating incorrect code. */
2841
2842 object_tree = make_tree (ptr_type_node, object);
2843 if (!CONST_INT_P (val))
2844 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2845 size_tree = make_tree (sizetype, size);
2846 val_tree = make_tree (integer_type_node, val);
2847
2848 fn = clear_storage_libcall_fn (true);
2849 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2850 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2851
2852 retval = expand_normal (call_expr);
2853
2854 return retval;
2855 }
2856
2857 /* A subroutine of set_storage_via_libcall. Create the tree node
2858 for the function we use for block clears. */
2859
2860 tree block_clear_fn;
2861
2862 void
2863 init_block_clear_fn (const char *asmspec)
2864 {
2865 if (!block_clear_fn)
2866 {
2867 tree fn, args;
2868
2869 fn = get_identifier ("memset");
2870 args = build_function_type_list (ptr_type_node, ptr_type_node,
2871 integer_type_node, sizetype,
2872 NULL_TREE);
2873
2874 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2875 DECL_EXTERNAL (fn) = 1;
2876 TREE_PUBLIC (fn) = 1;
2877 DECL_ARTIFICIAL (fn) = 1;
2878 TREE_NOTHROW (fn) = 1;
2879 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2880 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2881
2882 block_clear_fn = fn;
2883 }
2884
2885 if (asmspec)
2886 set_user_assembler_name (block_clear_fn, asmspec);
2887 }
2888
2889 static tree
2890 clear_storage_libcall_fn (int for_call)
2891 {
2892 static bool emitted_extern;
2893
2894 if (!block_clear_fn)
2895 init_block_clear_fn (NULL);
2896
2897 if (for_call && !emitted_extern)
2898 {
2899 emitted_extern = true;
2900 make_decl_rtl (block_clear_fn);
2901 }
2902
2903 return block_clear_fn;
2904 }
2905 \f
2906 /* Expand a setmem pattern; return true if successful. */
2907
2908 bool
2909 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2910 unsigned int expected_align, HOST_WIDE_INT expected_size,
2911 unsigned HOST_WIDE_INT min_size,
2912 unsigned HOST_WIDE_INT max_size,
2913 unsigned HOST_WIDE_INT probable_max_size)
2914 {
2915 /* Try the most limited insn first, because there's no point
2916 including more than one in the machine description unless
2917 the more limited one has some advantage. */
2918
2919 machine_mode mode;
2920
2921 if (expected_align < align)
2922 expected_align = align;
2923 if (expected_size != -1)
2924 {
2925 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2926 expected_size = max_size;
2927 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2928 expected_size = min_size;
2929 }
2930
2931 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2932 mode = GET_MODE_WIDER_MODE (mode))
2933 {
2934 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2935
2936 if (code != CODE_FOR_nothing
2937 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2938 here because if SIZE is less than the mode mask, as it is
2939 returned by the macro, it will definitely be less than the
2940 actual mode mask. Since SIZE is within the Pmode address
2941 space, we limit MODE to Pmode. */
2942 && ((CONST_INT_P (size)
2943 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2944 <= (GET_MODE_MASK (mode) >> 1)))
2945 || max_size <= (GET_MODE_MASK (mode) >> 1)
2946 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2947 {
2948 struct expand_operand ops[9];
2949 unsigned int nops;
2950
2951 nops = insn_data[(int) code].n_generator_args;
2952 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2953
2954 create_fixed_operand (&ops[0], object);
2955 /* The check above guarantees that this size conversion is valid. */
2956 create_convert_operand_to (&ops[1], size, mode, true);
2957 create_convert_operand_from (&ops[2], val, byte_mode, true);
2958 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2959 if (nops >= 6)
2960 {
2961 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2962 create_integer_operand (&ops[5], expected_size);
2963 }
2964 if (nops >= 8)
2965 {
2966 create_integer_operand (&ops[6], min_size);
2967 /* If we can not represent the maximal size,
2968 make parameter NULL. */
2969 if ((HOST_WIDE_INT) max_size != -1)
2970 create_integer_operand (&ops[7], max_size);
2971 else
2972 create_fixed_operand (&ops[7], NULL);
2973 }
2974 if (nops == 9)
2975 {
2976 /* If we can not represent the maximal size,
2977 make parameter NULL. */
2978 if ((HOST_WIDE_INT) probable_max_size != -1)
2979 create_integer_operand (&ops[8], probable_max_size);
2980 else
2981 create_fixed_operand (&ops[8], NULL);
2982 }
2983 if (maybe_expand_insn (code, nops, ops))
2984 return true;
2985 }
2986 }
2987
2988 return false;
2989 }
2990
2991 \f
2992 /* Write to one of the components of the complex value CPLX. Write VAL to
2993 the real part if IMAG_P is false, and the imaginary part if its true. */
2994
2995 static void
2996 write_complex_part (rtx cplx, rtx val, bool imag_p)
2997 {
2998 machine_mode cmode;
2999 machine_mode imode;
3000 unsigned ibitsize;
3001
3002 if (GET_CODE (cplx) == CONCAT)
3003 {
3004 emit_move_insn (XEXP (cplx, imag_p), val);
3005 return;
3006 }
3007
3008 cmode = GET_MODE (cplx);
3009 imode = GET_MODE_INNER (cmode);
3010 ibitsize = GET_MODE_BITSIZE (imode);
3011
3012 /* For MEMs simplify_gen_subreg may generate an invalid new address
3013 because, e.g., the original address is considered mode-dependent
3014 by the target, which restricts simplify_subreg from invoking
3015 adjust_address_nv. Instead of preparing fallback support for an
3016 invalid address, we call adjust_address_nv directly. */
3017 if (MEM_P (cplx))
3018 {
3019 emit_move_insn (adjust_address_nv (cplx, imode,
3020 imag_p ? GET_MODE_SIZE (imode) : 0),
3021 val);
3022 return;
3023 }
3024
3025 /* If the sub-object is at least word sized, then we know that subregging
3026 will work. This special case is important, since store_bit_field
3027 wants to operate on integer modes, and there's rarely an OImode to
3028 correspond to TCmode. */
3029 if (ibitsize >= BITS_PER_WORD
3030 /* For hard regs we have exact predicates. Assume we can split
3031 the original object if it spans an even number of hard regs.
3032 This special case is important for SCmode on 64-bit platforms
3033 where the natural size of floating-point regs is 32-bit. */
3034 || (REG_P (cplx)
3035 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3036 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3037 {
3038 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3039 imag_p ? GET_MODE_SIZE (imode) : 0);
3040 if (part)
3041 {
3042 emit_move_insn (part, val);
3043 return;
3044 }
3045 else
3046 /* simplify_gen_subreg may fail for sub-word MEMs. */
3047 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3048 }
3049
3050 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3051 }
3052
3053 /* Extract one of the components of the complex value CPLX. Extract the
3054 real part if IMAG_P is false, and the imaginary part if it's true. */
3055
3056 static rtx
3057 read_complex_part (rtx cplx, bool imag_p)
3058 {
3059 machine_mode cmode, imode;
3060 unsigned ibitsize;
3061
3062 if (GET_CODE (cplx) == CONCAT)
3063 return XEXP (cplx, imag_p);
3064
3065 cmode = GET_MODE (cplx);
3066 imode = GET_MODE_INNER (cmode);
3067 ibitsize = GET_MODE_BITSIZE (imode);
3068
3069 /* Special case reads from complex constants that got spilled to memory. */
3070 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3071 {
3072 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3073 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3074 {
3075 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3076 if (CONSTANT_CLASS_P (part))
3077 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3078 }
3079 }
3080
3081 /* For MEMs simplify_gen_subreg may generate an invalid new address
3082 because, e.g., the original address is considered mode-dependent
3083 by the target, which restricts simplify_subreg from invoking
3084 adjust_address_nv. Instead of preparing fallback support for an
3085 invalid address, we call adjust_address_nv directly. */
3086 if (MEM_P (cplx))
3087 return adjust_address_nv (cplx, imode,
3088 imag_p ? GET_MODE_SIZE (imode) : 0);
3089
3090 /* If the sub-object is at least word sized, then we know that subregging
3091 will work. This special case is important, since extract_bit_field
3092 wants to operate on integer modes, and there's rarely an OImode to
3093 correspond to TCmode. */
3094 if (ibitsize >= BITS_PER_WORD
3095 /* For hard regs we have exact predicates. Assume we can split
3096 the original object if it spans an even number of hard regs.
3097 This special case is important for SCmode on 64-bit platforms
3098 where the natural size of floating-point regs is 32-bit. */
3099 || (REG_P (cplx)
3100 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3101 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3102 {
3103 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3104 imag_p ? GET_MODE_SIZE (imode) : 0);
3105 if (ret)
3106 return ret;
3107 else
3108 /* simplify_gen_subreg may fail for sub-word MEMs. */
3109 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3110 }
3111
3112 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3113 true, NULL_RTX, imode, imode);
3114 }
3115 \f
3116 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3117 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3118 represented in NEW_MODE. If FORCE is true, this will never happen, as
3119 we'll force-create a SUBREG if needed. */
3120
3121 static rtx
3122 emit_move_change_mode (machine_mode new_mode,
3123 machine_mode old_mode, rtx x, bool force)
3124 {
3125 rtx ret;
3126
3127 if (push_operand (x, GET_MODE (x)))
3128 {
3129 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3130 MEM_COPY_ATTRIBUTES (ret, x);
3131 }
3132 else if (MEM_P (x))
3133 {
3134 /* We don't have to worry about changing the address since the
3135 size in bytes is supposed to be the same. */
3136 if (reload_in_progress)
3137 {
3138 /* Copy the MEM to change the mode and move any
3139 substitutions from the old MEM to the new one. */
3140 ret = adjust_address_nv (x, new_mode, 0);
3141 copy_replacements (x, ret);
3142 }
3143 else
3144 ret = adjust_address (x, new_mode, 0);
3145 }
3146 else
3147 {
3148 /* Note that we do want simplify_subreg's behavior of validating
3149 that the new mode is ok for a hard register. If we were to use
3150 simplify_gen_subreg, we would create the subreg, but would
3151 probably run into the target not being able to implement it. */
3152 /* Except, of course, when FORCE is true, when this is exactly what
3153 we want. Which is needed for CCmodes on some targets. */
3154 if (force)
3155 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3156 else
3157 ret = simplify_subreg (new_mode, x, old_mode, 0);
3158 }
3159
3160 return ret;
3161 }
3162
3163 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3164 an integer mode of the same size as MODE. Returns the instruction
3165 emitted, or NULL if such a move could not be generated. */
3166
3167 static rtx_insn *
3168 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3169 {
3170 machine_mode imode;
3171 enum insn_code code;
3172
3173 /* There must exist a mode of the exact size we require. */
3174 imode = int_mode_for_mode (mode);
3175 if (imode == BLKmode)
3176 return NULL;
3177
3178 /* The target must support moves in this mode. */
3179 code = optab_handler (mov_optab, imode);
3180 if (code == CODE_FOR_nothing)
3181 return NULL;
3182
3183 x = emit_move_change_mode (imode, mode, x, force);
3184 if (x == NULL_RTX)
3185 return NULL;
3186 y = emit_move_change_mode (imode, mode, y, force);
3187 if (y == NULL_RTX)
3188 return NULL;
3189 return emit_insn (GEN_FCN (code) (x, y));
3190 }
3191
3192 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3193 Return an equivalent MEM that does not use an auto-increment. */
3194
3195 rtx
3196 emit_move_resolve_push (machine_mode mode, rtx x)
3197 {
3198 enum rtx_code code = GET_CODE (XEXP (x, 0));
3199 HOST_WIDE_INT adjust;
3200 rtx temp;
3201
3202 adjust = GET_MODE_SIZE (mode);
3203 #ifdef PUSH_ROUNDING
3204 adjust = PUSH_ROUNDING (adjust);
3205 #endif
3206 if (code == PRE_DEC || code == POST_DEC)
3207 adjust = -adjust;
3208 else if (code == PRE_MODIFY || code == POST_MODIFY)
3209 {
3210 rtx expr = XEXP (XEXP (x, 0), 1);
3211 HOST_WIDE_INT val;
3212
3213 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3214 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3215 val = INTVAL (XEXP (expr, 1));
3216 if (GET_CODE (expr) == MINUS)
3217 val = -val;
3218 gcc_assert (adjust == val || adjust == -val);
3219 adjust = val;
3220 }
3221
3222 /* Do not use anti_adjust_stack, since we don't want to update
3223 stack_pointer_delta. */
3224 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3225 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3226 0, OPTAB_LIB_WIDEN);
3227 if (temp != stack_pointer_rtx)
3228 emit_move_insn (stack_pointer_rtx, temp);
3229
3230 switch (code)
3231 {
3232 case PRE_INC:
3233 case PRE_DEC:
3234 case PRE_MODIFY:
3235 temp = stack_pointer_rtx;
3236 break;
3237 case POST_INC:
3238 case POST_DEC:
3239 case POST_MODIFY:
3240 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3241 break;
3242 default:
3243 gcc_unreachable ();
3244 }
3245
3246 return replace_equiv_address (x, temp);
3247 }
3248
3249 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3250 X is known to satisfy push_operand, and MODE is known to be complex.
3251 Returns the last instruction emitted. */
3252
3253 rtx_insn *
3254 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3255 {
3256 machine_mode submode = GET_MODE_INNER (mode);
3257 bool imag_first;
3258
3259 #ifdef PUSH_ROUNDING
3260 unsigned int submodesize = GET_MODE_SIZE (submode);
3261
3262 /* In case we output to the stack, but the size is smaller than the
3263 machine can push exactly, we need to use move instructions. */
3264 if (PUSH_ROUNDING (submodesize) != submodesize)
3265 {
3266 x = emit_move_resolve_push (mode, x);
3267 return emit_move_insn (x, y);
3268 }
3269 #endif
3270
3271 /* Note that the real part always precedes the imag part in memory
3272 regardless of machine's endianness. */
3273 switch (GET_CODE (XEXP (x, 0)))
3274 {
3275 case PRE_DEC:
3276 case POST_DEC:
3277 imag_first = true;
3278 break;
3279 case PRE_INC:
3280 case POST_INC:
3281 imag_first = false;
3282 break;
3283 default:
3284 gcc_unreachable ();
3285 }
3286
3287 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3288 read_complex_part (y, imag_first));
3289 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3290 read_complex_part (y, !imag_first));
3291 }
3292
3293 /* A subroutine of emit_move_complex. Perform the move from Y to X
3294 via two moves of the parts. Returns the last instruction emitted. */
3295
3296 rtx_insn *
3297 emit_move_complex_parts (rtx x, rtx y)
3298 {
3299 /* Show the output dies here. This is necessary for SUBREGs
3300 of pseudos since we cannot track their lifetimes correctly;
3301 hard regs shouldn't appear here except as return values. */
3302 if (!reload_completed && !reload_in_progress
3303 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3304 emit_clobber (x);
3305
3306 write_complex_part (x, read_complex_part (y, false), false);
3307 write_complex_part (x, read_complex_part (y, true), true);
3308
3309 return get_last_insn ();
3310 }
3311
3312 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3313 MODE is known to be complex. Returns the last instruction emitted. */
3314
3315 static rtx_insn *
3316 emit_move_complex (machine_mode mode, rtx x, rtx y)
3317 {
3318 bool try_int;
3319
3320 /* Need to take special care for pushes, to maintain proper ordering
3321 of the data, and possibly extra padding. */
3322 if (push_operand (x, mode))
3323 return emit_move_complex_push (mode, x, y);
3324
3325 /* See if we can coerce the target into moving both values at once, except
3326 for floating point where we favor moving as parts if this is easy. */
3327 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3328 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3329 && !(REG_P (x)
3330 && HARD_REGISTER_P (x)
3331 && hard_regno_nregs[REGNO (x)][mode] == 1)
3332 && !(REG_P (y)
3333 && HARD_REGISTER_P (y)
3334 && hard_regno_nregs[REGNO (y)][mode] == 1))
3335 try_int = false;
3336 /* Not possible if the values are inherently not adjacent. */
3337 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3338 try_int = false;
3339 /* Is possible if both are registers (or subregs of registers). */
3340 else if (register_operand (x, mode) && register_operand (y, mode))
3341 try_int = true;
3342 /* If one of the operands is a memory, and alignment constraints
3343 are friendly enough, we may be able to do combined memory operations.
3344 We do not attempt this if Y is a constant because that combination is
3345 usually better with the by-parts thing below. */
3346 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3347 && (!STRICT_ALIGNMENT
3348 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3349 try_int = true;
3350 else
3351 try_int = false;
3352
3353 if (try_int)
3354 {
3355 rtx_insn *ret;
3356
3357 /* For memory to memory moves, optimal behavior can be had with the
3358 existing block move logic. */
3359 if (MEM_P (x) && MEM_P (y))
3360 {
3361 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3362 BLOCK_OP_NO_LIBCALL);
3363 return get_last_insn ();
3364 }
3365
3366 ret = emit_move_via_integer (mode, x, y, true);
3367 if (ret)
3368 return ret;
3369 }
3370
3371 return emit_move_complex_parts (x, y);
3372 }
3373
3374 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3375 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3376
3377 static rtx_insn *
3378 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3379 {
3380 rtx_insn *ret;
3381
3382 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3383 if (mode != CCmode)
3384 {
3385 enum insn_code code = optab_handler (mov_optab, CCmode);
3386 if (code != CODE_FOR_nothing)
3387 {
3388 x = emit_move_change_mode (CCmode, mode, x, true);
3389 y = emit_move_change_mode (CCmode, mode, y, true);
3390 return emit_insn (GEN_FCN (code) (x, y));
3391 }
3392 }
3393
3394 /* Otherwise, find the MODE_INT mode of the same width. */
3395 ret = emit_move_via_integer (mode, x, y, false);
3396 gcc_assert (ret != NULL);
3397 return ret;
3398 }
3399
3400 /* Return true if word I of OP lies entirely in the
3401 undefined bits of a paradoxical subreg. */
3402
3403 static bool
3404 undefined_operand_subword_p (const_rtx op, int i)
3405 {
3406 machine_mode innermode, innermostmode;
3407 int offset;
3408 if (GET_CODE (op) != SUBREG)
3409 return false;
3410 innermode = GET_MODE (op);
3411 innermostmode = GET_MODE (SUBREG_REG (op));
3412 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3413 /* The SUBREG_BYTE represents offset, as if the value were stored in
3414 memory, except for a paradoxical subreg where we define
3415 SUBREG_BYTE to be 0; undo this exception as in
3416 simplify_subreg. */
3417 if (SUBREG_BYTE (op) == 0
3418 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3419 {
3420 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3421 if (WORDS_BIG_ENDIAN)
3422 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3423 if (BYTES_BIG_ENDIAN)
3424 offset += difference % UNITS_PER_WORD;
3425 }
3426 if (offset >= GET_MODE_SIZE (innermostmode)
3427 || offset <= -GET_MODE_SIZE (word_mode))
3428 return true;
3429 return false;
3430 }
3431
3432 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3433 MODE is any multi-word or full-word mode that lacks a move_insn
3434 pattern. Note that you will get better code if you define such
3435 patterns, even if they must turn into multiple assembler instructions. */
3436
3437 static rtx_insn *
3438 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3439 {
3440 rtx_insn *last_insn = 0;
3441 rtx_insn *seq;
3442 rtx inner;
3443 bool need_clobber;
3444 int i;
3445
3446 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3447
3448 /* If X is a push on the stack, do the push now and replace
3449 X with a reference to the stack pointer. */
3450 if (push_operand (x, mode))
3451 x = emit_move_resolve_push (mode, x);
3452
3453 /* If we are in reload, see if either operand is a MEM whose address
3454 is scheduled for replacement. */
3455 if (reload_in_progress && MEM_P (x)
3456 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3457 x = replace_equiv_address_nv (x, inner);
3458 if (reload_in_progress && MEM_P (y)
3459 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3460 y = replace_equiv_address_nv (y, inner);
3461
3462 start_sequence ();
3463
3464 need_clobber = false;
3465 for (i = 0;
3466 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3467 i++)
3468 {
3469 rtx xpart = operand_subword (x, i, 1, mode);
3470 rtx ypart;
3471
3472 /* Do not generate code for a move if it would come entirely
3473 from the undefined bits of a paradoxical subreg. */
3474 if (undefined_operand_subword_p (y, i))
3475 continue;
3476
3477 ypart = operand_subword (y, i, 1, mode);
3478
3479 /* If we can't get a part of Y, put Y into memory if it is a
3480 constant. Otherwise, force it into a register. Then we must
3481 be able to get a part of Y. */
3482 if (ypart == 0 && CONSTANT_P (y))
3483 {
3484 y = use_anchored_address (force_const_mem (mode, y));
3485 ypart = operand_subword (y, i, 1, mode);
3486 }
3487 else if (ypart == 0)
3488 ypart = operand_subword_force (y, i, mode);
3489
3490 gcc_assert (xpart && ypart);
3491
3492 need_clobber |= (GET_CODE (xpart) == SUBREG);
3493
3494 last_insn = emit_move_insn (xpart, ypart);
3495 }
3496
3497 seq = get_insns ();
3498 end_sequence ();
3499
3500 /* Show the output dies here. This is necessary for SUBREGs
3501 of pseudos since we cannot track their lifetimes correctly;
3502 hard regs shouldn't appear here except as return values.
3503 We never want to emit such a clobber after reload. */
3504 if (x != y
3505 && ! (reload_in_progress || reload_completed)
3506 && need_clobber != 0)
3507 emit_clobber (x);
3508
3509 emit_insn (seq);
3510
3511 return last_insn;
3512 }
3513
3514 /* Low level part of emit_move_insn.
3515 Called just like emit_move_insn, but assumes X and Y
3516 are basically valid. */
3517
3518 rtx_insn *
3519 emit_move_insn_1 (rtx x, rtx y)
3520 {
3521 machine_mode mode = GET_MODE (x);
3522 enum insn_code code;
3523
3524 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3525
3526 code = optab_handler (mov_optab, mode);
3527 if (code != CODE_FOR_nothing)
3528 return emit_insn (GEN_FCN (code) (x, y));
3529
3530 /* Expand complex moves by moving real part and imag part. */
3531 if (COMPLEX_MODE_P (mode))
3532 return emit_move_complex (mode, x, y);
3533
3534 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3535 || ALL_FIXED_POINT_MODE_P (mode))
3536 {
3537 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3538
3539 /* If we can't find an integer mode, use multi words. */
3540 if (result)
3541 return result;
3542 else
3543 return emit_move_multi_word (mode, x, y);
3544 }
3545
3546 if (GET_MODE_CLASS (mode) == MODE_CC)
3547 return emit_move_ccmode (mode, x, y);
3548
3549 /* Try using a move pattern for the corresponding integer mode. This is
3550 only safe when simplify_subreg can convert MODE constants into integer
3551 constants. At present, it can only do this reliably if the value
3552 fits within a HOST_WIDE_INT. */
3553 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3554 {
3555 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3556
3557 if (ret)
3558 {
3559 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3560 return ret;
3561 }
3562 }
3563
3564 return emit_move_multi_word (mode, x, y);
3565 }
3566
3567 /* Generate code to copy Y into X.
3568 Both Y and X must have the same mode, except that
3569 Y can be a constant with VOIDmode.
3570 This mode cannot be BLKmode; use emit_block_move for that.
3571
3572 Return the last instruction emitted. */
3573
3574 rtx_insn *
3575 emit_move_insn (rtx x, rtx y)
3576 {
3577 machine_mode mode = GET_MODE (x);
3578 rtx y_cst = NULL_RTX;
3579 rtx_insn *last_insn;
3580 rtx set;
3581
3582 gcc_assert (mode != BLKmode
3583 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3584
3585 if (CONSTANT_P (y))
3586 {
3587 if (optimize
3588 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3589 && (last_insn = compress_float_constant (x, y)))
3590 return last_insn;
3591
3592 y_cst = y;
3593
3594 if (!targetm.legitimate_constant_p (mode, y))
3595 {
3596 y = force_const_mem (mode, y);
3597
3598 /* If the target's cannot_force_const_mem prevented the spill,
3599 assume that the target's move expanders will also take care
3600 of the non-legitimate constant. */
3601 if (!y)
3602 y = y_cst;
3603 else
3604 y = use_anchored_address (y);
3605 }
3606 }
3607
3608 /* If X or Y are memory references, verify that their addresses are valid
3609 for the machine. */
3610 if (MEM_P (x)
3611 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3612 MEM_ADDR_SPACE (x))
3613 && ! push_operand (x, GET_MODE (x))))
3614 x = validize_mem (x);
3615
3616 if (MEM_P (y)
3617 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3618 MEM_ADDR_SPACE (y)))
3619 y = validize_mem (y);
3620
3621 gcc_assert (mode != BLKmode);
3622
3623 last_insn = emit_move_insn_1 (x, y);
3624
3625 if (y_cst && REG_P (x)
3626 && (set = single_set (last_insn)) != NULL_RTX
3627 && SET_DEST (set) == x
3628 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3629 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3630
3631 return last_insn;
3632 }
3633
3634 /* Generate the body of an instruction to copy Y into X.
3635 It may be a list of insns, if one insn isn't enough. */
3636
3637 rtx
3638 gen_move_insn (rtx x, rtx y)
3639 {
3640 rtx_insn *seq;
3641
3642 start_sequence ();
3643 emit_move_insn_1 (x, y);
3644 seq = get_insns ();
3645 end_sequence ();
3646 return seq;
3647 }
3648
3649 /* If Y is representable exactly in a narrower mode, and the target can
3650 perform the extension directly from constant or memory, then emit the
3651 move as an extension. */
3652
3653 static rtx_insn *
3654 compress_float_constant (rtx x, rtx y)
3655 {
3656 machine_mode dstmode = GET_MODE (x);
3657 machine_mode orig_srcmode = GET_MODE (y);
3658 machine_mode srcmode;
3659 REAL_VALUE_TYPE r;
3660 int oldcost, newcost;
3661 bool speed = optimize_insn_for_speed_p ();
3662
3663 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3664
3665 if (targetm.legitimate_constant_p (dstmode, y))
3666 oldcost = set_src_cost (y, speed);
3667 else
3668 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3669
3670 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3671 srcmode != orig_srcmode;
3672 srcmode = GET_MODE_WIDER_MODE (srcmode))
3673 {
3674 enum insn_code ic;
3675 rtx trunc_y;
3676 rtx_insn *last_insn;
3677
3678 /* Skip if the target can't extend this way. */
3679 ic = can_extend_p (dstmode, srcmode, 0);
3680 if (ic == CODE_FOR_nothing)
3681 continue;
3682
3683 /* Skip if the narrowed value isn't exact. */
3684 if (! exact_real_truncate (srcmode, &r))
3685 continue;
3686
3687 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3688
3689 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3690 {
3691 /* Skip if the target needs extra instructions to perform
3692 the extension. */
3693 if (!insn_operand_matches (ic, 1, trunc_y))
3694 continue;
3695 /* This is valid, but may not be cheaper than the original. */
3696 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3697 speed);
3698 if (oldcost < newcost)
3699 continue;
3700 }
3701 else if (float_extend_from_mem[dstmode][srcmode])
3702 {
3703 trunc_y = force_const_mem (srcmode, trunc_y);
3704 /* This is valid, but may not be cheaper than the original. */
3705 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3706 speed);
3707 if (oldcost < newcost)
3708 continue;
3709 trunc_y = validize_mem (trunc_y);
3710 }
3711 else
3712 continue;
3713
3714 /* For CSE's benefit, force the compressed constant pool entry
3715 into a new pseudo. This constant may be used in different modes,
3716 and if not, combine will put things back together for us. */
3717 trunc_y = force_reg (srcmode, trunc_y);
3718
3719 /* If x is a hard register, perform the extension into a pseudo,
3720 so that e.g. stack realignment code is aware of it. */
3721 rtx target = x;
3722 if (REG_P (x) && HARD_REGISTER_P (x))
3723 target = gen_reg_rtx (dstmode);
3724
3725 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3726 last_insn = get_last_insn ();
3727
3728 if (REG_P (target))
3729 set_unique_reg_note (last_insn, REG_EQUAL, y);
3730
3731 if (target != x)
3732 return emit_move_insn (x, target);
3733 return last_insn;
3734 }
3735
3736 return NULL;
3737 }
3738 \f
3739 /* Pushing data onto the stack. */
3740
3741 /* Push a block of length SIZE (perhaps variable)
3742 and return an rtx to address the beginning of the block.
3743 The value may be virtual_outgoing_args_rtx.
3744
3745 EXTRA is the number of bytes of padding to push in addition to SIZE.
3746 BELOW nonzero means this padding comes at low addresses;
3747 otherwise, the padding comes at high addresses. */
3748
3749 rtx
3750 push_block (rtx size, int extra, int below)
3751 {
3752 rtx temp;
3753
3754 size = convert_modes (Pmode, ptr_mode, size, 1);
3755 if (CONSTANT_P (size))
3756 anti_adjust_stack (plus_constant (Pmode, size, extra));
3757 else if (REG_P (size) && extra == 0)
3758 anti_adjust_stack (size);
3759 else
3760 {
3761 temp = copy_to_mode_reg (Pmode, size);
3762 if (extra != 0)
3763 temp = expand_binop (Pmode, add_optab, temp,
3764 gen_int_mode (extra, Pmode),
3765 temp, 0, OPTAB_LIB_WIDEN);
3766 anti_adjust_stack (temp);
3767 }
3768
3769 #ifndef STACK_GROWS_DOWNWARD
3770 if (0)
3771 #else
3772 if (1)
3773 #endif
3774 {
3775 temp = virtual_outgoing_args_rtx;
3776 if (extra != 0 && below)
3777 temp = plus_constant (Pmode, temp, extra);
3778 }
3779 else
3780 {
3781 if (CONST_INT_P (size))
3782 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3783 -INTVAL (size) - (below ? 0 : extra));
3784 else if (extra != 0 && !below)
3785 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3786 negate_rtx (Pmode, plus_constant (Pmode, size,
3787 extra)));
3788 else
3789 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3790 negate_rtx (Pmode, size));
3791 }
3792
3793 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3794 }
3795
3796 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3797
3798 static rtx
3799 mem_autoinc_base (rtx mem)
3800 {
3801 if (MEM_P (mem))
3802 {
3803 rtx addr = XEXP (mem, 0);
3804 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3805 return XEXP (addr, 0);
3806 }
3807 return NULL;
3808 }
3809
3810 /* A utility routine used here, in reload, and in try_split. The insns
3811 after PREV up to and including LAST are known to adjust the stack,
3812 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3813 placing notes as appropriate. PREV may be NULL, indicating the
3814 entire insn sequence prior to LAST should be scanned.
3815
3816 The set of allowed stack pointer modifications is small:
3817 (1) One or more auto-inc style memory references (aka pushes),
3818 (2) One or more addition/subtraction with the SP as destination,
3819 (3) A single move insn with the SP as destination,
3820 (4) A call_pop insn,
3821 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3822
3823 Insns in the sequence that do not modify the SP are ignored,
3824 except for noreturn calls.
3825
3826 The return value is the amount of adjustment that can be trivially
3827 verified, via immediate operand or auto-inc. If the adjustment
3828 cannot be trivially extracted, the return value is INT_MIN. */
3829
3830 HOST_WIDE_INT
3831 find_args_size_adjust (rtx_insn *insn)
3832 {
3833 rtx dest, set, pat;
3834 int i;
3835
3836 pat = PATTERN (insn);
3837 set = NULL;
3838
3839 /* Look for a call_pop pattern. */
3840 if (CALL_P (insn))
3841 {
3842 /* We have to allow non-call_pop patterns for the case
3843 of emit_single_push_insn of a TLS address. */
3844 if (GET_CODE (pat) != PARALLEL)
3845 return 0;
3846
3847 /* All call_pop have a stack pointer adjust in the parallel.
3848 The call itself is always first, and the stack adjust is
3849 usually last, so search from the end. */
3850 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3851 {
3852 set = XVECEXP (pat, 0, i);
3853 if (GET_CODE (set) != SET)
3854 continue;
3855 dest = SET_DEST (set);
3856 if (dest == stack_pointer_rtx)
3857 break;
3858 }
3859 /* We'd better have found the stack pointer adjust. */
3860 if (i == 0)
3861 return 0;
3862 /* Fall through to process the extracted SET and DEST
3863 as if it was a standalone insn. */
3864 }
3865 else if (GET_CODE (pat) == SET)
3866 set = pat;
3867 else if ((set = single_set (insn)) != NULL)
3868 ;
3869 else if (GET_CODE (pat) == PARALLEL)
3870 {
3871 /* ??? Some older ports use a parallel with a stack adjust
3872 and a store for a PUSH_ROUNDING pattern, rather than a
3873 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3874 /* ??? See h8300 and m68k, pushqi1. */
3875 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3876 {
3877 set = XVECEXP (pat, 0, i);
3878 if (GET_CODE (set) != SET)
3879 continue;
3880 dest = SET_DEST (set);
3881 if (dest == stack_pointer_rtx)
3882 break;
3883
3884 /* We do not expect an auto-inc of the sp in the parallel. */
3885 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3886 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3887 != stack_pointer_rtx);
3888 }
3889 if (i < 0)
3890 return 0;
3891 }
3892 else
3893 return 0;
3894
3895 dest = SET_DEST (set);
3896
3897 /* Look for direct modifications of the stack pointer. */
3898 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3899 {
3900 /* Look for a trivial adjustment, otherwise assume nothing. */
3901 /* Note that the SPU restore_stack_block pattern refers to
3902 the stack pointer in V4SImode. Consider that non-trivial. */
3903 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3904 && GET_CODE (SET_SRC (set)) == PLUS
3905 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3906 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3907 return INTVAL (XEXP (SET_SRC (set), 1));
3908 /* ??? Reload can generate no-op moves, which will be cleaned
3909 up later. Recognize it and continue searching. */
3910 else if (rtx_equal_p (dest, SET_SRC (set)))
3911 return 0;
3912 else
3913 return HOST_WIDE_INT_MIN;
3914 }
3915 else
3916 {
3917 rtx mem, addr;
3918
3919 /* Otherwise only think about autoinc patterns. */
3920 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3921 {
3922 mem = dest;
3923 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3924 != stack_pointer_rtx);
3925 }
3926 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3927 mem = SET_SRC (set);
3928 else
3929 return 0;
3930
3931 addr = XEXP (mem, 0);
3932 switch (GET_CODE (addr))
3933 {
3934 case PRE_INC:
3935 case POST_INC:
3936 return GET_MODE_SIZE (GET_MODE (mem));
3937 case PRE_DEC:
3938 case POST_DEC:
3939 return -GET_MODE_SIZE (GET_MODE (mem));
3940 case PRE_MODIFY:
3941 case POST_MODIFY:
3942 addr = XEXP (addr, 1);
3943 gcc_assert (GET_CODE (addr) == PLUS);
3944 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3945 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3946 return INTVAL (XEXP (addr, 1));
3947 default:
3948 gcc_unreachable ();
3949 }
3950 }
3951 }
3952
3953 int
3954 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3955 {
3956 int args_size = end_args_size;
3957 bool saw_unknown = false;
3958 rtx_insn *insn;
3959
3960 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3961 {
3962 HOST_WIDE_INT this_delta;
3963
3964 if (!NONDEBUG_INSN_P (insn))
3965 continue;
3966
3967 this_delta = find_args_size_adjust (insn);
3968 if (this_delta == 0)
3969 {
3970 if (!CALL_P (insn)
3971 || ACCUMULATE_OUTGOING_ARGS
3972 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3973 continue;
3974 }
3975
3976 gcc_assert (!saw_unknown);
3977 if (this_delta == HOST_WIDE_INT_MIN)
3978 saw_unknown = true;
3979
3980 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3981 #ifdef STACK_GROWS_DOWNWARD
3982 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3983 #endif
3984 args_size -= this_delta;
3985 }
3986
3987 return saw_unknown ? INT_MIN : args_size;
3988 }
3989
3990 #ifdef PUSH_ROUNDING
3991 /* Emit single push insn. */
3992
3993 static void
3994 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
3995 {
3996 rtx dest_addr;
3997 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3998 rtx dest;
3999 enum insn_code icode;
4000
4001 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4002 /* If there is push pattern, use it. Otherwise try old way of throwing
4003 MEM representing push operation to move expander. */
4004 icode = optab_handler (push_optab, mode);
4005 if (icode != CODE_FOR_nothing)
4006 {
4007 struct expand_operand ops[1];
4008
4009 create_input_operand (&ops[0], x, mode);
4010 if (maybe_expand_insn (icode, 1, ops))
4011 return;
4012 }
4013 if (GET_MODE_SIZE (mode) == rounded_size)
4014 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4015 /* If we are to pad downward, adjust the stack pointer first and
4016 then store X into the stack location using an offset. This is
4017 because emit_move_insn does not know how to pad; it does not have
4018 access to type. */
4019 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4020 {
4021 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4022 HOST_WIDE_INT offset;
4023
4024 emit_move_insn (stack_pointer_rtx,
4025 expand_binop (Pmode,
4026 #ifdef STACK_GROWS_DOWNWARD
4027 sub_optab,
4028 #else
4029 add_optab,
4030 #endif
4031 stack_pointer_rtx,
4032 gen_int_mode (rounded_size, Pmode),
4033 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4034
4035 offset = (HOST_WIDE_INT) padding_size;
4036 #ifdef STACK_GROWS_DOWNWARD
4037 if (STACK_PUSH_CODE == POST_DEC)
4038 /* We have already decremented the stack pointer, so get the
4039 previous value. */
4040 offset += (HOST_WIDE_INT) rounded_size;
4041 #else
4042 if (STACK_PUSH_CODE == POST_INC)
4043 /* We have already incremented the stack pointer, so get the
4044 previous value. */
4045 offset -= (HOST_WIDE_INT) rounded_size;
4046 #endif
4047 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4048 gen_int_mode (offset, Pmode));
4049 }
4050 else
4051 {
4052 #ifdef STACK_GROWS_DOWNWARD
4053 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4054 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4055 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4056 Pmode));
4057 #else
4058 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4059 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4060 gen_int_mode (rounded_size, Pmode));
4061 #endif
4062 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4063 }
4064
4065 dest = gen_rtx_MEM (mode, dest_addr);
4066
4067 if (type != 0)
4068 {
4069 set_mem_attributes (dest, type, 1);
4070
4071 if (cfun->tail_call_marked)
4072 /* Function incoming arguments may overlap with sibling call
4073 outgoing arguments and we cannot allow reordering of reads
4074 from function arguments with stores to outgoing arguments
4075 of sibling calls. */
4076 set_mem_alias_set (dest, 0);
4077 }
4078 emit_move_insn (dest, x);
4079 }
4080
4081 /* Emit and annotate a single push insn. */
4082
4083 static void
4084 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4085 {
4086 int delta, old_delta = stack_pointer_delta;
4087 rtx_insn *prev = get_last_insn ();
4088 rtx_insn *last;
4089
4090 emit_single_push_insn_1 (mode, x, type);
4091
4092 last = get_last_insn ();
4093
4094 /* Notice the common case where we emitted exactly one insn. */
4095 if (PREV_INSN (last) == prev)
4096 {
4097 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4098 return;
4099 }
4100
4101 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4102 gcc_assert (delta == INT_MIN || delta == old_delta);
4103 }
4104 #endif
4105
4106 /* Generate code to push X onto the stack, assuming it has mode MODE and
4107 type TYPE.
4108 MODE is redundant except when X is a CONST_INT (since they don't
4109 carry mode info).
4110 SIZE is an rtx for the size of data to be copied (in bytes),
4111 needed only if X is BLKmode.
4112
4113 ALIGN (in bits) is maximum alignment we can assume.
4114
4115 If PARTIAL and REG are both nonzero, then copy that many of the first
4116 bytes of X into registers starting with REG, and push the rest of X.
4117 The amount of space pushed is decreased by PARTIAL bytes.
4118 REG must be a hard register in this case.
4119 If REG is zero but PARTIAL is not, take any all others actions for an
4120 argument partially in registers, but do not actually load any
4121 registers.
4122
4123 EXTRA is the amount in bytes of extra space to leave next to this arg.
4124 This is ignored if an argument block has already been allocated.
4125
4126 On a machine that lacks real push insns, ARGS_ADDR is the address of
4127 the bottom of the argument block for this call. We use indexing off there
4128 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4129 argument block has not been preallocated.
4130
4131 ARGS_SO_FAR is the size of args previously pushed for this call.
4132
4133 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4134 for arguments passed in registers. If nonzero, it will be the number
4135 of bytes required. */
4136
4137 void
4138 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4139 unsigned int align, int partial, rtx reg, int extra,
4140 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4141 rtx alignment_pad)
4142 {
4143 rtx xinner;
4144 enum direction stack_direction
4145 #ifdef STACK_GROWS_DOWNWARD
4146 = downward;
4147 #else
4148 = upward;
4149 #endif
4150
4151 /* Decide where to pad the argument: `downward' for below,
4152 `upward' for above, or `none' for don't pad it.
4153 Default is below for small data on big-endian machines; else above. */
4154 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4155
4156 /* Invert direction if stack is post-decrement.
4157 FIXME: why? */
4158 if (STACK_PUSH_CODE == POST_DEC)
4159 if (where_pad != none)
4160 where_pad = (where_pad == downward ? upward : downward);
4161
4162 xinner = x;
4163
4164 if (mode == BLKmode
4165 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4166 {
4167 /* Copy a block into the stack, entirely or partially. */
4168
4169 rtx temp;
4170 int used;
4171 int offset;
4172 int skip;
4173
4174 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4175 used = partial - offset;
4176
4177 if (mode != BLKmode)
4178 {
4179 /* A value is to be stored in an insufficiently aligned
4180 stack slot; copy via a suitably aligned slot if
4181 necessary. */
4182 size = GEN_INT (GET_MODE_SIZE (mode));
4183 if (!MEM_P (xinner))
4184 {
4185 temp = assign_temp (type, 1, 1);
4186 emit_move_insn (temp, xinner);
4187 xinner = temp;
4188 }
4189 }
4190
4191 gcc_assert (size);
4192
4193 /* USED is now the # of bytes we need not copy to the stack
4194 because registers will take care of them. */
4195
4196 if (partial != 0)
4197 xinner = adjust_address (xinner, BLKmode, used);
4198
4199 /* If the partial register-part of the arg counts in its stack size,
4200 skip the part of stack space corresponding to the registers.
4201 Otherwise, start copying to the beginning of the stack space,
4202 by setting SKIP to 0. */
4203 skip = (reg_parm_stack_space == 0) ? 0 : used;
4204
4205 #ifdef PUSH_ROUNDING
4206 /* Do it with several push insns if that doesn't take lots of insns
4207 and if there is no difficulty with push insns that skip bytes
4208 on the stack for alignment purposes. */
4209 if (args_addr == 0
4210 && PUSH_ARGS
4211 && CONST_INT_P (size)
4212 && skip == 0
4213 && MEM_ALIGN (xinner) >= align
4214 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4215 /* Here we avoid the case of a structure whose weak alignment
4216 forces many pushes of a small amount of data,
4217 and such small pushes do rounding that causes trouble. */
4218 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4219 || align >= BIGGEST_ALIGNMENT
4220 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4221 == (align / BITS_PER_UNIT)))
4222 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4223 {
4224 /* Push padding now if padding above and stack grows down,
4225 or if padding below and stack grows up.
4226 But if space already allocated, this has already been done. */
4227 if (extra && args_addr == 0
4228 && where_pad != none && where_pad != stack_direction)
4229 anti_adjust_stack (GEN_INT (extra));
4230
4231 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4232 }
4233 else
4234 #endif /* PUSH_ROUNDING */
4235 {
4236 rtx target;
4237
4238 /* Otherwise make space on the stack and copy the data
4239 to the address of that space. */
4240
4241 /* Deduct words put into registers from the size we must copy. */
4242 if (partial != 0)
4243 {
4244 if (CONST_INT_P (size))
4245 size = GEN_INT (INTVAL (size) - used);
4246 else
4247 size = expand_binop (GET_MODE (size), sub_optab, size,
4248 gen_int_mode (used, GET_MODE (size)),
4249 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4250 }
4251
4252 /* Get the address of the stack space.
4253 In this case, we do not deal with EXTRA separately.
4254 A single stack adjust will do. */
4255 if (! args_addr)
4256 {
4257 temp = push_block (size, extra, where_pad == downward);
4258 extra = 0;
4259 }
4260 else if (CONST_INT_P (args_so_far))
4261 temp = memory_address (BLKmode,
4262 plus_constant (Pmode, args_addr,
4263 skip + INTVAL (args_so_far)));
4264 else
4265 temp = memory_address (BLKmode,
4266 plus_constant (Pmode,
4267 gen_rtx_PLUS (Pmode,
4268 args_addr,
4269 args_so_far),
4270 skip));
4271
4272 if (!ACCUMULATE_OUTGOING_ARGS)
4273 {
4274 /* If the source is referenced relative to the stack pointer,
4275 copy it to another register to stabilize it. We do not need
4276 to do this if we know that we won't be changing sp. */
4277
4278 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4279 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4280 temp = copy_to_reg (temp);
4281 }
4282
4283 target = gen_rtx_MEM (BLKmode, temp);
4284
4285 /* We do *not* set_mem_attributes here, because incoming arguments
4286 may overlap with sibling call outgoing arguments and we cannot
4287 allow reordering of reads from function arguments with stores
4288 to outgoing arguments of sibling calls. We do, however, want
4289 to record the alignment of the stack slot. */
4290 /* ALIGN may well be better aligned than TYPE, e.g. due to
4291 PARM_BOUNDARY. Assume the caller isn't lying. */
4292 set_mem_align (target, align);
4293
4294 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4295 }
4296 }
4297 else if (partial > 0)
4298 {
4299 /* Scalar partly in registers. */
4300
4301 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4302 int i;
4303 int not_stack;
4304 /* # bytes of start of argument
4305 that we must make space for but need not store. */
4306 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4307 int args_offset = INTVAL (args_so_far);
4308 int skip;
4309
4310 /* Push padding now if padding above and stack grows down,
4311 or if padding below and stack grows up.
4312 But if space already allocated, this has already been done. */
4313 if (extra && args_addr == 0
4314 && where_pad != none && where_pad != stack_direction)
4315 anti_adjust_stack (GEN_INT (extra));
4316
4317 /* If we make space by pushing it, we might as well push
4318 the real data. Otherwise, we can leave OFFSET nonzero
4319 and leave the space uninitialized. */
4320 if (args_addr == 0)
4321 offset = 0;
4322
4323 /* Now NOT_STACK gets the number of words that we don't need to
4324 allocate on the stack. Convert OFFSET to words too. */
4325 not_stack = (partial - offset) / UNITS_PER_WORD;
4326 offset /= UNITS_PER_WORD;
4327
4328 /* If the partial register-part of the arg counts in its stack size,
4329 skip the part of stack space corresponding to the registers.
4330 Otherwise, start copying to the beginning of the stack space,
4331 by setting SKIP to 0. */
4332 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4333
4334 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4335 x = validize_mem (force_const_mem (mode, x));
4336
4337 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4338 SUBREGs of such registers are not allowed. */
4339 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4340 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4341 x = copy_to_reg (x);
4342
4343 /* Loop over all the words allocated on the stack for this arg. */
4344 /* We can do it by words, because any scalar bigger than a word
4345 has a size a multiple of a word. */
4346 for (i = size - 1; i >= not_stack; i--)
4347 if (i >= not_stack + offset)
4348 emit_push_insn (operand_subword_force (x, i, mode),
4349 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4350 0, args_addr,
4351 GEN_INT (args_offset + ((i - not_stack + skip)
4352 * UNITS_PER_WORD)),
4353 reg_parm_stack_space, alignment_pad);
4354 }
4355 else
4356 {
4357 rtx addr;
4358 rtx dest;
4359
4360 /* Push padding now if padding above and stack grows down,
4361 or if padding below and stack grows up.
4362 But if space already allocated, this has already been done. */
4363 if (extra && args_addr == 0
4364 && where_pad != none && where_pad != stack_direction)
4365 anti_adjust_stack (GEN_INT (extra));
4366
4367 #ifdef PUSH_ROUNDING
4368 if (args_addr == 0 && PUSH_ARGS)
4369 emit_single_push_insn (mode, x, type);
4370 else
4371 #endif
4372 {
4373 if (CONST_INT_P (args_so_far))
4374 addr
4375 = memory_address (mode,
4376 plus_constant (Pmode, args_addr,
4377 INTVAL (args_so_far)));
4378 else
4379 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4380 args_so_far));
4381 dest = gen_rtx_MEM (mode, addr);
4382
4383 /* We do *not* set_mem_attributes here, because incoming arguments
4384 may overlap with sibling call outgoing arguments and we cannot
4385 allow reordering of reads from function arguments with stores
4386 to outgoing arguments of sibling calls. We do, however, want
4387 to record the alignment of the stack slot. */
4388 /* ALIGN may well be better aligned than TYPE, e.g. due to
4389 PARM_BOUNDARY. Assume the caller isn't lying. */
4390 set_mem_align (dest, align);
4391
4392 emit_move_insn (dest, x);
4393 }
4394 }
4395
4396 /* If part should go in registers, copy that part
4397 into the appropriate registers. Do this now, at the end,
4398 since mem-to-mem copies above may do function calls. */
4399 if (partial > 0 && reg != 0)
4400 {
4401 /* Handle calls that pass values in multiple non-contiguous locations.
4402 The Irix 6 ABI has examples of this. */
4403 if (GET_CODE (reg) == PARALLEL)
4404 emit_group_load (reg, x, type, -1);
4405 else
4406 {
4407 gcc_assert (partial % UNITS_PER_WORD == 0);
4408 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4409 }
4410 }
4411
4412 if (extra && args_addr == 0 && where_pad == stack_direction)
4413 anti_adjust_stack (GEN_INT (extra));
4414
4415 if (alignment_pad && args_addr == 0)
4416 anti_adjust_stack (alignment_pad);
4417 }
4418 \f
4419 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4420 operations. */
4421
4422 static rtx
4423 get_subtarget (rtx x)
4424 {
4425 return (optimize
4426 || x == 0
4427 /* Only registers can be subtargets. */
4428 || !REG_P (x)
4429 /* Don't use hard regs to avoid extending their life. */
4430 || REGNO (x) < FIRST_PSEUDO_REGISTER
4431 ? 0 : x);
4432 }
4433
4434 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4435 FIELD is a bitfield. Returns true if the optimization was successful,
4436 and there's nothing else to do. */
4437
4438 static bool
4439 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4440 unsigned HOST_WIDE_INT bitpos,
4441 unsigned HOST_WIDE_INT bitregion_start,
4442 unsigned HOST_WIDE_INT bitregion_end,
4443 machine_mode mode1, rtx str_rtx,
4444 tree to, tree src)
4445 {
4446 machine_mode str_mode = GET_MODE (str_rtx);
4447 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4448 tree op0, op1;
4449 rtx value, result;
4450 optab binop;
4451 gimple srcstmt;
4452 enum tree_code code;
4453
4454 if (mode1 != VOIDmode
4455 || bitsize >= BITS_PER_WORD
4456 || str_bitsize > BITS_PER_WORD
4457 || TREE_SIDE_EFFECTS (to)
4458 || TREE_THIS_VOLATILE (to))
4459 return false;
4460
4461 STRIP_NOPS (src);
4462 if (TREE_CODE (src) != SSA_NAME)
4463 return false;
4464 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4465 return false;
4466
4467 srcstmt = get_gimple_for_ssa_name (src);
4468 if (!srcstmt
4469 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4470 return false;
4471
4472 code = gimple_assign_rhs_code (srcstmt);
4473
4474 op0 = gimple_assign_rhs1 (srcstmt);
4475
4476 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4477 to find its initialization. Hopefully the initialization will
4478 be from a bitfield load. */
4479 if (TREE_CODE (op0) == SSA_NAME)
4480 {
4481 gimple op0stmt = get_gimple_for_ssa_name (op0);
4482
4483 /* We want to eventually have OP0 be the same as TO, which
4484 should be a bitfield. */
4485 if (!op0stmt
4486 || !is_gimple_assign (op0stmt)
4487 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4488 return false;
4489 op0 = gimple_assign_rhs1 (op0stmt);
4490 }
4491
4492 op1 = gimple_assign_rhs2 (srcstmt);
4493
4494 if (!operand_equal_p (to, op0, 0))
4495 return false;
4496
4497 if (MEM_P (str_rtx))
4498 {
4499 unsigned HOST_WIDE_INT offset1;
4500
4501 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4502 str_mode = word_mode;
4503 str_mode = get_best_mode (bitsize, bitpos,
4504 bitregion_start, bitregion_end,
4505 MEM_ALIGN (str_rtx), str_mode, 0);
4506 if (str_mode == VOIDmode)
4507 return false;
4508 str_bitsize = GET_MODE_BITSIZE (str_mode);
4509
4510 offset1 = bitpos;
4511 bitpos %= str_bitsize;
4512 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4513 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4514 }
4515 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4516 return false;
4517
4518 /* If the bit field covers the whole REG/MEM, store_field
4519 will likely generate better code. */
4520 if (bitsize >= str_bitsize)
4521 return false;
4522
4523 /* We can't handle fields split across multiple entities. */
4524 if (bitpos + bitsize > str_bitsize)
4525 return false;
4526
4527 if (BYTES_BIG_ENDIAN)
4528 bitpos = str_bitsize - bitpos - bitsize;
4529
4530 switch (code)
4531 {
4532 case PLUS_EXPR:
4533 case MINUS_EXPR:
4534 /* For now, just optimize the case of the topmost bitfield
4535 where we don't need to do any masking and also
4536 1 bit bitfields where xor can be used.
4537 We might win by one instruction for the other bitfields
4538 too if insv/extv instructions aren't used, so that
4539 can be added later. */
4540 if (bitpos + bitsize != str_bitsize
4541 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4542 break;
4543
4544 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4545 value = convert_modes (str_mode,
4546 TYPE_MODE (TREE_TYPE (op1)), value,
4547 TYPE_UNSIGNED (TREE_TYPE (op1)));
4548
4549 /* We may be accessing data outside the field, which means
4550 we can alias adjacent data. */
4551 if (MEM_P (str_rtx))
4552 {
4553 str_rtx = shallow_copy_rtx (str_rtx);
4554 set_mem_alias_set (str_rtx, 0);
4555 set_mem_expr (str_rtx, 0);
4556 }
4557
4558 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4559 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4560 {
4561 value = expand_and (str_mode, value, const1_rtx, NULL);
4562 binop = xor_optab;
4563 }
4564 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4565 result = expand_binop (str_mode, binop, str_rtx,
4566 value, str_rtx, 1, OPTAB_WIDEN);
4567 if (result != str_rtx)
4568 emit_move_insn (str_rtx, result);
4569 return true;
4570
4571 case BIT_IOR_EXPR:
4572 case BIT_XOR_EXPR:
4573 if (TREE_CODE (op1) != INTEGER_CST)
4574 break;
4575 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4576 value = convert_modes (str_mode,
4577 TYPE_MODE (TREE_TYPE (op1)), value,
4578 TYPE_UNSIGNED (TREE_TYPE (op1)));
4579
4580 /* We may be accessing data outside the field, which means
4581 we can alias adjacent data. */
4582 if (MEM_P (str_rtx))
4583 {
4584 str_rtx = shallow_copy_rtx (str_rtx);
4585 set_mem_alias_set (str_rtx, 0);
4586 set_mem_expr (str_rtx, 0);
4587 }
4588
4589 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4590 if (bitpos + bitsize != str_bitsize)
4591 {
4592 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4593 str_mode);
4594 value = expand_and (str_mode, value, mask, NULL_RTX);
4595 }
4596 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4597 result = expand_binop (str_mode, binop, str_rtx,
4598 value, str_rtx, 1, OPTAB_WIDEN);
4599 if (result != str_rtx)
4600 emit_move_insn (str_rtx, result);
4601 return true;
4602
4603 default:
4604 break;
4605 }
4606
4607 return false;
4608 }
4609
4610 /* In the C++ memory model, consecutive bit fields in a structure are
4611 considered one memory location.
4612
4613 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4614 returns the bit range of consecutive bits in which this COMPONENT_REF
4615 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4616 and *OFFSET may be adjusted in the process.
4617
4618 If the access does not need to be restricted, 0 is returned in both
4619 *BITSTART and *BITEND. */
4620
4621 static void
4622 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4623 unsigned HOST_WIDE_INT *bitend,
4624 tree exp,
4625 HOST_WIDE_INT *bitpos,
4626 tree *offset)
4627 {
4628 HOST_WIDE_INT bitoffset;
4629 tree field, repr;
4630
4631 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4632
4633 field = TREE_OPERAND (exp, 1);
4634 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4635 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4636 need to limit the range we can access. */
4637 if (!repr)
4638 {
4639 *bitstart = *bitend = 0;
4640 return;
4641 }
4642
4643 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4644 part of a larger bit field, then the representative does not serve any
4645 useful purpose. This can occur in Ada. */
4646 if (handled_component_p (TREE_OPERAND (exp, 0)))
4647 {
4648 machine_mode rmode;
4649 HOST_WIDE_INT rbitsize, rbitpos;
4650 tree roffset;
4651 int unsignedp;
4652 int volatilep = 0;
4653 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4654 &roffset, &rmode, &unsignedp, &volatilep, false);
4655 if ((rbitpos % BITS_PER_UNIT) != 0)
4656 {
4657 *bitstart = *bitend = 0;
4658 return;
4659 }
4660 }
4661
4662 /* Compute the adjustment to bitpos from the offset of the field
4663 relative to the representative. DECL_FIELD_OFFSET of field and
4664 repr are the same by construction if they are not constants,
4665 see finish_bitfield_layout. */
4666 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4667 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4668 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4669 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4670 else
4671 bitoffset = 0;
4672 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4673 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4674
4675 /* If the adjustment is larger than bitpos, we would have a negative bit
4676 position for the lower bound and this may wreak havoc later. Adjust
4677 offset and bitpos to make the lower bound non-negative in that case. */
4678 if (bitoffset > *bitpos)
4679 {
4680 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4681 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4682
4683 *bitpos += adjust;
4684 if (*offset == NULL_TREE)
4685 *offset = size_int (-adjust / BITS_PER_UNIT);
4686 else
4687 *offset
4688 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4689 *bitstart = 0;
4690 }
4691 else
4692 *bitstart = *bitpos - bitoffset;
4693
4694 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4695 }
4696
4697 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4698 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4699 DECL_RTL was not set yet, return NORTL. */
4700
4701 static inline bool
4702 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4703 {
4704 if (TREE_CODE (addr) != ADDR_EXPR)
4705 return false;
4706
4707 tree base = TREE_OPERAND (addr, 0);
4708
4709 if (!DECL_P (base)
4710 || TREE_ADDRESSABLE (base)
4711 || DECL_MODE (base) == BLKmode)
4712 return false;
4713
4714 if (!DECL_RTL_SET_P (base))
4715 return nortl;
4716
4717 return (!MEM_P (DECL_RTL (base)));
4718 }
4719
4720 /* Returns true if the MEM_REF REF refers to an object that does not
4721 reside in memory and has non-BLKmode. */
4722
4723 static inline bool
4724 mem_ref_refers_to_non_mem_p (tree ref)
4725 {
4726 tree base = TREE_OPERAND (ref, 0);
4727 return addr_expr_of_non_mem_decl_p_1 (base, false);
4728 }
4729
4730 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4731 is true, try generating a nontemporal store. */
4732
4733 void
4734 expand_assignment (tree to, tree from, bool nontemporal)
4735 {
4736 rtx to_rtx = 0;
4737 rtx result;
4738 machine_mode mode;
4739 unsigned int align;
4740 enum insn_code icode;
4741
4742 /* Don't crash if the lhs of the assignment was erroneous. */
4743 if (TREE_CODE (to) == ERROR_MARK)
4744 {
4745 expand_normal (from);
4746 return;
4747 }
4748
4749 /* Optimize away no-op moves without side-effects. */
4750 if (operand_equal_p (to, from, 0))
4751 return;
4752
4753 /* Handle misaligned stores. */
4754 mode = TYPE_MODE (TREE_TYPE (to));
4755 if ((TREE_CODE (to) == MEM_REF
4756 || TREE_CODE (to) == TARGET_MEM_REF)
4757 && mode != BLKmode
4758 && !mem_ref_refers_to_non_mem_p (to)
4759 && ((align = get_object_alignment (to))
4760 < GET_MODE_ALIGNMENT (mode))
4761 && (((icode = optab_handler (movmisalign_optab, mode))
4762 != CODE_FOR_nothing)
4763 || SLOW_UNALIGNED_ACCESS (mode, align)))
4764 {
4765 rtx reg, mem;
4766
4767 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4768 reg = force_not_mem (reg);
4769 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4770
4771 if (icode != CODE_FOR_nothing)
4772 {
4773 struct expand_operand ops[2];
4774
4775 create_fixed_operand (&ops[0], mem);
4776 create_input_operand (&ops[1], reg, mode);
4777 /* The movmisalign<mode> pattern cannot fail, else the assignment
4778 would silently be omitted. */
4779 expand_insn (icode, 2, ops);
4780 }
4781 else
4782 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4783 return;
4784 }
4785
4786 /* Assignment of a structure component needs special treatment
4787 if the structure component's rtx is not simply a MEM.
4788 Assignment of an array element at a constant index, and assignment of
4789 an array element in an unaligned packed structure field, has the same
4790 problem. Same for (partially) storing into a non-memory object. */
4791 if (handled_component_p (to)
4792 || (TREE_CODE (to) == MEM_REF
4793 && mem_ref_refers_to_non_mem_p (to))
4794 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4795 {
4796 machine_mode mode1;
4797 HOST_WIDE_INT bitsize, bitpos;
4798 unsigned HOST_WIDE_INT bitregion_start = 0;
4799 unsigned HOST_WIDE_INT bitregion_end = 0;
4800 tree offset;
4801 int unsignedp;
4802 int volatilep = 0;
4803 tree tem;
4804
4805 push_temp_slots ();
4806 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4807 &unsignedp, &volatilep, true);
4808
4809 /* Make sure bitpos is not negative, it can wreak havoc later. */
4810 if (bitpos < 0)
4811 {
4812 gcc_assert (offset == NULL_TREE);
4813 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4814 ? 3 : exact_log2 (BITS_PER_UNIT)));
4815 bitpos &= BITS_PER_UNIT - 1;
4816 }
4817
4818 if (TREE_CODE (to) == COMPONENT_REF
4819 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4820 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4821 /* The C++ memory model naturally applies to byte-aligned fields.
4822 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4823 BITSIZE are not byte-aligned, there is no need to limit the range
4824 we can access. This can occur with packed structures in Ada. */
4825 else if (bitsize > 0
4826 && bitsize % BITS_PER_UNIT == 0
4827 && bitpos % BITS_PER_UNIT == 0)
4828 {
4829 bitregion_start = bitpos;
4830 bitregion_end = bitpos + bitsize - 1;
4831 }
4832
4833 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4834
4835 /* If the field has a mode, we want to access it in the
4836 field's mode, not the computed mode.
4837 If a MEM has VOIDmode (external with incomplete type),
4838 use BLKmode for it instead. */
4839 if (MEM_P (to_rtx))
4840 {
4841 if (mode1 != VOIDmode)
4842 to_rtx = adjust_address (to_rtx, mode1, 0);
4843 else if (GET_MODE (to_rtx) == VOIDmode)
4844 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4845 }
4846
4847 if (offset != 0)
4848 {
4849 machine_mode address_mode;
4850 rtx offset_rtx;
4851
4852 if (!MEM_P (to_rtx))
4853 {
4854 /* We can get constant negative offsets into arrays with broken
4855 user code. Translate this to a trap instead of ICEing. */
4856 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4857 expand_builtin_trap ();
4858 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4859 }
4860
4861 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4862 address_mode = get_address_mode (to_rtx);
4863 if (GET_MODE (offset_rtx) != address_mode)
4864 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4865
4866 /* If we have an expression in OFFSET_RTX and a non-zero
4867 byte offset in BITPOS, adding the byte offset before the
4868 OFFSET_RTX results in better intermediate code, which makes
4869 later rtl optimization passes perform better.
4870
4871 We prefer intermediate code like this:
4872
4873 r124:DI=r123:DI+0x18
4874 [r124:DI]=r121:DI
4875
4876 ... instead of ...
4877
4878 r124:DI=r123:DI+0x10
4879 [r124:DI+0x8]=r121:DI
4880
4881 This is only done for aligned data values, as these can
4882 be expected to result in single move instructions. */
4883 if (mode1 != VOIDmode
4884 && bitpos != 0
4885 && bitsize > 0
4886 && (bitpos % bitsize) == 0
4887 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4888 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4889 {
4890 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4891 bitregion_start = 0;
4892 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4893 bitregion_end -= bitpos;
4894 bitpos = 0;
4895 }
4896
4897 to_rtx = offset_address (to_rtx, offset_rtx,
4898 highest_pow2_factor_for_target (to,
4899 offset));
4900 }
4901
4902 /* No action is needed if the target is not a memory and the field
4903 lies completely outside that target. This can occur if the source
4904 code contains an out-of-bounds access to a small array. */
4905 if (!MEM_P (to_rtx)
4906 && GET_MODE (to_rtx) != BLKmode
4907 && (unsigned HOST_WIDE_INT) bitpos
4908 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4909 {
4910 expand_normal (from);
4911 result = NULL;
4912 }
4913 /* Handle expand_expr of a complex value returning a CONCAT. */
4914 else if (GET_CODE (to_rtx) == CONCAT)
4915 {
4916 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4917 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4918 && bitpos == 0
4919 && bitsize == mode_bitsize)
4920 result = store_expr (from, to_rtx, false, nontemporal);
4921 else if (bitsize == mode_bitsize / 2
4922 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4923 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4924 nontemporal);
4925 else if (bitpos + bitsize <= mode_bitsize / 2)
4926 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4927 bitregion_start, bitregion_end,
4928 mode1, from,
4929 get_alias_set (to), nontemporal);
4930 else if (bitpos >= mode_bitsize / 2)
4931 result = store_field (XEXP (to_rtx, 1), bitsize,
4932 bitpos - mode_bitsize / 2,
4933 bitregion_start, bitregion_end,
4934 mode1, from,
4935 get_alias_set (to), nontemporal);
4936 else if (bitpos == 0 && bitsize == mode_bitsize)
4937 {
4938 rtx from_rtx;
4939 result = expand_normal (from);
4940 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4941 TYPE_MODE (TREE_TYPE (from)), 0);
4942 emit_move_insn (XEXP (to_rtx, 0),
4943 read_complex_part (from_rtx, false));
4944 emit_move_insn (XEXP (to_rtx, 1),
4945 read_complex_part (from_rtx, true));
4946 }
4947 else
4948 {
4949 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4950 GET_MODE_SIZE (GET_MODE (to_rtx)));
4951 write_complex_part (temp, XEXP (to_rtx, 0), false);
4952 write_complex_part (temp, XEXP (to_rtx, 1), true);
4953 result = store_field (temp, bitsize, bitpos,
4954 bitregion_start, bitregion_end,
4955 mode1, from,
4956 get_alias_set (to), nontemporal);
4957 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4958 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4959 }
4960 }
4961 else
4962 {
4963 if (MEM_P (to_rtx))
4964 {
4965 /* If the field is at offset zero, we could have been given the
4966 DECL_RTX of the parent struct. Don't munge it. */
4967 to_rtx = shallow_copy_rtx (to_rtx);
4968 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4969 if (volatilep)
4970 MEM_VOLATILE_P (to_rtx) = 1;
4971 }
4972
4973 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4974 bitregion_start, bitregion_end,
4975 mode1,
4976 to_rtx, to, from))
4977 result = NULL;
4978 else
4979 result = store_field (to_rtx, bitsize, bitpos,
4980 bitregion_start, bitregion_end,
4981 mode1, from,
4982 get_alias_set (to), nontemporal);
4983 }
4984
4985 if (result)
4986 preserve_temp_slots (result);
4987 pop_temp_slots ();
4988 return;
4989 }
4990
4991 /* If the rhs is a function call and its value is not an aggregate,
4992 call the function before we start to compute the lhs.
4993 This is needed for correct code for cases such as
4994 val = setjmp (buf) on machines where reference to val
4995 requires loading up part of an address in a separate insn.
4996
4997 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4998 since it might be a promoted variable where the zero- or sign- extension
4999 needs to be done. Handling this in the normal way is safe because no
5000 computation is done before the call. The same is true for SSA names. */
5001 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5002 && COMPLETE_TYPE_P (TREE_TYPE (from))
5003 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5004 && ! (((TREE_CODE (to) == VAR_DECL
5005 || TREE_CODE (to) == PARM_DECL
5006 || TREE_CODE (to) == RESULT_DECL)
5007 && REG_P (DECL_RTL (to)))
5008 || TREE_CODE (to) == SSA_NAME))
5009 {
5010 rtx value;
5011 rtx bounds;
5012
5013 push_temp_slots ();
5014 value = expand_normal (from);
5015
5016 /* Split value and bounds to store them separately. */
5017 chkp_split_slot (value, &value, &bounds);
5018
5019 if (to_rtx == 0)
5020 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5021
5022 /* Handle calls that return values in multiple non-contiguous locations.
5023 The Irix 6 ABI has examples of this. */
5024 if (GET_CODE (to_rtx) == PARALLEL)
5025 {
5026 if (GET_CODE (value) == PARALLEL)
5027 emit_group_move (to_rtx, value);
5028 else
5029 emit_group_load (to_rtx, value, TREE_TYPE (from),
5030 int_size_in_bytes (TREE_TYPE (from)));
5031 }
5032 else if (GET_CODE (value) == PARALLEL)
5033 emit_group_store (to_rtx, value, TREE_TYPE (from),
5034 int_size_in_bytes (TREE_TYPE (from)));
5035 else if (GET_MODE (to_rtx) == BLKmode)
5036 {
5037 /* Handle calls that return BLKmode values in registers. */
5038 if (REG_P (value))
5039 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5040 else
5041 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5042 }
5043 else
5044 {
5045 if (POINTER_TYPE_P (TREE_TYPE (to)))
5046 value = convert_memory_address_addr_space
5047 (GET_MODE (to_rtx), value,
5048 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5049
5050 emit_move_insn (to_rtx, value);
5051 }
5052
5053 /* Store bounds if required. */
5054 if (bounds
5055 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5056 {
5057 gcc_assert (MEM_P (to_rtx));
5058 chkp_emit_bounds_store (bounds, value, to_rtx);
5059 }
5060
5061 preserve_temp_slots (to_rtx);
5062 pop_temp_slots ();
5063 return;
5064 }
5065
5066 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5067 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5068
5069 /* Don't move directly into a return register. */
5070 if (TREE_CODE (to) == RESULT_DECL
5071 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5072 {
5073 rtx temp;
5074
5075 push_temp_slots ();
5076
5077 /* If the source is itself a return value, it still is in a pseudo at
5078 this point so we can move it back to the return register directly. */
5079 if (REG_P (to_rtx)
5080 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5081 && TREE_CODE (from) != CALL_EXPR)
5082 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5083 else
5084 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5085
5086 /* Handle calls that return values in multiple non-contiguous locations.
5087 The Irix 6 ABI has examples of this. */
5088 if (GET_CODE (to_rtx) == PARALLEL)
5089 {
5090 if (GET_CODE (temp) == PARALLEL)
5091 emit_group_move (to_rtx, temp);
5092 else
5093 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5094 int_size_in_bytes (TREE_TYPE (from)));
5095 }
5096 else if (temp)
5097 emit_move_insn (to_rtx, temp);
5098
5099 preserve_temp_slots (to_rtx);
5100 pop_temp_slots ();
5101 return;
5102 }
5103
5104 /* In case we are returning the contents of an object which overlaps
5105 the place the value is being stored, use a safe function when copying
5106 a value through a pointer into a structure value return block. */
5107 if (TREE_CODE (to) == RESULT_DECL
5108 && TREE_CODE (from) == INDIRECT_REF
5109 && ADDR_SPACE_GENERIC_P
5110 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5111 && refs_may_alias_p (to, from)
5112 && cfun->returns_struct
5113 && !cfun->returns_pcc_struct)
5114 {
5115 rtx from_rtx, size;
5116
5117 push_temp_slots ();
5118 size = expr_size (from);
5119 from_rtx = expand_normal (from);
5120
5121 emit_library_call (memmove_libfunc, LCT_NORMAL,
5122 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5123 XEXP (from_rtx, 0), Pmode,
5124 convert_to_mode (TYPE_MODE (sizetype),
5125 size, TYPE_UNSIGNED (sizetype)),
5126 TYPE_MODE (sizetype));
5127
5128 preserve_temp_slots (to_rtx);
5129 pop_temp_slots ();
5130 return;
5131 }
5132
5133 /* Compute FROM and store the value in the rtx we got. */
5134
5135 push_temp_slots ();
5136 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, to);
5137 preserve_temp_slots (result);
5138 pop_temp_slots ();
5139 return;
5140 }
5141
5142 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5143 succeeded, false otherwise. */
5144
5145 bool
5146 emit_storent_insn (rtx to, rtx from)
5147 {
5148 struct expand_operand ops[2];
5149 machine_mode mode = GET_MODE (to);
5150 enum insn_code code = optab_handler (storent_optab, mode);
5151
5152 if (code == CODE_FOR_nothing)
5153 return false;
5154
5155 create_fixed_operand (&ops[0], to);
5156 create_input_operand (&ops[1], from, mode);
5157 return maybe_expand_insn (code, 2, ops);
5158 }
5159
5160 /* Generate code for computing expression EXP,
5161 and storing the value into TARGET.
5162
5163 If the mode is BLKmode then we may return TARGET itself.
5164 It turns out that in BLKmode it doesn't cause a problem.
5165 because C has no operators that could combine two different
5166 assignments into the same BLKmode object with different values
5167 with no sequence point. Will other languages need this to
5168 be more thorough?
5169
5170 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5171 stack, and block moves may need to be treated specially.
5172
5173 If NONTEMPORAL is true, try using a nontemporal store instruction.
5174
5175 If BTARGET is not NULL then computed bounds of EXP are
5176 associated with BTARGET. */
5177
5178 rtx
5179 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5180 bool nontemporal, tree btarget)
5181 {
5182 rtx temp;
5183 rtx alt_rtl = NULL_RTX;
5184 location_t loc = curr_insn_location ();
5185
5186 if (VOID_TYPE_P (TREE_TYPE (exp)))
5187 {
5188 /* C++ can generate ?: expressions with a throw expression in one
5189 branch and an rvalue in the other. Here, we resolve attempts to
5190 store the throw expression's nonexistent result. */
5191 gcc_assert (!call_param_p);
5192 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5193 return NULL_RTX;
5194 }
5195 if (TREE_CODE (exp) == COMPOUND_EXPR)
5196 {
5197 /* Perform first part of compound expression, then assign from second
5198 part. */
5199 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5200 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5201 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5202 call_param_p, nontemporal, btarget);
5203 }
5204 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5205 {
5206 /* For conditional expression, get safe form of the target. Then
5207 test the condition, doing the appropriate assignment on either
5208 side. This avoids the creation of unnecessary temporaries.
5209 For non-BLKmode, it is more efficient not to do this. */
5210
5211 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5212
5213 do_pending_stack_adjust ();
5214 NO_DEFER_POP;
5215 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5216 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5217 nontemporal, btarget);
5218 emit_jump_insn (gen_jump (lab2));
5219 emit_barrier ();
5220 emit_label (lab1);
5221 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5222 nontemporal, btarget);
5223 emit_label (lab2);
5224 OK_DEFER_POP;
5225
5226 return NULL_RTX;
5227 }
5228 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5229 /* If this is a scalar in a register that is stored in a wider mode
5230 than the declared mode, compute the result into its declared mode
5231 and then convert to the wider mode. Our value is the computed
5232 expression. */
5233 {
5234 rtx inner_target = 0;
5235
5236 /* We can do the conversion inside EXP, which will often result
5237 in some optimizations. Do the conversion in two steps: first
5238 change the signedness, if needed, then the extend. But don't
5239 do this if the type of EXP is a subtype of something else
5240 since then the conversion might involve more than just
5241 converting modes. */
5242 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5243 && TREE_TYPE (TREE_TYPE (exp)) == 0
5244 && GET_MODE_PRECISION (GET_MODE (target))
5245 == TYPE_PRECISION (TREE_TYPE (exp)))
5246 {
5247 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5248 TYPE_UNSIGNED (TREE_TYPE (exp))))
5249 {
5250 /* Some types, e.g. Fortran's logical*4, won't have a signed
5251 version, so use the mode instead. */
5252 tree ntype
5253 = (signed_or_unsigned_type_for
5254 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5255 if (ntype == NULL)
5256 ntype = lang_hooks.types.type_for_mode
5257 (TYPE_MODE (TREE_TYPE (exp)),
5258 SUBREG_PROMOTED_SIGN (target));
5259
5260 exp = fold_convert_loc (loc, ntype, exp);
5261 }
5262
5263 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5264 (GET_MODE (SUBREG_REG (target)),
5265 SUBREG_PROMOTED_SIGN (target)),
5266 exp);
5267
5268 inner_target = SUBREG_REG (target);
5269 }
5270
5271 temp = expand_expr (exp, inner_target, VOIDmode,
5272 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5273
5274 /* Handle bounds returned by call. */
5275 if (TREE_CODE (exp) == CALL_EXPR)
5276 {
5277 rtx bounds;
5278 chkp_split_slot (temp, &temp, &bounds);
5279 if (bounds && btarget)
5280 {
5281 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5282 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5283 chkp_set_rtl_bounds (btarget, tmp);
5284 }
5285 }
5286
5287 /* If TEMP is a VOIDmode constant, use convert_modes to make
5288 sure that we properly convert it. */
5289 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5290 {
5291 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5292 temp, SUBREG_PROMOTED_SIGN (target));
5293 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5294 GET_MODE (target), temp,
5295 SUBREG_PROMOTED_SIGN (target));
5296 }
5297
5298 convert_move (SUBREG_REG (target), temp,
5299 SUBREG_PROMOTED_SIGN (target));
5300
5301 return NULL_RTX;
5302 }
5303 else if ((TREE_CODE (exp) == STRING_CST
5304 || (TREE_CODE (exp) == MEM_REF
5305 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5306 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5307 == STRING_CST
5308 && integer_zerop (TREE_OPERAND (exp, 1))))
5309 && !nontemporal && !call_param_p
5310 && MEM_P (target))
5311 {
5312 /* Optimize initialization of an array with a STRING_CST. */
5313 HOST_WIDE_INT exp_len, str_copy_len;
5314 rtx dest_mem;
5315 tree str = TREE_CODE (exp) == STRING_CST
5316 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5317
5318 exp_len = int_expr_size (exp);
5319 if (exp_len <= 0)
5320 goto normal_expr;
5321
5322 if (TREE_STRING_LENGTH (str) <= 0)
5323 goto normal_expr;
5324
5325 str_copy_len = strlen (TREE_STRING_POINTER (str));
5326 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5327 goto normal_expr;
5328
5329 str_copy_len = TREE_STRING_LENGTH (str);
5330 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5331 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5332 {
5333 str_copy_len += STORE_MAX_PIECES - 1;
5334 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5335 }
5336 str_copy_len = MIN (str_copy_len, exp_len);
5337 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5338 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5339 MEM_ALIGN (target), false))
5340 goto normal_expr;
5341
5342 dest_mem = target;
5343
5344 dest_mem = store_by_pieces (dest_mem,
5345 str_copy_len, builtin_strncpy_read_str,
5346 CONST_CAST (char *,
5347 TREE_STRING_POINTER (str)),
5348 MEM_ALIGN (target), false,
5349 exp_len > str_copy_len ? 1 : 0);
5350 if (exp_len > str_copy_len)
5351 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5352 GEN_INT (exp_len - str_copy_len),
5353 BLOCK_OP_NORMAL);
5354 return NULL_RTX;
5355 }
5356 else
5357 {
5358 rtx tmp_target;
5359
5360 normal_expr:
5361 /* If we want to use a nontemporal store, force the value to
5362 register first. */
5363 tmp_target = nontemporal ? NULL_RTX : target;
5364 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5365 (call_param_p
5366 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5367 &alt_rtl, false);
5368
5369 /* Handle bounds returned by call. */
5370 if (TREE_CODE (exp) == CALL_EXPR)
5371 {
5372 rtx bounds;
5373 chkp_split_slot (temp, &temp, &bounds);
5374 if (bounds && btarget)
5375 {
5376 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5377 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5378 chkp_set_rtl_bounds (btarget, tmp);
5379 }
5380 }
5381 }
5382
5383 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5384 the same as that of TARGET, adjust the constant. This is needed, for
5385 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5386 only a word-sized value. */
5387 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5388 && TREE_CODE (exp) != ERROR_MARK
5389 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5390 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5391 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5392
5393 /* If value was not generated in the target, store it there.
5394 Convert the value to TARGET's type first if necessary and emit the
5395 pending incrementations that have been queued when expanding EXP.
5396 Note that we cannot emit the whole queue blindly because this will
5397 effectively disable the POST_INC optimization later.
5398
5399 If TEMP and TARGET compare equal according to rtx_equal_p, but
5400 one or both of them are volatile memory refs, we have to distinguish
5401 two cases:
5402 - expand_expr has used TARGET. In this case, we must not generate
5403 another copy. This can be detected by TARGET being equal according
5404 to == .
5405 - expand_expr has not used TARGET - that means that the source just
5406 happens to have the same RTX form. Since temp will have been created
5407 by expand_expr, it will compare unequal according to == .
5408 We must generate a copy in this case, to reach the correct number
5409 of volatile memory references. */
5410
5411 if ((! rtx_equal_p (temp, target)
5412 || (temp != target && (side_effects_p (temp)
5413 || side_effects_p (target))))
5414 && TREE_CODE (exp) != ERROR_MARK
5415 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5416 but TARGET is not valid memory reference, TEMP will differ
5417 from TARGET although it is really the same location. */
5418 && !(alt_rtl
5419 && rtx_equal_p (alt_rtl, target)
5420 && !side_effects_p (alt_rtl)
5421 && !side_effects_p (target))
5422 /* If there's nothing to copy, don't bother. Don't call
5423 expr_size unless necessary, because some front-ends (C++)
5424 expr_size-hook must not be given objects that are not
5425 supposed to be bit-copied or bit-initialized. */
5426 && expr_size (exp) != const0_rtx)
5427 {
5428 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5429 {
5430 if (GET_MODE (target) == BLKmode)
5431 {
5432 /* Handle calls that return BLKmode values in registers. */
5433 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5434 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5435 else
5436 store_bit_field (target,
5437 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5438 0, 0, 0, GET_MODE (temp), temp);
5439 }
5440 else
5441 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5442 }
5443
5444 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5445 {
5446 /* Handle copying a string constant into an array. The string
5447 constant may be shorter than the array. So copy just the string's
5448 actual length, and clear the rest. First get the size of the data
5449 type of the string, which is actually the size of the target. */
5450 rtx size = expr_size (exp);
5451
5452 if (CONST_INT_P (size)
5453 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5454 emit_block_move (target, temp, size,
5455 (call_param_p
5456 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5457 else
5458 {
5459 machine_mode pointer_mode
5460 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5461 machine_mode address_mode = get_address_mode (target);
5462
5463 /* Compute the size of the data to copy from the string. */
5464 tree copy_size
5465 = size_binop_loc (loc, MIN_EXPR,
5466 make_tree (sizetype, size),
5467 size_int (TREE_STRING_LENGTH (exp)));
5468 rtx copy_size_rtx
5469 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5470 (call_param_p
5471 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5472 rtx_code_label *label = 0;
5473
5474 /* Copy that much. */
5475 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5476 TYPE_UNSIGNED (sizetype));
5477 emit_block_move (target, temp, copy_size_rtx,
5478 (call_param_p
5479 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5480
5481 /* Figure out how much is left in TARGET that we have to clear.
5482 Do all calculations in pointer_mode. */
5483 if (CONST_INT_P (copy_size_rtx))
5484 {
5485 size = plus_constant (address_mode, size,
5486 -INTVAL (copy_size_rtx));
5487 target = adjust_address (target, BLKmode,
5488 INTVAL (copy_size_rtx));
5489 }
5490 else
5491 {
5492 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5493 copy_size_rtx, NULL_RTX, 0,
5494 OPTAB_LIB_WIDEN);
5495
5496 if (GET_MODE (copy_size_rtx) != address_mode)
5497 copy_size_rtx = convert_to_mode (address_mode,
5498 copy_size_rtx,
5499 TYPE_UNSIGNED (sizetype));
5500
5501 target = offset_address (target, copy_size_rtx,
5502 highest_pow2_factor (copy_size));
5503 label = gen_label_rtx ();
5504 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5505 GET_MODE (size), 0, label);
5506 }
5507
5508 if (size != const0_rtx)
5509 clear_storage (target, size, BLOCK_OP_NORMAL);
5510
5511 if (label)
5512 emit_label (label);
5513 }
5514 }
5515 /* Handle calls that return values in multiple non-contiguous locations.
5516 The Irix 6 ABI has examples of this. */
5517 else if (GET_CODE (target) == PARALLEL)
5518 {
5519 if (GET_CODE (temp) == PARALLEL)
5520 emit_group_move (target, temp);
5521 else
5522 emit_group_load (target, temp, TREE_TYPE (exp),
5523 int_size_in_bytes (TREE_TYPE (exp)));
5524 }
5525 else if (GET_CODE (temp) == PARALLEL)
5526 emit_group_store (target, temp, TREE_TYPE (exp),
5527 int_size_in_bytes (TREE_TYPE (exp)));
5528 else if (GET_MODE (temp) == BLKmode)
5529 emit_block_move (target, temp, expr_size (exp),
5530 (call_param_p
5531 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5532 /* If we emit a nontemporal store, there is nothing else to do. */
5533 else if (nontemporal && emit_storent_insn (target, temp))
5534 ;
5535 else
5536 {
5537 temp = force_operand (temp, target);
5538 if (temp != target)
5539 emit_move_insn (target, temp);
5540 }
5541 }
5542
5543 return NULL_RTX;
5544 }
5545
5546 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5547 rtx
5548 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5549 {
5550 return store_expr_with_bounds (exp, target, call_param_p, nontemporal, NULL);
5551 }
5552 \f
5553 /* Return true if field F of structure TYPE is a flexible array. */
5554
5555 static bool
5556 flexible_array_member_p (const_tree f, const_tree type)
5557 {
5558 const_tree tf;
5559
5560 tf = TREE_TYPE (f);
5561 return (DECL_CHAIN (f) == NULL
5562 && TREE_CODE (tf) == ARRAY_TYPE
5563 && TYPE_DOMAIN (tf)
5564 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5565 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5566 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5567 && int_size_in_bytes (type) >= 0);
5568 }
5569
5570 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5571 must have in order for it to completely initialize a value of type TYPE.
5572 Return -1 if the number isn't known.
5573
5574 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5575
5576 static HOST_WIDE_INT
5577 count_type_elements (const_tree type, bool for_ctor_p)
5578 {
5579 switch (TREE_CODE (type))
5580 {
5581 case ARRAY_TYPE:
5582 {
5583 tree nelts;
5584
5585 nelts = array_type_nelts (type);
5586 if (nelts && tree_fits_uhwi_p (nelts))
5587 {
5588 unsigned HOST_WIDE_INT n;
5589
5590 n = tree_to_uhwi (nelts) + 1;
5591 if (n == 0 || for_ctor_p)
5592 return n;
5593 else
5594 return n * count_type_elements (TREE_TYPE (type), false);
5595 }
5596 return for_ctor_p ? -1 : 1;
5597 }
5598
5599 case RECORD_TYPE:
5600 {
5601 unsigned HOST_WIDE_INT n;
5602 tree f;
5603
5604 n = 0;
5605 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5606 if (TREE_CODE (f) == FIELD_DECL)
5607 {
5608 if (!for_ctor_p)
5609 n += count_type_elements (TREE_TYPE (f), false);
5610 else if (!flexible_array_member_p (f, type))
5611 /* Don't count flexible arrays, which are not supposed
5612 to be initialized. */
5613 n += 1;
5614 }
5615
5616 return n;
5617 }
5618
5619 case UNION_TYPE:
5620 case QUAL_UNION_TYPE:
5621 {
5622 tree f;
5623 HOST_WIDE_INT n, m;
5624
5625 gcc_assert (!for_ctor_p);
5626 /* Estimate the number of scalars in each field and pick the
5627 maximum. Other estimates would do instead; the idea is simply
5628 to make sure that the estimate is not sensitive to the ordering
5629 of the fields. */
5630 n = 1;
5631 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5632 if (TREE_CODE (f) == FIELD_DECL)
5633 {
5634 m = count_type_elements (TREE_TYPE (f), false);
5635 /* If the field doesn't span the whole union, add an extra
5636 scalar for the rest. */
5637 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5638 TYPE_SIZE (type)) != 1)
5639 m++;
5640 if (n < m)
5641 n = m;
5642 }
5643 return n;
5644 }
5645
5646 case COMPLEX_TYPE:
5647 return 2;
5648
5649 case VECTOR_TYPE:
5650 return TYPE_VECTOR_SUBPARTS (type);
5651
5652 case INTEGER_TYPE:
5653 case REAL_TYPE:
5654 case FIXED_POINT_TYPE:
5655 case ENUMERAL_TYPE:
5656 case BOOLEAN_TYPE:
5657 case POINTER_TYPE:
5658 case OFFSET_TYPE:
5659 case REFERENCE_TYPE:
5660 case NULLPTR_TYPE:
5661 return 1;
5662
5663 case ERROR_MARK:
5664 return 0;
5665
5666 case VOID_TYPE:
5667 case METHOD_TYPE:
5668 case FUNCTION_TYPE:
5669 case LANG_TYPE:
5670 default:
5671 gcc_unreachable ();
5672 }
5673 }
5674
5675 /* Helper for categorize_ctor_elements. Identical interface. */
5676
5677 static bool
5678 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5679 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5680 {
5681 unsigned HOST_WIDE_INT idx;
5682 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5683 tree value, purpose, elt_type;
5684
5685 /* Whether CTOR is a valid constant initializer, in accordance with what
5686 initializer_constant_valid_p does. If inferred from the constructor
5687 elements, true until proven otherwise. */
5688 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5689 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5690
5691 nz_elts = 0;
5692 init_elts = 0;
5693 num_fields = 0;
5694 elt_type = NULL_TREE;
5695
5696 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5697 {
5698 HOST_WIDE_INT mult = 1;
5699
5700 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5701 {
5702 tree lo_index = TREE_OPERAND (purpose, 0);
5703 tree hi_index = TREE_OPERAND (purpose, 1);
5704
5705 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5706 mult = (tree_to_uhwi (hi_index)
5707 - tree_to_uhwi (lo_index) + 1);
5708 }
5709 num_fields += mult;
5710 elt_type = TREE_TYPE (value);
5711
5712 switch (TREE_CODE (value))
5713 {
5714 case CONSTRUCTOR:
5715 {
5716 HOST_WIDE_INT nz = 0, ic = 0;
5717
5718 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5719 p_complete);
5720
5721 nz_elts += mult * nz;
5722 init_elts += mult * ic;
5723
5724 if (const_from_elts_p && const_p)
5725 const_p = const_elt_p;
5726 }
5727 break;
5728
5729 case INTEGER_CST:
5730 case REAL_CST:
5731 case FIXED_CST:
5732 if (!initializer_zerop (value))
5733 nz_elts += mult;
5734 init_elts += mult;
5735 break;
5736
5737 case STRING_CST:
5738 nz_elts += mult * TREE_STRING_LENGTH (value);
5739 init_elts += mult * TREE_STRING_LENGTH (value);
5740 break;
5741
5742 case COMPLEX_CST:
5743 if (!initializer_zerop (TREE_REALPART (value)))
5744 nz_elts += mult;
5745 if (!initializer_zerop (TREE_IMAGPART (value)))
5746 nz_elts += mult;
5747 init_elts += mult;
5748 break;
5749
5750 case VECTOR_CST:
5751 {
5752 unsigned i;
5753 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5754 {
5755 tree v = VECTOR_CST_ELT (value, i);
5756 if (!initializer_zerop (v))
5757 nz_elts += mult;
5758 init_elts += mult;
5759 }
5760 }
5761 break;
5762
5763 default:
5764 {
5765 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5766 nz_elts += mult * tc;
5767 init_elts += mult * tc;
5768
5769 if (const_from_elts_p && const_p)
5770 const_p = initializer_constant_valid_p (value, elt_type)
5771 != NULL_TREE;
5772 }
5773 break;
5774 }
5775 }
5776
5777 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5778 num_fields, elt_type))
5779 *p_complete = false;
5780
5781 *p_nz_elts += nz_elts;
5782 *p_init_elts += init_elts;
5783
5784 return const_p;
5785 }
5786
5787 /* Examine CTOR to discover:
5788 * how many scalar fields are set to nonzero values,
5789 and place it in *P_NZ_ELTS;
5790 * how many scalar fields in total are in CTOR,
5791 and place it in *P_ELT_COUNT.
5792 * whether the constructor is complete -- in the sense that every
5793 meaningful byte is explicitly given a value --
5794 and place it in *P_COMPLETE.
5795
5796 Return whether or not CTOR is a valid static constant initializer, the same
5797 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5798
5799 bool
5800 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5801 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5802 {
5803 *p_nz_elts = 0;
5804 *p_init_elts = 0;
5805 *p_complete = true;
5806
5807 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5808 }
5809
5810 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5811 of which had type LAST_TYPE. Each element was itself a complete
5812 initializer, in the sense that every meaningful byte was explicitly
5813 given a value. Return true if the same is true for the constructor
5814 as a whole. */
5815
5816 bool
5817 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5818 const_tree last_type)
5819 {
5820 if (TREE_CODE (type) == UNION_TYPE
5821 || TREE_CODE (type) == QUAL_UNION_TYPE)
5822 {
5823 if (num_elts == 0)
5824 return false;
5825
5826 gcc_assert (num_elts == 1 && last_type);
5827
5828 /* ??? We could look at each element of the union, and find the
5829 largest element. Which would avoid comparing the size of the
5830 initialized element against any tail padding in the union.
5831 Doesn't seem worth the effort... */
5832 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5833 }
5834
5835 return count_type_elements (type, true) == num_elts;
5836 }
5837
5838 /* Return 1 if EXP contains mostly (3/4) zeros. */
5839
5840 static int
5841 mostly_zeros_p (const_tree exp)
5842 {
5843 if (TREE_CODE (exp) == CONSTRUCTOR)
5844 {
5845 HOST_WIDE_INT nz_elts, init_elts;
5846 bool complete_p;
5847
5848 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5849 return !complete_p || nz_elts < init_elts / 4;
5850 }
5851
5852 return initializer_zerop (exp);
5853 }
5854
5855 /* Return 1 if EXP contains all zeros. */
5856
5857 static int
5858 all_zeros_p (const_tree exp)
5859 {
5860 if (TREE_CODE (exp) == CONSTRUCTOR)
5861 {
5862 HOST_WIDE_INT nz_elts, init_elts;
5863 bool complete_p;
5864
5865 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5866 return nz_elts == 0;
5867 }
5868
5869 return initializer_zerop (exp);
5870 }
5871 \f
5872 /* Helper function for store_constructor.
5873 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5874 CLEARED is as for store_constructor.
5875 ALIAS_SET is the alias set to use for any stores.
5876
5877 This provides a recursive shortcut back to store_constructor when it isn't
5878 necessary to go through store_field. This is so that we can pass through
5879 the cleared field to let store_constructor know that we may not have to
5880 clear a substructure if the outer structure has already been cleared. */
5881
5882 static void
5883 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5884 HOST_WIDE_INT bitpos, machine_mode mode,
5885 tree exp, int cleared, alias_set_type alias_set)
5886 {
5887 if (TREE_CODE (exp) == CONSTRUCTOR
5888 /* We can only call store_constructor recursively if the size and
5889 bit position are on a byte boundary. */
5890 && bitpos % BITS_PER_UNIT == 0
5891 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5892 /* If we have a nonzero bitpos for a register target, then we just
5893 let store_field do the bitfield handling. This is unlikely to
5894 generate unnecessary clear instructions anyways. */
5895 && (bitpos == 0 || MEM_P (target)))
5896 {
5897 if (MEM_P (target))
5898 target
5899 = adjust_address (target,
5900 GET_MODE (target) == BLKmode
5901 || 0 != (bitpos
5902 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5903 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5904
5905
5906 /* Update the alias set, if required. */
5907 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5908 && MEM_ALIAS_SET (target) != 0)
5909 {
5910 target = copy_rtx (target);
5911 set_mem_alias_set (target, alias_set);
5912 }
5913
5914 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5915 }
5916 else
5917 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5918 }
5919
5920
5921 /* Returns the number of FIELD_DECLs in TYPE. */
5922
5923 static int
5924 fields_length (const_tree type)
5925 {
5926 tree t = TYPE_FIELDS (type);
5927 int count = 0;
5928
5929 for (; t; t = DECL_CHAIN (t))
5930 if (TREE_CODE (t) == FIELD_DECL)
5931 ++count;
5932
5933 return count;
5934 }
5935
5936
5937 /* Store the value of constructor EXP into the rtx TARGET.
5938 TARGET is either a REG or a MEM; we know it cannot conflict, since
5939 safe_from_p has been called.
5940 CLEARED is true if TARGET is known to have been zero'd.
5941 SIZE is the number of bytes of TARGET we are allowed to modify: this
5942 may not be the same as the size of EXP if we are assigning to a field
5943 which has been packed to exclude padding bits. */
5944
5945 static void
5946 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5947 {
5948 tree type = TREE_TYPE (exp);
5949 #ifdef WORD_REGISTER_OPERATIONS
5950 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5951 #endif
5952
5953 switch (TREE_CODE (type))
5954 {
5955 case RECORD_TYPE:
5956 case UNION_TYPE:
5957 case QUAL_UNION_TYPE:
5958 {
5959 unsigned HOST_WIDE_INT idx;
5960 tree field, value;
5961
5962 /* If size is zero or the target is already cleared, do nothing. */
5963 if (size == 0 || cleared)
5964 cleared = 1;
5965 /* We either clear the aggregate or indicate the value is dead. */
5966 else if ((TREE_CODE (type) == UNION_TYPE
5967 || TREE_CODE (type) == QUAL_UNION_TYPE)
5968 && ! CONSTRUCTOR_ELTS (exp))
5969 /* If the constructor is empty, clear the union. */
5970 {
5971 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5972 cleared = 1;
5973 }
5974
5975 /* If we are building a static constructor into a register,
5976 set the initial value as zero so we can fold the value into
5977 a constant. But if more than one register is involved,
5978 this probably loses. */
5979 else if (REG_P (target) && TREE_STATIC (exp)
5980 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5981 {
5982 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5983 cleared = 1;
5984 }
5985
5986 /* If the constructor has fewer fields than the structure or
5987 if we are initializing the structure to mostly zeros, clear
5988 the whole structure first. Don't do this if TARGET is a
5989 register whose mode size isn't equal to SIZE since
5990 clear_storage can't handle this case. */
5991 else if (size > 0
5992 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5993 != fields_length (type))
5994 || mostly_zeros_p (exp))
5995 && (!REG_P (target)
5996 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5997 == size)))
5998 {
5999 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6000 cleared = 1;
6001 }
6002
6003 if (REG_P (target) && !cleared)
6004 emit_clobber (target);
6005
6006 /* Store each element of the constructor into the
6007 corresponding field of TARGET. */
6008 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6009 {
6010 machine_mode mode;
6011 HOST_WIDE_INT bitsize;
6012 HOST_WIDE_INT bitpos = 0;
6013 tree offset;
6014 rtx to_rtx = target;
6015
6016 /* Just ignore missing fields. We cleared the whole
6017 structure, above, if any fields are missing. */
6018 if (field == 0)
6019 continue;
6020
6021 if (cleared && initializer_zerop (value))
6022 continue;
6023
6024 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6025 bitsize = tree_to_uhwi (DECL_SIZE (field));
6026 else
6027 bitsize = -1;
6028
6029 mode = DECL_MODE (field);
6030 if (DECL_BIT_FIELD (field))
6031 mode = VOIDmode;
6032
6033 offset = DECL_FIELD_OFFSET (field);
6034 if (tree_fits_shwi_p (offset)
6035 && tree_fits_shwi_p (bit_position (field)))
6036 {
6037 bitpos = int_bit_position (field);
6038 offset = 0;
6039 }
6040 else
6041 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6042
6043 if (offset)
6044 {
6045 machine_mode address_mode;
6046 rtx offset_rtx;
6047
6048 offset
6049 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6050 make_tree (TREE_TYPE (exp),
6051 target));
6052
6053 offset_rtx = expand_normal (offset);
6054 gcc_assert (MEM_P (to_rtx));
6055
6056 address_mode = get_address_mode (to_rtx);
6057 if (GET_MODE (offset_rtx) != address_mode)
6058 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6059
6060 to_rtx = offset_address (to_rtx, offset_rtx,
6061 highest_pow2_factor (offset));
6062 }
6063
6064 #ifdef WORD_REGISTER_OPERATIONS
6065 /* If this initializes a field that is smaller than a
6066 word, at the start of a word, try to widen it to a full
6067 word. This special case allows us to output C++ member
6068 function initializations in a form that the optimizers
6069 can understand. */
6070 if (REG_P (target)
6071 && bitsize < BITS_PER_WORD
6072 && bitpos % BITS_PER_WORD == 0
6073 && GET_MODE_CLASS (mode) == MODE_INT
6074 && TREE_CODE (value) == INTEGER_CST
6075 && exp_size >= 0
6076 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6077 {
6078 tree type = TREE_TYPE (value);
6079
6080 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6081 {
6082 type = lang_hooks.types.type_for_mode
6083 (word_mode, TYPE_UNSIGNED (type));
6084 value = fold_convert (type, value);
6085 }
6086
6087 if (BYTES_BIG_ENDIAN)
6088 value
6089 = fold_build2 (LSHIFT_EXPR, type, value,
6090 build_int_cst (type,
6091 BITS_PER_WORD - bitsize));
6092 bitsize = BITS_PER_WORD;
6093 mode = word_mode;
6094 }
6095 #endif
6096
6097 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6098 && DECL_NONADDRESSABLE_P (field))
6099 {
6100 to_rtx = copy_rtx (to_rtx);
6101 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6102 }
6103
6104 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6105 value, cleared,
6106 get_alias_set (TREE_TYPE (field)));
6107 }
6108 break;
6109 }
6110 case ARRAY_TYPE:
6111 {
6112 tree value, index;
6113 unsigned HOST_WIDE_INT i;
6114 int need_to_clear;
6115 tree domain;
6116 tree elttype = TREE_TYPE (type);
6117 int const_bounds_p;
6118 HOST_WIDE_INT minelt = 0;
6119 HOST_WIDE_INT maxelt = 0;
6120
6121 domain = TYPE_DOMAIN (type);
6122 const_bounds_p = (TYPE_MIN_VALUE (domain)
6123 && TYPE_MAX_VALUE (domain)
6124 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6125 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6126
6127 /* If we have constant bounds for the range of the type, get them. */
6128 if (const_bounds_p)
6129 {
6130 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6131 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6132 }
6133
6134 /* If the constructor has fewer elements than the array, clear
6135 the whole array first. Similarly if this is static
6136 constructor of a non-BLKmode object. */
6137 if (cleared)
6138 need_to_clear = 0;
6139 else if (REG_P (target) && TREE_STATIC (exp))
6140 need_to_clear = 1;
6141 else
6142 {
6143 unsigned HOST_WIDE_INT idx;
6144 tree index, value;
6145 HOST_WIDE_INT count = 0, zero_count = 0;
6146 need_to_clear = ! const_bounds_p;
6147
6148 /* This loop is a more accurate version of the loop in
6149 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6150 is also needed to check for missing elements. */
6151 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6152 {
6153 HOST_WIDE_INT this_node_count;
6154
6155 if (need_to_clear)
6156 break;
6157
6158 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6159 {
6160 tree lo_index = TREE_OPERAND (index, 0);
6161 tree hi_index = TREE_OPERAND (index, 1);
6162
6163 if (! tree_fits_uhwi_p (lo_index)
6164 || ! tree_fits_uhwi_p (hi_index))
6165 {
6166 need_to_clear = 1;
6167 break;
6168 }
6169
6170 this_node_count = (tree_to_uhwi (hi_index)
6171 - tree_to_uhwi (lo_index) + 1);
6172 }
6173 else
6174 this_node_count = 1;
6175
6176 count += this_node_count;
6177 if (mostly_zeros_p (value))
6178 zero_count += this_node_count;
6179 }
6180
6181 /* Clear the entire array first if there are any missing
6182 elements, or if the incidence of zero elements is >=
6183 75%. */
6184 if (! need_to_clear
6185 && (count < maxelt - minelt + 1
6186 || 4 * zero_count >= 3 * count))
6187 need_to_clear = 1;
6188 }
6189
6190 if (need_to_clear && size > 0)
6191 {
6192 if (REG_P (target))
6193 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6194 else
6195 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6196 cleared = 1;
6197 }
6198
6199 if (!cleared && REG_P (target))
6200 /* Inform later passes that the old value is dead. */
6201 emit_clobber (target);
6202
6203 /* Store each element of the constructor into the
6204 corresponding element of TARGET, determined by counting the
6205 elements. */
6206 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6207 {
6208 machine_mode mode;
6209 HOST_WIDE_INT bitsize;
6210 HOST_WIDE_INT bitpos;
6211 rtx xtarget = target;
6212
6213 if (cleared && initializer_zerop (value))
6214 continue;
6215
6216 mode = TYPE_MODE (elttype);
6217 if (mode == BLKmode)
6218 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6219 ? tree_to_uhwi (TYPE_SIZE (elttype))
6220 : -1);
6221 else
6222 bitsize = GET_MODE_BITSIZE (mode);
6223
6224 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6225 {
6226 tree lo_index = TREE_OPERAND (index, 0);
6227 tree hi_index = TREE_OPERAND (index, 1);
6228 rtx index_r, pos_rtx;
6229 HOST_WIDE_INT lo, hi, count;
6230 tree position;
6231
6232 /* If the range is constant and "small", unroll the loop. */
6233 if (const_bounds_p
6234 && tree_fits_shwi_p (lo_index)
6235 && tree_fits_shwi_p (hi_index)
6236 && (lo = tree_to_shwi (lo_index),
6237 hi = tree_to_shwi (hi_index),
6238 count = hi - lo + 1,
6239 (!MEM_P (target)
6240 || count <= 2
6241 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6242 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6243 <= 40 * 8)))))
6244 {
6245 lo -= minelt; hi -= minelt;
6246 for (; lo <= hi; lo++)
6247 {
6248 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6249
6250 if (MEM_P (target)
6251 && !MEM_KEEP_ALIAS_SET_P (target)
6252 && TREE_CODE (type) == ARRAY_TYPE
6253 && TYPE_NONALIASED_COMPONENT (type))
6254 {
6255 target = copy_rtx (target);
6256 MEM_KEEP_ALIAS_SET_P (target) = 1;
6257 }
6258
6259 store_constructor_field
6260 (target, bitsize, bitpos, mode, value, cleared,
6261 get_alias_set (elttype));
6262 }
6263 }
6264 else
6265 {
6266 rtx_code_label *loop_start = gen_label_rtx ();
6267 rtx_code_label *loop_end = gen_label_rtx ();
6268 tree exit_cond;
6269
6270 expand_normal (hi_index);
6271
6272 index = build_decl (EXPR_LOCATION (exp),
6273 VAR_DECL, NULL_TREE, domain);
6274 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6275 SET_DECL_RTL (index, index_r);
6276 store_expr (lo_index, index_r, 0, false);
6277
6278 /* Build the head of the loop. */
6279 do_pending_stack_adjust ();
6280 emit_label (loop_start);
6281
6282 /* Assign value to element index. */
6283 position =
6284 fold_convert (ssizetype,
6285 fold_build2 (MINUS_EXPR,
6286 TREE_TYPE (index),
6287 index,
6288 TYPE_MIN_VALUE (domain)));
6289
6290 position =
6291 size_binop (MULT_EXPR, position,
6292 fold_convert (ssizetype,
6293 TYPE_SIZE_UNIT (elttype)));
6294
6295 pos_rtx = expand_normal (position);
6296 xtarget = offset_address (target, pos_rtx,
6297 highest_pow2_factor (position));
6298 xtarget = adjust_address (xtarget, mode, 0);
6299 if (TREE_CODE (value) == CONSTRUCTOR)
6300 store_constructor (value, xtarget, cleared,
6301 bitsize / BITS_PER_UNIT);
6302 else
6303 store_expr (value, xtarget, 0, false);
6304
6305 /* Generate a conditional jump to exit the loop. */
6306 exit_cond = build2 (LT_EXPR, integer_type_node,
6307 index, hi_index);
6308 jumpif (exit_cond, loop_end, -1);
6309
6310 /* Update the loop counter, and jump to the head of
6311 the loop. */
6312 expand_assignment (index,
6313 build2 (PLUS_EXPR, TREE_TYPE (index),
6314 index, integer_one_node),
6315 false);
6316
6317 emit_jump (loop_start);
6318
6319 /* Build the end of the loop. */
6320 emit_label (loop_end);
6321 }
6322 }
6323 else if ((index != 0 && ! tree_fits_shwi_p (index))
6324 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6325 {
6326 tree position;
6327
6328 if (index == 0)
6329 index = ssize_int (1);
6330
6331 if (minelt)
6332 index = fold_convert (ssizetype,
6333 fold_build2 (MINUS_EXPR,
6334 TREE_TYPE (index),
6335 index,
6336 TYPE_MIN_VALUE (domain)));
6337
6338 position =
6339 size_binop (MULT_EXPR, index,
6340 fold_convert (ssizetype,
6341 TYPE_SIZE_UNIT (elttype)));
6342 xtarget = offset_address (target,
6343 expand_normal (position),
6344 highest_pow2_factor (position));
6345 xtarget = adjust_address (xtarget, mode, 0);
6346 store_expr (value, xtarget, 0, false);
6347 }
6348 else
6349 {
6350 if (index != 0)
6351 bitpos = ((tree_to_shwi (index) - minelt)
6352 * tree_to_uhwi (TYPE_SIZE (elttype)));
6353 else
6354 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6355
6356 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6357 && TREE_CODE (type) == ARRAY_TYPE
6358 && TYPE_NONALIASED_COMPONENT (type))
6359 {
6360 target = copy_rtx (target);
6361 MEM_KEEP_ALIAS_SET_P (target) = 1;
6362 }
6363 store_constructor_field (target, bitsize, bitpos, mode, value,
6364 cleared, get_alias_set (elttype));
6365 }
6366 }
6367 break;
6368 }
6369
6370 case VECTOR_TYPE:
6371 {
6372 unsigned HOST_WIDE_INT idx;
6373 constructor_elt *ce;
6374 int i;
6375 int need_to_clear;
6376 int icode = CODE_FOR_nothing;
6377 tree elttype = TREE_TYPE (type);
6378 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6379 machine_mode eltmode = TYPE_MODE (elttype);
6380 HOST_WIDE_INT bitsize;
6381 HOST_WIDE_INT bitpos;
6382 rtvec vector = NULL;
6383 unsigned n_elts;
6384 alias_set_type alias;
6385
6386 gcc_assert (eltmode != BLKmode);
6387
6388 n_elts = TYPE_VECTOR_SUBPARTS (type);
6389 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6390 {
6391 machine_mode mode = GET_MODE (target);
6392
6393 icode = (int) optab_handler (vec_init_optab, mode);
6394 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6395 if (icode != CODE_FOR_nothing)
6396 {
6397 tree value;
6398
6399 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6400 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6401 {
6402 icode = CODE_FOR_nothing;
6403 break;
6404 }
6405 }
6406 if (icode != CODE_FOR_nothing)
6407 {
6408 unsigned int i;
6409
6410 vector = rtvec_alloc (n_elts);
6411 for (i = 0; i < n_elts; i++)
6412 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6413 }
6414 }
6415
6416 /* If the constructor has fewer elements than the vector,
6417 clear the whole array first. Similarly if this is static
6418 constructor of a non-BLKmode object. */
6419 if (cleared)
6420 need_to_clear = 0;
6421 else if (REG_P (target) && TREE_STATIC (exp))
6422 need_to_clear = 1;
6423 else
6424 {
6425 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6426 tree value;
6427
6428 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6429 {
6430 int n_elts_here = tree_to_uhwi
6431 (int_const_binop (TRUNC_DIV_EXPR,
6432 TYPE_SIZE (TREE_TYPE (value)),
6433 TYPE_SIZE (elttype)));
6434
6435 count += n_elts_here;
6436 if (mostly_zeros_p (value))
6437 zero_count += n_elts_here;
6438 }
6439
6440 /* Clear the entire vector first if there are any missing elements,
6441 or if the incidence of zero elements is >= 75%. */
6442 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6443 }
6444
6445 if (need_to_clear && size > 0 && !vector)
6446 {
6447 if (REG_P (target))
6448 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6449 else
6450 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6451 cleared = 1;
6452 }
6453
6454 /* Inform later passes that the old value is dead. */
6455 if (!cleared && !vector && REG_P (target))
6456 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6457
6458 if (MEM_P (target))
6459 alias = MEM_ALIAS_SET (target);
6460 else
6461 alias = get_alias_set (elttype);
6462
6463 /* Store each element of the constructor into the corresponding
6464 element of TARGET, determined by counting the elements. */
6465 for (idx = 0, i = 0;
6466 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6467 idx++, i += bitsize / elt_size)
6468 {
6469 HOST_WIDE_INT eltpos;
6470 tree value = ce->value;
6471
6472 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6473 if (cleared && initializer_zerop (value))
6474 continue;
6475
6476 if (ce->index)
6477 eltpos = tree_to_uhwi (ce->index);
6478 else
6479 eltpos = i;
6480
6481 if (vector)
6482 {
6483 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6484 elements. */
6485 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6486 RTVEC_ELT (vector, eltpos)
6487 = expand_normal (value);
6488 }
6489 else
6490 {
6491 machine_mode value_mode =
6492 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6493 ? TYPE_MODE (TREE_TYPE (value))
6494 : eltmode;
6495 bitpos = eltpos * elt_size;
6496 store_constructor_field (target, bitsize, bitpos, value_mode,
6497 value, cleared, alias);
6498 }
6499 }
6500
6501 if (vector)
6502 emit_insn (GEN_FCN (icode)
6503 (target,
6504 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6505 break;
6506 }
6507
6508 default:
6509 gcc_unreachable ();
6510 }
6511 }
6512
6513 /* Store the value of EXP (an expression tree)
6514 into a subfield of TARGET which has mode MODE and occupies
6515 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6516 If MODE is VOIDmode, it means that we are storing into a bit-field.
6517
6518 BITREGION_START is bitpos of the first bitfield in this region.
6519 BITREGION_END is the bitpos of the ending bitfield in this region.
6520 These two fields are 0, if the C++ memory model does not apply,
6521 or we are not interested in keeping track of bitfield regions.
6522
6523 Always return const0_rtx unless we have something particular to
6524 return.
6525
6526 ALIAS_SET is the alias set for the destination. This value will
6527 (in general) be different from that for TARGET, since TARGET is a
6528 reference to the containing structure.
6529
6530 If NONTEMPORAL is true, try generating a nontemporal store. */
6531
6532 static rtx
6533 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6534 unsigned HOST_WIDE_INT bitregion_start,
6535 unsigned HOST_WIDE_INT bitregion_end,
6536 machine_mode mode, tree exp,
6537 alias_set_type alias_set, bool nontemporal)
6538 {
6539 if (TREE_CODE (exp) == ERROR_MARK)
6540 return const0_rtx;
6541
6542 /* If we have nothing to store, do nothing unless the expression has
6543 side-effects. */
6544 if (bitsize == 0)
6545 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6546
6547 if (GET_CODE (target) == CONCAT)
6548 {
6549 /* We're storing into a struct containing a single __complex. */
6550
6551 gcc_assert (!bitpos);
6552 return store_expr (exp, target, 0, nontemporal);
6553 }
6554
6555 /* If the structure is in a register or if the component
6556 is a bit field, we cannot use addressing to access it.
6557 Use bit-field techniques or SUBREG to store in it. */
6558
6559 if (mode == VOIDmode
6560 || (mode != BLKmode && ! direct_store[(int) mode]
6561 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6562 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6563 || REG_P (target)
6564 || GET_CODE (target) == SUBREG
6565 /* If the field isn't aligned enough to store as an ordinary memref,
6566 store it as a bit field. */
6567 || (mode != BLKmode
6568 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6569 || bitpos % GET_MODE_ALIGNMENT (mode))
6570 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6571 || (bitpos % BITS_PER_UNIT != 0)))
6572 || (bitsize >= 0 && mode != BLKmode
6573 && GET_MODE_BITSIZE (mode) > bitsize)
6574 /* If the RHS and field are a constant size and the size of the
6575 RHS isn't the same size as the bitfield, we must use bitfield
6576 operations. */
6577 || (bitsize >= 0
6578 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6579 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6580 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6581 decl we must use bitfield operations. */
6582 || (bitsize >= 0
6583 && TREE_CODE (exp) == MEM_REF
6584 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6585 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6586 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6587 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6588 {
6589 rtx temp;
6590 gimple nop_def;
6591
6592 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6593 implies a mask operation. If the precision is the same size as
6594 the field we're storing into, that mask is redundant. This is
6595 particularly common with bit field assignments generated by the
6596 C front end. */
6597 nop_def = get_def_for_expr (exp, NOP_EXPR);
6598 if (nop_def)
6599 {
6600 tree type = TREE_TYPE (exp);
6601 if (INTEGRAL_TYPE_P (type)
6602 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6603 && bitsize == TYPE_PRECISION (type))
6604 {
6605 tree op = gimple_assign_rhs1 (nop_def);
6606 type = TREE_TYPE (op);
6607 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6608 exp = op;
6609 }
6610 }
6611
6612 temp = expand_normal (exp);
6613
6614 /* If BITSIZE is narrower than the size of the type of EXP
6615 we will be narrowing TEMP. Normally, what's wanted are the
6616 low-order bits. However, if EXP's type is a record and this is
6617 big-endian machine, we want the upper BITSIZE bits. */
6618 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6619 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6620 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6621 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6622 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6623 NULL_RTX, 1);
6624
6625 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6626 if (mode != VOIDmode && mode != BLKmode
6627 && mode != TYPE_MODE (TREE_TYPE (exp)))
6628 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6629
6630 /* If the modes of TEMP and TARGET are both BLKmode, both
6631 must be in memory and BITPOS must be aligned on a byte
6632 boundary. If so, we simply do a block copy. Likewise
6633 for a BLKmode-like TARGET. */
6634 if (GET_MODE (temp) == BLKmode
6635 && (GET_MODE (target) == BLKmode
6636 || (MEM_P (target)
6637 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6638 && (bitpos % BITS_PER_UNIT) == 0
6639 && (bitsize % BITS_PER_UNIT) == 0)))
6640 {
6641 gcc_assert (MEM_P (target) && MEM_P (temp)
6642 && (bitpos % BITS_PER_UNIT) == 0);
6643
6644 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6645 emit_block_move (target, temp,
6646 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6647 / BITS_PER_UNIT),
6648 BLOCK_OP_NORMAL);
6649
6650 return const0_rtx;
6651 }
6652
6653 /* Handle calls that return values in multiple non-contiguous locations.
6654 The Irix 6 ABI has examples of this. */
6655 if (GET_CODE (temp) == PARALLEL)
6656 {
6657 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6658 rtx temp_target;
6659 if (mode == BLKmode || mode == VOIDmode)
6660 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6661 temp_target = gen_reg_rtx (mode);
6662 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6663 temp = temp_target;
6664 }
6665 else if (mode == BLKmode)
6666 {
6667 /* Handle calls that return BLKmode values in registers. */
6668 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6669 {
6670 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6671 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6672 temp = temp_target;
6673 }
6674 else
6675 {
6676 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6677 rtx temp_target;
6678 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6679 temp_target = gen_reg_rtx (mode);
6680 temp_target
6681 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6682 temp_target, mode, mode);
6683 temp = temp_target;
6684 }
6685 }
6686
6687 /* Store the value in the bitfield. */
6688 store_bit_field (target, bitsize, bitpos,
6689 bitregion_start, bitregion_end,
6690 mode, temp);
6691
6692 return const0_rtx;
6693 }
6694 else
6695 {
6696 /* Now build a reference to just the desired component. */
6697 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6698
6699 if (to_rtx == target)
6700 to_rtx = copy_rtx (to_rtx);
6701
6702 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6703 set_mem_alias_set (to_rtx, alias_set);
6704
6705 return store_expr (exp, to_rtx, 0, nontemporal);
6706 }
6707 }
6708 \f
6709 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6710 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6711 codes and find the ultimate containing object, which we return.
6712
6713 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6714 bit position, and *PUNSIGNEDP to the signedness of the field.
6715 If the position of the field is variable, we store a tree
6716 giving the variable offset (in units) in *POFFSET.
6717 This offset is in addition to the bit position.
6718 If the position is not variable, we store 0 in *POFFSET.
6719
6720 If any of the extraction expressions is volatile,
6721 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6722
6723 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6724 Otherwise, it is a mode that can be used to access the field.
6725
6726 If the field describes a variable-sized object, *PMODE is set to
6727 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6728 this case, but the address of the object can be found.
6729
6730 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6731 look through nodes that serve as markers of a greater alignment than
6732 the one that can be deduced from the expression. These nodes make it
6733 possible for front-ends to prevent temporaries from being created by
6734 the middle-end on alignment considerations. For that purpose, the
6735 normal operating mode at high-level is to always pass FALSE so that
6736 the ultimate containing object is really returned; moreover, the
6737 associated predicate handled_component_p will always return TRUE
6738 on these nodes, thus indicating that they are essentially handled
6739 by get_inner_reference. TRUE should only be passed when the caller
6740 is scanning the expression in order to build another representation
6741 and specifically knows how to handle these nodes; as such, this is
6742 the normal operating mode in the RTL expanders. */
6743
6744 tree
6745 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6746 HOST_WIDE_INT *pbitpos, tree *poffset,
6747 machine_mode *pmode, int *punsignedp,
6748 int *pvolatilep, bool keep_aligning)
6749 {
6750 tree size_tree = 0;
6751 machine_mode mode = VOIDmode;
6752 bool blkmode_bitfield = false;
6753 tree offset = size_zero_node;
6754 offset_int bit_offset = 0;
6755
6756 /* First get the mode, signedness, and size. We do this from just the
6757 outermost expression. */
6758 *pbitsize = -1;
6759 if (TREE_CODE (exp) == COMPONENT_REF)
6760 {
6761 tree field = TREE_OPERAND (exp, 1);
6762 size_tree = DECL_SIZE (field);
6763 if (flag_strict_volatile_bitfields > 0
6764 && TREE_THIS_VOLATILE (exp)
6765 && DECL_BIT_FIELD_TYPE (field)
6766 && DECL_MODE (field) != BLKmode)
6767 /* Volatile bitfields should be accessed in the mode of the
6768 field's type, not the mode computed based on the bit
6769 size. */
6770 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6771 else if (!DECL_BIT_FIELD (field))
6772 mode = DECL_MODE (field);
6773 else if (DECL_MODE (field) == BLKmode)
6774 blkmode_bitfield = true;
6775
6776 *punsignedp = DECL_UNSIGNED (field);
6777 }
6778 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6779 {
6780 size_tree = TREE_OPERAND (exp, 1);
6781 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6782 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6783
6784 /* For vector types, with the correct size of access, use the mode of
6785 inner type. */
6786 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6787 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6788 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6789 mode = TYPE_MODE (TREE_TYPE (exp));
6790 }
6791 else
6792 {
6793 mode = TYPE_MODE (TREE_TYPE (exp));
6794 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6795
6796 if (mode == BLKmode)
6797 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6798 else
6799 *pbitsize = GET_MODE_BITSIZE (mode);
6800 }
6801
6802 if (size_tree != 0)
6803 {
6804 if (! tree_fits_uhwi_p (size_tree))
6805 mode = BLKmode, *pbitsize = -1;
6806 else
6807 *pbitsize = tree_to_uhwi (size_tree);
6808 }
6809
6810 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6811 and find the ultimate containing object. */
6812 while (1)
6813 {
6814 switch (TREE_CODE (exp))
6815 {
6816 case BIT_FIELD_REF:
6817 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6818 break;
6819
6820 case COMPONENT_REF:
6821 {
6822 tree field = TREE_OPERAND (exp, 1);
6823 tree this_offset = component_ref_field_offset (exp);
6824
6825 /* If this field hasn't been filled in yet, don't go past it.
6826 This should only happen when folding expressions made during
6827 type construction. */
6828 if (this_offset == 0)
6829 break;
6830
6831 offset = size_binop (PLUS_EXPR, offset, this_offset);
6832 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6833
6834 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6835 }
6836 break;
6837
6838 case ARRAY_REF:
6839 case ARRAY_RANGE_REF:
6840 {
6841 tree index = TREE_OPERAND (exp, 1);
6842 tree low_bound = array_ref_low_bound (exp);
6843 tree unit_size = array_ref_element_size (exp);
6844
6845 /* We assume all arrays have sizes that are a multiple of a byte.
6846 First subtract the lower bound, if any, in the type of the
6847 index, then convert to sizetype and multiply by the size of
6848 the array element. */
6849 if (! integer_zerop (low_bound))
6850 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6851 index, low_bound);
6852
6853 offset = size_binop (PLUS_EXPR, offset,
6854 size_binop (MULT_EXPR,
6855 fold_convert (sizetype, index),
6856 unit_size));
6857 }
6858 break;
6859
6860 case REALPART_EXPR:
6861 break;
6862
6863 case IMAGPART_EXPR:
6864 bit_offset += *pbitsize;
6865 break;
6866
6867 case VIEW_CONVERT_EXPR:
6868 if (keep_aligning && STRICT_ALIGNMENT
6869 && (TYPE_ALIGN (TREE_TYPE (exp))
6870 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6871 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6872 < BIGGEST_ALIGNMENT)
6873 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6874 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6875 goto done;
6876 break;
6877
6878 case MEM_REF:
6879 /* Hand back the decl for MEM[&decl, off]. */
6880 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6881 {
6882 tree off = TREE_OPERAND (exp, 1);
6883 if (!integer_zerop (off))
6884 {
6885 offset_int boff, coff = mem_ref_offset (exp);
6886 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6887 bit_offset += boff;
6888 }
6889 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6890 }
6891 goto done;
6892
6893 default:
6894 goto done;
6895 }
6896
6897 /* If any reference in the chain is volatile, the effect is volatile. */
6898 if (TREE_THIS_VOLATILE (exp))
6899 *pvolatilep = 1;
6900
6901 exp = TREE_OPERAND (exp, 0);
6902 }
6903 done:
6904
6905 /* If OFFSET is constant, see if we can return the whole thing as a
6906 constant bit position. Make sure to handle overflow during
6907 this conversion. */
6908 if (TREE_CODE (offset) == INTEGER_CST)
6909 {
6910 offset_int tem = wi::sext (wi::to_offset (offset),
6911 TYPE_PRECISION (sizetype));
6912 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6913 tem += bit_offset;
6914 if (wi::fits_shwi_p (tem))
6915 {
6916 *pbitpos = tem.to_shwi ();
6917 *poffset = offset = NULL_TREE;
6918 }
6919 }
6920
6921 /* Otherwise, split it up. */
6922 if (offset)
6923 {
6924 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6925 if (wi::neg_p (bit_offset))
6926 {
6927 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6928 offset_int tem = bit_offset.and_not (mask);
6929 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6930 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6931 bit_offset -= tem;
6932 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6933 offset = size_binop (PLUS_EXPR, offset,
6934 wide_int_to_tree (sizetype, tem));
6935 }
6936
6937 *pbitpos = bit_offset.to_shwi ();
6938 *poffset = offset;
6939 }
6940
6941 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6942 if (mode == VOIDmode
6943 && blkmode_bitfield
6944 && (*pbitpos % BITS_PER_UNIT) == 0
6945 && (*pbitsize % BITS_PER_UNIT) == 0)
6946 *pmode = BLKmode;
6947 else
6948 *pmode = mode;
6949
6950 return exp;
6951 }
6952
6953 /* Return a tree of sizetype representing the size, in bytes, of the element
6954 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6955
6956 tree
6957 array_ref_element_size (tree exp)
6958 {
6959 tree aligned_size = TREE_OPERAND (exp, 3);
6960 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6961 location_t loc = EXPR_LOCATION (exp);
6962
6963 /* If a size was specified in the ARRAY_REF, it's the size measured
6964 in alignment units of the element type. So multiply by that value. */
6965 if (aligned_size)
6966 {
6967 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6968 sizetype from another type of the same width and signedness. */
6969 if (TREE_TYPE (aligned_size) != sizetype)
6970 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6971 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6972 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6973 }
6974
6975 /* Otherwise, take the size from that of the element type. Substitute
6976 any PLACEHOLDER_EXPR that we have. */
6977 else
6978 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6979 }
6980
6981 /* Return a tree representing the lower bound of the array mentioned in
6982 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6983
6984 tree
6985 array_ref_low_bound (tree exp)
6986 {
6987 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6988
6989 /* If a lower bound is specified in EXP, use it. */
6990 if (TREE_OPERAND (exp, 2))
6991 return TREE_OPERAND (exp, 2);
6992
6993 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6994 substituting for a PLACEHOLDER_EXPR as needed. */
6995 if (domain_type && TYPE_MIN_VALUE (domain_type))
6996 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6997
6998 /* Otherwise, return a zero of the appropriate type. */
6999 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
7000 }
7001
7002 /* Returns true if REF is an array reference to an array at the end of
7003 a structure. If this is the case, the array may be allocated larger
7004 than its upper bound implies. */
7005
7006 bool
7007 array_at_struct_end_p (tree ref)
7008 {
7009 if (TREE_CODE (ref) != ARRAY_REF
7010 && TREE_CODE (ref) != ARRAY_RANGE_REF)
7011 return false;
7012
7013 while (handled_component_p (ref))
7014 {
7015 /* If the reference chain contains a component reference to a
7016 non-union type and there follows another field the reference
7017 is not at the end of a structure. */
7018 if (TREE_CODE (ref) == COMPONENT_REF
7019 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
7020 {
7021 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
7022 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
7023 nextf = DECL_CHAIN (nextf);
7024 if (nextf)
7025 return false;
7026 }
7027
7028 ref = TREE_OPERAND (ref, 0);
7029 }
7030
7031 /* If the reference is based on a declared entity, the size of the array
7032 is constrained by its given domain. */
7033 if (DECL_P (ref))
7034 return false;
7035
7036 return true;
7037 }
7038
7039 /* Return a tree representing the upper bound of the array mentioned in
7040 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
7041
7042 tree
7043 array_ref_up_bound (tree exp)
7044 {
7045 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7046
7047 /* If there is a domain type and it has an upper bound, use it, substituting
7048 for a PLACEHOLDER_EXPR as needed. */
7049 if (domain_type && TYPE_MAX_VALUE (domain_type))
7050 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
7051
7052 /* Otherwise fail. */
7053 return NULL_TREE;
7054 }
7055
7056 /* Return a tree representing the offset, in bytes, of the field referenced
7057 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
7058
7059 tree
7060 component_ref_field_offset (tree exp)
7061 {
7062 tree aligned_offset = TREE_OPERAND (exp, 2);
7063 tree field = TREE_OPERAND (exp, 1);
7064 location_t loc = EXPR_LOCATION (exp);
7065
7066 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
7067 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
7068 value. */
7069 if (aligned_offset)
7070 {
7071 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7072 sizetype from another type of the same width and signedness. */
7073 if (TREE_TYPE (aligned_offset) != sizetype)
7074 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7075 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7076 size_int (DECL_OFFSET_ALIGN (field)
7077 / BITS_PER_UNIT));
7078 }
7079
7080 /* Otherwise, take the offset from that of the field. Substitute
7081 any PLACEHOLDER_EXPR that we have. */
7082 else
7083 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7084 }
7085
7086 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7087
7088 static unsigned HOST_WIDE_INT
7089 target_align (const_tree target)
7090 {
7091 /* We might have a chain of nested references with intermediate misaligning
7092 bitfields components, so need to recurse to find out. */
7093
7094 unsigned HOST_WIDE_INT this_align, outer_align;
7095
7096 switch (TREE_CODE (target))
7097 {
7098 case BIT_FIELD_REF:
7099 return 1;
7100
7101 case COMPONENT_REF:
7102 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7103 outer_align = target_align (TREE_OPERAND (target, 0));
7104 return MIN (this_align, outer_align);
7105
7106 case ARRAY_REF:
7107 case ARRAY_RANGE_REF:
7108 this_align = TYPE_ALIGN (TREE_TYPE (target));
7109 outer_align = target_align (TREE_OPERAND (target, 0));
7110 return MIN (this_align, outer_align);
7111
7112 CASE_CONVERT:
7113 case NON_LVALUE_EXPR:
7114 case VIEW_CONVERT_EXPR:
7115 this_align = TYPE_ALIGN (TREE_TYPE (target));
7116 outer_align = target_align (TREE_OPERAND (target, 0));
7117 return MAX (this_align, outer_align);
7118
7119 default:
7120 return TYPE_ALIGN (TREE_TYPE (target));
7121 }
7122 }
7123
7124 \f
7125 /* Given an rtx VALUE that may contain additions and multiplications, return
7126 an equivalent value that just refers to a register, memory, or constant.
7127 This is done by generating instructions to perform the arithmetic and
7128 returning a pseudo-register containing the value.
7129
7130 The returned value may be a REG, SUBREG, MEM or constant. */
7131
7132 rtx
7133 force_operand (rtx value, rtx target)
7134 {
7135 rtx op1, op2;
7136 /* Use subtarget as the target for operand 0 of a binary operation. */
7137 rtx subtarget = get_subtarget (target);
7138 enum rtx_code code = GET_CODE (value);
7139
7140 /* Check for subreg applied to an expression produced by loop optimizer. */
7141 if (code == SUBREG
7142 && !REG_P (SUBREG_REG (value))
7143 && !MEM_P (SUBREG_REG (value)))
7144 {
7145 value
7146 = simplify_gen_subreg (GET_MODE (value),
7147 force_reg (GET_MODE (SUBREG_REG (value)),
7148 force_operand (SUBREG_REG (value),
7149 NULL_RTX)),
7150 GET_MODE (SUBREG_REG (value)),
7151 SUBREG_BYTE (value));
7152 code = GET_CODE (value);
7153 }
7154
7155 /* Check for a PIC address load. */
7156 if ((code == PLUS || code == MINUS)
7157 && XEXP (value, 0) == pic_offset_table_rtx
7158 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7159 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7160 || GET_CODE (XEXP (value, 1)) == CONST))
7161 {
7162 if (!subtarget)
7163 subtarget = gen_reg_rtx (GET_MODE (value));
7164 emit_move_insn (subtarget, value);
7165 return subtarget;
7166 }
7167
7168 if (ARITHMETIC_P (value))
7169 {
7170 op2 = XEXP (value, 1);
7171 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7172 subtarget = 0;
7173 if (code == MINUS && CONST_INT_P (op2))
7174 {
7175 code = PLUS;
7176 op2 = negate_rtx (GET_MODE (value), op2);
7177 }
7178
7179 /* Check for an addition with OP2 a constant integer and our first
7180 operand a PLUS of a virtual register and something else. In that
7181 case, we want to emit the sum of the virtual register and the
7182 constant first and then add the other value. This allows virtual
7183 register instantiation to simply modify the constant rather than
7184 creating another one around this addition. */
7185 if (code == PLUS && CONST_INT_P (op2)
7186 && GET_CODE (XEXP (value, 0)) == PLUS
7187 && REG_P (XEXP (XEXP (value, 0), 0))
7188 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7189 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7190 {
7191 rtx temp = expand_simple_binop (GET_MODE (value), code,
7192 XEXP (XEXP (value, 0), 0), op2,
7193 subtarget, 0, OPTAB_LIB_WIDEN);
7194 return expand_simple_binop (GET_MODE (value), code, temp,
7195 force_operand (XEXP (XEXP (value,
7196 0), 1), 0),
7197 target, 0, OPTAB_LIB_WIDEN);
7198 }
7199
7200 op1 = force_operand (XEXP (value, 0), subtarget);
7201 op2 = force_operand (op2, NULL_RTX);
7202 switch (code)
7203 {
7204 case MULT:
7205 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7206 case DIV:
7207 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7208 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7209 target, 1, OPTAB_LIB_WIDEN);
7210 else
7211 return expand_divmod (0,
7212 FLOAT_MODE_P (GET_MODE (value))
7213 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7214 GET_MODE (value), op1, op2, target, 0);
7215 case MOD:
7216 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7217 target, 0);
7218 case UDIV:
7219 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7220 target, 1);
7221 case UMOD:
7222 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7223 target, 1);
7224 case ASHIFTRT:
7225 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7226 target, 0, OPTAB_LIB_WIDEN);
7227 default:
7228 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7229 target, 1, OPTAB_LIB_WIDEN);
7230 }
7231 }
7232 if (UNARY_P (value))
7233 {
7234 if (!target)
7235 target = gen_reg_rtx (GET_MODE (value));
7236 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7237 switch (code)
7238 {
7239 case ZERO_EXTEND:
7240 case SIGN_EXTEND:
7241 case TRUNCATE:
7242 case FLOAT_EXTEND:
7243 case FLOAT_TRUNCATE:
7244 convert_move (target, op1, code == ZERO_EXTEND);
7245 return target;
7246
7247 case FIX:
7248 case UNSIGNED_FIX:
7249 expand_fix (target, op1, code == UNSIGNED_FIX);
7250 return target;
7251
7252 case FLOAT:
7253 case UNSIGNED_FLOAT:
7254 expand_float (target, op1, code == UNSIGNED_FLOAT);
7255 return target;
7256
7257 default:
7258 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7259 }
7260 }
7261
7262 #ifdef INSN_SCHEDULING
7263 /* On machines that have insn scheduling, we want all memory reference to be
7264 explicit, so we need to deal with such paradoxical SUBREGs. */
7265 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7266 value
7267 = simplify_gen_subreg (GET_MODE (value),
7268 force_reg (GET_MODE (SUBREG_REG (value)),
7269 force_operand (SUBREG_REG (value),
7270 NULL_RTX)),
7271 GET_MODE (SUBREG_REG (value)),
7272 SUBREG_BYTE (value));
7273 #endif
7274
7275 return value;
7276 }
7277 \f
7278 /* Subroutine of expand_expr: return nonzero iff there is no way that
7279 EXP can reference X, which is being modified. TOP_P is nonzero if this
7280 call is going to be used to determine whether we need a temporary
7281 for EXP, as opposed to a recursive call to this function.
7282
7283 It is always safe for this routine to return zero since it merely
7284 searches for optimization opportunities. */
7285
7286 int
7287 safe_from_p (const_rtx x, tree exp, int top_p)
7288 {
7289 rtx exp_rtl = 0;
7290 int i, nops;
7291
7292 if (x == 0
7293 /* If EXP has varying size, we MUST use a target since we currently
7294 have no way of allocating temporaries of variable size
7295 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7296 So we assume here that something at a higher level has prevented a
7297 clash. This is somewhat bogus, but the best we can do. Only
7298 do this when X is BLKmode and when we are at the top level. */
7299 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7300 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7301 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7302 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7303 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7304 != INTEGER_CST)
7305 && GET_MODE (x) == BLKmode)
7306 /* If X is in the outgoing argument area, it is always safe. */
7307 || (MEM_P (x)
7308 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7309 || (GET_CODE (XEXP (x, 0)) == PLUS
7310 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7311 return 1;
7312
7313 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7314 find the underlying pseudo. */
7315 if (GET_CODE (x) == SUBREG)
7316 {
7317 x = SUBREG_REG (x);
7318 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7319 return 0;
7320 }
7321
7322 /* Now look at our tree code and possibly recurse. */
7323 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7324 {
7325 case tcc_declaration:
7326 exp_rtl = DECL_RTL_IF_SET (exp);
7327 break;
7328
7329 case tcc_constant:
7330 return 1;
7331
7332 case tcc_exceptional:
7333 if (TREE_CODE (exp) == TREE_LIST)
7334 {
7335 while (1)
7336 {
7337 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7338 return 0;
7339 exp = TREE_CHAIN (exp);
7340 if (!exp)
7341 return 1;
7342 if (TREE_CODE (exp) != TREE_LIST)
7343 return safe_from_p (x, exp, 0);
7344 }
7345 }
7346 else if (TREE_CODE (exp) == CONSTRUCTOR)
7347 {
7348 constructor_elt *ce;
7349 unsigned HOST_WIDE_INT idx;
7350
7351 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7352 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7353 || !safe_from_p (x, ce->value, 0))
7354 return 0;
7355 return 1;
7356 }
7357 else if (TREE_CODE (exp) == ERROR_MARK)
7358 return 1; /* An already-visited SAVE_EXPR? */
7359 else
7360 return 0;
7361
7362 case tcc_statement:
7363 /* The only case we look at here is the DECL_INITIAL inside a
7364 DECL_EXPR. */
7365 return (TREE_CODE (exp) != DECL_EXPR
7366 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7367 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7368 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7369
7370 case tcc_binary:
7371 case tcc_comparison:
7372 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7373 return 0;
7374 /* Fall through. */
7375
7376 case tcc_unary:
7377 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7378
7379 case tcc_expression:
7380 case tcc_reference:
7381 case tcc_vl_exp:
7382 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7383 the expression. If it is set, we conflict iff we are that rtx or
7384 both are in memory. Otherwise, we check all operands of the
7385 expression recursively. */
7386
7387 switch (TREE_CODE (exp))
7388 {
7389 case ADDR_EXPR:
7390 /* If the operand is static or we are static, we can't conflict.
7391 Likewise if we don't conflict with the operand at all. */
7392 if (staticp (TREE_OPERAND (exp, 0))
7393 || TREE_STATIC (exp)
7394 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7395 return 1;
7396
7397 /* Otherwise, the only way this can conflict is if we are taking
7398 the address of a DECL a that address if part of X, which is
7399 very rare. */
7400 exp = TREE_OPERAND (exp, 0);
7401 if (DECL_P (exp))
7402 {
7403 if (!DECL_RTL_SET_P (exp)
7404 || !MEM_P (DECL_RTL (exp)))
7405 return 0;
7406 else
7407 exp_rtl = XEXP (DECL_RTL (exp), 0);
7408 }
7409 break;
7410
7411 case MEM_REF:
7412 if (MEM_P (x)
7413 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7414 get_alias_set (exp)))
7415 return 0;
7416 break;
7417
7418 case CALL_EXPR:
7419 /* Assume that the call will clobber all hard registers and
7420 all of memory. */
7421 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7422 || MEM_P (x))
7423 return 0;
7424 break;
7425
7426 case WITH_CLEANUP_EXPR:
7427 case CLEANUP_POINT_EXPR:
7428 /* Lowered by gimplify.c. */
7429 gcc_unreachable ();
7430
7431 case SAVE_EXPR:
7432 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7433
7434 default:
7435 break;
7436 }
7437
7438 /* If we have an rtx, we do not need to scan our operands. */
7439 if (exp_rtl)
7440 break;
7441
7442 nops = TREE_OPERAND_LENGTH (exp);
7443 for (i = 0; i < nops; i++)
7444 if (TREE_OPERAND (exp, i) != 0
7445 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7446 return 0;
7447
7448 break;
7449
7450 case tcc_type:
7451 /* Should never get a type here. */
7452 gcc_unreachable ();
7453 }
7454
7455 /* If we have an rtl, find any enclosed object. Then see if we conflict
7456 with it. */
7457 if (exp_rtl)
7458 {
7459 if (GET_CODE (exp_rtl) == SUBREG)
7460 {
7461 exp_rtl = SUBREG_REG (exp_rtl);
7462 if (REG_P (exp_rtl)
7463 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7464 return 0;
7465 }
7466
7467 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7468 are memory and they conflict. */
7469 return ! (rtx_equal_p (x, exp_rtl)
7470 || (MEM_P (x) && MEM_P (exp_rtl)
7471 && true_dependence (exp_rtl, VOIDmode, x)));
7472 }
7473
7474 /* If we reach here, it is safe. */
7475 return 1;
7476 }
7477
7478 \f
7479 /* Return the highest power of two that EXP is known to be a multiple of.
7480 This is used in updating alignment of MEMs in array references. */
7481
7482 unsigned HOST_WIDE_INT
7483 highest_pow2_factor (const_tree exp)
7484 {
7485 unsigned HOST_WIDE_INT ret;
7486 int trailing_zeros = tree_ctz (exp);
7487 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7488 return BIGGEST_ALIGNMENT;
7489 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7490 if (ret > BIGGEST_ALIGNMENT)
7491 return BIGGEST_ALIGNMENT;
7492 return ret;
7493 }
7494
7495 /* Similar, except that the alignment requirements of TARGET are
7496 taken into account. Assume it is at least as aligned as its
7497 type, unless it is a COMPONENT_REF in which case the layout of
7498 the structure gives the alignment. */
7499
7500 static unsigned HOST_WIDE_INT
7501 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7502 {
7503 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7504 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7505
7506 return MAX (factor, talign);
7507 }
7508 \f
7509 #ifdef HAVE_conditional_move
7510 /* Convert the tree comparison code TCODE to the rtl one where the
7511 signedness is UNSIGNEDP. */
7512
7513 static enum rtx_code
7514 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7515 {
7516 enum rtx_code code;
7517 switch (tcode)
7518 {
7519 case EQ_EXPR:
7520 code = EQ;
7521 break;
7522 case NE_EXPR:
7523 code = NE;
7524 break;
7525 case LT_EXPR:
7526 code = unsignedp ? LTU : LT;
7527 break;
7528 case LE_EXPR:
7529 code = unsignedp ? LEU : LE;
7530 break;
7531 case GT_EXPR:
7532 code = unsignedp ? GTU : GT;
7533 break;
7534 case GE_EXPR:
7535 code = unsignedp ? GEU : GE;
7536 break;
7537 case UNORDERED_EXPR:
7538 code = UNORDERED;
7539 break;
7540 case ORDERED_EXPR:
7541 code = ORDERED;
7542 break;
7543 case UNLT_EXPR:
7544 code = UNLT;
7545 break;
7546 case UNLE_EXPR:
7547 code = UNLE;
7548 break;
7549 case UNGT_EXPR:
7550 code = UNGT;
7551 break;
7552 case UNGE_EXPR:
7553 code = UNGE;
7554 break;
7555 case UNEQ_EXPR:
7556 code = UNEQ;
7557 break;
7558 case LTGT_EXPR:
7559 code = LTGT;
7560 break;
7561
7562 default:
7563 gcc_unreachable ();
7564 }
7565 return code;
7566 }
7567 #endif
7568
7569 /* Subroutine of expand_expr. Expand the two operands of a binary
7570 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7571 The value may be stored in TARGET if TARGET is nonzero. The
7572 MODIFIER argument is as documented by expand_expr. */
7573
7574 static void
7575 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7576 enum expand_modifier modifier)
7577 {
7578 if (! safe_from_p (target, exp1, 1))
7579 target = 0;
7580 if (operand_equal_p (exp0, exp1, 0))
7581 {
7582 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7583 *op1 = copy_rtx (*op0);
7584 }
7585 else
7586 {
7587 /* If we need to preserve evaluation order, copy exp0 into its own
7588 temporary variable so that it can't be clobbered by exp1. */
7589 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7590 exp0 = save_expr (exp0);
7591 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7592 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7593 }
7594 }
7595
7596 \f
7597 /* Return a MEM that contains constant EXP. DEFER is as for
7598 output_constant_def and MODIFIER is as for expand_expr. */
7599
7600 static rtx
7601 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7602 {
7603 rtx mem;
7604
7605 mem = output_constant_def (exp, defer);
7606 if (modifier != EXPAND_INITIALIZER)
7607 mem = use_anchored_address (mem);
7608 return mem;
7609 }
7610
7611 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7612 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7613
7614 static rtx
7615 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7616 enum expand_modifier modifier, addr_space_t as)
7617 {
7618 rtx result, subtarget;
7619 tree inner, offset;
7620 HOST_WIDE_INT bitsize, bitpos;
7621 int volatilep, unsignedp;
7622 machine_mode mode1;
7623
7624 /* If we are taking the address of a constant and are at the top level,
7625 we have to use output_constant_def since we can't call force_const_mem
7626 at top level. */
7627 /* ??? This should be considered a front-end bug. We should not be
7628 generating ADDR_EXPR of something that isn't an LVALUE. The only
7629 exception here is STRING_CST. */
7630 if (CONSTANT_CLASS_P (exp))
7631 {
7632 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7633 if (modifier < EXPAND_SUM)
7634 result = force_operand (result, target);
7635 return result;
7636 }
7637
7638 /* Everything must be something allowed by is_gimple_addressable. */
7639 switch (TREE_CODE (exp))
7640 {
7641 case INDIRECT_REF:
7642 /* This case will happen via recursion for &a->b. */
7643 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7644
7645 case MEM_REF:
7646 {
7647 tree tem = TREE_OPERAND (exp, 0);
7648 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7649 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7650 return expand_expr (tem, target, tmode, modifier);
7651 }
7652
7653 case CONST_DECL:
7654 /* Expand the initializer like constants above. */
7655 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7656 0, modifier), 0);
7657 if (modifier < EXPAND_SUM)
7658 result = force_operand (result, target);
7659 return result;
7660
7661 case REALPART_EXPR:
7662 /* The real part of the complex number is always first, therefore
7663 the address is the same as the address of the parent object. */
7664 offset = 0;
7665 bitpos = 0;
7666 inner = TREE_OPERAND (exp, 0);
7667 break;
7668
7669 case IMAGPART_EXPR:
7670 /* The imaginary part of the complex number is always second.
7671 The expression is therefore always offset by the size of the
7672 scalar type. */
7673 offset = 0;
7674 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7675 inner = TREE_OPERAND (exp, 0);
7676 break;
7677
7678 case COMPOUND_LITERAL_EXPR:
7679 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7680 rtl_for_decl_init is called on DECL_INITIAL with
7681 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7682 if (modifier == EXPAND_INITIALIZER
7683 && COMPOUND_LITERAL_EXPR_DECL (exp))
7684 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7685 target, tmode, modifier, as);
7686 /* FALLTHRU */
7687 default:
7688 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7689 expand_expr, as that can have various side effects; LABEL_DECLs for
7690 example, may not have their DECL_RTL set yet. Expand the rtl of
7691 CONSTRUCTORs too, which should yield a memory reference for the
7692 constructor's contents. Assume language specific tree nodes can
7693 be expanded in some interesting way. */
7694 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7695 if (DECL_P (exp)
7696 || TREE_CODE (exp) == CONSTRUCTOR
7697 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7698 {
7699 result = expand_expr (exp, target, tmode,
7700 modifier == EXPAND_INITIALIZER
7701 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7702
7703 /* If the DECL isn't in memory, then the DECL wasn't properly
7704 marked TREE_ADDRESSABLE, which will be either a front-end
7705 or a tree optimizer bug. */
7706
7707 if (TREE_ADDRESSABLE (exp)
7708 && ! MEM_P (result)
7709 && ! targetm.calls.allocate_stack_slots_for_args ())
7710 {
7711 error ("local frame unavailable (naked function?)");
7712 return result;
7713 }
7714 else
7715 gcc_assert (MEM_P (result));
7716 result = XEXP (result, 0);
7717
7718 /* ??? Is this needed anymore? */
7719 if (DECL_P (exp))
7720 TREE_USED (exp) = 1;
7721
7722 if (modifier != EXPAND_INITIALIZER
7723 && modifier != EXPAND_CONST_ADDRESS
7724 && modifier != EXPAND_SUM)
7725 result = force_operand (result, target);
7726 return result;
7727 }
7728
7729 /* Pass FALSE as the last argument to get_inner_reference although
7730 we are expanding to RTL. The rationale is that we know how to
7731 handle "aligning nodes" here: we can just bypass them because
7732 they won't change the final object whose address will be returned
7733 (they actually exist only for that purpose). */
7734 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7735 &mode1, &unsignedp, &volatilep, false);
7736 break;
7737 }
7738
7739 /* We must have made progress. */
7740 gcc_assert (inner != exp);
7741
7742 subtarget = offset || bitpos ? NULL_RTX : target;
7743 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7744 inner alignment, force the inner to be sufficiently aligned. */
7745 if (CONSTANT_CLASS_P (inner)
7746 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7747 {
7748 inner = copy_node (inner);
7749 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7750 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7751 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7752 }
7753 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7754
7755 if (offset)
7756 {
7757 rtx tmp;
7758
7759 if (modifier != EXPAND_NORMAL)
7760 result = force_operand (result, NULL);
7761 tmp = expand_expr (offset, NULL_RTX, tmode,
7762 modifier == EXPAND_INITIALIZER
7763 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7764
7765 /* expand_expr is allowed to return an object in a mode other
7766 than TMODE. If it did, we need to convert. */
7767 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7768 tmp = convert_modes (tmode, GET_MODE (tmp),
7769 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7770 result = convert_memory_address_addr_space (tmode, result, as);
7771 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7772
7773 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7774 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7775 else
7776 {
7777 subtarget = bitpos ? NULL_RTX : target;
7778 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7779 1, OPTAB_LIB_WIDEN);
7780 }
7781 }
7782
7783 if (bitpos)
7784 {
7785 /* Someone beforehand should have rejected taking the address
7786 of such an object. */
7787 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7788
7789 result = convert_memory_address_addr_space (tmode, result, as);
7790 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7791 if (modifier < EXPAND_SUM)
7792 result = force_operand (result, target);
7793 }
7794
7795 return result;
7796 }
7797
7798 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7799 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7800
7801 static rtx
7802 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7803 enum expand_modifier modifier)
7804 {
7805 addr_space_t as = ADDR_SPACE_GENERIC;
7806 machine_mode address_mode = Pmode;
7807 machine_mode pointer_mode = ptr_mode;
7808 machine_mode rmode;
7809 rtx result;
7810
7811 /* Target mode of VOIDmode says "whatever's natural". */
7812 if (tmode == VOIDmode)
7813 tmode = TYPE_MODE (TREE_TYPE (exp));
7814
7815 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7816 {
7817 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7818 address_mode = targetm.addr_space.address_mode (as);
7819 pointer_mode = targetm.addr_space.pointer_mode (as);
7820 }
7821
7822 /* We can get called with some Weird Things if the user does silliness
7823 like "(short) &a". In that case, convert_memory_address won't do
7824 the right thing, so ignore the given target mode. */
7825 if (tmode != address_mode && tmode != pointer_mode)
7826 tmode = address_mode;
7827
7828 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7829 tmode, modifier, as);
7830
7831 /* Despite expand_expr claims concerning ignoring TMODE when not
7832 strictly convenient, stuff breaks if we don't honor it. Note
7833 that combined with the above, we only do this for pointer modes. */
7834 rmode = GET_MODE (result);
7835 if (rmode == VOIDmode)
7836 rmode = tmode;
7837 if (rmode != tmode)
7838 result = convert_memory_address_addr_space (tmode, result, as);
7839
7840 return result;
7841 }
7842
7843 /* Generate code for computing CONSTRUCTOR EXP.
7844 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7845 is TRUE, instead of creating a temporary variable in memory
7846 NULL is returned and the caller needs to handle it differently. */
7847
7848 static rtx
7849 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7850 bool avoid_temp_mem)
7851 {
7852 tree type = TREE_TYPE (exp);
7853 machine_mode mode = TYPE_MODE (type);
7854
7855 /* Try to avoid creating a temporary at all. This is possible
7856 if all of the initializer is zero.
7857 FIXME: try to handle all [0..255] initializers we can handle
7858 with memset. */
7859 if (TREE_STATIC (exp)
7860 && !TREE_ADDRESSABLE (exp)
7861 && target != 0 && mode == BLKmode
7862 && all_zeros_p (exp))
7863 {
7864 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7865 return target;
7866 }
7867
7868 /* All elts simple constants => refer to a constant in memory. But
7869 if this is a non-BLKmode mode, let it store a field at a time
7870 since that should make a CONST_INT, CONST_WIDE_INT or
7871 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7872 use, it is best to store directly into the target unless the type
7873 is large enough that memcpy will be used. If we are making an
7874 initializer and all operands are constant, put it in memory as
7875 well.
7876
7877 FIXME: Avoid trying to fill vector constructors piece-meal.
7878 Output them with output_constant_def below unless we're sure
7879 they're zeros. This should go away when vector initializers
7880 are treated like VECTOR_CST instead of arrays. */
7881 if ((TREE_STATIC (exp)
7882 && ((mode == BLKmode
7883 && ! (target != 0 && safe_from_p (target, exp, 1)))
7884 || TREE_ADDRESSABLE (exp)
7885 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7886 && (! can_move_by_pieces
7887 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7888 TYPE_ALIGN (type)))
7889 && ! mostly_zeros_p (exp))))
7890 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7891 && TREE_CONSTANT (exp)))
7892 {
7893 rtx constructor;
7894
7895 if (avoid_temp_mem)
7896 return NULL_RTX;
7897
7898 constructor = expand_expr_constant (exp, 1, modifier);
7899
7900 if (modifier != EXPAND_CONST_ADDRESS
7901 && modifier != EXPAND_INITIALIZER
7902 && modifier != EXPAND_SUM)
7903 constructor = validize_mem (constructor);
7904
7905 return constructor;
7906 }
7907
7908 /* Handle calls that pass values in multiple non-contiguous
7909 locations. The Irix 6 ABI has examples of this. */
7910 if (target == 0 || ! safe_from_p (target, exp, 1)
7911 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7912 {
7913 if (avoid_temp_mem)
7914 return NULL_RTX;
7915
7916 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7917 }
7918
7919 store_constructor (exp, target, 0, int_expr_size (exp));
7920 return target;
7921 }
7922
7923
7924 /* expand_expr: generate code for computing expression EXP.
7925 An rtx for the computed value is returned. The value is never null.
7926 In the case of a void EXP, const0_rtx is returned.
7927
7928 The value may be stored in TARGET if TARGET is nonzero.
7929 TARGET is just a suggestion; callers must assume that
7930 the rtx returned may not be the same as TARGET.
7931
7932 If TARGET is CONST0_RTX, it means that the value will be ignored.
7933
7934 If TMODE is not VOIDmode, it suggests generating the
7935 result in mode TMODE. But this is done only when convenient.
7936 Otherwise, TMODE is ignored and the value generated in its natural mode.
7937 TMODE is just a suggestion; callers must assume that
7938 the rtx returned may not have mode TMODE.
7939
7940 Note that TARGET may have neither TMODE nor MODE. In that case, it
7941 probably will not be used.
7942
7943 If MODIFIER is EXPAND_SUM then when EXP is an addition
7944 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7945 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7946 products as above, or REG or MEM, or constant.
7947 Ordinarily in such cases we would output mul or add instructions
7948 and then return a pseudo reg containing the sum.
7949
7950 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7951 it also marks a label as absolutely required (it can't be dead).
7952 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7953 This is used for outputting expressions used in initializers.
7954
7955 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7956 with a constant address even if that address is not normally legitimate.
7957 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7958
7959 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7960 a call parameter. Such targets require special care as we haven't yet
7961 marked TARGET so that it's safe from being trashed by libcalls. We
7962 don't want to use TARGET for anything but the final result;
7963 Intermediate values must go elsewhere. Additionally, calls to
7964 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7965
7966 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7967 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7968 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7969 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7970 recursively.
7971
7972 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7973 In this case, we don't adjust a returned MEM rtx that wouldn't be
7974 sufficiently aligned for its mode; instead, it's up to the caller
7975 to deal with it afterwards. This is used to make sure that unaligned
7976 base objects for which out-of-bounds accesses are supported, for
7977 example record types with trailing arrays, aren't realigned behind
7978 the back of the caller.
7979 The normal operating mode is to pass FALSE for this parameter. */
7980
7981 rtx
7982 expand_expr_real (tree exp, rtx target, machine_mode tmode,
7983 enum expand_modifier modifier, rtx *alt_rtl,
7984 bool inner_reference_p)
7985 {
7986 rtx ret;
7987
7988 /* Handle ERROR_MARK before anybody tries to access its type. */
7989 if (TREE_CODE (exp) == ERROR_MARK
7990 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7991 {
7992 ret = CONST0_RTX (tmode);
7993 return ret ? ret : const0_rtx;
7994 }
7995
7996 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7997 inner_reference_p);
7998 return ret;
7999 }
8000
8001 /* Try to expand the conditional expression which is represented by
8002 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
8003 return the rtl reg which repsents the result. Otherwise return
8004 NULL_RTL. */
8005
8006 static rtx
8007 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8008 tree treeop1 ATTRIBUTE_UNUSED,
8009 tree treeop2 ATTRIBUTE_UNUSED)
8010 {
8011 #ifdef HAVE_conditional_move
8012 rtx insn;
8013 rtx op00, op01, op1, op2;
8014 enum rtx_code comparison_code;
8015 machine_mode comparison_mode;
8016 gimple srcstmt;
8017 rtx temp;
8018 tree type = TREE_TYPE (treeop1);
8019 int unsignedp = TYPE_UNSIGNED (type);
8020 machine_mode mode = TYPE_MODE (type);
8021 machine_mode orig_mode = mode;
8022
8023 /* If we cannot do a conditional move on the mode, try doing it
8024 with the promoted mode. */
8025 if (!can_conditionally_move_p (mode))
8026 {
8027 mode = promote_mode (type, mode, &unsignedp);
8028 if (!can_conditionally_move_p (mode))
8029 return NULL_RTX;
8030 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
8031 }
8032 else
8033 temp = assign_temp (type, 0, 1);
8034
8035 start_sequence ();
8036 expand_operands (treeop1, treeop2,
8037 temp, &op1, &op2, EXPAND_NORMAL);
8038
8039 if (TREE_CODE (treeop0) == SSA_NAME
8040 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8041 {
8042 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8043 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8044 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8045 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8046 comparison_mode = TYPE_MODE (type);
8047 unsignedp = TYPE_UNSIGNED (type);
8048 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8049 }
8050 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
8051 {
8052 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8053 enum tree_code cmpcode = TREE_CODE (treeop0);
8054 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8055 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8056 unsignedp = TYPE_UNSIGNED (type);
8057 comparison_mode = TYPE_MODE (type);
8058 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8059 }
8060 else
8061 {
8062 op00 = expand_normal (treeop0);
8063 op01 = const0_rtx;
8064 comparison_code = NE;
8065 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8066 }
8067
8068 if (GET_MODE (op1) != mode)
8069 op1 = gen_lowpart (mode, op1);
8070
8071 if (GET_MODE (op2) != mode)
8072 op2 = gen_lowpart (mode, op2);
8073
8074 /* Try to emit the conditional move. */
8075 insn = emit_conditional_move (temp, comparison_code,
8076 op00, op01, comparison_mode,
8077 op1, op2, mode,
8078 unsignedp);
8079
8080 /* If we could do the conditional move, emit the sequence,
8081 and return. */
8082 if (insn)
8083 {
8084 rtx_insn *seq = get_insns ();
8085 end_sequence ();
8086 emit_insn (seq);
8087 return convert_modes (orig_mode, mode, temp, 0);
8088 }
8089
8090 /* Otherwise discard the sequence and fall back to code with
8091 branches. */
8092 end_sequence ();
8093 #endif
8094 return NULL_RTX;
8095 }
8096
8097 rtx
8098 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8099 enum expand_modifier modifier)
8100 {
8101 rtx op0, op1, op2, temp;
8102 tree type;
8103 int unsignedp;
8104 machine_mode mode;
8105 enum tree_code code = ops->code;
8106 optab this_optab;
8107 rtx subtarget, original_target;
8108 int ignore;
8109 bool reduce_bit_field;
8110 location_t loc = ops->location;
8111 tree treeop0, treeop1, treeop2;
8112 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8113 ? reduce_to_bit_field_precision ((expr), \
8114 target, \
8115 type) \
8116 : (expr))
8117
8118 type = ops->type;
8119 mode = TYPE_MODE (type);
8120 unsignedp = TYPE_UNSIGNED (type);
8121
8122 treeop0 = ops->op0;
8123 treeop1 = ops->op1;
8124 treeop2 = ops->op2;
8125
8126 /* We should be called only on simple (binary or unary) expressions,
8127 exactly those that are valid in gimple expressions that aren't
8128 GIMPLE_SINGLE_RHS (or invalid). */
8129 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8130 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8131 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8132
8133 ignore = (target == const0_rtx
8134 || ((CONVERT_EXPR_CODE_P (code)
8135 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8136 && TREE_CODE (type) == VOID_TYPE));
8137
8138 /* We should be called only if we need the result. */
8139 gcc_assert (!ignore);
8140
8141 /* An operation in what may be a bit-field type needs the
8142 result to be reduced to the precision of the bit-field type,
8143 which is narrower than that of the type's mode. */
8144 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8145 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8146
8147 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8148 target = 0;
8149
8150 /* Use subtarget as the target for operand 0 of a binary operation. */
8151 subtarget = get_subtarget (target);
8152 original_target = target;
8153
8154 switch (code)
8155 {
8156 case NON_LVALUE_EXPR:
8157 case PAREN_EXPR:
8158 CASE_CONVERT:
8159 if (treeop0 == error_mark_node)
8160 return const0_rtx;
8161
8162 if (TREE_CODE (type) == UNION_TYPE)
8163 {
8164 tree valtype = TREE_TYPE (treeop0);
8165
8166 /* If both input and output are BLKmode, this conversion isn't doing
8167 anything except possibly changing memory attribute. */
8168 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8169 {
8170 rtx result = expand_expr (treeop0, target, tmode,
8171 modifier);
8172
8173 result = copy_rtx (result);
8174 set_mem_attributes (result, type, 0);
8175 return result;
8176 }
8177
8178 if (target == 0)
8179 {
8180 if (TYPE_MODE (type) != BLKmode)
8181 target = gen_reg_rtx (TYPE_MODE (type));
8182 else
8183 target = assign_temp (type, 1, 1);
8184 }
8185
8186 if (MEM_P (target))
8187 /* Store data into beginning of memory target. */
8188 store_expr (treeop0,
8189 adjust_address (target, TYPE_MODE (valtype), 0),
8190 modifier == EXPAND_STACK_PARM,
8191 false);
8192
8193 else
8194 {
8195 gcc_assert (REG_P (target));
8196
8197 /* Store this field into a union of the proper type. */
8198 store_field (target,
8199 MIN ((int_size_in_bytes (TREE_TYPE
8200 (treeop0))
8201 * BITS_PER_UNIT),
8202 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8203 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8204 }
8205
8206 /* Return the entire union. */
8207 return target;
8208 }
8209
8210 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8211 {
8212 op0 = expand_expr (treeop0, target, VOIDmode,
8213 modifier);
8214
8215 /* If the signedness of the conversion differs and OP0 is
8216 a promoted SUBREG, clear that indication since we now
8217 have to do the proper extension. */
8218 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8219 && GET_CODE (op0) == SUBREG)
8220 SUBREG_PROMOTED_VAR_P (op0) = 0;
8221
8222 return REDUCE_BIT_FIELD (op0);
8223 }
8224
8225 op0 = expand_expr (treeop0, NULL_RTX, mode,
8226 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8227 if (GET_MODE (op0) == mode)
8228 ;
8229
8230 /* If OP0 is a constant, just convert it into the proper mode. */
8231 else if (CONSTANT_P (op0))
8232 {
8233 tree inner_type = TREE_TYPE (treeop0);
8234 machine_mode inner_mode = GET_MODE (op0);
8235
8236 if (inner_mode == VOIDmode)
8237 inner_mode = TYPE_MODE (inner_type);
8238
8239 if (modifier == EXPAND_INITIALIZER)
8240 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8241 subreg_lowpart_offset (mode,
8242 inner_mode));
8243 else
8244 op0= convert_modes (mode, inner_mode, op0,
8245 TYPE_UNSIGNED (inner_type));
8246 }
8247
8248 else if (modifier == EXPAND_INITIALIZER)
8249 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8250
8251 else if (target == 0)
8252 op0 = convert_to_mode (mode, op0,
8253 TYPE_UNSIGNED (TREE_TYPE
8254 (treeop0)));
8255 else
8256 {
8257 convert_move (target, op0,
8258 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8259 op0 = target;
8260 }
8261
8262 return REDUCE_BIT_FIELD (op0);
8263
8264 case ADDR_SPACE_CONVERT_EXPR:
8265 {
8266 tree treeop0_type = TREE_TYPE (treeop0);
8267 addr_space_t as_to;
8268 addr_space_t as_from;
8269
8270 gcc_assert (POINTER_TYPE_P (type));
8271 gcc_assert (POINTER_TYPE_P (treeop0_type));
8272
8273 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8274 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8275
8276 /* Conversions between pointers to the same address space should
8277 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8278 gcc_assert (as_to != as_from);
8279
8280 /* Ask target code to handle conversion between pointers
8281 to overlapping address spaces. */
8282 if (targetm.addr_space.subset_p (as_to, as_from)
8283 || targetm.addr_space.subset_p (as_from, as_to))
8284 {
8285 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8286 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8287 gcc_assert (op0);
8288 return op0;
8289 }
8290
8291 /* For disjoint address spaces, converting anything but
8292 a null pointer invokes undefined behaviour. We simply
8293 always return a null pointer here. */
8294 return CONST0_RTX (mode);
8295 }
8296
8297 case POINTER_PLUS_EXPR:
8298 /* Even though the sizetype mode and the pointer's mode can be different
8299 expand is able to handle this correctly and get the correct result out
8300 of the PLUS_EXPR code. */
8301 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8302 if sizetype precision is smaller than pointer precision. */
8303 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8304 treeop1 = fold_convert_loc (loc, type,
8305 fold_convert_loc (loc, ssizetype,
8306 treeop1));
8307 /* If sizetype precision is larger than pointer precision, truncate the
8308 offset to have matching modes. */
8309 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8310 treeop1 = fold_convert_loc (loc, type, treeop1);
8311
8312 case PLUS_EXPR:
8313 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8314 something else, make sure we add the register to the constant and
8315 then to the other thing. This case can occur during strength
8316 reduction and doing it this way will produce better code if the
8317 frame pointer or argument pointer is eliminated.
8318
8319 fold-const.c will ensure that the constant is always in the inner
8320 PLUS_EXPR, so the only case we need to do anything about is if
8321 sp, ap, or fp is our second argument, in which case we must swap
8322 the innermost first argument and our second argument. */
8323
8324 if (TREE_CODE (treeop0) == PLUS_EXPR
8325 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8326 && TREE_CODE (treeop1) == VAR_DECL
8327 && (DECL_RTL (treeop1) == frame_pointer_rtx
8328 || DECL_RTL (treeop1) == stack_pointer_rtx
8329 || DECL_RTL (treeop1) == arg_pointer_rtx))
8330 {
8331 gcc_unreachable ();
8332 }
8333
8334 /* If the result is to be ptr_mode and we are adding an integer to
8335 something, we might be forming a constant. So try to use
8336 plus_constant. If it produces a sum and we can't accept it,
8337 use force_operand. This allows P = &ARR[const] to generate
8338 efficient code on machines where a SYMBOL_REF is not a valid
8339 address.
8340
8341 If this is an EXPAND_SUM call, always return the sum. */
8342 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8343 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8344 {
8345 if (modifier == EXPAND_STACK_PARM)
8346 target = 0;
8347 if (TREE_CODE (treeop0) == INTEGER_CST
8348 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8349 && TREE_CONSTANT (treeop1))
8350 {
8351 rtx constant_part;
8352 HOST_WIDE_INT wc;
8353 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8354
8355 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8356 EXPAND_SUM);
8357 /* Use wi::shwi to ensure that the constant is
8358 truncated according to the mode of OP1, then sign extended
8359 to a HOST_WIDE_INT. Using the constant directly can result
8360 in non-canonical RTL in a 64x32 cross compile. */
8361 wc = TREE_INT_CST_LOW (treeop0);
8362 constant_part =
8363 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8364 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8365 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8366 op1 = force_operand (op1, target);
8367 return REDUCE_BIT_FIELD (op1);
8368 }
8369
8370 else if (TREE_CODE (treeop1) == INTEGER_CST
8371 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8372 && TREE_CONSTANT (treeop0))
8373 {
8374 rtx constant_part;
8375 HOST_WIDE_INT wc;
8376 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8377
8378 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8379 (modifier == EXPAND_INITIALIZER
8380 ? EXPAND_INITIALIZER : EXPAND_SUM));
8381 if (! CONSTANT_P (op0))
8382 {
8383 op1 = expand_expr (treeop1, NULL_RTX,
8384 VOIDmode, modifier);
8385 /* Return a PLUS if modifier says it's OK. */
8386 if (modifier == EXPAND_SUM
8387 || modifier == EXPAND_INITIALIZER)
8388 return simplify_gen_binary (PLUS, mode, op0, op1);
8389 goto binop2;
8390 }
8391 /* Use wi::shwi to ensure that the constant is
8392 truncated according to the mode of OP1, then sign extended
8393 to a HOST_WIDE_INT. Using the constant directly can result
8394 in non-canonical RTL in a 64x32 cross compile. */
8395 wc = TREE_INT_CST_LOW (treeop1);
8396 constant_part
8397 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8398 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8399 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8400 op0 = force_operand (op0, target);
8401 return REDUCE_BIT_FIELD (op0);
8402 }
8403 }
8404
8405 /* Use TER to expand pointer addition of a negated value
8406 as pointer subtraction. */
8407 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8408 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8409 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8410 && TREE_CODE (treeop1) == SSA_NAME
8411 && TYPE_MODE (TREE_TYPE (treeop0))
8412 == TYPE_MODE (TREE_TYPE (treeop1)))
8413 {
8414 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8415 if (def)
8416 {
8417 treeop1 = gimple_assign_rhs1 (def);
8418 code = MINUS_EXPR;
8419 goto do_minus;
8420 }
8421 }
8422
8423 /* No sense saving up arithmetic to be done
8424 if it's all in the wrong mode to form part of an address.
8425 And force_operand won't know whether to sign-extend or
8426 zero-extend. */
8427 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8428 || mode != ptr_mode)
8429 {
8430 expand_operands (treeop0, treeop1,
8431 subtarget, &op0, &op1, EXPAND_NORMAL);
8432 if (op0 == const0_rtx)
8433 return op1;
8434 if (op1 == const0_rtx)
8435 return op0;
8436 goto binop2;
8437 }
8438
8439 expand_operands (treeop0, treeop1,
8440 subtarget, &op0, &op1, modifier);
8441 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8442
8443 case MINUS_EXPR:
8444 do_minus:
8445 /* For initializers, we are allowed to return a MINUS of two
8446 symbolic constants. Here we handle all cases when both operands
8447 are constant. */
8448 /* Handle difference of two symbolic constants,
8449 for the sake of an initializer. */
8450 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8451 && really_constant_p (treeop0)
8452 && really_constant_p (treeop1))
8453 {
8454 expand_operands (treeop0, treeop1,
8455 NULL_RTX, &op0, &op1, modifier);
8456
8457 /* If the last operand is a CONST_INT, use plus_constant of
8458 the negated constant. Else make the MINUS. */
8459 if (CONST_INT_P (op1))
8460 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8461 -INTVAL (op1)));
8462 else
8463 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8464 }
8465
8466 /* No sense saving up arithmetic to be done
8467 if it's all in the wrong mode to form part of an address.
8468 And force_operand won't know whether to sign-extend or
8469 zero-extend. */
8470 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8471 || mode != ptr_mode)
8472 goto binop;
8473
8474 expand_operands (treeop0, treeop1,
8475 subtarget, &op0, &op1, modifier);
8476
8477 /* Convert A - const to A + (-const). */
8478 if (CONST_INT_P (op1))
8479 {
8480 op1 = negate_rtx (mode, op1);
8481 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8482 }
8483
8484 goto binop2;
8485
8486 case WIDEN_MULT_PLUS_EXPR:
8487 case WIDEN_MULT_MINUS_EXPR:
8488 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8489 op2 = expand_normal (treeop2);
8490 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8491 target, unsignedp);
8492 return target;
8493
8494 case WIDEN_MULT_EXPR:
8495 /* If first operand is constant, swap them.
8496 Thus the following special case checks need only
8497 check the second operand. */
8498 if (TREE_CODE (treeop0) == INTEGER_CST)
8499 {
8500 tree t1 = treeop0;
8501 treeop0 = treeop1;
8502 treeop1 = t1;
8503 }
8504
8505 /* First, check if we have a multiplication of one signed and one
8506 unsigned operand. */
8507 if (TREE_CODE (treeop1) != INTEGER_CST
8508 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8509 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8510 {
8511 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8512 this_optab = usmul_widen_optab;
8513 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8514 != CODE_FOR_nothing)
8515 {
8516 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8517 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8518 EXPAND_NORMAL);
8519 else
8520 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8521 EXPAND_NORMAL);
8522 /* op0 and op1 might still be constant, despite the above
8523 != INTEGER_CST check. Handle it. */
8524 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8525 {
8526 op0 = convert_modes (innermode, mode, op0, true);
8527 op1 = convert_modes (innermode, mode, op1, false);
8528 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8529 target, unsignedp));
8530 }
8531 goto binop3;
8532 }
8533 }
8534 /* Check for a multiplication with matching signedness. */
8535 else if ((TREE_CODE (treeop1) == INTEGER_CST
8536 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8537 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8538 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8539 {
8540 tree op0type = TREE_TYPE (treeop0);
8541 machine_mode innermode = TYPE_MODE (op0type);
8542 bool zextend_p = TYPE_UNSIGNED (op0type);
8543 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8544 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8545
8546 if (TREE_CODE (treeop0) != INTEGER_CST)
8547 {
8548 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8549 != CODE_FOR_nothing)
8550 {
8551 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8552 EXPAND_NORMAL);
8553 /* op0 and op1 might still be constant, despite the above
8554 != INTEGER_CST check. Handle it. */
8555 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8556 {
8557 widen_mult_const:
8558 op0 = convert_modes (innermode, mode, op0, zextend_p);
8559 op1
8560 = convert_modes (innermode, mode, op1,
8561 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8562 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8563 target,
8564 unsignedp));
8565 }
8566 temp = expand_widening_mult (mode, op0, op1, target,
8567 unsignedp, this_optab);
8568 return REDUCE_BIT_FIELD (temp);
8569 }
8570 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8571 != CODE_FOR_nothing
8572 && innermode == word_mode)
8573 {
8574 rtx htem, hipart;
8575 op0 = expand_normal (treeop0);
8576 if (TREE_CODE (treeop1) == INTEGER_CST)
8577 op1 = convert_modes (innermode, mode,
8578 expand_normal (treeop1),
8579 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8580 else
8581 op1 = expand_normal (treeop1);
8582 /* op0 and op1 might still be constant, despite the above
8583 != INTEGER_CST check. Handle it. */
8584 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8585 goto widen_mult_const;
8586 temp = expand_binop (mode, other_optab, op0, op1, target,
8587 unsignedp, OPTAB_LIB_WIDEN);
8588 hipart = gen_highpart (innermode, temp);
8589 htem = expand_mult_highpart_adjust (innermode, hipart,
8590 op0, op1, hipart,
8591 zextend_p);
8592 if (htem != hipart)
8593 emit_move_insn (hipart, htem);
8594 return REDUCE_BIT_FIELD (temp);
8595 }
8596 }
8597 }
8598 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8599 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8600 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8601 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8602
8603 case FMA_EXPR:
8604 {
8605 optab opt = fma_optab;
8606 gimple def0, def2;
8607
8608 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8609 call. */
8610 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8611 {
8612 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8613 tree call_expr;
8614
8615 gcc_assert (fn != NULL_TREE);
8616 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8617 return expand_builtin (call_expr, target, subtarget, mode, false);
8618 }
8619
8620 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8621 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8622
8623 op0 = op2 = NULL;
8624
8625 if (def0 && def2
8626 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8627 {
8628 opt = fnms_optab;
8629 op0 = expand_normal (gimple_assign_rhs1 (def0));
8630 op2 = expand_normal (gimple_assign_rhs1 (def2));
8631 }
8632 else if (def0
8633 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8634 {
8635 opt = fnma_optab;
8636 op0 = expand_normal (gimple_assign_rhs1 (def0));
8637 }
8638 else if (def2
8639 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8640 {
8641 opt = fms_optab;
8642 op2 = expand_normal (gimple_assign_rhs1 (def2));
8643 }
8644
8645 if (op0 == NULL)
8646 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8647 if (op2 == NULL)
8648 op2 = expand_normal (treeop2);
8649 op1 = expand_normal (treeop1);
8650
8651 return expand_ternary_op (TYPE_MODE (type), opt,
8652 op0, op1, op2, target, 0);
8653 }
8654
8655 case MULT_EXPR:
8656 /* If this is a fixed-point operation, then we cannot use the code
8657 below because "expand_mult" doesn't support sat/no-sat fixed-point
8658 multiplications. */
8659 if (ALL_FIXED_POINT_MODE_P (mode))
8660 goto binop;
8661
8662 /* If first operand is constant, swap them.
8663 Thus the following special case checks need only
8664 check the second operand. */
8665 if (TREE_CODE (treeop0) == INTEGER_CST)
8666 {
8667 tree t1 = treeop0;
8668 treeop0 = treeop1;
8669 treeop1 = t1;
8670 }
8671
8672 /* Attempt to return something suitable for generating an
8673 indexed address, for machines that support that. */
8674
8675 if (modifier == EXPAND_SUM && mode == ptr_mode
8676 && tree_fits_shwi_p (treeop1))
8677 {
8678 tree exp1 = treeop1;
8679
8680 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8681 EXPAND_SUM);
8682
8683 if (!REG_P (op0))
8684 op0 = force_operand (op0, NULL_RTX);
8685 if (!REG_P (op0))
8686 op0 = copy_to_mode_reg (mode, op0);
8687
8688 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8689 gen_int_mode (tree_to_shwi (exp1),
8690 TYPE_MODE (TREE_TYPE (exp1)))));
8691 }
8692
8693 if (modifier == EXPAND_STACK_PARM)
8694 target = 0;
8695
8696 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8697 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8698
8699 case TRUNC_DIV_EXPR:
8700 case FLOOR_DIV_EXPR:
8701 case CEIL_DIV_EXPR:
8702 case ROUND_DIV_EXPR:
8703 case EXACT_DIV_EXPR:
8704 /* If this is a fixed-point operation, then we cannot use the code
8705 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8706 divisions. */
8707 if (ALL_FIXED_POINT_MODE_P (mode))
8708 goto binop;
8709
8710 if (modifier == EXPAND_STACK_PARM)
8711 target = 0;
8712 /* Possible optimization: compute the dividend with EXPAND_SUM
8713 then if the divisor is constant can optimize the case
8714 where some terms of the dividend have coeffs divisible by it. */
8715 expand_operands (treeop0, treeop1,
8716 subtarget, &op0, &op1, EXPAND_NORMAL);
8717 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8718
8719 case RDIV_EXPR:
8720 goto binop;
8721
8722 case MULT_HIGHPART_EXPR:
8723 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8724 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8725 gcc_assert (temp);
8726 return temp;
8727
8728 case TRUNC_MOD_EXPR:
8729 case FLOOR_MOD_EXPR:
8730 case CEIL_MOD_EXPR:
8731 case ROUND_MOD_EXPR:
8732 if (modifier == EXPAND_STACK_PARM)
8733 target = 0;
8734 expand_operands (treeop0, treeop1,
8735 subtarget, &op0, &op1, EXPAND_NORMAL);
8736 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8737
8738 case FIXED_CONVERT_EXPR:
8739 op0 = expand_normal (treeop0);
8740 if (target == 0 || modifier == EXPAND_STACK_PARM)
8741 target = gen_reg_rtx (mode);
8742
8743 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8744 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8745 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8746 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8747 else
8748 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8749 return target;
8750
8751 case FIX_TRUNC_EXPR:
8752 op0 = expand_normal (treeop0);
8753 if (target == 0 || modifier == EXPAND_STACK_PARM)
8754 target = gen_reg_rtx (mode);
8755 expand_fix (target, op0, unsignedp);
8756 return target;
8757
8758 case FLOAT_EXPR:
8759 op0 = expand_normal (treeop0);
8760 if (target == 0 || modifier == EXPAND_STACK_PARM)
8761 target = gen_reg_rtx (mode);
8762 /* expand_float can't figure out what to do if FROM has VOIDmode.
8763 So give it the correct mode. With -O, cse will optimize this. */
8764 if (GET_MODE (op0) == VOIDmode)
8765 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8766 op0);
8767 expand_float (target, op0,
8768 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8769 return target;
8770
8771 case NEGATE_EXPR:
8772 op0 = expand_expr (treeop0, subtarget,
8773 VOIDmode, EXPAND_NORMAL);
8774 if (modifier == EXPAND_STACK_PARM)
8775 target = 0;
8776 temp = expand_unop (mode,
8777 optab_for_tree_code (NEGATE_EXPR, type,
8778 optab_default),
8779 op0, target, 0);
8780 gcc_assert (temp);
8781 return REDUCE_BIT_FIELD (temp);
8782
8783 case ABS_EXPR:
8784 op0 = expand_expr (treeop0, subtarget,
8785 VOIDmode, EXPAND_NORMAL);
8786 if (modifier == EXPAND_STACK_PARM)
8787 target = 0;
8788
8789 /* ABS_EXPR is not valid for complex arguments. */
8790 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8791 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8792
8793 /* Unsigned abs is simply the operand. Testing here means we don't
8794 risk generating incorrect code below. */
8795 if (TYPE_UNSIGNED (type))
8796 return op0;
8797
8798 return expand_abs (mode, op0, target, unsignedp,
8799 safe_from_p (target, treeop0, 1));
8800
8801 case MAX_EXPR:
8802 case MIN_EXPR:
8803 target = original_target;
8804 if (target == 0
8805 || modifier == EXPAND_STACK_PARM
8806 || (MEM_P (target) && MEM_VOLATILE_P (target))
8807 || GET_MODE (target) != mode
8808 || (REG_P (target)
8809 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8810 target = gen_reg_rtx (mode);
8811 expand_operands (treeop0, treeop1,
8812 target, &op0, &op1, EXPAND_NORMAL);
8813
8814 /* First try to do it with a special MIN or MAX instruction.
8815 If that does not win, use a conditional jump to select the proper
8816 value. */
8817 this_optab = optab_for_tree_code (code, type, optab_default);
8818 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8819 OPTAB_WIDEN);
8820 if (temp != 0)
8821 return temp;
8822
8823 /* At this point, a MEM target is no longer useful; we will get better
8824 code without it. */
8825
8826 if (! REG_P (target))
8827 target = gen_reg_rtx (mode);
8828
8829 /* If op1 was placed in target, swap op0 and op1. */
8830 if (target != op0 && target == op1)
8831 {
8832 temp = op0;
8833 op0 = op1;
8834 op1 = temp;
8835 }
8836
8837 /* We generate better code and avoid problems with op1 mentioning
8838 target by forcing op1 into a pseudo if it isn't a constant. */
8839 if (! CONSTANT_P (op1))
8840 op1 = force_reg (mode, op1);
8841
8842 {
8843 enum rtx_code comparison_code;
8844 rtx cmpop1 = op1;
8845
8846 if (code == MAX_EXPR)
8847 comparison_code = unsignedp ? GEU : GE;
8848 else
8849 comparison_code = unsignedp ? LEU : LE;
8850
8851 /* Canonicalize to comparisons against 0. */
8852 if (op1 == const1_rtx)
8853 {
8854 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8855 or (a != 0 ? a : 1) for unsigned.
8856 For MIN we are safe converting (a <= 1 ? a : 1)
8857 into (a <= 0 ? a : 1) */
8858 cmpop1 = const0_rtx;
8859 if (code == MAX_EXPR)
8860 comparison_code = unsignedp ? NE : GT;
8861 }
8862 if (op1 == constm1_rtx && !unsignedp)
8863 {
8864 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8865 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8866 cmpop1 = const0_rtx;
8867 if (code == MIN_EXPR)
8868 comparison_code = LT;
8869 }
8870 #ifdef HAVE_conditional_move
8871 /* Use a conditional move if possible. */
8872 if (can_conditionally_move_p (mode))
8873 {
8874 rtx insn;
8875
8876 start_sequence ();
8877
8878 /* Try to emit the conditional move. */
8879 insn = emit_conditional_move (target, comparison_code,
8880 op0, cmpop1, mode,
8881 op0, op1, mode,
8882 unsignedp);
8883
8884 /* If we could do the conditional move, emit the sequence,
8885 and return. */
8886 if (insn)
8887 {
8888 rtx_insn *seq = get_insns ();
8889 end_sequence ();
8890 emit_insn (seq);
8891 return target;
8892 }
8893
8894 /* Otherwise discard the sequence and fall back to code with
8895 branches. */
8896 end_sequence ();
8897 }
8898 #endif
8899 if (target != op0)
8900 emit_move_insn (target, op0);
8901
8902 temp = gen_label_rtx ();
8903 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8904 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8905 -1);
8906 }
8907 emit_move_insn (target, op1);
8908 emit_label (temp);
8909 return target;
8910
8911 case BIT_NOT_EXPR:
8912 op0 = expand_expr (treeop0, subtarget,
8913 VOIDmode, EXPAND_NORMAL);
8914 if (modifier == EXPAND_STACK_PARM)
8915 target = 0;
8916 /* In case we have to reduce the result to bitfield precision
8917 for unsigned bitfield expand this as XOR with a proper constant
8918 instead. */
8919 if (reduce_bit_field && TYPE_UNSIGNED (type))
8920 {
8921 wide_int mask = wi::mask (TYPE_PRECISION (type),
8922 false, GET_MODE_PRECISION (mode));
8923
8924 temp = expand_binop (mode, xor_optab, op0,
8925 immed_wide_int_const (mask, mode),
8926 target, 1, OPTAB_LIB_WIDEN);
8927 }
8928 else
8929 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8930 gcc_assert (temp);
8931 return temp;
8932
8933 /* ??? Can optimize bitwise operations with one arg constant.
8934 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8935 and (a bitwise1 b) bitwise2 b (etc)
8936 but that is probably not worth while. */
8937
8938 case BIT_AND_EXPR:
8939 case BIT_IOR_EXPR:
8940 case BIT_XOR_EXPR:
8941 goto binop;
8942
8943 case LROTATE_EXPR:
8944 case RROTATE_EXPR:
8945 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8946 || (GET_MODE_PRECISION (TYPE_MODE (type))
8947 == TYPE_PRECISION (type)));
8948 /* fall through */
8949
8950 case LSHIFT_EXPR:
8951 case RSHIFT_EXPR:
8952 /* If this is a fixed-point operation, then we cannot use the code
8953 below because "expand_shift" doesn't support sat/no-sat fixed-point
8954 shifts. */
8955 if (ALL_FIXED_POINT_MODE_P (mode))
8956 goto binop;
8957
8958 if (! safe_from_p (subtarget, treeop1, 1))
8959 subtarget = 0;
8960 if (modifier == EXPAND_STACK_PARM)
8961 target = 0;
8962 op0 = expand_expr (treeop0, subtarget,
8963 VOIDmode, EXPAND_NORMAL);
8964 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8965 unsignedp);
8966 if (code == LSHIFT_EXPR)
8967 temp = REDUCE_BIT_FIELD (temp);
8968 return temp;
8969
8970 /* Could determine the answer when only additive constants differ. Also,
8971 the addition of one can be handled by changing the condition. */
8972 case LT_EXPR:
8973 case LE_EXPR:
8974 case GT_EXPR:
8975 case GE_EXPR:
8976 case EQ_EXPR:
8977 case NE_EXPR:
8978 case UNORDERED_EXPR:
8979 case ORDERED_EXPR:
8980 case UNLT_EXPR:
8981 case UNLE_EXPR:
8982 case UNGT_EXPR:
8983 case UNGE_EXPR:
8984 case UNEQ_EXPR:
8985 case LTGT_EXPR:
8986 temp = do_store_flag (ops,
8987 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8988 tmode != VOIDmode ? tmode : mode);
8989 if (temp)
8990 return temp;
8991
8992 /* Use a compare and a jump for BLKmode comparisons, or for function
8993 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8994
8995 if ((target == 0
8996 || modifier == EXPAND_STACK_PARM
8997 || ! safe_from_p (target, treeop0, 1)
8998 || ! safe_from_p (target, treeop1, 1)
8999 /* Make sure we don't have a hard reg (such as function's return
9000 value) live across basic blocks, if not optimizing. */
9001 || (!optimize && REG_P (target)
9002 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9003 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9004
9005 emit_move_insn (target, const0_rtx);
9006
9007 op1 = gen_label_rtx ();
9008 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9009
9010 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9011 emit_move_insn (target, constm1_rtx);
9012 else
9013 emit_move_insn (target, const1_rtx);
9014
9015 emit_label (op1);
9016 return target;
9017
9018 case COMPLEX_EXPR:
9019 /* Get the rtx code of the operands. */
9020 op0 = expand_normal (treeop0);
9021 op1 = expand_normal (treeop1);
9022
9023 if (!target)
9024 target = gen_reg_rtx (TYPE_MODE (type));
9025 else
9026 /* If target overlaps with op1, then either we need to force
9027 op1 into a pseudo (if target also overlaps with op0),
9028 or write the complex parts in reverse order. */
9029 switch (GET_CODE (target))
9030 {
9031 case CONCAT:
9032 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9033 {
9034 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9035 {
9036 complex_expr_force_op1:
9037 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9038 emit_move_insn (temp, op1);
9039 op1 = temp;
9040 break;
9041 }
9042 complex_expr_swap_order:
9043 /* Move the imaginary (op1) and real (op0) parts to their
9044 location. */
9045 write_complex_part (target, op1, true);
9046 write_complex_part (target, op0, false);
9047
9048 return target;
9049 }
9050 break;
9051 case MEM:
9052 temp = adjust_address_nv (target,
9053 GET_MODE_INNER (GET_MODE (target)), 0);
9054 if (reg_overlap_mentioned_p (temp, op1))
9055 {
9056 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9057 temp = adjust_address_nv (target, imode,
9058 GET_MODE_SIZE (imode));
9059 if (reg_overlap_mentioned_p (temp, op0))
9060 goto complex_expr_force_op1;
9061 goto complex_expr_swap_order;
9062 }
9063 break;
9064 default:
9065 if (reg_overlap_mentioned_p (target, op1))
9066 {
9067 if (reg_overlap_mentioned_p (target, op0))
9068 goto complex_expr_force_op1;
9069 goto complex_expr_swap_order;
9070 }
9071 break;
9072 }
9073
9074 /* Move the real (op0) and imaginary (op1) parts to their location. */
9075 write_complex_part (target, op0, false);
9076 write_complex_part (target, op1, true);
9077
9078 return target;
9079
9080 case WIDEN_SUM_EXPR:
9081 {
9082 tree oprnd0 = treeop0;
9083 tree oprnd1 = treeop1;
9084
9085 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9086 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9087 target, unsignedp);
9088 return target;
9089 }
9090
9091 case REDUC_MAX_EXPR:
9092 case REDUC_MIN_EXPR:
9093 case REDUC_PLUS_EXPR:
9094 {
9095 op0 = expand_normal (treeop0);
9096 this_optab = optab_for_tree_code (code, type, optab_default);
9097 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9098
9099 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9100 {
9101 struct expand_operand ops[2];
9102 enum insn_code icode = optab_handler (this_optab, vec_mode);
9103
9104 create_output_operand (&ops[0], target, mode);
9105 create_input_operand (&ops[1], op0, vec_mode);
9106 if (maybe_expand_insn (icode, 2, ops))
9107 {
9108 target = ops[0].value;
9109 if (GET_MODE (target) != mode)
9110 return gen_lowpart (tmode, target);
9111 return target;
9112 }
9113 }
9114 /* Fall back to optab with vector result, and then extract scalar. */
9115 this_optab = scalar_reduc_to_vector (this_optab, type);
9116 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9117 gcc_assert (temp);
9118 /* The tree code produces a scalar result, but (somewhat by convention)
9119 the optab produces a vector with the result in element 0 if
9120 little-endian, or element N-1 if big-endian. So pull the scalar
9121 result out of that element. */
9122 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9123 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9124 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9125 target, mode, mode);
9126 gcc_assert (temp);
9127 return temp;
9128 }
9129
9130 case VEC_RSHIFT_EXPR:
9131 {
9132 target = expand_vec_shift_expr (ops, target);
9133 return target;
9134 }
9135
9136 case VEC_UNPACK_HI_EXPR:
9137 case VEC_UNPACK_LO_EXPR:
9138 {
9139 op0 = expand_normal (treeop0);
9140 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9141 target, unsignedp);
9142 gcc_assert (temp);
9143 return temp;
9144 }
9145
9146 case VEC_UNPACK_FLOAT_HI_EXPR:
9147 case VEC_UNPACK_FLOAT_LO_EXPR:
9148 {
9149 op0 = expand_normal (treeop0);
9150 /* The signedness is determined from input operand. */
9151 temp = expand_widen_pattern_expr
9152 (ops, op0, NULL_RTX, NULL_RTX,
9153 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9154
9155 gcc_assert (temp);
9156 return temp;
9157 }
9158
9159 case VEC_WIDEN_MULT_HI_EXPR:
9160 case VEC_WIDEN_MULT_LO_EXPR:
9161 case VEC_WIDEN_MULT_EVEN_EXPR:
9162 case VEC_WIDEN_MULT_ODD_EXPR:
9163 case VEC_WIDEN_LSHIFT_HI_EXPR:
9164 case VEC_WIDEN_LSHIFT_LO_EXPR:
9165 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9166 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9167 target, unsignedp);
9168 gcc_assert (target);
9169 return target;
9170
9171 case VEC_PACK_TRUNC_EXPR:
9172 case VEC_PACK_SAT_EXPR:
9173 case VEC_PACK_FIX_TRUNC_EXPR:
9174 mode = TYPE_MODE (TREE_TYPE (treeop0));
9175 goto binop;
9176
9177 case VEC_PERM_EXPR:
9178 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9179 op2 = expand_normal (treeop2);
9180
9181 /* Careful here: if the target doesn't support integral vector modes,
9182 a constant selection vector could wind up smooshed into a normal
9183 integral constant. */
9184 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9185 {
9186 tree sel_type = TREE_TYPE (treeop2);
9187 machine_mode vmode
9188 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9189 TYPE_VECTOR_SUBPARTS (sel_type));
9190 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9191 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9192 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9193 }
9194 else
9195 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9196
9197 temp = expand_vec_perm (mode, op0, op1, op2, target);
9198 gcc_assert (temp);
9199 return temp;
9200
9201 case DOT_PROD_EXPR:
9202 {
9203 tree oprnd0 = treeop0;
9204 tree oprnd1 = treeop1;
9205 tree oprnd2 = treeop2;
9206 rtx op2;
9207
9208 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9209 op2 = expand_normal (oprnd2);
9210 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9211 target, unsignedp);
9212 return target;
9213 }
9214
9215 case SAD_EXPR:
9216 {
9217 tree oprnd0 = treeop0;
9218 tree oprnd1 = treeop1;
9219 tree oprnd2 = treeop2;
9220 rtx op2;
9221
9222 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9223 op2 = expand_normal (oprnd2);
9224 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9225 target, unsignedp);
9226 return target;
9227 }
9228
9229 case REALIGN_LOAD_EXPR:
9230 {
9231 tree oprnd0 = treeop0;
9232 tree oprnd1 = treeop1;
9233 tree oprnd2 = treeop2;
9234 rtx op2;
9235
9236 this_optab = optab_for_tree_code (code, type, optab_default);
9237 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9238 op2 = expand_normal (oprnd2);
9239 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9240 target, unsignedp);
9241 gcc_assert (temp);
9242 return temp;
9243 }
9244
9245 case COND_EXPR:
9246 /* A COND_EXPR with its type being VOID_TYPE represents a
9247 conditional jump and is handled in
9248 expand_gimple_cond_expr. */
9249 gcc_assert (!VOID_TYPE_P (type));
9250
9251 /* Note that COND_EXPRs whose type is a structure or union
9252 are required to be constructed to contain assignments of
9253 a temporary variable, so that we can evaluate them here
9254 for side effect only. If type is void, we must do likewise. */
9255
9256 gcc_assert (!TREE_ADDRESSABLE (type)
9257 && !ignore
9258 && TREE_TYPE (treeop1) != void_type_node
9259 && TREE_TYPE (treeop2) != void_type_node);
9260
9261 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9262 if (temp)
9263 return temp;
9264
9265 /* If we are not to produce a result, we have no target. Otherwise,
9266 if a target was specified use it; it will not be used as an
9267 intermediate target unless it is safe. If no target, use a
9268 temporary. */
9269
9270 if (modifier != EXPAND_STACK_PARM
9271 && original_target
9272 && safe_from_p (original_target, treeop0, 1)
9273 && GET_MODE (original_target) == mode
9274 && !MEM_P (original_target))
9275 temp = original_target;
9276 else
9277 temp = assign_temp (type, 0, 1);
9278
9279 do_pending_stack_adjust ();
9280 NO_DEFER_POP;
9281 op0 = gen_label_rtx ();
9282 op1 = gen_label_rtx ();
9283 jumpifnot (treeop0, op0, -1);
9284 store_expr (treeop1, temp,
9285 modifier == EXPAND_STACK_PARM,
9286 false);
9287
9288 emit_jump_insn (gen_jump (op1));
9289 emit_barrier ();
9290 emit_label (op0);
9291 store_expr (treeop2, temp,
9292 modifier == EXPAND_STACK_PARM,
9293 false);
9294
9295 emit_label (op1);
9296 OK_DEFER_POP;
9297 return temp;
9298
9299 case VEC_COND_EXPR:
9300 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9301 return target;
9302
9303 default:
9304 gcc_unreachable ();
9305 }
9306
9307 /* Here to do an ordinary binary operator. */
9308 binop:
9309 expand_operands (treeop0, treeop1,
9310 subtarget, &op0, &op1, EXPAND_NORMAL);
9311 binop2:
9312 this_optab = optab_for_tree_code (code, type, optab_default);
9313 binop3:
9314 if (modifier == EXPAND_STACK_PARM)
9315 target = 0;
9316 temp = expand_binop (mode, this_optab, op0, op1, target,
9317 unsignedp, OPTAB_LIB_WIDEN);
9318 gcc_assert (temp);
9319 /* Bitwise operations do not need bitfield reduction as we expect their
9320 operands being properly truncated. */
9321 if (code == BIT_XOR_EXPR
9322 || code == BIT_AND_EXPR
9323 || code == BIT_IOR_EXPR)
9324 return temp;
9325 return REDUCE_BIT_FIELD (temp);
9326 }
9327 #undef REDUCE_BIT_FIELD
9328
9329
9330 /* Return TRUE if expression STMT is suitable for replacement.
9331 Never consider memory loads as replaceable, because those don't ever lead
9332 into constant expressions. */
9333
9334 static bool
9335 stmt_is_replaceable_p (gimple stmt)
9336 {
9337 if (ssa_is_replaceable_p (stmt))
9338 {
9339 /* Don't move around loads. */
9340 if (!gimple_assign_single_p (stmt)
9341 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9342 return true;
9343 }
9344 return false;
9345 }
9346
9347 rtx
9348 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9349 enum expand_modifier modifier, rtx *alt_rtl,
9350 bool inner_reference_p)
9351 {
9352 rtx op0, op1, temp, decl_rtl;
9353 tree type;
9354 int unsignedp;
9355 machine_mode mode;
9356 enum tree_code code = TREE_CODE (exp);
9357 rtx subtarget, original_target;
9358 int ignore;
9359 tree context;
9360 bool reduce_bit_field;
9361 location_t loc = EXPR_LOCATION (exp);
9362 struct separate_ops ops;
9363 tree treeop0, treeop1, treeop2;
9364 tree ssa_name = NULL_TREE;
9365 gimple g;
9366
9367 type = TREE_TYPE (exp);
9368 mode = TYPE_MODE (type);
9369 unsignedp = TYPE_UNSIGNED (type);
9370
9371 treeop0 = treeop1 = treeop2 = NULL_TREE;
9372 if (!VL_EXP_CLASS_P (exp))
9373 switch (TREE_CODE_LENGTH (code))
9374 {
9375 default:
9376 case 3: treeop2 = TREE_OPERAND (exp, 2);
9377 case 2: treeop1 = TREE_OPERAND (exp, 1);
9378 case 1: treeop0 = TREE_OPERAND (exp, 0);
9379 case 0: break;
9380 }
9381 ops.code = code;
9382 ops.type = type;
9383 ops.op0 = treeop0;
9384 ops.op1 = treeop1;
9385 ops.op2 = treeop2;
9386 ops.location = loc;
9387
9388 ignore = (target == const0_rtx
9389 || ((CONVERT_EXPR_CODE_P (code)
9390 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9391 && TREE_CODE (type) == VOID_TYPE));
9392
9393 /* An operation in what may be a bit-field type needs the
9394 result to be reduced to the precision of the bit-field type,
9395 which is narrower than that of the type's mode. */
9396 reduce_bit_field = (!ignore
9397 && INTEGRAL_TYPE_P (type)
9398 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9399
9400 /* If we are going to ignore this result, we need only do something
9401 if there is a side-effect somewhere in the expression. If there
9402 is, short-circuit the most common cases here. Note that we must
9403 not call expand_expr with anything but const0_rtx in case this
9404 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9405
9406 if (ignore)
9407 {
9408 if (! TREE_SIDE_EFFECTS (exp))
9409 return const0_rtx;
9410
9411 /* Ensure we reference a volatile object even if value is ignored, but
9412 don't do this if all we are doing is taking its address. */
9413 if (TREE_THIS_VOLATILE (exp)
9414 && TREE_CODE (exp) != FUNCTION_DECL
9415 && mode != VOIDmode && mode != BLKmode
9416 && modifier != EXPAND_CONST_ADDRESS)
9417 {
9418 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9419 if (MEM_P (temp))
9420 copy_to_reg (temp);
9421 return const0_rtx;
9422 }
9423
9424 if (TREE_CODE_CLASS (code) == tcc_unary
9425 || code == BIT_FIELD_REF
9426 || code == COMPONENT_REF
9427 || code == INDIRECT_REF)
9428 return expand_expr (treeop0, const0_rtx, VOIDmode,
9429 modifier);
9430
9431 else if (TREE_CODE_CLASS (code) == tcc_binary
9432 || TREE_CODE_CLASS (code) == tcc_comparison
9433 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9434 {
9435 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9436 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9437 return const0_rtx;
9438 }
9439
9440 target = 0;
9441 }
9442
9443 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9444 target = 0;
9445
9446 /* Use subtarget as the target for operand 0 of a binary operation. */
9447 subtarget = get_subtarget (target);
9448 original_target = target;
9449
9450 switch (code)
9451 {
9452 case LABEL_DECL:
9453 {
9454 tree function = decl_function_context (exp);
9455
9456 temp = label_rtx (exp);
9457 temp = gen_rtx_LABEL_REF (Pmode, temp);
9458
9459 if (function != current_function_decl
9460 && function != 0)
9461 LABEL_REF_NONLOCAL_P (temp) = 1;
9462
9463 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9464 return temp;
9465 }
9466
9467 case SSA_NAME:
9468 /* ??? ivopts calls expander, without any preparation from
9469 out-of-ssa. So fake instructions as if this was an access to the
9470 base variable. This unnecessarily allocates a pseudo, see how we can
9471 reuse it, if partition base vars have it set already. */
9472 if (!currently_expanding_to_rtl)
9473 {
9474 tree var = SSA_NAME_VAR (exp);
9475 if (var && DECL_RTL_SET_P (var))
9476 return DECL_RTL (var);
9477 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9478 LAST_VIRTUAL_REGISTER + 1);
9479 }
9480
9481 g = get_gimple_for_ssa_name (exp);
9482 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9483 if (g == NULL
9484 && modifier == EXPAND_INITIALIZER
9485 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9486 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9487 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9488 g = SSA_NAME_DEF_STMT (exp);
9489 if (g)
9490 {
9491 rtx r;
9492 ops.code = gimple_assign_rhs_code (g);
9493 switch (get_gimple_rhs_class (ops.code))
9494 {
9495 case GIMPLE_TERNARY_RHS:
9496 ops.op2 = gimple_assign_rhs3 (g);
9497 /* Fallthru */
9498 case GIMPLE_BINARY_RHS:
9499 ops.op1 = gimple_assign_rhs2 (g);
9500 /* Fallthru */
9501 case GIMPLE_UNARY_RHS:
9502 ops.op0 = gimple_assign_rhs1 (g);
9503 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9504 ops.location = gimple_location (g);
9505 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9506 break;
9507 case GIMPLE_SINGLE_RHS:
9508 {
9509 location_t saved_loc = curr_insn_location ();
9510 set_curr_insn_location (gimple_location (g));
9511 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9512 tmode, modifier, NULL, inner_reference_p);
9513 set_curr_insn_location (saved_loc);
9514 break;
9515 }
9516 default:
9517 gcc_unreachable ();
9518 }
9519 if (REG_P (r) && !REG_EXPR (r))
9520 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9521 return r;
9522 }
9523
9524 ssa_name = exp;
9525 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9526 exp = SSA_NAME_VAR (ssa_name);
9527 goto expand_decl_rtl;
9528
9529 case PARM_DECL:
9530 case VAR_DECL:
9531 /* If a static var's type was incomplete when the decl was written,
9532 but the type is complete now, lay out the decl now. */
9533 if (DECL_SIZE (exp) == 0
9534 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9535 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9536 layout_decl (exp, 0);
9537
9538 /* ... fall through ... */
9539
9540 case FUNCTION_DECL:
9541 case RESULT_DECL:
9542 decl_rtl = DECL_RTL (exp);
9543 expand_decl_rtl:
9544 gcc_assert (decl_rtl);
9545 decl_rtl = copy_rtx (decl_rtl);
9546 /* Record writes to register variables. */
9547 if (modifier == EXPAND_WRITE
9548 && REG_P (decl_rtl)
9549 && HARD_REGISTER_P (decl_rtl))
9550 add_to_hard_reg_set (&crtl->asm_clobbers,
9551 GET_MODE (decl_rtl), REGNO (decl_rtl));
9552
9553 /* Ensure variable marked as used even if it doesn't go through
9554 a parser. If it hasn't be used yet, write out an external
9555 definition. */
9556 TREE_USED (exp) = 1;
9557
9558 /* Show we haven't gotten RTL for this yet. */
9559 temp = 0;
9560
9561 /* Variables inherited from containing functions should have
9562 been lowered by this point. */
9563 context = decl_function_context (exp);
9564 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9565 || context == current_function_decl
9566 || TREE_STATIC (exp)
9567 || DECL_EXTERNAL (exp)
9568 /* ??? C++ creates functions that are not TREE_STATIC. */
9569 || TREE_CODE (exp) == FUNCTION_DECL);
9570
9571 /* This is the case of an array whose size is to be determined
9572 from its initializer, while the initializer is still being parsed.
9573 ??? We aren't parsing while expanding anymore. */
9574
9575 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9576 temp = validize_mem (decl_rtl);
9577
9578 /* If DECL_RTL is memory, we are in the normal case and the
9579 address is not valid, get the address into a register. */
9580
9581 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9582 {
9583 if (alt_rtl)
9584 *alt_rtl = decl_rtl;
9585 decl_rtl = use_anchored_address (decl_rtl);
9586 if (modifier != EXPAND_CONST_ADDRESS
9587 && modifier != EXPAND_SUM
9588 && !memory_address_addr_space_p (DECL_MODE (exp),
9589 XEXP (decl_rtl, 0),
9590 MEM_ADDR_SPACE (decl_rtl)))
9591 temp = replace_equiv_address (decl_rtl,
9592 copy_rtx (XEXP (decl_rtl, 0)));
9593 }
9594
9595 /* If we got something, return it. But first, set the alignment
9596 if the address is a register. */
9597 if (temp != 0)
9598 {
9599 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9600 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9601
9602 return temp;
9603 }
9604
9605 /* If the mode of DECL_RTL does not match that of the decl,
9606 there are two cases: we are dealing with a BLKmode value
9607 that is returned in a register, or we are dealing with
9608 a promoted value. In the latter case, return a SUBREG
9609 of the wanted mode, but mark it so that we know that it
9610 was already extended. */
9611 if (REG_P (decl_rtl)
9612 && DECL_MODE (exp) != BLKmode
9613 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9614 {
9615 machine_mode pmode;
9616
9617 /* Get the signedness to be used for this variable. Ensure we get
9618 the same mode we got when the variable was declared. */
9619 if (code == SSA_NAME
9620 && (g = SSA_NAME_DEF_STMT (ssa_name))
9621 && gimple_code (g) == GIMPLE_CALL
9622 && !gimple_call_internal_p (g))
9623 pmode = promote_function_mode (type, mode, &unsignedp,
9624 gimple_call_fntype (g),
9625 2);
9626 else
9627 pmode = promote_decl_mode (exp, &unsignedp);
9628 gcc_assert (GET_MODE (decl_rtl) == pmode);
9629
9630 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9631 SUBREG_PROMOTED_VAR_P (temp) = 1;
9632 SUBREG_PROMOTED_SET (temp, unsignedp);
9633 return temp;
9634 }
9635
9636 return decl_rtl;
9637
9638 case INTEGER_CST:
9639 /* Given that TYPE_PRECISION (type) is not always equal to
9640 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9641 the former to the latter according to the signedness of the
9642 type. */
9643 temp = immed_wide_int_const (wide_int::from
9644 (exp,
9645 GET_MODE_PRECISION (TYPE_MODE (type)),
9646 TYPE_SIGN (type)),
9647 TYPE_MODE (type));
9648 return temp;
9649
9650 case VECTOR_CST:
9651 {
9652 tree tmp = NULL_TREE;
9653 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9654 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9655 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9656 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9657 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9658 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9659 return const_vector_from_tree (exp);
9660 if (GET_MODE_CLASS (mode) == MODE_INT)
9661 {
9662 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9663 if (type_for_mode)
9664 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9665 }
9666 if (!tmp)
9667 {
9668 vec<constructor_elt, va_gc> *v;
9669 unsigned i;
9670 vec_alloc (v, VECTOR_CST_NELTS (exp));
9671 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9672 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9673 tmp = build_constructor (type, v);
9674 }
9675 return expand_expr (tmp, ignore ? const0_rtx : target,
9676 tmode, modifier);
9677 }
9678
9679 case CONST_DECL:
9680 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9681
9682 case REAL_CST:
9683 /* If optimized, generate immediate CONST_DOUBLE
9684 which will be turned into memory by reload if necessary.
9685
9686 We used to force a register so that loop.c could see it. But
9687 this does not allow gen_* patterns to perform optimizations with
9688 the constants. It also produces two insns in cases like "x = 1.0;".
9689 On most machines, floating-point constants are not permitted in
9690 many insns, so we'd end up copying it to a register in any case.
9691
9692 Now, we do the copying in expand_binop, if appropriate. */
9693 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9694 TYPE_MODE (TREE_TYPE (exp)));
9695
9696 case FIXED_CST:
9697 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9698 TYPE_MODE (TREE_TYPE (exp)));
9699
9700 case COMPLEX_CST:
9701 /* Handle evaluating a complex constant in a CONCAT target. */
9702 if (original_target && GET_CODE (original_target) == CONCAT)
9703 {
9704 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9705 rtx rtarg, itarg;
9706
9707 rtarg = XEXP (original_target, 0);
9708 itarg = XEXP (original_target, 1);
9709
9710 /* Move the real and imaginary parts separately. */
9711 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9712 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9713
9714 if (op0 != rtarg)
9715 emit_move_insn (rtarg, op0);
9716 if (op1 != itarg)
9717 emit_move_insn (itarg, op1);
9718
9719 return original_target;
9720 }
9721
9722 /* ... fall through ... */
9723
9724 case STRING_CST:
9725 temp = expand_expr_constant (exp, 1, modifier);
9726
9727 /* temp contains a constant address.
9728 On RISC machines where a constant address isn't valid,
9729 make some insns to get that address into a register. */
9730 if (modifier != EXPAND_CONST_ADDRESS
9731 && modifier != EXPAND_INITIALIZER
9732 && modifier != EXPAND_SUM
9733 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9734 MEM_ADDR_SPACE (temp)))
9735 return replace_equiv_address (temp,
9736 copy_rtx (XEXP (temp, 0)));
9737 return temp;
9738
9739 case SAVE_EXPR:
9740 {
9741 tree val = treeop0;
9742 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9743 inner_reference_p);
9744
9745 if (!SAVE_EXPR_RESOLVED_P (exp))
9746 {
9747 /* We can indeed still hit this case, typically via builtin
9748 expanders calling save_expr immediately before expanding
9749 something. Assume this means that we only have to deal
9750 with non-BLKmode values. */
9751 gcc_assert (GET_MODE (ret) != BLKmode);
9752
9753 val = build_decl (curr_insn_location (),
9754 VAR_DECL, NULL, TREE_TYPE (exp));
9755 DECL_ARTIFICIAL (val) = 1;
9756 DECL_IGNORED_P (val) = 1;
9757 treeop0 = val;
9758 TREE_OPERAND (exp, 0) = treeop0;
9759 SAVE_EXPR_RESOLVED_P (exp) = 1;
9760
9761 if (!CONSTANT_P (ret))
9762 ret = copy_to_reg (ret);
9763 SET_DECL_RTL (val, ret);
9764 }
9765
9766 return ret;
9767 }
9768
9769
9770 case CONSTRUCTOR:
9771 /* If we don't need the result, just ensure we evaluate any
9772 subexpressions. */
9773 if (ignore)
9774 {
9775 unsigned HOST_WIDE_INT idx;
9776 tree value;
9777
9778 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9779 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9780
9781 return const0_rtx;
9782 }
9783
9784 return expand_constructor (exp, target, modifier, false);
9785
9786 case TARGET_MEM_REF:
9787 {
9788 addr_space_t as
9789 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9790 enum insn_code icode;
9791 unsigned int align;
9792
9793 op0 = addr_for_mem_ref (exp, as, true);
9794 op0 = memory_address_addr_space (mode, op0, as);
9795 temp = gen_rtx_MEM (mode, op0);
9796 set_mem_attributes (temp, exp, 0);
9797 set_mem_addr_space (temp, as);
9798 align = get_object_alignment (exp);
9799 if (modifier != EXPAND_WRITE
9800 && modifier != EXPAND_MEMORY
9801 && mode != BLKmode
9802 && align < GET_MODE_ALIGNMENT (mode)
9803 /* If the target does not have special handling for unaligned
9804 loads of mode then it can use regular moves for them. */
9805 && ((icode = optab_handler (movmisalign_optab, mode))
9806 != CODE_FOR_nothing))
9807 {
9808 struct expand_operand ops[2];
9809
9810 /* We've already validated the memory, and we're creating a
9811 new pseudo destination. The predicates really can't fail,
9812 nor can the generator. */
9813 create_output_operand (&ops[0], NULL_RTX, mode);
9814 create_fixed_operand (&ops[1], temp);
9815 expand_insn (icode, 2, ops);
9816 temp = ops[0].value;
9817 }
9818 return temp;
9819 }
9820
9821 case MEM_REF:
9822 {
9823 addr_space_t as
9824 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9825 machine_mode address_mode;
9826 tree base = TREE_OPERAND (exp, 0);
9827 gimple def_stmt;
9828 enum insn_code icode;
9829 unsigned align;
9830 /* Handle expansion of non-aliased memory with non-BLKmode. That
9831 might end up in a register. */
9832 if (mem_ref_refers_to_non_mem_p (exp))
9833 {
9834 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9835 base = TREE_OPERAND (base, 0);
9836 if (offset == 0
9837 && tree_fits_uhwi_p (TYPE_SIZE (type))
9838 && (GET_MODE_BITSIZE (DECL_MODE (base))
9839 == tree_to_uhwi (TYPE_SIZE (type))))
9840 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9841 target, tmode, modifier);
9842 if (TYPE_MODE (type) == BLKmode)
9843 {
9844 temp = assign_stack_temp (DECL_MODE (base),
9845 GET_MODE_SIZE (DECL_MODE (base)));
9846 store_expr (base, temp, 0, false);
9847 temp = adjust_address (temp, BLKmode, offset);
9848 set_mem_size (temp, int_size_in_bytes (type));
9849 return temp;
9850 }
9851 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9852 bitsize_int (offset * BITS_PER_UNIT));
9853 return expand_expr (exp, target, tmode, modifier);
9854 }
9855 address_mode = targetm.addr_space.address_mode (as);
9856 base = TREE_OPERAND (exp, 0);
9857 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9858 {
9859 tree mask = gimple_assign_rhs2 (def_stmt);
9860 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9861 gimple_assign_rhs1 (def_stmt), mask);
9862 TREE_OPERAND (exp, 0) = base;
9863 }
9864 align = get_object_alignment (exp);
9865 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9866 op0 = memory_address_addr_space (mode, op0, as);
9867 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9868 {
9869 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9870 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9871 op0 = memory_address_addr_space (mode, op0, as);
9872 }
9873 temp = gen_rtx_MEM (mode, op0);
9874 set_mem_attributes (temp, exp, 0);
9875 set_mem_addr_space (temp, as);
9876 if (TREE_THIS_VOLATILE (exp))
9877 MEM_VOLATILE_P (temp) = 1;
9878 if (modifier != EXPAND_WRITE
9879 && modifier != EXPAND_MEMORY
9880 && !inner_reference_p
9881 && mode != BLKmode
9882 && align < GET_MODE_ALIGNMENT (mode))
9883 {
9884 if ((icode = optab_handler (movmisalign_optab, mode))
9885 != CODE_FOR_nothing)
9886 {
9887 struct expand_operand ops[2];
9888
9889 /* We've already validated the memory, and we're creating a
9890 new pseudo destination. The predicates really can't fail,
9891 nor can the generator. */
9892 create_output_operand (&ops[0], NULL_RTX, mode);
9893 create_fixed_operand (&ops[1], temp);
9894 expand_insn (icode, 2, ops);
9895 temp = ops[0].value;
9896 }
9897 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9898 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9899 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9900 (modifier == EXPAND_STACK_PARM
9901 ? NULL_RTX : target),
9902 mode, mode);
9903 }
9904 return temp;
9905 }
9906
9907 case ARRAY_REF:
9908
9909 {
9910 tree array = treeop0;
9911 tree index = treeop1;
9912 tree init;
9913
9914 /* Fold an expression like: "foo"[2].
9915 This is not done in fold so it won't happen inside &.
9916 Don't fold if this is for wide characters since it's too
9917 difficult to do correctly and this is a very rare case. */
9918
9919 if (modifier != EXPAND_CONST_ADDRESS
9920 && modifier != EXPAND_INITIALIZER
9921 && modifier != EXPAND_MEMORY)
9922 {
9923 tree t = fold_read_from_constant_string (exp);
9924
9925 if (t)
9926 return expand_expr (t, target, tmode, modifier);
9927 }
9928
9929 /* If this is a constant index into a constant array,
9930 just get the value from the array. Handle both the cases when
9931 we have an explicit constructor and when our operand is a variable
9932 that was declared const. */
9933
9934 if (modifier != EXPAND_CONST_ADDRESS
9935 && modifier != EXPAND_INITIALIZER
9936 && modifier != EXPAND_MEMORY
9937 && TREE_CODE (array) == CONSTRUCTOR
9938 && ! TREE_SIDE_EFFECTS (array)
9939 && TREE_CODE (index) == INTEGER_CST)
9940 {
9941 unsigned HOST_WIDE_INT ix;
9942 tree field, value;
9943
9944 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9945 field, value)
9946 if (tree_int_cst_equal (field, index))
9947 {
9948 if (!TREE_SIDE_EFFECTS (value))
9949 return expand_expr (fold (value), target, tmode, modifier);
9950 break;
9951 }
9952 }
9953
9954 else if (optimize >= 1
9955 && modifier != EXPAND_CONST_ADDRESS
9956 && modifier != EXPAND_INITIALIZER
9957 && modifier != EXPAND_MEMORY
9958 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9959 && TREE_CODE (index) == INTEGER_CST
9960 && (TREE_CODE (array) == VAR_DECL
9961 || TREE_CODE (array) == CONST_DECL)
9962 && (init = ctor_for_folding (array)) != error_mark_node)
9963 {
9964 if (init == NULL_TREE)
9965 {
9966 tree value = build_zero_cst (type);
9967 if (TREE_CODE (value) == CONSTRUCTOR)
9968 {
9969 /* If VALUE is a CONSTRUCTOR, this optimization is only
9970 useful if this doesn't store the CONSTRUCTOR into
9971 memory. If it does, it is more efficient to just
9972 load the data from the array directly. */
9973 rtx ret = expand_constructor (value, target,
9974 modifier, true);
9975 if (ret == NULL_RTX)
9976 value = NULL_TREE;
9977 }
9978
9979 if (value)
9980 return expand_expr (value, target, tmode, modifier);
9981 }
9982 else if (TREE_CODE (init) == CONSTRUCTOR)
9983 {
9984 unsigned HOST_WIDE_INT ix;
9985 tree field, value;
9986
9987 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9988 field, value)
9989 if (tree_int_cst_equal (field, index))
9990 {
9991 if (TREE_SIDE_EFFECTS (value))
9992 break;
9993
9994 if (TREE_CODE (value) == CONSTRUCTOR)
9995 {
9996 /* If VALUE is a CONSTRUCTOR, this
9997 optimization is only useful if
9998 this doesn't store the CONSTRUCTOR
9999 into memory. If it does, it is more
10000 efficient to just load the data from
10001 the array directly. */
10002 rtx ret = expand_constructor (value, target,
10003 modifier, true);
10004 if (ret == NULL_RTX)
10005 break;
10006 }
10007
10008 return
10009 expand_expr (fold (value), target, tmode, modifier);
10010 }
10011 }
10012 else if (TREE_CODE (init) == STRING_CST)
10013 {
10014 tree low_bound = array_ref_low_bound (exp);
10015 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10016
10017 /* Optimize the special case of a zero lower bound.
10018
10019 We convert the lower bound to sizetype to avoid problems
10020 with constant folding. E.g. suppose the lower bound is
10021 1 and its mode is QI. Without the conversion
10022 (ARRAY + (INDEX - (unsigned char)1))
10023 becomes
10024 (ARRAY + (-(unsigned char)1) + INDEX)
10025 which becomes
10026 (ARRAY + 255 + INDEX). Oops! */
10027 if (!integer_zerop (low_bound))
10028 index1 = size_diffop_loc (loc, index1,
10029 fold_convert_loc (loc, sizetype,
10030 low_bound));
10031
10032 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10033 {
10034 tree type = TREE_TYPE (TREE_TYPE (init));
10035 machine_mode mode = TYPE_MODE (type);
10036
10037 if (GET_MODE_CLASS (mode) == MODE_INT
10038 && GET_MODE_SIZE (mode) == 1)
10039 return gen_int_mode (TREE_STRING_POINTER (init)
10040 [TREE_INT_CST_LOW (index1)],
10041 mode);
10042 }
10043 }
10044 }
10045 }
10046 goto normal_inner_ref;
10047
10048 case COMPONENT_REF:
10049 /* If the operand is a CONSTRUCTOR, we can just extract the
10050 appropriate field if it is present. */
10051 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10052 {
10053 unsigned HOST_WIDE_INT idx;
10054 tree field, value;
10055
10056 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10057 idx, field, value)
10058 if (field == treeop1
10059 /* We can normally use the value of the field in the
10060 CONSTRUCTOR. However, if this is a bitfield in
10061 an integral mode that we can fit in a HOST_WIDE_INT,
10062 we must mask only the number of bits in the bitfield,
10063 since this is done implicitly by the constructor. If
10064 the bitfield does not meet either of those conditions,
10065 we can't do this optimization. */
10066 && (! DECL_BIT_FIELD (field)
10067 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10068 && (GET_MODE_PRECISION (DECL_MODE (field))
10069 <= HOST_BITS_PER_WIDE_INT))))
10070 {
10071 if (DECL_BIT_FIELD (field)
10072 && modifier == EXPAND_STACK_PARM)
10073 target = 0;
10074 op0 = expand_expr (value, target, tmode, modifier);
10075 if (DECL_BIT_FIELD (field))
10076 {
10077 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10078 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10079
10080 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10081 {
10082 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10083 imode);
10084 op0 = expand_and (imode, op0, op1, target);
10085 }
10086 else
10087 {
10088 int count = GET_MODE_PRECISION (imode) - bitsize;
10089
10090 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10091 target, 0);
10092 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10093 target, 0);
10094 }
10095 }
10096
10097 return op0;
10098 }
10099 }
10100 goto normal_inner_ref;
10101
10102 case BIT_FIELD_REF:
10103 case ARRAY_RANGE_REF:
10104 normal_inner_ref:
10105 {
10106 machine_mode mode1, mode2;
10107 HOST_WIDE_INT bitsize, bitpos;
10108 tree offset;
10109 int volatilep = 0, must_force_mem;
10110 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10111 &mode1, &unsignedp, &volatilep, true);
10112 rtx orig_op0, memloc;
10113 bool mem_attrs_from_type = false;
10114
10115 /* If we got back the original object, something is wrong. Perhaps
10116 we are evaluating an expression too early. In any event, don't
10117 infinitely recurse. */
10118 gcc_assert (tem != exp);
10119
10120 /* If TEM's type is a union of variable size, pass TARGET to the inner
10121 computation, since it will need a temporary and TARGET is known
10122 to have to do. This occurs in unchecked conversion in Ada. */
10123 orig_op0 = op0
10124 = expand_expr_real (tem,
10125 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10126 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10127 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10128 != INTEGER_CST)
10129 && modifier != EXPAND_STACK_PARM
10130 ? target : NULL_RTX),
10131 VOIDmode,
10132 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10133 NULL, true);
10134
10135 /* If the field has a mode, we want to access it in the
10136 field's mode, not the computed mode.
10137 If a MEM has VOIDmode (external with incomplete type),
10138 use BLKmode for it instead. */
10139 if (MEM_P (op0))
10140 {
10141 if (mode1 != VOIDmode)
10142 op0 = adjust_address (op0, mode1, 0);
10143 else if (GET_MODE (op0) == VOIDmode)
10144 op0 = adjust_address (op0, BLKmode, 0);
10145 }
10146
10147 mode2
10148 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10149
10150 /* If we have either an offset, a BLKmode result, or a reference
10151 outside the underlying object, we must force it to memory.
10152 Such a case can occur in Ada if we have unchecked conversion
10153 of an expression from a scalar type to an aggregate type or
10154 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10155 passed a partially uninitialized object or a view-conversion
10156 to a larger size. */
10157 must_force_mem = (offset
10158 || mode1 == BLKmode
10159 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10160
10161 /* Handle CONCAT first. */
10162 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10163 {
10164 if (bitpos == 0
10165 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10166 return op0;
10167 if (bitpos == 0
10168 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10169 && bitsize)
10170 {
10171 op0 = XEXP (op0, 0);
10172 mode2 = GET_MODE (op0);
10173 }
10174 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10175 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10176 && bitpos
10177 && bitsize)
10178 {
10179 op0 = XEXP (op0, 1);
10180 bitpos = 0;
10181 mode2 = GET_MODE (op0);
10182 }
10183 else
10184 /* Otherwise force into memory. */
10185 must_force_mem = 1;
10186 }
10187
10188 /* If this is a constant, put it in a register if it is a legitimate
10189 constant and we don't need a memory reference. */
10190 if (CONSTANT_P (op0)
10191 && mode2 != BLKmode
10192 && targetm.legitimate_constant_p (mode2, op0)
10193 && !must_force_mem)
10194 op0 = force_reg (mode2, op0);
10195
10196 /* Otherwise, if this is a constant, try to force it to the constant
10197 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10198 is a legitimate constant. */
10199 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10200 op0 = validize_mem (memloc);
10201
10202 /* Otherwise, if this is a constant or the object is not in memory
10203 and need be, put it there. */
10204 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10205 {
10206 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10207 emit_move_insn (memloc, op0);
10208 op0 = memloc;
10209 mem_attrs_from_type = true;
10210 }
10211
10212 if (offset)
10213 {
10214 machine_mode address_mode;
10215 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10216 EXPAND_SUM);
10217
10218 gcc_assert (MEM_P (op0));
10219
10220 address_mode = get_address_mode (op0);
10221 if (GET_MODE (offset_rtx) != address_mode)
10222 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10223
10224 /* See the comment in expand_assignment for the rationale. */
10225 if (mode1 != VOIDmode
10226 && bitpos != 0
10227 && bitsize > 0
10228 && (bitpos % bitsize) == 0
10229 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10230 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10231 {
10232 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10233 bitpos = 0;
10234 }
10235
10236 op0 = offset_address (op0, offset_rtx,
10237 highest_pow2_factor (offset));
10238 }
10239
10240 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10241 record its alignment as BIGGEST_ALIGNMENT. */
10242 if (MEM_P (op0) && bitpos == 0 && offset != 0
10243 && is_aligning_offset (offset, tem))
10244 set_mem_align (op0, BIGGEST_ALIGNMENT);
10245
10246 /* Don't forget about volatility even if this is a bitfield. */
10247 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10248 {
10249 if (op0 == orig_op0)
10250 op0 = copy_rtx (op0);
10251
10252 MEM_VOLATILE_P (op0) = 1;
10253 }
10254
10255 /* In cases where an aligned union has an unaligned object
10256 as a field, we might be extracting a BLKmode value from
10257 an integer-mode (e.g., SImode) object. Handle this case
10258 by doing the extract into an object as wide as the field
10259 (which we know to be the width of a basic mode), then
10260 storing into memory, and changing the mode to BLKmode. */
10261 if (mode1 == VOIDmode
10262 || REG_P (op0) || GET_CODE (op0) == SUBREG
10263 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10264 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10265 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10266 && modifier != EXPAND_CONST_ADDRESS
10267 && modifier != EXPAND_INITIALIZER
10268 && modifier != EXPAND_MEMORY)
10269 /* If the bitfield is volatile and the bitsize
10270 is narrower than the access size of the bitfield,
10271 we need to extract bitfields from the access. */
10272 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10273 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10274 && mode1 != BLKmode
10275 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10276 /* If the field isn't aligned enough to fetch as a memref,
10277 fetch it as a bit field. */
10278 || (mode1 != BLKmode
10279 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10280 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10281 || (MEM_P (op0)
10282 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10283 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10284 && modifier != EXPAND_MEMORY
10285 && ((modifier == EXPAND_CONST_ADDRESS
10286 || modifier == EXPAND_INITIALIZER)
10287 ? STRICT_ALIGNMENT
10288 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10289 || (bitpos % BITS_PER_UNIT != 0)))
10290 /* If the type and the field are a constant size and the
10291 size of the type isn't the same size as the bitfield,
10292 we must use bitfield operations. */
10293 || (bitsize >= 0
10294 && TYPE_SIZE (TREE_TYPE (exp))
10295 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10296 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10297 bitsize)))
10298 {
10299 machine_mode ext_mode = mode;
10300
10301 if (ext_mode == BLKmode
10302 && ! (target != 0 && MEM_P (op0)
10303 && MEM_P (target)
10304 && bitpos % BITS_PER_UNIT == 0))
10305 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10306
10307 if (ext_mode == BLKmode)
10308 {
10309 if (target == 0)
10310 target = assign_temp (type, 1, 1);
10311
10312 /* ??? Unlike the similar test a few lines below, this one is
10313 very likely obsolete. */
10314 if (bitsize == 0)
10315 return target;
10316
10317 /* In this case, BITPOS must start at a byte boundary and
10318 TARGET, if specified, must be a MEM. */
10319 gcc_assert (MEM_P (op0)
10320 && (!target || MEM_P (target))
10321 && !(bitpos % BITS_PER_UNIT));
10322
10323 emit_block_move (target,
10324 adjust_address (op0, VOIDmode,
10325 bitpos / BITS_PER_UNIT),
10326 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10327 / BITS_PER_UNIT),
10328 (modifier == EXPAND_STACK_PARM
10329 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10330
10331 return target;
10332 }
10333
10334 /* If we have nothing to extract, the result will be 0 for targets
10335 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10336 return 0 for the sake of consistency, as reading a zero-sized
10337 bitfield is valid in Ada and the value is fully specified. */
10338 if (bitsize == 0)
10339 return const0_rtx;
10340
10341 op0 = validize_mem (op0);
10342
10343 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10344 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10345
10346 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10347 (modifier == EXPAND_STACK_PARM
10348 ? NULL_RTX : target),
10349 ext_mode, ext_mode);
10350
10351 /* If the result is a record type and BITSIZE is narrower than
10352 the mode of OP0, an integral mode, and this is a big endian
10353 machine, we must put the field into the high-order bits. */
10354 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10355 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10356 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10357 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10358 GET_MODE_BITSIZE (GET_MODE (op0))
10359 - bitsize, op0, 1);
10360
10361 /* If the result type is BLKmode, store the data into a temporary
10362 of the appropriate type, but with the mode corresponding to the
10363 mode for the data we have (op0's mode). */
10364 if (mode == BLKmode)
10365 {
10366 rtx new_rtx
10367 = assign_stack_temp_for_type (ext_mode,
10368 GET_MODE_BITSIZE (ext_mode),
10369 type);
10370 emit_move_insn (new_rtx, op0);
10371 op0 = copy_rtx (new_rtx);
10372 PUT_MODE (op0, BLKmode);
10373 }
10374
10375 return op0;
10376 }
10377
10378 /* If the result is BLKmode, use that to access the object
10379 now as well. */
10380 if (mode == BLKmode)
10381 mode1 = BLKmode;
10382
10383 /* Get a reference to just this component. */
10384 if (modifier == EXPAND_CONST_ADDRESS
10385 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10386 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10387 else
10388 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10389
10390 if (op0 == orig_op0)
10391 op0 = copy_rtx (op0);
10392
10393 /* If op0 is a temporary because of forcing to memory, pass only the
10394 type to set_mem_attributes so that the original expression is never
10395 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10396 if (mem_attrs_from_type)
10397 set_mem_attributes (op0, type, 0);
10398 else
10399 set_mem_attributes (op0, exp, 0);
10400
10401 if (REG_P (XEXP (op0, 0)))
10402 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10403
10404 MEM_VOLATILE_P (op0) |= volatilep;
10405 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10406 || modifier == EXPAND_CONST_ADDRESS
10407 || modifier == EXPAND_INITIALIZER)
10408 return op0;
10409
10410 if (target == 0)
10411 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10412
10413 convert_move (target, op0, unsignedp);
10414 return target;
10415 }
10416
10417 case OBJ_TYPE_REF:
10418 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10419
10420 case CALL_EXPR:
10421 /* All valid uses of __builtin_va_arg_pack () are removed during
10422 inlining. */
10423 if (CALL_EXPR_VA_ARG_PACK (exp))
10424 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10425 {
10426 tree fndecl = get_callee_fndecl (exp), attr;
10427
10428 if (fndecl
10429 && (attr = lookup_attribute ("error",
10430 DECL_ATTRIBUTES (fndecl))) != NULL)
10431 error ("%Kcall to %qs declared with attribute error: %s",
10432 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10433 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10434 if (fndecl
10435 && (attr = lookup_attribute ("warning",
10436 DECL_ATTRIBUTES (fndecl))) != NULL)
10437 warning_at (tree_nonartificial_location (exp),
10438 0, "%Kcall to %qs declared with attribute warning: %s",
10439 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10440 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10441
10442 /* Check for a built-in function. */
10443 if (fndecl && DECL_BUILT_IN (fndecl))
10444 {
10445 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10446 return expand_builtin (exp, target, subtarget, tmode, ignore);
10447 }
10448 }
10449 return expand_call (exp, target, ignore);
10450
10451 case VIEW_CONVERT_EXPR:
10452 op0 = NULL_RTX;
10453
10454 /* If we are converting to BLKmode, try to avoid an intermediate
10455 temporary by fetching an inner memory reference. */
10456 if (mode == BLKmode
10457 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10458 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10459 && handled_component_p (treeop0))
10460 {
10461 machine_mode mode1;
10462 HOST_WIDE_INT bitsize, bitpos;
10463 tree offset;
10464 int unsignedp;
10465 int volatilep = 0;
10466 tree tem
10467 = get_inner_reference (treeop0, &bitsize, &bitpos,
10468 &offset, &mode1, &unsignedp, &volatilep,
10469 true);
10470 rtx orig_op0;
10471
10472 /* ??? We should work harder and deal with non-zero offsets. */
10473 if (!offset
10474 && (bitpos % BITS_PER_UNIT) == 0
10475 && bitsize >= 0
10476 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10477 {
10478 /* See the normal_inner_ref case for the rationale. */
10479 orig_op0
10480 = expand_expr_real (tem,
10481 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10482 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10483 != INTEGER_CST)
10484 && modifier != EXPAND_STACK_PARM
10485 ? target : NULL_RTX),
10486 VOIDmode,
10487 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10488 NULL, true);
10489
10490 if (MEM_P (orig_op0))
10491 {
10492 op0 = orig_op0;
10493
10494 /* Get a reference to just this component. */
10495 if (modifier == EXPAND_CONST_ADDRESS
10496 || modifier == EXPAND_SUM
10497 || modifier == EXPAND_INITIALIZER)
10498 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10499 else
10500 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10501
10502 if (op0 == orig_op0)
10503 op0 = copy_rtx (op0);
10504
10505 set_mem_attributes (op0, treeop0, 0);
10506 if (REG_P (XEXP (op0, 0)))
10507 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10508
10509 MEM_VOLATILE_P (op0) |= volatilep;
10510 }
10511 }
10512 }
10513
10514 if (!op0)
10515 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10516 NULL, inner_reference_p);
10517
10518 /* If the input and output modes are both the same, we are done. */
10519 if (mode == GET_MODE (op0))
10520 ;
10521 /* If neither mode is BLKmode, and both modes are the same size
10522 then we can use gen_lowpart. */
10523 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10524 && (GET_MODE_PRECISION (mode)
10525 == GET_MODE_PRECISION (GET_MODE (op0)))
10526 && !COMPLEX_MODE_P (GET_MODE (op0)))
10527 {
10528 if (GET_CODE (op0) == SUBREG)
10529 op0 = force_reg (GET_MODE (op0), op0);
10530 temp = gen_lowpart_common (mode, op0);
10531 if (temp)
10532 op0 = temp;
10533 else
10534 {
10535 if (!REG_P (op0) && !MEM_P (op0))
10536 op0 = force_reg (GET_MODE (op0), op0);
10537 op0 = gen_lowpart (mode, op0);
10538 }
10539 }
10540 /* If both types are integral, convert from one mode to the other. */
10541 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10542 op0 = convert_modes (mode, GET_MODE (op0), op0,
10543 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10544 /* If the output type is a bit-field type, do an extraction. */
10545 else if (reduce_bit_field)
10546 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10547 TYPE_UNSIGNED (type), NULL_RTX,
10548 mode, mode);
10549 /* As a last resort, spill op0 to memory, and reload it in a
10550 different mode. */
10551 else if (!MEM_P (op0))
10552 {
10553 /* If the operand is not a MEM, force it into memory. Since we
10554 are going to be changing the mode of the MEM, don't call
10555 force_const_mem for constants because we don't allow pool
10556 constants to change mode. */
10557 tree inner_type = TREE_TYPE (treeop0);
10558
10559 gcc_assert (!TREE_ADDRESSABLE (exp));
10560
10561 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10562 target
10563 = assign_stack_temp_for_type
10564 (TYPE_MODE (inner_type),
10565 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10566
10567 emit_move_insn (target, op0);
10568 op0 = target;
10569 }
10570
10571 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10572 output type is such that the operand is known to be aligned, indicate
10573 that it is. Otherwise, we need only be concerned about alignment for
10574 non-BLKmode results. */
10575 if (MEM_P (op0))
10576 {
10577 enum insn_code icode;
10578
10579 if (TYPE_ALIGN_OK (type))
10580 {
10581 /* ??? Copying the MEM without substantially changing it might
10582 run afoul of the code handling volatile memory references in
10583 store_expr, which assumes that TARGET is returned unmodified
10584 if it has been used. */
10585 op0 = copy_rtx (op0);
10586 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10587 }
10588 else if (modifier != EXPAND_WRITE
10589 && modifier != EXPAND_MEMORY
10590 && !inner_reference_p
10591 && mode != BLKmode
10592 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10593 {
10594 /* If the target does have special handling for unaligned
10595 loads of mode then use them. */
10596 if ((icode = optab_handler (movmisalign_optab, mode))
10597 != CODE_FOR_nothing)
10598 {
10599 rtx reg, insn;
10600
10601 op0 = adjust_address (op0, mode, 0);
10602 /* We've already validated the memory, and we're creating a
10603 new pseudo destination. The predicates really can't
10604 fail. */
10605 reg = gen_reg_rtx (mode);
10606
10607 /* Nor can the insn generator. */
10608 insn = GEN_FCN (icode) (reg, op0);
10609 emit_insn (insn);
10610 return reg;
10611 }
10612 else if (STRICT_ALIGNMENT)
10613 {
10614 tree inner_type = TREE_TYPE (treeop0);
10615 HOST_WIDE_INT temp_size
10616 = MAX (int_size_in_bytes (inner_type),
10617 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10618 rtx new_rtx
10619 = assign_stack_temp_for_type (mode, temp_size, type);
10620 rtx new_with_op0_mode
10621 = adjust_address (new_rtx, GET_MODE (op0), 0);
10622
10623 gcc_assert (!TREE_ADDRESSABLE (exp));
10624
10625 if (GET_MODE (op0) == BLKmode)
10626 emit_block_move (new_with_op0_mode, op0,
10627 GEN_INT (GET_MODE_SIZE (mode)),
10628 (modifier == EXPAND_STACK_PARM
10629 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10630 else
10631 emit_move_insn (new_with_op0_mode, op0);
10632
10633 op0 = new_rtx;
10634 }
10635 }
10636
10637 op0 = adjust_address (op0, mode, 0);
10638 }
10639
10640 return op0;
10641
10642 case MODIFY_EXPR:
10643 {
10644 tree lhs = treeop0;
10645 tree rhs = treeop1;
10646 gcc_assert (ignore);
10647
10648 /* Check for |= or &= of a bitfield of size one into another bitfield
10649 of size 1. In this case, (unless we need the result of the
10650 assignment) we can do this more efficiently with a
10651 test followed by an assignment, if necessary.
10652
10653 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10654 things change so we do, this code should be enhanced to
10655 support it. */
10656 if (TREE_CODE (lhs) == COMPONENT_REF
10657 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10658 || TREE_CODE (rhs) == BIT_AND_EXPR)
10659 && TREE_OPERAND (rhs, 0) == lhs
10660 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10661 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10662 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10663 {
10664 rtx_code_label *label = gen_label_rtx ();
10665 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10666 do_jump (TREE_OPERAND (rhs, 1),
10667 value ? label : 0,
10668 value ? 0 : label, -1);
10669 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10670 false);
10671 do_pending_stack_adjust ();
10672 emit_label (label);
10673 return const0_rtx;
10674 }
10675
10676 expand_assignment (lhs, rhs, false);
10677 return const0_rtx;
10678 }
10679
10680 case ADDR_EXPR:
10681 return expand_expr_addr_expr (exp, target, tmode, modifier);
10682
10683 case REALPART_EXPR:
10684 op0 = expand_normal (treeop0);
10685 return read_complex_part (op0, false);
10686
10687 case IMAGPART_EXPR:
10688 op0 = expand_normal (treeop0);
10689 return read_complex_part (op0, true);
10690
10691 case RETURN_EXPR:
10692 case LABEL_EXPR:
10693 case GOTO_EXPR:
10694 case SWITCH_EXPR:
10695 case ASM_EXPR:
10696 /* Expanded in cfgexpand.c. */
10697 gcc_unreachable ();
10698
10699 case TRY_CATCH_EXPR:
10700 case CATCH_EXPR:
10701 case EH_FILTER_EXPR:
10702 case TRY_FINALLY_EXPR:
10703 /* Lowered by tree-eh.c. */
10704 gcc_unreachable ();
10705
10706 case WITH_CLEANUP_EXPR:
10707 case CLEANUP_POINT_EXPR:
10708 case TARGET_EXPR:
10709 case CASE_LABEL_EXPR:
10710 case VA_ARG_EXPR:
10711 case BIND_EXPR:
10712 case INIT_EXPR:
10713 case CONJ_EXPR:
10714 case COMPOUND_EXPR:
10715 case PREINCREMENT_EXPR:
10716 case PREDECREMENT_EXPR:
10717 case POSTINCREMENT_EXPR:
10718 case POSTDECREMENT_EXPR:
10719 case LOOP_EXPR:
10720 case EXIT_EXPR:
10721 case COMPOUND_LITERAL_EXPR:
10722 /* Lowered by gimplify.c. */
10723 gcc_unreachable ();
10724
10725 case FDESC_EXPR:
10726 /* Function descriptors are not valid except for as
10727 initialization constants, and should not be expanded. */
10728 gcc_unreachable ();
10729
10730 case WITH_SIZE_EXPR:
10731 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10732 have pulled out the size to use in whatever context it needed. */
10733 return expand_expr_real (treeop0, original_target, tmode,
10734 modifier, alt_rtl, inner_reference_p);
10735
10736 default:
10737 return expand_expr_real_2 (&ops, target, tmode, modifier);
10738 }
10739 }
10740 \f
10741 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10742 signedness of TYPE), possibly returning the result in TARGET. */
10743 static rtx
10744 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10745 {
10746 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10747 if (target && GET_MODE (target) != GET_MODE (exp))
10748 target = 0;
10749 /* For constant values, reduce using build_int_cst_type. */
10750 if (CONST_INT_P (exp))
10751 {
10752 HOST_WIDE_INT value = INTVAL (exp);
10753 tree t = build_int_cst_type (type, value);
10754 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10755 }
10756 else if (TYPE_UNSIGNED (type))
10757 {
10758 machine_mode mode = GET_MODE (exp);
10759 rtx mask = immed_wide_int_const
10760 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10761 return expand_and (mode, exp, mask, target);
10762 }
10763 else
10764 {
10765 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10766 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10767 exp, count, target, 0);
10768 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10769 exp, count, target, 0);
10770 }
10771 }
10772 \f
10773 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10774 when applied to the address of EXP produces an address known to be
10775 aligned more than BIGGEST_ALIGNMENT. */
10776
10777 static int
10778 is_aligning_offset (const_tree offset, const_tree exp)
10779 {
10780 /* Strip off any conversions. */
10781 while (CONVERT_EXPR_P (offset))
10782 offset = TREE_OPERAND (offset, 0);
10783
10784 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10785 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10786 if (TREE_CODE (offset) != BIT_AND_EXPR
10787 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10788 || compare_tree_int (TREE_OPERAND (offset, 1),
10789 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10790 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10791 return 0;
10792
10793 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10794 It must be NEGATE_EXPR. Then strip any more conversions. */
10795 offset = TREE_OPERAND (offset, 0);
10796 while (CONVERT_EXPR_P (offset))
10797 offset = TREE_OPERAND (offset, 0);
10798
10799 if (TREE_CODE (offset) != NEGATE_EXPR)
10800 return 0;
10801
10802 offset = TREE_OPERAND (offset, 0);
10803 while (CONVERT_EXPR_P (offset))
10804 offset = TREE_OPERAND (offset, 0);
10805
10806 /* This must now be the address of EXP. */
10807 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10808 }
10809 \f
10810 /* Return the tree node if an ARG corresponds to a string constant or zero
10811 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10812 in bytes within the string that ARG is accessing. The type of the
10813 offset will be `sizetype'. */
10814
10815 tree
10816 string_constant (tree arg, tree *ptr_offset)
10817 {
10818 tree array, offset, lower_bound;
10819 STRIP_NOPS (arg);
10820
10821 if (TREE_CODE (arg) == ADDR_EXPR)
10822 {
10823 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10824 {
10825 *ptr_offset = size_zero_node;
10826 return TREE_OPERAND (arg, 0);
10827 }
10828 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10829 {
10830 array = TREE_OPERAND (arg, 0);
10831 offset = size_zero_node;
10832 }
10833 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10834 {
10835 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10836 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10837 if (TREE_CODE (array) != STRING_CST
10838 && TREE_CODE (array) != VAR_DECL)
10839 return 0;
10840
10841 /* Check if the array has a nonzero lower bound. */
10842 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10843 if (!integer_zerop (lower_bound))
10844 {
10845 /* If the offset and base aren't both constants, return 0. */
10846 if (TREE_CODE (lower_bound) != INTEGER_CST)
10847 return 0;
10848 if (TREE_CODE (offset) != INTEGER_CST)
10849 return 0;
10850 /* Adjust offset by the lower bound. */
10851 offset = size_diffop (fold_convert (sizetype, offset),
10852 fold_convert (sizetype, lower_bound));
10853 }
10854 }
10855 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10856 {
10857 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10858 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10859 if (TREE_CODE (array) != ADDR_EXPR)
10860 return 0;
10861 array = TREE_OPERAND (array, 0);
10862 if (TREE_CODE (array) != STRING_CST
10863 && TREE_CODE (array) != VAR_DECL)
10864 return 0;
10865 }
10866 else
10867 return 0;
10868 }
10869 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10870 {
10871 tree arg0 = TREE_OPERAND (arg, 0);
10872 tree arg1 = TREE_OPERAND (arg, 1);
10873
10874 STRIP_NOPS (arg0);
10875 STRIP_NOPS (arg1);
10876
10877 if (TREE_CODE (arg0) == ADDR_EXPR
10878 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10879 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10880 {
10881 array = TREE_OPERAND (arg0, 0);
10882 offset = arg1;
10883 }
10884 else if (TREE_CODE (arg1) == ADDR_EXPR
10885 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10886 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10887 {
10888 array = TREE_OPERAND (arg1, 0);
10889 offset = arg0;
10890 }
10891 else
10892 return 0;
10893 }
10894 else
10895 return 0;
10896
10897 if (TREE_CODE (array) == STRING_CST)
10898 {
10899 *ptr_offset = fold_convert (sizetype, offset);
10900 return array;
10901 }
10902 else if (TREE_CODE (array) == VAR_DECL
10903 || TREE_CODE (array) == CONST_DECL)
10904 {
10905 int length;
10906 tree init = ctor_for_folding (array);
10907
10908 /* Variables initialized to string literals can be handled too. */
10909 if (init == error_mark_node
10910 || !init
10911 || TREE_CODE (init) != STRING_CST)
10912 return 0;
10913
10914 /* Avoid const char foo[4] = "abcde"; */
10915 if (DECL_SIZE_UNIT (array) == NULL_TREE
10916 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10917 || (length = TREE_STRING_LENGTH (init)) <= 0
10918 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10919 return 0;
10920
10921 /* If variable is bigger than the string literal, OFFSET must be constant
10922 and inside of the bounds of the string literal. */
10923 offset = fold_convert (sizetype, offset);
10924 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10925 && (! tree_fits_uhwi_p (offset)
10926 || compare_tree_int (offset, length) >= 0))
10927 return 0;
10928
10929 *ptr_offset = offset;
10930 return init;
10931 }
10932
10933 return 0;
10934 }
10935 \f
10936 /* Generate code to calculate OPS, and exploded expression
10937 using a store-flag instruction and return an rtx for the result.
10938 OPS reflects a comparison.
10939
10940 If TARGET is nonzero, store the result there if convenient.
10941
10942 Return zero if there is no suitable set-flag instruction
10943 available on this machine.
10944
10945 Once expand_expr has been called on the arguments of the comparison,
10946 we are committed to doing the store flag, since it is not safe to
10947 re-evaluate the expression. We emit the store-flag insn by calling
10948 emit_store_flag, but only expand the arguments if we have a reason
10949 to believe that emit_store_flag will be successful. If we think that
10950 it will, but it isn't, we have to simulate the store-flag with a
10951 set/jump/set sequence. */
10952
10953 static rtx
10954 do_store_flag (sepops ops, rtx target, machine_mode mode)
10955 {
10956 enum rtx_code code;
10957 tree arg0, arg1, type;
10958 tree tem;
10959 machine_mode operand_mode;
10960 int unsignedp;
10961 rtx op0, op1;
10962 rtx subtarget = target;
10963 location_t loc = ops->location;
10964
10965 arg0 = ops->op0;
10966 arg1 = ops->op1;
10967
10968 /* Don't crash if the comparison was erroneous. */
10969 if (arg0 == error_mark_node || arg1 == error_mark_node)
10970 return const0_rtx;
10971
10972 type = TREE_TYPE (arg0);
10973 operand_mode = TYPE_MODE (type);
10974 unsignedp = TYPE_UNSIGNED (type);
10975
10976 /* We won't bother with BLKmode store-flag operations because it would mean
10977 passing a lot of information to emit_store_flag. */
10978 if (operand_mode == BLKmode)
10979 return 0;
10980
10981 /* We won't bother with store-flag operations involving function pointers
10982 when function pointers must be canonicalized before comparisons. */
10983 #ifdef HAVE_canonicalize_funcptr_for_compare
10984 if (HAVE_canonicalize_funcptr_for_compare
10985 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10986 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10987 == FUNCTION_TYPE))
10988 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10989 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10990 == FUNCTION_TYPE))))
10991 return 0;
10992 #endif
10993
10994 STRIP_NOPS (arg0);
10995 STRIP_NOPS (arg1);
10996
10997 /* For vector typed comparisons emit code to generate the desired
10998 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10999 expander for this. */
11000 if (TREE_CODE (ops->type) == VECTOR_TYPE)
11001 {
11002 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11003 tree if_true = constant_boolean_node (true, ops->type);
11004 tree if_false = constant_boolean_node (false, ops->type);
11005 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
11006 }
11007
11008 /* Get the rtx comparison code to use. We know that EXP is a comparison
11009 operation of some type. Some comparisons against 1 and -1 can be
11010 converted to comparisons with zero. Do so here so that the tests
11011 below will be aware that we have a comparison with zero. These
11012 tests will not catch constants in the first operand, but constants
11013 are rarely passed as the first operand. */
11014
11015 switch (ops->code)
11016 {
11017 case EQ_EXPR:
11018 code = EQ;
11019 break;
11020 case NE_EXPR:
11021 code = NE;
11022 break;
11023 case LT_EXPR:
11024 if (integer_onep (arg1))
11025 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11026 else
11027 code = unsignedp ? LTU : LT;
11028 break;
11029 case LE_EXPR:
11030 if (! unsignedp && integer_all_onesp (arg1))
11031 arg1 = integer_zero_node, code = LT;
11032 else
11033 code = unsignedp ? LEU : LE;
11034 break;
11035 case GT_EXPR:
11036 if (! unsignedp && integer_all_onesp (arg1))
11037 arg1 = integer_zero_node, code = GE;
11038 else
11039 code = unsignedp ? GTU : GT;
11040 break;
11041 case GE_EXPR:
11042 if (integer_onep (arg1))
11043 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11044 else
11045 code = unsignedp ? GEU : GE;
11046 break;
11047
11048 case UNORDERED_EXPR:
11049 code = UNORDERED;
11050 break;
11051 case ORDERED_EXPR:
11052 code = ORDERED;
11053 break;
11054 case UNLT_EXPR:
11055 code = UNLT;
11056 break;
11057 case UNLE_EXPR:
11058 code = UNLE;
11059 break;
11060 case UNGT_EXPR:
11061 code = UNGT;
11062 break;
11063 case UNGE_EXPR:
11064 code = UNGE;
11065 break;
11066 case UNEQ_EXPR:
11067 code = UNEQ;
11068 break;
11069 case LTGT_EXPR:
11070 code = LTGT;
11071 break;
11072
11073 default:
11074 gcc_unreachable ();
11075 }
11076
11077 /* Put a constant second. */
11078 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11079 || TREE_CODE (arg0) == FIXED_CST)
11080 {
11081 tem = arg0; arg0 = arg1; arg1 = tem;
11082 code = swap_condition (code);
11083 }
11084
11085 /* If this is an equality or inequality test of a single bit, we can
11086 do this by shifting the bit being tested to the low-order bit and
11087 masking the result with the constant 1. If the condition was EQ,
11088 we xor it with 1. This does not require an scc insn and is faster
11089 than an scc insn even if we have it.
11090
11091 The code to make this transformation was moved into fold_single_bit_test,
11092 so we just call into the folder and expand its result. */
11093
11094 if ((code == NE || code == EQ)
11095 && integer_zerop (arg1)
11096 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11097 {
11098 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11099 if (srcstmt
11100 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11101 {
11102 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11103 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11104 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11105 gimple_assign_rhs1 (srcstmt),
11106 gimple_assign_rhs2 (srcstmt));
11107 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11108 if (temp)
11109 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11110 }
11111 }
11112
11113 if (! get_subtarget (target)
11114 || GET_MODE (subtarget) != operand_mode)
11115 subtarget = 0;
11116
11117 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11118
11119 if (target == 0)
11120 target = gen_reg_rtx (mode);
11121
11122 /* Try a cstore if possible. */
11123 return emit_store_flag_force (target, code, op0, op1,
11124 operand_mode, unsignedp,
11125 (TYPE_PRECISION (ops->type) == 1
11126 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11127 }
11128 \f
11129
11130 /* Stubs in case we haven't got a casesi insn. */
11131 #ifndef HAVE_casesi
11132 # define HAVE_casesi 0
11133 # define gen_casesi(a, b, c, d, e) (0)
11134 # define CODE_FOR_casesi CODE_FOR_nothing
11135 #endif
11136
11137 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11138 0 otherwise (i.e. if there is no casesi instruction).
11139
11140 DEFAULT_PROBABILITY is the probability of jumping to the default
11141 label. */
11142 int
11143 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11144 rtx table_label, rtx default_label, rtx fallback_label,
11145 int default_probability)
11146 {
11147 struct expand_operand ops[5];
11148 machine_mode index_mode = SImode;
11149 rtx op1, op2, index;
11150
11151 if (! HAVE_casesi)
11152 return 0;
11153
11154 /* Convert the index to SImode. */
11155 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11156 {
11157 machine_mode omode = TYPE_MODE (index_type);
11158 rtx rangertx = expand_normal (range);
11159
11160 /* We must handle the endpoints in the original mode. */
11161 index_expr = build2 (MINUS_EXPR, index_type,
11162 index_expr, minval);
11163 minval = integer_zero_node;
11164 index = expand_normal (index_expr);
11165 if (default_label)
11166 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11167 omode, 1, default_label,
11168 default_probability);
11169 /* Now we can safely truncate. */
11170 index = convert_to_mode (index_mode, index, 0);
11171 }
11172 else
11173 {
11174 if (TYPE_MODE (index_type) != index_mode)
11175 {
11176 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11177 index_expr = fold_convert (index_type, index_expr);
11178 }
11179
11180 index = expand_normal (index_expr);
11181 }
11182
11183 do_pending_stack_adjust ();
11184
11185 op1 = expand_normal (minval);
11186 op2 = expand_normal (range);
11187
11188 create_input_operand (&ops[0], index, index_mode);
11189 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11190 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11191 create_fixed_operand (&ops[3], table_label);
11192 create_fixed_operand (&ops[4], (default_label
11193 ? default_label
11194 : fallback_label));
11195 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11196 return 1;
11197 }
11198
11199 /* Attempt to generate a tablejump instruction; same concept. */
11200 #ifndef HAVE_tablejump
11201 #define HAVE_tablejump 0
11202 #define gen_tablejump(x, y) (0)
11203 #endif
11204
11205 /* Subroutine of the next function.
11206
11207 INDEX is the value being switched on, with the lowest value
11208 in the table already subtracted.
11209 MODE is its expected mode (needed if INDEX is constant).
11210 RANGE is the length of the jump table.
11211 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11212
11213 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11214 index value is out of range.
11215 DEFAULT_PROBABILITY is the probability of jumping to
11216 the default label. */
11217
11218 static void
11219 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11220 rtx default_label, int default_probability)
11221 {
11222 rtx temp, vector;
11223
11224 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11225 cfun->cfg->max_jumptable_ents = INTVAL (range);
11226
11227 /* Do an unsigned comparison (in the proper mode) between the index
11228 expression and the value which represents the length of the range.
11229 Since we just finished subtracting the lower bound of the range
11230 from the index expression, this comparison allows us to simultaneously
11231 check that the original index expression value is both greater than
11232 or equal to the minimum value of the range and less than or equal to
11233 the maximum value of the range. */
11234
11235 if (default_label)
11236 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11237 default_label, default_probability);
11238
11239
11240 /* If index is in range, it must fit in Pmode.
11241 Convert to Pmode so we can index with it. */
11242 if (mode != Pmode)
11243 index = convert_to_mode (Pmode, index, 1);
11244
11245 /* Don't let a MEM slip through, because then INDEX that comes
11246 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11247 and break_out_memory_refs will go to work on it and mess it up. */
11248 #ifdef PIC_CASE_VECTOR_ADDRESS
11249 if (flag_pic && !REG_P (index))
11250 index = copy_to_mode_reg (Pmode, index);
11251 #endif
11252
11253 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11254 GET_MODE_SIZE, because this indicates how large insns are. The other
11255 uses should all be Pmode, because they are addresses. This code
11256 could fail if addresses and insns are not the same size. */
11257 index = simplify_gen_binary (MULT, Pmode, index,
11258 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11259 Pmode));
11260 index = simplify_gen_binary (PLUS, Pmode, index,
11261 gen_rtx_LABEL_REF (Pmode, table_label));
11262
11263 #ifdef PIC_CASE_VECTOR_ADDRESS
11264 if (flag_pic)
11265 index = PIC_CASE_VECTOR_ADDRESS (index);
11266 else
11267 #endif
11268 index = memory_address (CASE_VECTOR_MODE, index);
11269 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11270 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11271 convert_move (temp, vector, 0);
11272
11273 emit_jump_insn (gen_tablejump (temp, table_label));
11274
11275 /* If we are generating PIC code or if the table is PC-relative, the
11276 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11277 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11278 emit_barrier ();
11279 }
11280
11281 int
11282 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11283 rtx table_label, rtx default_label, int default_probability)
11284 {
11285 rtx index;
11286
11287 if (! HAVE_tablejump)
11288 return 0;
11289
11290 index_expr = fold_build2 (MINUS_EXPR, index_type,
11291 fold_convert (index_type, index_expr),
11292 fold_convert (index_type, minval));
11293 index = expand_normal (index_expr);
11294 do_pending_stack_adjust ();
11295
11296 do_tablejump (index, TYPE_MODE (index_type),
11297 convert_modes (TYPE_MODE (index_type),
11298 TYPE_MODE (TREE_TYPE (range)),
11299 expand_normal (range),
11300 TYPE_UNSIGNED (TREE_TYPE (range))),
11301 table_label, default_label, default_probability);
11302 return 1;
11303 }
11304
11305 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11306 static rtx
11307 const_vector_from_tree (tree exp)
11308 {
11309 rtvec v;
11310 unsigned i;
11311 int units;
11312 tree elt;
11313 machine_mode inner, mode;
11314
11315 mode = TYPE_MODE (TREE_TYPE (exp));
11316
11317 if (initializer_zerop (exp))
11318 return CONST0_RTX (mode);
11319
11320 units = GET_MODE_NUNITS (mode);
11321 inner = GET_MODE_INNER (mode);
11322
11323 v = rtvec_alloc (units);
11324
11325 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11326 {
11327 elt = VECTOR_CST_ELT (exp, i);
11328
11329 if (TREE_CODE (elt) == REAL_CST)
11330 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11331 inner);
11332 else if (TREE_CODE (elt) == FIXED_CST)
11333 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11334 inner);
11335 else
11336 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11337 }
11338
11339 return gen_rtx_CONST_VECTOR (mode, v);
11340 }
11341
11342 /* Build a decl for a personality function given a language prefix. */
11343
11344 tree
11345 build_personality_function (const char *lang)
11346 {
11347 const char *unwind_and_version;
11348 tree decl, type;
11349 char *name;
11350
11351 switch (targetm_common.except_unwind_info (&global_options))
11352 {
11353 case UI_NONE:
11354 return NULL;
11355 case UI_SJLJ:
11356 unwind_and_version = "_sj0";
11357 break;
11358 case UI_DWARF2:
11359 case UI_TARGET:
11360 unwind_and_version = "_v0";
11361 break;
11362 case UI_SEH:
11363 unwind_and_version = "_seh0";
11364 break;
11365 default:
11366 gcc_unreachable ();
11367 }
11368
11369 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11370
11371 type = build_function_type_list (integer_type_node, integer_type_node,
11372 long_long_unsigned_type_node,
11373 ptr_type_node, ptr_type_node, NULL_TREE);
11374 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11375 get_identifier (name), type);
11376 DECL_ARTIFICIAL (decl) = 1;
11377 DECL_EXTERNAL (decl) = 1;
11378 TREE_PUBLIC (decl) = 1;
11379
11380 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11381 are the flags assigned by targetm.encode_section_info. */
11382 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11383
11384 return decl;
11385 }
11386
11387 /* Extracts the personality function of DECL and returns the corresponding
11388 libfunc. */
11389
11390 rtx
11391 get_personality_function (tree decl)
11392 {
11393 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11394 enum eh_personality_kind pk;
11395
11396 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11397 if (pk == eh_personality_none)
11398 return NULL;
11399
11400 if (!personality
11401 && pk == eh_personality_any)
11402 personality = lang_hooks.eh_personality ();
11403
11404 if (pk == eh_personality_lang)
11405 gcc_assert (personality != NULL_TREE);
11406
11407 return XEXP (DECL_RTL (personality), 0);
11408 }
11409
11410 #include "gt-expr.h"