re PR c++/69158 (ICE in in cxx_eval_indirect_ref, at cp/constexpr.c:2598)
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "tm_p.h"
30 #include "ssa.h"
31 #include "expmed.h"
32 #include "optabs.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "cgraph.h"
37 #include "diagnostic.h"
38 #include "alias.h"
39 #include "fold-const.h"
40 #include "stor-layout.h"
41 #include "attribs.h"
42 #include "varasm.h"
43 #include "except.h"
44 #include "insn-attr.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "stmt.h"
49 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
50 #include "expr.h"
51 #include "optabs-tree.h"
52 #include "libfuncs.h"
53 #include "reload.h"
54 #include "langhooks.h"
55 #include "common/common-target.h"
56 #include "tree-ssa-live.h"
57 #include "tree-outof-ssa.h"
58 #include "tree-ssa-address.h"
59 #include "builtins.h"
60 #include "tree-chkp.h"
61 #include "rtl-chkp.h"
62 #include "ccmp.h"
63
64
65 /* If this is nonzero, we do not bother generating VOLATILE
66 around volatile memory references, and we are willing to
67 output indirect addresses. If cse is to follow, we reject
68 indirect addresses so a useful potential cse is generated;
69 if it is used only once, instruction combination will produce
70 the same indirect address eventually. */
71 int cse_not_expected;
72
73 /* This structure is used by move_by_pieces to describe the move to
74 be performed. */
75 struct move_by_pieces_d
76 {
77 rtx to;
78 rtx to_addr;
79 int autinc_to;
80 int explicit_inc_to;
81 rtx from;
82 rtx from_addr;
83 int autinc_from;
84 int explicit_inc_from;
85 unsigned HOST_WIDE_INT len;
86 HOST_WIDE_INT offset;
87 int reverse;
88 };
89
90 /* This structure is used by store_by_pieces to describe the clear to
91 be performed. */
92
93 struct store_by_pieces_d
94 {
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
101 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
102 void *constfundata;
103 int reverse;
104 };
105
106 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
107 struct move_by_pieces_d *);
108 static bool block_move_libcall_safe_for_call_parm (void);
109 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
110 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
111 unsigned HOST_WIDE_INT);
112 static tree emit_block_move_libcall_fn (int);
113 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
114 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
115 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
116 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
117 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
118 struct store_by_pieces_d *);
119 static tree clear_storage_libcall_fn (int);
120 static rtx_insn *compress_float_constant (rtx, rtx);
121 static rtx get_subtarget (rtx);
122 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
123 HOST_WIDE_INT, machine_mode,
124 tree, int, alias_set_type, bool);
125 static void store_constructor (tree, rtx, int, HOST_WIDE_INT, bool);
126 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
127 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
128 machine_mode, tree, alias_set_type, bool, bool);
129
130 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
131
132 static int is_aligning_offset (const_tree, const_tree);
133 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
134 static rtx do_store_flag (sepops, rtx, machine_mode);
135 #ifdef PUSH_ROUNDING
136 static void emit_single_push_insn (machine_mode, rtx, tree);
137 #endif
138 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
139 static rtx const_vector_from_tree (tree);
140 static tree tree_expr_size (const_tree);
141 static HOST_WIDE_INT int_expr_size (tree);
142
143 \f
144 /* This is run to set up which modes can be used
145 directly in memory and to initialize the block move optab. It is run
146 at the beginning of compilation and when the target is reinitialized. */
147
148 void
149 init_expr_target (void)
150 {
151 rtx insn, pat;
152 machine_mode mode;
153 int num_clobbers;
154 rtx mem, mem1;
155 rtx reg;
156
157 /* Try indexing by frame ptr and try by stack ptr.
158 It is known that on the Convex the stack ptr isn't a valid index.
159 With luck, one or the other is valid on any machine. */
160 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
161 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
162
163 /* A scratch register we can modify in-place below to avoid
164 useless RTL allocations. */
165 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
166
167 insn = rtx_alloc (INSN);
168 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
169 PATTERN (insn) = pat;
170
171 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
172 mode = (machine_mode) ((int) mode + 1))
173 {
174 int regno;
175
176 direct_load[(int) mode] = direct_store[(int) mode] = 0;
177 PUT_MODE (mem, mode);
178 PUT_MODE (mem1, mode);
179
180 /* See if there is some register that can be used in this mode and
181 directly loaded or stored from memory. */
182
183 if (mode != VOIDmode && mode != BLKmode)
184 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
185 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
186 regno++)
187 {
188 if (! HARD_REGNO_MODE_OK (regno, mode))
189 continue;
190
191 set_mode_and_regno (reg, mode, regno);
192
193 SET_SRC (pat) = mem;
194 SET_DEST (pat) = reg;
195 if (recog (pat, insn, &num_clobbers) >= 0)
196 direct_load[(int) mode] = 1;
197
198 SET_SRC (pat) = mem1;
199 SET_DEST (pat) = reg;
200 if (recog (pat, insn, &num_clobbers) >= 0)
201 direct_load[(int) mode] = 1;
202
203 SET_SRC (pat) = reg;
204 SET_DEST (pat) = mem;
205 if (recog (pat, insn, &num_clobbers) >= 0)
206 direct_store[(int) mode] = 1;
207
208 SET_SRC (pat) = reg;
209 SET_DEST (pat) = mem1;
210 if (recog (pat, insn, &num_clobbers) >= 0)
211 direct_store[(int) mode] = 1;
212 }
213 }
214
215 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
216
217 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
218 mode = GET_MODE_WIDER_MODE (mode))
219 {
220 machine_mode srcmode;
221 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
222 srcmode = GET_MODE_WIDER_MODE (srcmode))
223 {
224 enum insn_code ic;
225
226 ic = can_extend_p (mode, srcmode, 0);
227 if (ic == CODE_FOR_nothing)
228 continue;
229
230 PUT_MODE (mem, srcmode);
231
232 if (insn_operand_matches (ic, 1, mem))
233 float_extend_from_mem[mode][srcmode] = true;
234 }
235 }
236 }
237
238 /* This is run at the start of compiling a function. */
239
240 void
241 init_expr (void)
242 {
243 memset (&crtl->expr, 0, sizeof (crtl->expr));
244 }
245 \f
246 /* Copy data from FROM to TO, where the machine modes are not the same.
247 Both modes may be integer, or both may be floating, or both may be
248 fixed-point.
249 UNSIGNEDP should be nonzero if FROM is an unsigned type.
250 This causes zero-extension instead of sign-extension. */
251
252 void
253 convert_move (rtx to, rtx from, int unsignedp)
254 {
255 machine_mode to_mode = GET_MODE (to);
256 machine_mode from_mode = GET_MODE (from);
257 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
258 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
259 enum insn_code code;
260 rtx libcall;
261
262 /* rtx code for making an equivalent value. */
263 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
264 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
265
266
267 gcc_assert (to_real == from_real);
268 gcc_assert (to_mode != BLKmode);
269 gcc_assert (from_mode != BLKmode);
270
271 /* If the source and destination are already the same, then there's
272 nothing to do. */
273 if (to == from)
274 return;
275
276 /* If FROM is a SUBREG that indicates that we have already done at least
277 the required extension, strip it. We don't handle such SUBREGs as
278 TO here. */
279
280 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
281 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
282 >= GET_MODE_PRECISION (to_mode))
283 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
284 from = gen_lowpart (to_mode, from), from_mode = to_mode;
285
286 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
287
288 if (to_mode == from_mode
289 || (from_mode == VOIDmode && CONSTANT_P (from)))
290 {
291 emit_move_insn (to, from);
292 return;
293 }
294
295 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
296 {
297 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
298
299 if (VECTOR_MODE_P (to_mode))
300 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
301 else
302 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
303
304 emit_move_insn (to, from);
305 return;
306 }
307
308 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
309 {
310 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
311 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
312 return;
313 }
314
315 if (to_real)
316 {
317 rtx value;
318 rtx_insn *insns;
319 convert_optab tab;
320
321 gcc_assert ((GET_MODE_PRECISION (from_mode)
322 != GET_MODE_PRECISION (to_mode))
323 || (DECIMAL_FLOAT_MODE_P (from_mode)
324 != DECIMAL_FLOAT_MODE_P (to_mode)));
325
326 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
327 /* Conversion between decimal float and binary float, same size. */
328 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
329 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
330 tab = sext_optab;
331 else
332 tab = trunc_optab;
333
334 /* Try converting directly if the insn is supported. */
335
336 code = convert_optab_handler (tab, to_mode, from_mode);
337 if (code != CODE_FOR_nothing)
338 {
339 emit_unop_insn (code, to, from,
340 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
341 return;
342 }
343
344 /* Otherwise use a libcall. */
345 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
346
347 /* Is this conversion implemented yet? */
348 gcc_assert (libcall);
349
350 start_sequence ();
351 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
352 1, from, from_mode);
353 insns = get_insns ();
354 end_sequence ();
355 emit_libcall_block (insns, to, value,
356 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
357 from)
358 : gen_rtx_FLOAT_EXTEND (to_mode, from));
359 return;
360 }
361
362 /* Handle pointer conversion. */ /* SPEE 900220. */
363 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
364 {
365 convert_optab ctab;
366
367 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
368 ctab = trunc_optab;
369 else if (unsignedp)
370 ctab = zext_optab;
371 else
372 ctab = sext_optab;
373
374 if (convert_optab_handler (ctab, to_mode, from_mode)
375 != CODE_FOR_nothing)
376 {
377 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
378 to, from, UNKNOWN);
379 return;
380 }
381 }
382
383 /* Targets are expected to provide conversion insns between PxImode and
384 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
385 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
386 {
387 machine_mode full_mode
388 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
389
390 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
391 != CODE_FOR_nothing);
392
393 if (full_mode != from_mode)
394 from = convert_to_mode (full_mode, from, unsignedp);
395 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
396 to, from, UNKNOWN);
397 return;
398 }
399 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
400 {
401 rtx new_from;
402 machine_mode full_mode
403 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
404 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
405 enum insn_code icode;
406
407 icode = convert_optab_handler (ctab, full_mode, from_mode);
408 gcc_assert (icode != CODE_FOR_nothing);
409
410 if (to_mode == full_mode)
411 {
412 emit_unop_insn (icode, to, from, UNKNOWN);
413 return;
414 }
415
416 new_from = gen_reg_rtx (full_mode);
417 emit_unop_insn (icode, new_from, from, UNKNOWN);
418
419 /* else proceed to integer conversions below. */
420 from_mode = full_mode;
421 from = new_from;
422 }
423
424 /* Make sure both are fixed-point modes or both are not. */
425 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
426 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
427 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
428 {
429 /* If we widen from_mode to to_mode and they are in the same class,
430 we won't saturate the result.
431 Otherwise, always saturate the result to play safe. */
432 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
433 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
434 expand_fixed_convert (to, from, 0, 0);
435 else
436 expand_fixed_convert (to, from, 0, 1);
437 return;
438 }
439
440 /* Now both modes are integers. */
441
442 /* Handle expanding beyond a word. */
443 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
444 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
445 {
446 rtx_insn *insns;
447 rtx lowpart;
448 rtx fill_value;
449 rtx lowfrom;
450 int i;
451 machine_mode lowpart_mode;
452 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
453
454 /* Try converting directly if the insn is supported. */
455 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
456 != CODE_FOR_nothing)
457 {
458 /* If FROM is a SUBREG, put it into a register. Do this
459 so that we always generate the same set of insns for
460 better cse'ing; if an intermediate assignment occurred,
461 we won't be doing the operation directly on the SUBREG. */
462 if (optimize > 0 && GET_CODE (from) == SUBREG)
463 from = force_reg (from_mode, from);
464 emit_unop_insn (code, to, from, equiv_code);
465 return;
466 }
467 /* Next, try converting via full word. */
468 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
469 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
470 != CODE_FOR_nothing))
471 {
472 rtx word_to = gen_reg_rtx (word_mode);
473 if (REG_P (to))
474 {
475 if (reg_overlap_mentioned_p (to, from))
476 from = force_reg (from_mode, from);
477 emit_clobber (to);
478 }
479 convert_move (word_to, from, unsignedp);
480 emit_unop_insn (code, to, word_to, equiv_code);
481 return;
482 }
483
484 /* No special multiword conversion insn; do it by hand. */
485 start_sequence ();
486
487 /* Since we will turn this into a no conflict block, we must ensure the
488 the source does not overlap the target so force it into an isolated
489 register when maybe so. Likewise for any MEM input, since the
490 conversion sequence might require several references to it and we
491 must ensure we're getting the same value every time. */
492
493 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
495
496 /* Get a copy of FROM widened to a word, if necessary. */
497 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
498 lowpart_mode = word_mode;
499 else
500 lowpart_mode = from_mode;
501
502 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
503
504 lowpart = gen_lowpart (lowpart_mode, to);
505 emit_move_insn (lowpart, lowfrom);
506
507 /* Compute the value to put in each remaining word. */
508 if (unsignedp)
509 fill_value = const0_rtx;
510 else
511 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
512 LT, lowfrom, const0_rtx,
513 lowpart_mode, 0, -1);
514
515 /* Fill the remaining words. */
516 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
517 {
518 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
519 rtx subword = operand_subword (to, index, 1, to_mode);
520
521 gcc_assert (subword);
522
523 if (fill_value != subword)
524 emit_move_insn (subword, fill_value);
525 }
526
527 insns = get_insns ();
528 end_sequence ();
529
530 emit_insn (insns);
531 return;
532 }
533
534 /* Truncating multi-word to a word or less. */
535 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
536 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
537 {
538 if (!((MEM_P (from)
539 && ! MEM_VOLATILE_P (from)
540 && direct_load[(int) to_mode]
541 && ! mode_dependent_address_p (XEXP (from, 0),
542 MEM_ADDR_SPACE (from)))
543 || REG_P (from)
544 || GET_CODE (from) == SUBREG))
545 from = force_reg (from_mode, from);
546 convert_move (to, gen_lowpart (word_mode, from), 0);
547 return;
548 }
549
550 /* Now follow all the conversions between integers
551 no more than a word long. */
552
553 /* For truncation, usually we can just refer to FROM in a narrower mode. */
554 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
555 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
556 {
557 if (!((MEM_P (from)
558 && ! MEM_VOLATILE_P (from)
559 && direct_load[(int) to_mode]
560 && ! mode_dependent_address_p (XEXP (from, 0),
561 MEM_ADDR_SPACE (from)))
562 || REG_P (from)
563 || GET_CODE (from) == SUBREG))
564 from = force_reg (from_mode, from);
565 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
566 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
567 from = copy_to_reg (from);
568 emit_move_insn (to, gen_lowpart (to_mode, from));
569 return;
570 }
571
572 /* Handle extension. */
573 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
574 {
575 /* Convert directly if that works. */
576 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
577 != CODE_FOR_nothing)
578 {
579 emit_unop_insn (code, to, from, equiv_code);
580 return;
581 }
582 else
583 {
584 machine_mode intermediate;
585 rtx tmp;
586 int shift_amount;
587
588 /* Search for a mode to convert via. */
589 for (intermediate = from_mode; intermediate != VOIDmode;
590 intermediate = GET_MODE_WIDER_MODE (intermediate))
591 if (((can_extend_p (to_mode, intermediate, unsignedp)
592 != CODE_FOR_nothing)
593 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
594 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
595 && (can_extend_p (intermediate, from_mode, unsignedp)
596 != CODE_FOR_nothing))
597 {
598 convert_move (to, convert_to_mode (intermediate, from,
599 unsignedp), unsignedp);
600 return;
601 }
602
603 /* No suitable intermediate mode.
604 Generate what we need with shifts. */
605 shift_amount = (GET_MODE_PRECISION (to_mode)
606 - GET_MODE_PRECISION (from_mode));
607 from = gen_lowpart (to_mode, force_reg (from_mode, from));
608 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
609 to, unsignedp);
610 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
611 to, unsignedp);
612 if (tmp != to)
613 emit_move_insn (to, tmp);
614 return;
615 }
616 }
617
618 /* Support special truncate insns for certain modes. */
619 if (convert_optab_handler (trunc_optab, to_mode,
620 from_mode) != CODE_FOR_nothing)
621 {
622 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
623 to, from, UNKNOWN);
624 return;
625 }
626
627 /* Handle truncation of volatile memrefs, and so on;
628 the things that couldn't be truncated directly,
629 and for which there was no special instruction.
630
631 ??? Code above formerly short-circuited this, for most integer
632 mode pairs, with a force_reg in from_mode followed by a recursive
633 call to this routine. Appears always to have been wrong. */
634 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
635 {
636 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
637 emit_move_insn (to, temp);
638 return;
639 }
640
641 /* Mode combination is not recognized. */
642 gcc_unreachable ();
643 }
644
645 /* Return an rtx for a value that would result
646 from converting X to mode MODE.
647 Both X and MODE may be floating, or both integer.
648 UNSIGNEDP is nonzero if X is an unsigned value.
649 This can be done by referring to a part of X in place
650 or by copying to a new temporary with conversion. */
651
652 rtx
653 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
654 {
655 return convert_modes (mode, VOIDmode, x, unsignedp);
656 }
657
658 /* Return an rtx for a value that would result
659 from converting X from mode OLDMODE to mode MODE.
660 Both modes may be floating, or both integer.
661 UNSIGNEDP is nonzero if X is an unsigned value.
662
663 This can be done by referring to a part of X in place
664 or by copying to a new temporary with conversion.
665
666 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
667
668 rtx
669 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
670 {
671 rtx temp;
672
673 /* If FROM is a SUBREG that indicates that we have already done at least
674 the required extension, strip it. */
675
676 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
677 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
678 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
679 x = gen_lowpart (mode, SUBREG_REG (x));
680
681 if (GET_MODE (x) != VOIDmode)
682 oldmode = GET_MODE (x);
683
684 if (mode == oldmode)
685 return x;
686
687 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
688 {
689 /* If the caller did not tell us the old mode, then there is not
690 much to do with respect to canonicalization. We have to
691 assume that all the bits are significant. */
692 if (GET_MODE_CLASS (oldmode) != MODE_INT)
693 oldmode = MAX_MODE_INT;
694 wide_int w = wide_int::from (std::make_pair (x, oldmode),
695 GET_MODE_PRECISION (mode),
696 unsignedp ? UNSIGNED : SIGNED);
697 return immed_wide_int_const (w, mode);
698 }
699
700 /* We can do this with a gen_lowpart if both desired and current modes
701 are integer, and this is either a constant integer, a register, or a
702 non-volatile MEM. */
703 if (GET_MODE_CLASS (mode) == MODE_INT
704 && GET_MODE_CLASS (oldmode) == MODE_INT
705 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
706 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
707 || (REG_P (x)
708 && (!HARD_REGISTER_P (x)
709 || HARD_REGNO_MODE_OK (REGNO (x), mode))
710 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
711
712 return gen_lowpart (mode, x);
713
714 /* Converting from integer constant into mode is always equivalent to an
715 subreg operation. */
716 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
717 {
718 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
719 return simplify_gen_subreg (mode, x, oldmode, 0);
720 }
721
722 temp = gen_reg_rtx (mode);
723 convert_move (temp, x, unsignedp);
724 return temp;
725 }
726 \f
727 /* Return the largest alignment we can use for doing a move (or store)
728 of MAX_PIECES. ALIGN is the largest alignment we could use. */
729
730 static unsigned int
731 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
732 {
733 machine_mode tmode;
734
735 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
736 if (align >= GET_MODE_ALIGNMENT (tmode))
737 align = GET_MODE_ALIGNMENT (tmode);
738 else
739 {
740 machine_mode tmode, xmode;
741
742 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
743 tmode != VOIDmode;
744 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
745 if (GET_MODE_SIZE (tmode) > max_pieces
746 || SLOW_UNALIGNED_ACCESS (tmode, align))
747 break;
748
749 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
750 }
751
752 return align;
753 }
754
755 /* Return the widest integer mode no wider than SIZE. If no such mode
756 can be found, return VOIDmode. */
757
758 static machine_mode
759 widest_int_mode_for_size (unsigned int size)
760 {
761 machine_mode tmode, mode = VOIDmode;
762
763 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
764 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
765 if (GET_MODE_SIZE (tmode) < size)
766 mode = tmode;
767
768 return mode;
769 }
770
771 /* Determine whether the LEN bytes can be moved by using several move
772 instructions. Return nonzero if a call to move_by_pieces should
773 succeed. */
774
775 int
776 can_move_by_pieces (unsigned HOST_WIDE_INT len,
777 unsigned int align)
778 {
779 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
780 optimize_insn_for_speed_p ());
781 }
782
783 /* Generate several move instructions to copy LEN bytes from block FROM to
784 block TO. (These are MEM rtx's with BLKmode).
785
786 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
787 used to push FROM to the stack.
788
789 ALIGN is maximum stack alignment we can assume.
790
791 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
792 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
793 stpcpy. */
794
795 rtx
796 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
797 unsigned int align, int endp)
798 {
799 struct move_by_pieces_d data;
800 machine_mode to_addr_mode;
801 machine_mode from_addr_mode = get_address_mode (from);
802 rtx to_addr, from_addr = XEXP (from, 0);
803 unsigned int max_size = MOVE_MAX_PIECES + 1;
804 enum insn_code icode;
805
806 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
807
808 data.offset = 0;
809 data.from_addr = from_addr;
810 if (to)
811 {
812 to_addr_mode = get_address_mode (to);
813 to_addr = XEXP (to, 0);
814 data.to = to;
815 data.autinc_to
816 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
817 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
818 data.reverse
819 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
820 }
821 else
822 {
823 to_addr_mode = VOIDmode;
824 to_addr = NULL_RTX;
825 data.to = NULL_RTX;
826 data.autinc_to = 1;
827 if (STACK_GROWS_DOWNWARD)
828 data.reverse = 1;
829 else
830 data.reverse = 0;
831 }
832 data.to_addr = to_addr;
833 data.from = from;
834 data.autinc_from
835 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
836 || GET_CODE (from_addr) == POST_INC
837 || GET_CODE (from_addr) == POST_DEC);
838
839 data.explicit_inc_from = 0;
840 data.explicit_inc_to = 0;
841 if (data.reverse) data.offset = len;
842 data.len = len;
843
844 /* If copying requires more than two move insns,
845 copy addresses to registers (to make displacements shorter)
846 and use post-increment if available. */
847 if (!(data.autinc_from && data.autinc_to)
848 && move_by_pieces_ninsns (len, align, max_size) > 2)
849 {
850 /* Find the mode of the largest move...
851 MODE might not be used depending on the definitions of the
852 USE_* macros below. */
853 machine_mode mode ATTRIBUTE_UNUSED
854 = widest_int_mode_for_size (max_size);
855
856 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
857 {
858 data.from_addr = copy_to_mode_reg (from_addr_mode,
859 plus_constant (from_addr_mode,
860 from_addr, len));
861 data.autinc_from = 1;
862 data.explicit_inc_from = -1;
863 }
864 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
865 {
866 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
867 data.autinc_from = 1;
868 data.explicit_inc_from = 1;
869 }
870 if (!data.autinc_from && CONSTANT_P (from_addr))
871 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
872 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
873 {
874 data.to_addr = copy_to_mode_reg (to_addr_mode,
875 plus_constant (to_addr_mode,
876 to_addr, len));
877 data.autinc_to = 1;
878 data.explicit_inc_to = -1;
879 }
880 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
881 {
882 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
883 data.autinc_to = 1;
884 data.explicit_inc_to = 1;
885 }
886 if (!data.autinc_to && CONSTANT_P (to_addr))
887 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
888 }
889
890 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
891
892 /* First move what we can in the largest integer mode, then go to
893 successively smaller modes. */
894
895 while (max_size > 1 && data.len > 0)
896 {
897 machine_mode mode = widest_int_mode_for_size (max_size);
898
899 if (mode == VOIDmode)
900 break;
901
902 icode = optab_handler (mov_optab, mode);
903 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
904 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
905
906 max_size = GET_MODE_SIZE (mode);
907 }
908
909 /* The code above should have handled everything. */
910 gcc_assert (!data.len);
911
912 if (endp)
913 {
914 rtx to1;
915
916 gcc_assert (!data.reverse);
917 if (data.autinc_to)
918 {
919 if (endp == 2)
920 {
921 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
922 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
923 else
924 data.to_addr = copy_to_mode_reg (to_addr_mode,
925 plus_constant (to_addr_mode,
926 data.to_addr,
927 -1));
928 }
929 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
930 data.offset);
931 }
932 else
933 {
934 if (endp == 2)
935 --data.offset;
936 to1 = adjust_address (data.to, QImode, data.offset);
937 }
938 return to1;
939 }
940 else
941 return data.to;
942 }
943
944 /* Return number of insns required to move L bytes by pieces.
945 ALIGN (in bits) is maximum alignment we can assume. */
946
947 unsigned HOST_WIDE_INT
948 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
949 unsigned int max_size)
950 {
951 unsigned HOST_WIDE_INT n_insns = 0;
952
953 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
954
955 while (max_size > 1 && l > 0)
956 {
957 machine_mode mode;
958 enum insn_code icode;
959
960 mode = widest_int_mode_for_size (max_size);
961
962 if (mode == VOIDmode)
963 break;
964
965 icode = optab_handler (mov_optab, mode);
966 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
967 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
968
969 max_size = GET_MODE_SIZE (mode);
970 }
971
972 gcc_assert (!l);
973 return n_insns;
974 }
975
976 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
977 with move instructions for mode MODE. GENFUN is the gen_... function
978 to make a move insn for that mode. DATA has all the other info. */
979
980 static void
981 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
982 struct move_by_pieces_d *data)
983 {
984 unsigned int size = GET_MODE_SIZE (mode);
985 rtx to1 = NULL_RTX, from1;
986
987 while (data->len >= size)
988 {
989 if (data->reverse)
990 data->offset -= size;
991
992 if (data->to)
993 {
994 if (data->autinc_to)
995 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
996 data->offset);
997 else
998 to1 = adjust_address (data->to, mode, data->offset);
999 }
1000
1001 if (data->autinc_from)
1002 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1003 data->offset);
1004 else
1005 from1 = adjust_address (data->from, mode, data->offset);
1006
1007 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1008 emit_insn (gen_add2_insn (data->to_addr,
1009 gen_int_mode (-(HOST_WIDE_INT) size,
1010 GET_MODE (data->to_addr))));
1011 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1012 emit_insn (gen_add2_insn (data->from_addr,
1013 gen_int_mode (-(HOST_WIDE_INT) size,
1014 GET_MODE (data->from_addr))));
1015
1016 if (data->to)
1017 emit_insn ((*genfun) (to1, from1));
1018 else
1019 {
1020 #ifdef PUSH_ROUNDING
1021 emit_single_push_insn (mode, from1, NULL);
1022 #else
1023 gcc_unreachable ();
1024 #endif
1025 }
1026
1027 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1028 emit_insn (gen_add2_insn (data->to_addr,
1029 gen_int_mode (size,
1030 GET_MODE (data->to_addr))));
1031 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1032 emit_insn (gen_add2_insn (data->from_addr,
1033 gen_int_mode (size,
1034 GET_MODE (data->from_addr))));
1035
1036 if (! data->reverse)
1037 data->offset += size;
1038
1039 data->len -= size;
1040 }
1041 }
1042 \f
1043 /* Emit code to move a block Y to a block X. This may be done with
1044 string-move instructions, with multiple scalar move instructions,
1045 or with a library call.
1046
1047 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1048 SIZE is an rtx that says how long they are.
1049 ALIGN is the maximum alignment we can assume they have.
1050 METHOD describes what kind of copy this is, and what mechanisms may be used.
1051 MIN_SIZE is the minimal size of block to move
1052 MAX_SIZE is the maximal size of block to move, if it can not be represented
1053 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1054
1055 Return the address of the new block, if memcpy is called and returns it,
1056 0 otherwise. */
1057
1058 rtx
1059 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1060 unsigned int expected_align, HOST_WIDE_INT expected_size,
1061 unsigned HOST_WIDE_INT min_size,
1062 unsigned HOST_WIDE_INT max_size,
1063 unsigned HOST_WIDE_INT probable_max_size)
1064 {
1065 bool may_use_call;
1066 rtx retval = 0;
1067 unsigned int align;
1068
1069 gcc_assert (size);
1070 if (CONST_INT_P (size)
1071 && INTVAL (size) == 0)
1072 return 0;
1073
1074 switch (method)
1075 {
1076 case BLOCK_OP_NORMAL:
1077 case BLOCK_OP_TAILCALL:
1078 may_use_call = true;
1079 break;
1080
1081 case BLOCK_OP_CALL_PARM:
1082 may_use_call = block_move_libcall_safe_for_call_parm ();
1083
1084 /* Make inhibit_defer_pop nonzero around the library call
1085 to force it to pop the arguments right away. */
1086 NO_DEFER_POP;
1087 break;
1088
1089 case BLOCK_OP_NO_LIBCALL:
1090 may_use_call = false;
1091 break;
1092
1093 default:
1094 gcc_unreachable ();
1095 }
1096
1097 gcc_assert (MEM_P (x) && MEM_P (y));
1098 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1099 gcc_assert (align >= BITS_PER_UNIT);
1100
1101 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1102 block copy is more efficient for other large modes, e.g. DCmode. */
1103 x = adjust_address (x, BLKmode, 0);
1104 y = adjust_address (y, BLKmode, 0);
1105
1106 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1107 can be incorrect is coming from __builtin_memcpy. */
1108 if (CONST_INT_P (size))
1109 {
1110 x = shallow_copy_rtx (x);
1111 y = shallow_copy_rtx (y);
1112 set_mem_size (x, INTVAL (size));
1113 set_mem_size (y, INTVAL (size));
1114 }
1115
1116 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1117 move_by_pieces (x, y, INTVAL (size), align, 0);
1118 else if (emit_block_move_via_movmem (x, y, size, align,
1119 expected_align, expected_size,
1120 min_size, max_size, probable_max_size))
1121 ;
1122 else if (may_use_call
1123 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1124 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1125 {
1126 /* Since x and y are passed to a libcall, mark the corresponding
1127 tree EXPR as addressable. */
1128 tree y_expr = MEM_EXPR (y);
1129 tree x_expr = MEM_EXPR (x);
1130 if (y_expr)
1131 mark_addressable (y_expr);
1132 if (x_expr)
1133 mark_addressable (x_expr);
1134 retval = emit_block_move_via_libcall (x, y, size,
1135 method == BLOCK_OP_TAILCALL);
1136 }
1137
1138 else
1139 emit_block_move_via_loop (x, y, size, align);
1140
1141 if (method == BLOCK_OP_CALL_PARM)
1142 OK_DEFER_POP;
1143
1144 return retval;
1145 }
1146
1147 rtx
1148 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1149 {
1150 unsigned HOST_WIDE_INT max, min = 0;
1151 if (GET_CODE (size) == CONST_INT)
1152 min = max = UINTVAL (size);
1153 else
1154 max = GET_MODE_MASK (GET_MODE (size));
1155 return emit_block_move_hints (x, y, size, method, 0, -1,
1156 min, max, max);
1157 }
1158
1159 /* A subroutine of emit_block_move. Returns true if calling the
1160 block move libcall will not clobber any parameters which may have
1161 already been placed on the stack. */
1162
1163 static bool
1164 block_move_libcall_safe_for_call_parm (void)
1165 {
1166 #if defined (REG_PARM_STACK_SPACE)
1167 tree fn;
1168 #endif
1169
1170 /* If arguments are pushed on the stack, then they're safe. */
1171 if (PUSH_ARGS)
1172 return true;
1173
1174 /* If registers go on the stack anyway, any argument is sure to clobber
1175 an outgoing argument. */
1176 #if defined (REG_PARM_STACK_SPACE)
1177 fn = emit_block_move_libcall_fn (false);
1178 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1179 depend on its argument. */
1180 (void) fn;
1181 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1182 && REG_PARM_STACK_SPACE (fn) != 0)
1183 return false;
1184 #endif
1185
1186 /* If any argument goes in memory, then it might clobber an outgoing
1187 argument. */
1188 {
1189 CUMULATIVE_ARGS args_so_far_v;
1190 cumulative_args_t args_so_far;
1191 tree fn, arg;
1192
1193 fn = emit_block_move_libcall_fn (false);
1194 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1195 args_so_far = pack_cumulative_args (&args_so_far_v);
1196
1197 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1198 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1199 {
1200 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1201 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1202 NULL_TREE, true);
1203 if (!tmp || !REG_P (tmp))
1204 return false;
1205 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1206 return false;
1207 targetm.calls.function_arg_advance (args_so_far, mode,
1208 NULL_TREE, true);
1209 }
1210 }
1211 return true;
1212 }
1213
1214 /* A subroutine of emit_block_move. Expand a movmem pattern;
1215 return true if successful. */
1216
1217 static bool
1218 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1219 unsigned int expected_align, HOST_WIDE_INT expected_size,
1220 unsigned HOST_WIDE_INT min_size,
1221 unsigned HOST_WIDE_INT max_size,
1222 unsigned HOST_WIDE_INT probable_max_size)
1223 {
1224 int save_volatile_ok = volatile_ok;
1225 machine_mode mode;
1226
1227 if (expected_align < align)
1228 expected_align = align;
1229 if (expected_size != -1)
1230 {
1231 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1232 expected_size = probable_max_size;
1233 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1234 expected_size = min_size;
1235 }
1236
1237 /* Since this is a move insn, we don't care about volatility. */
1238 volatile_ok = 1;
1239
1240 /* Try the most limited insn first, because there's no point
1241 including more than one in the machine description unless
1242 the more limited one has some advantage. */
1243
1244 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1245 mode = GET_MODE_WIDER_MODE (mode))
1246 {
1247 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1248
1249 if (code != CODE_FOR_nothing
1250 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1251 here because if SIZE is less than the mode mask, as it is
1252 returned by the macro, it will definitely be less than the
1253 actual mode mask. Since SIZE is within the Pmode address
1254 space, we limit MODE to Pmode. */
1255 && ((CONST_INT_P (size)
1256 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1257 <= (GET_MODE_MASK (mode) >> 1)))
1258 || max_size <= (GET_MODE_MASK (mode) >> 1)
1259 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1260 {
1261 struct expand_operand ops[9];
1262 unsigned int nops;
1263
1264 /* ??? When called via emit_block_move_for_call, it'd be
1265 nice if there were some way to inform the backend, so
1266 that it doesn't fail the expansion because it thinks
1267 emitting the libcall would be more efficient. */
1268 nops = insn_data[(int) code].n_generator_args;
1269 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1270
1271 create_fixed_operand (&ops[0], x);
1272 create_fixed_operand (&ops[1], y);
1273 /* The check above guarantees that this size conversion is valid. */
1274 create_convert_operand_to (&ops[2], size, mode, true);
1275 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1276 if (nops >= 6)
1277 {
1278 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1279 create_integer_operand (&ops[5], expected_size);
1280 }
1281 if (nops >= 8)
1282 {
1283 create_integer_operand (&ops[6], min_size);
1284 /* If we can not represent the maximal size,
1285 make parameter NULL. */
1286 if ((HOST_WIDE_INT) max_size != -1)
1287 create_integer_operand (&ops[7], max_size);
1288 else
1289 create_fixed_operand (&ops[7], NULL);
1290 }
1291 if (nops == 9)
1292 {
1293 /* If we can not represent the maximal size,
1294 make parameter NULL. */
1295 if ((HOST_WIDE_INT) probable_max_size != -1)
1296 create_integer_operand (&ops[8], probable_max_size);
1297 else
1298 create_fixed_operand (&ops[8], NULL);
1299 }
1300 if (maybe_expand_insn (code, nops, ops))
1301 {
1302 volatile_ok = save_volatile_ok;
1303 return true;
1304 }
1305 }
1306 }
1307
1308 volatile_ok = save_volatile_ok;
1309 return false;
1310 }
1311
1312 /* A subroutine of emit_block_move. Expand a call to memcpy.
1313 Return the return value from memcpy, 0 otherwise. */
1314
1315 rtx
1316 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1317 {
1318 rtx dst_addr, src_addr;
1319 tree call_expr, fn, src_tree, dst_tree, size_tree;
1320 machine_mode size_mode;
1321 rtx retval;
1322
1323 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1324 pseudos. We can then place those new pseudos into a VAR_DECL and
1325 use them later. */
1326
1327 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1328 src_addr = copy_addr_to_reg (XEXP (src, 0));
1329
1330 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1331 src_addr = convert_memory_address (ptr_mode, src_addr);
1332
1333 dst_tree = make_tree (ptr_type_node, dst_addr);
1334 src_tree = make_tree (ptr_type_node, src_addr);
1335
1336 size_mode = TYPE_MODE (sizetype);
1337
1338 size = convert_to_mode (size_mode, size, 1);
1339 size = copy_to_mode_reg (size_mode, size);
1340
1341 /* It is incorrect to use the libcall calling conventions to call
1342 memcpy in this context. This could be a user call to memcpy and
1343 the user may wish to examine the return value from memcpy. For
1344 targets where libcalls and normal calls have different conventions
1345 for returning pointers, we could end up generating incorrect code. */
1346
1347 size_tree = make_tree (sizetype, size);
1348
1349 fn = emit_block_move_libcall_fn (true);
1350 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1351 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1352
1353 retval = expand_normal (call_expr);
1354
1355 return retval;
1356 }
1357
1358 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1359 for the function we use for block copies. */
1360
1361 static GTY(()) tree block_move_fn;
1362
1363 void
1364 init_block_move_fn (const char *asmspec)
1365 {
1366 if (!block_move_fn)
1367 {
1368 tree args, fn, attrs, attr_args;
1369
1370 fn = get_identifier ("memcpy");
1371 args = build_function_type_list (ptr_type_node, ptr_type_node,
1372 const_ptr_type_node, sizetype,
1373 NULL_TREE);
1374
1375 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1376 DECL_EXTERNAL (fn) = 1;
1377 TREE_PUBLIC (fn) = 1;
1378 DECL_ARTIFICIAL (fn) = 1;
1379 TREE_NOTHROW (fn) = 1;
1380 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1381 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1382
1383 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1384 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1385
1386 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1387
1388 block_move_fn = fn;
1389 }
1390
1391 if (asmspec)
1392 set_user_assembler_name (block_move_fn, asmspec);
1393 }
1394
1395 static tree
1396 emit_block_move_libcall_fn (int for_call)
1397 {
1398 static bool emitted_extern;
1399
1400 if (!block_move_fn)
1401 init_block_move_fn (NULL);
1402
1403 if (for_call && !emitted_extern)
1404 {
1405 emitted_extern = true;
1406 make_decl_rtl (block_move_fn);
1407 }
1408
1409 return block_move_fn;
1410 }
1411
1412 /* A subroutine of emit_block_move. Copy the data via an explicit
1413 loop. This is used only when libcalls are forbidden. */
1414 /* ??? It'd be nice to copy in hunks larger than QImode. */
1415
1416 static void
1417 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1418 unsigned int align ATTRIBUTE_UNUSED)
1419 {
1420 rtx_code_label *cmp_label, *top_label;
1421 rtx iter, x_addr, y_addr, tmp;
1422 machine_mode x_addr_mode = get_address_mode (x);
1423 machine_mode y_addr_mode = get_address_mode (y);
1424 machine_mode iter_mode;
1425
1426 iter_mode = GET_MODE (size);
1427 if (iter_mode == VOIDmode)
1428 iter_mode = word_mode;
1429
1430 top_label = gen_label_rtx ();
1431 cmp_label = gen_label_rtx ();
1432 iter = gen_reg_rtx (iter_mode);
1433
1434 emit_move_insn (iter, const0_rtx);
1435
1436 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1437 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1438 do_pending_stack_adjust ();
1439
1440 emit_jump (cmp_label);
1441 emit_label (top_label);
1442
1443 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1444 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1445
1446 if (x_addr_mode != y_addr_mode)
1447 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1448 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1449
1450 x = change_address (x, QImode, x_addr);
1451 y = change_address (y, QImode, y_addr);
1452
1453 emit_move_insn (x, y);
1454
1455 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1456 true, OPTAB_LIB_WIDEN);
1457 if (tmp != iter)
1458 emit_move_insn (iter, tmp);
1459
1460 emit_label (cmp_label);
1461
1462 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1463 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1464 }
1465 \f
1466 /* Copy all or part of a value X into registers starting at REGNO.
1467 The number of registers to be filled is NREGS. */
1468
1469 void
1470 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1471 {
1472 if (nregs == 0)
1473 return;
1474
1475 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1476 x = validize_mem (force_const_mem (mode, x));
1477
1478 /* See if the machine can do this with a load multiple insn. */
1479 if (targetm.have_load_multiple ())
1480 {
1481 rtx_insn *last = get_last_insn ();
1482 rtx first = gen_rtx_REG (word_mode, regno);
1483 if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
1484 GEN_INT (nregs)))
1485 {
1486 emit_insn (pat);
1487 return;
1488 }
1489 else
1490 delete_insns_since (last);
1491 }
1492
1493 for (int i = 0; i < nregs; i++)
1494 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1495 operand_subword_force (x, i, mode));
1496 }
1497
1498 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1499 The number of registers to be filled is NREGS. */
1500
1501 void
1502 move_block_from_reg (int regno, rtx x, int nregs)
1503 {
1504 if (nregs == 0)
1505 return;
1506
1507 /* See if the machine can do this with a store multiple insn. */
1508 if (targetm.have_store_multiple ())
1509 {
1510 rtx_insn *last = get_last_insn ();
1511 rtx first = gen_rtx_REG (word_mode, regno);
1512 if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
1513 GEN_INT (nregs)))
1514 {
1515 emit_insn (pat);
1516 return;
1517 }
1518 else
1519 delete_insns_since (last);
1520 }
1521
1522 for (int i = 0; i < nregs; i++)
1523 {
1524 rtx tem = operand_subword (x, i, 1, BLKmode);
1525
1526 gcc_assert (tem);
1527
1528 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1529 }
1530 }
1531
1532 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1533 ORIG, where ORIG is a non-consecutive group of registers represented by
1534 a PARALLEL. The clone is identical to the original except in that the
1535 original set of registers is replaced by a new set of pseudo registers.
1536 The new set has the same modes as the original set. */
1537
1538 rtx
1539 gen_group_rtx (rtx orig)
1540 {
1541 int i, length;
1542 rtx *tmps;
1543
1544 gcc_assert (GET_CODE (orig) == PARALLEL);
1545
1546 length = XVECLEN (orig, 0);
1547 tmps = XALLOCAVEC (rtx, length);
1548
1549 /* Skip a NULL entry in first slot. */
1550 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1551
1552 if (i)
1553 tmps[0] = 0;
1554
1555 for (; i < length; i++)
1556 {
1557 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1558 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1559
1560 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1561 }
1562
1563 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1564 }
1565
1566 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1567 except that values are placed in TMPS[i], and must later be moved
1568 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1569
1570 static void
1571 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1572 {
1573 rtx src;
1574 int start, i;
1575 machine_mode m = GET_MODE (orig_src);
1576
1577 gcc_assert (GET_CODE (dst) == PARALLEL);
1578
1579 if (m != VOIDmode
1580 && !SCALAR_INT_MODE_P (m)
1581 && !MEM_P (orig_src)
1582 && GET_CODE (orig_src) != CONCAT)
1583 {
1584 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1585 if (imode == BLKmode)
1586 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1587 else
1588 src = gen_reg_rtx (imode);
1589 if (imode != BLKmode)
1590 src = gen_lowpart (GET_MODE (orig_src), src);
1591 emit_move_insn (src, orig_src);
1592 /* ...and back again. */
1593 if (imode != BLKmode)
1594 src = gen_lowpart (imode, src);
1595 emit_group_load_1 (tmps, dst, src, type, ssize);
1596 return;
1597 }
1598
1599 /* Check for a NULL entry, used to indicate that the parameter goes
1600 both on the stack and in registers. */
1601 if (XEXP (XVECEXP (dst, 0, 0), 0))
1602 start = 0;
1603 else
1604 start = 1;
1605
1606 /* Process the pieces. */
1607 for (i = start; i < XVECLEN (dst, 0); i++)
1608 {
1609 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1610 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1611 unsigned int bytelen = GET_MODE_SIZE (mode);
1612 int shift = 0;
1613
1614 /* Handle trailing fragments that run over the size of the struct. */
1615 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1616 {
1617 /* Arrange to shift the fragment to where it belongs.
1618 extract_bit_field loads to the lsb of the reg. */
1619 if (
1620 #ifdef BLOCK_REG_PADDING
1621 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1622 == (BYTES_BIG_ENDIAN ? upward : downward)
1623 #else
1624 BYTES_BIG_ENDIAN
1625 #endif
1626 )
1627 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1628 bytelen = ssize - bytepos;
1629 gcc_assert (bytelen > 0);
1630 }
1631
1632 /* If we won't be loading directly from memory, protect the real source
1633 from strange tricks we might play; but make sure that the source can
1634 be loaded directly into the destination. */
1635 src = orig_src;
1636 if (!MEM_P (orig_src)
1637 && (!CONSTANT_P (orig_src)
1638 || (GET_MODE (orig_src) != mode
1639 && GET_MODE (orig_src) != VOIDmode)))
1640 {
1641 if (GET_MODE (orig_src) == VOIDmode)
1642 src = gen_reg_rtx (mode);
1643 else
1644 src = gen_reg_rtx (GET_MODE (orig_src));
1645
1646 emit_move_insn (src, orig_src);
1647 }
1648
1649 /* Optimize the access just a bit. */
1650 if (MEM_P (src)
1651 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1652 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1653 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1654 && bytelen == GET_MODE_SIZE (mode))
1655 {
1656 tmps[i] = gen_reg_rtx (mode);
1657 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1658 }
1659 else if (COMPLEX_MODE_P (mode)
1660 && GET_MODE (src) == mode
1661 && bytelen == GET_MODE_SIZE (mode))
1662 /* Let emit_move_complex do the bulk of the work. */
1663 tmps[i] = src;
1664 else if (GET_CODE (src) == CONCAT)
1665 {
1666 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1667 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1668
1669 if ((bytepos == 0 && bytelen == slen0)
1670 || (bytepos != 0 && bytepos + bytelen <= slen))
1671 {
1672 /* The following assumes that the concatenated objects all
1673 have the same size. In this case, a simple calculation
1674 can be used to determine the object and the bit field
1675 to be extracted. */
1676 tmps[i] = XEXP (src, bytepos / slen0);
1677 if (! CONSTANT_P (tmps[i])
1678 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1679 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1680 (bytepos % slen0) * BITS_PER_UNIT,
1681 1, NULL_RTX, mode, mode, false);
1682 }
1683 else
1684 {
1685 rtx mem;
1686
1687 gcc_assert (!bytepos);
1688 mem = assign_stack_temp (GET_MODE (src), slen);
1689 emit_move_insn (mem, src);
1690 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1691 0, 1, NULL_RTX, mode, mode, false);
1692 }
1693 }
1694 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1695 SIMD register, which is currently broken. While we get GCC
1696 to emit proper RTL for these cases, let's dump to memory. */
1697 else if (VECTOR_MODE_P (GET_MODE (dst))
1698 && REG_P (src))
1699 {
1700 int slen = GET_MODE_SIZE (GET_MODE (src));
1701 rtx mem;
1702
1703 mem = assign_stack_temp (GET_MODE (src), slen);
1704 emit_move_insn (mem, src);
1705 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1706 }
1707 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1708 && XVECLEN (dst, 0) > 1)
1709 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1710 else if (CONSTANT_P (src))
1711 {
1712 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1713
1714 if (len == ssize)
1715 tmps[i] = src;
1716 else
1717 {
1718 rtx first, second;
1719
1720 /* TODO: const_wide_int can have sizes other than this... */
1721 gcc_assert (2 * len == ssize);
1722 split_double (src, &first, &second);
1723 if (i)
1724 tmps[i] = second;
1725 else
1726 tmps[i] = first;
1727 }
1728 }
1729 else if (REG_P (src) && GET_MODE (src) == mode)
1730 tmps[i] = src;
1731 else
1732 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1733 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1734 mode, mode, false);
1735
1736 if (shift)
1737 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1738 shift, tmps[i], 0);
1739 }
1740 }
1741
1742 /* Emit code to move a block SRC of type TYPE to a block DST,
1743 where DST is non-consecutive registers represented by a PARALLEL.
1744 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1745 if not known. */
1746
1747 void
1748 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1749 {
1750 rtx *tmps;
1751 int i;
1752
1753 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1754 emit_group_load_1 (tmps, dst, src, type, ssize);
1755
1756 /* Copy the extracted pieces into the proper (probable) hard regs. */
1757 for (i = 0; i < XVECLEN (dst, 0); i++)
1758 {
1759 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1760 if (d == NULL)
1761 continue;
1762 emit_move_insn (d, tmps[i]);
1763 }
1764 }
1765
1766 /* Similar, but load SRC into new pseudos in a format that looks like
1767 PARALLEL. This can later be fed to emit_group_move to get things
1768 in the right place. */
1769
1770 rtx
1771 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1772 {
1773 rtvec vec;
1774 int i;
1775
1776 vec = rtvec_alloc (XVECLEN (parallel, 0));
1777 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1778
1779 /* Convert the vector to look just like the original PARALLEL, except
1780 with the computed values. */
1781 for (i = 0; i < XVECLEN (parallel, 0); i++)
1782 {
1783 rtx e = XVECEXP (parallel, 0, i);
1784 rtx d = XEXP (e, 0);
1785
1786 if (d)
1787 {
1788 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1789 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1790 }
1791 RTVEC_ELT (vec, i) = e;
1792 }
1793
1794 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1795 }
1796
1797 /* Emit code to move a block SRC to block DST, where SRC and DST are
1798 non-consecutive groups of registers, each represented by a PARALLEL. */
1799
1800 void
1801 emit_group_move (rtx dst, rtx src)
1802 {
1803 int i;
1804
1805 gcc_assert (GET_CODE (src) == PARALLEL
1806 && GET_CODE (dst) == PARALLEL
1807 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1808
1809 /* Skip first entry if NULL. */
1810 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1811 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1812 XEXP (XVECEXP (src, 0, i), 0));
1813 }
1814
1815 /* Move a group of registers represented by a PARALLEL into pseudos. */
1816
1817 rtx
1818 emit_group_move_into_temps (rtx src)
1819 {
1820 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1821 int i;
1822
1823 for (i = 0; i < XVECLEN (src, 0); i++)
1824 {
1825 rtx e = XVECEXP (src, 0, i);
1826 rtx d = XEXP (e, 0);
1827
1828 if (d)
1829 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1830 RTVEC_ELT (vec, i) = e;
1831 }
1832
1833 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1834 }
1835
1836 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1837 where SRC is non-consecutive registers represented by a PARALLEL.
1838 SSIZE represents the total size of block ORIG_DST, or -1 if not
1839 known. */
1840
1841 void
1842 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1843 {
1844 rtx *tmps, dst;
1845 int start, finish, i;
1846 machine_mode m = GET_MODE (orig_dst);
1847
1848 gcc_assert (GET_CODE (src) == PARALLEL);
1849
1850 if (!SCALAR_INT_MODE_P (m)
1851 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1852 {
1853 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1854 if (imode == BLKmode)
1855 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1856 else
1857 dst = gen_reg_rtx (imode);
1858 emit_group_store (dst, src, type, ssize);
1859 if (imode != BLKmode)
1860 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1861 emit_move_insn (orig_dst, dst);
1862 return;
1863 }
1864
1865 /* Check for a NULL entry, used to indicate that the parameter goes
1866 both on the stack and in registers. */
1867 if (XEXP (XVECEXP (src, 0, 0), 0))
1868 start = 0;
1869 else
1870 start = 1;
1871 finish = XVECLEN (src, 0);
1872
1873 tmps = XALLOCAVEC (rtx, finish);
1874
1875 /* Copy the (probable) hard regs into pseudos. */
1876 for (i = start; i < finish; i++)
1877 {
1878 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1879 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1880 {
1881 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1882 emit_move_insn (tmps[i], reg);
1883 }
1884 else
1885 tmps[i] = reg;
1886 }
1887
1888 /* If we won't be storing directly into memory, protect the real destination
1889 from strange tricks we might play. */
1890 dst = orig_dst;
1891 if (GET_CODE (dst) == PARALLEL)
1892 {
1893 rtx temp;
1894
1895 /* We can get a PARALLEL dst if there is a conditional expression in
1896 a return statement. In that case, the dst and src are the same,
1897 so no action is necessary. */
1898 if (rtx_equal_p (dst, src))
1899 return;
1900
1901 /* It is unclear if we can ever reach here, but we may as well handle
1902 it. Allocate a temporary, and split this into a store/load to/from
1903 the temporary. */
1904 temp = assign_stack_temp (GET_MODE (dst), ssize);
1905 emit_group_store (temp, src, type, ssize);
1906 emit_group_load (dst, temp, type, ssize);
1907 return;
1908 }
1909 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1910 {
1911 machine_mode outer = GET_MODE (dst);
1912 machine_mode inner;
1913 HOST_WIDE_INT bytepos;
1914 bool done = false;
1915 rtx temp;
1916
1917 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1918 dst = gen_reg_rtx (outer);
1919
1920 /* Make life a bit easier for combine. */
1921 /* If the first element of the vector is the low part
1922 of the destination mode, use a paradoxical subreg to
1923 initialize the destination. */
1924 if (start < finish)
1925 {
1926 inner = GET_MODE (tmps[start]);
1927 bytepos = subreg_lowpart_offset (inner, outer);
1928 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1929 {
1930 temp = simplify_gen_subreg (outer, tmps[start],
1931 inner, 0);
1932 if (temp)
1933 {
1934 emit_move_insn (dst, temp);
1935 done = true;
1936 start++;
1937 }
1938 }
1939 }
1940
1941 /* If the first element wasn't the low part, try the last. */
1942 if (!done
1943 && start < finish - 1)
1944 {
1945 inner = GET_MODE (tmps[finish - 1]);
1946 bytepos = subreg_lowpart_offset (inner, outer);
1947 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1948 {
1949 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1950 inner, 0);
1951 if (temp)
1952 {
1953 emit_move_insn (dst, temp);
1954 done = true;
1955 finish--;
1956 }
1957 }
1958 }
1959
1960 /* Otherwise, simply initialize the result to zero. */
1961 if (!done)
1962 emit_move_insn (dst, CONST0_RTX (outer));
1963 }
1964
1965 /* Process the pieces. */
1966 for (i = start; i < finish; i++)
1967 {
1968 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1969 machine_mode mode = GET_MODE (tmps[i]);
1970 unsigned int bytelen = GET_MODE_SIZE (mode);
1971 unsigned int adj_bytelen;
1972 rtx dest = dst;
1973
1974 /* Handle trailing fragments that run over the size of the struct. */
1975 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1976 adj_bytelen = ssize - bytepos;
1977 else
1978 adj_bytelen = bytelen;
1979
1980 if (GET_CODE (dst) == CONCAT)
1981 {
1982 if (bytepos + adj_bytelen
1983 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1984 dest = XEXP (dst, 0);
1985 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1986 {
1987 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1988 dest = XEXP (dst, 1);
1989 }
1990 else
1991 {
1992 machine_mode dest_mode = GET_MODE (dest);
1993 machine_mode tmp_mode = GET_MODE (tmps[i]);
1994
1995 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1996
1997 if (GET_MODE_ALIGNMENT (dest_mode)
1998 >= GET_MODE_ALIGNMENT (tmp_mode))
1999 {
2000 dest = assign_stack_temp (dest_mode,
2001 GET_MODE_SIZE (dest_mode));
2002 emit_move_insn (adjust_address (dest,
2003 tmp_mode,
2004 bytepos),
2005 tmps[i]);
2006 dst = dest;
2007 }
2008 else
2009 {
2010 dest = assign_stack_temp (tmp_mode,
2011 GET_MODE_SIZE (tmp_mode));
2012 emit_move_insn (dest, tmps[i]);
2013 dst = adjust_address (dest, dest_mode, bytepos);
2014 }
2015 break;
2016 }
2017 }
2018
2019 /* Handle trailing fragments that run over the size of the struct. */
2020 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2021 {
2022 /* store_bit_field always takes its value from the lsb.
2023 Move the fragment to the lsb if it's not already there. */
2024 if (
2025 #ifdef BLOCK_REG_PADDING
2026 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2027 == (BYTES_BIG_ENDIAN ? upward : downward)
2028 #else
2029 BYTES_BIG_ENDIAN
2030 #endif
2031 )
2032 {
2033 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2034 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2035 shift, tmps[i], 0);
2036 }
2037
2038 /* Make sure not to write past the end of the struct. */
2039 store_bit_field (dest,
2040 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2041 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2042 VOIDmode, tmps[i], false);
2043 }
2044
2045 /* Optimize the access just a bit. */
2046 else if (MEM_P (dest)
2047 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2048 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2049 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2050 && bytelen == GET_MODE_SIZE (mode))
2051 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2052
2053 else
2054 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2055 0, 0, mode, tmps[i], false);
2056 }
2057
2058 /* Copy from the pseudo into the (probable) hard reg. */
2059 if (orig_dst != dst)
2060 emit_move_insn (orig_dst, dst);
2061 }
2062
2063 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2064 of the value stored in X. */
2065
2066 rtx
2067 maybe_emit_group_store (rtx x, tree type)
2068 {
2069 machine_mode mode = TYPE_MODE (type);
2070 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2071 if (GET_CODE (x) == PARALLEL)
2072 {
2073 rtx result = gen_reg_rtx (mode);
2074 emit_group_store (result, x, type, int_size_in_bytes (type));
2075 return result;
2076 }
2077 return x;
2078 }
2079
2080 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2081
2082 This is used on targets that return BLKmode values in registers. */
2083
2084 void
2085 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2086 {
2087 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2088 rtx src = NULL, dst = NULL;
2089 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2090 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2091 machine_mode mode = GET_MODE (srcreg);
2092 machine_mode tmode = GET_MODE (target);
2093 machine_mode copy_mode;
2094
2095 /* BLKmode registers created in the back-end shouldn't have survived. */
2096 gcc_assert (mode != BLKmode);
2097
2098 /* If the structure doesn't take up a whole number of words, see whether
2099 SRCREG is padded on the left or on the right. If it's on the left,
2100 set PADDING_CORRECTION to the number of bits to skip.
2101
2102 In most ABIs, the structure will be returned at the least end of
2103 the register, which translates to right padding on little-endian
2104 targets and left padding on big-endian targets. The opposite
2105 holds if the structure is returned at the most significant
2106 end of the register. */
2107 if (bytes % UNITS_PER_WORD != 0
2108 && (targetm.calls.return_in_msb (type)
2109 ? !BYTES_BIG_ENDIAN
2110 : BYTES_BIG_ENDIAN))
2111 padding_correction
2112 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2113
2114 /* We can use a single move if we have an exact mode for the size. */
2115 else if (MEM_P (target)
2116 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2117 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2118 && bytes == GET_MODE_SIZE (mode))
2119 {
2120 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2121 return;
2122 }
2123
2124 /* And if we additionally have the same mode for a register. */
2125 else if (REG_P (target)
2126 && GET_MODE (target) == mode
2127 && bytes == GET_MODE_SIZE (mode))
2128 {
2129 emit_move_insn (target, srcreg);
2130 return;
2131 }
2132
2133 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2134 into a new pseudo which is a full word. */
2135 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2136 {
2137 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2138 mode = word_mode;
2139 }
2140
2141 /* Copy the structure BITSIZE bits at a time. If the target lives in
2142 memory, take care of not reading/writing past its end by selecting
2143 a copy mode suited to BITSIZE. This should always be possible given
2144 how it is computed.
2145
2146 If the target lives in register, make sure not to select a copy mode
2147 larger than the mode of the register.
2148
2149 We could probably emit more efficient code for machines which do not use
2150 strict alignment, but it doesn't seem worth the effort at the current
2151 time. */
2152
2153 copy_mode = word_mode;
2154 if (MEM_P (target))
2155 {
2156 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2157 if (mem_mode != BLKmode)
2158 copy_mode = mem_mode;
2159 }
2160 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2161 copy_mode = tmode;
2162
2163 for (bitpos = 0, xbitpos = padding_correction;
2164 bitpos < bytes * BITS_PER_UNIT;
2165 bitpos += bitsize, xbitpos += bitsize)
2166 {
2167 /* We need a new source operand each time xbitpos is on a
2168 word boundary and when xbitpos == padding_correction
2169 (the first time through). */
2170 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2171 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2172
2173 /* We need a new destination operand each time bitpos is on
2174 a word boundary. */
2175 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2176 dst = target;
2177 else if (bitpos % BITS_PER_WORD == 0)
2178 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2179
2180 /* Use xbitpos for the source extraction (right justified) and
2181 bitpos for the destination store (left justified). */
2182 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2183 extract_bit_field (src, bitsize,
2184 xbitpos % BITS_PER_WORD, 1,
2185 NULL_RTX, copy_mode, copy_mode,
2186 false),
2187 false);
2188 }
2189 }
2190
2191 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2192 register if it contains any data, otherwise return null.
2193
2194 This is used on targets that return BLKmode values in registers. */
2195
2196 rtx
2197 copy_blkmode_to_reg (machine_mode mode, tree src)
2198 {
2199 int i, n_regs;
2200 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2201 unsigned int bitsize;
2202 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2203 machine_mode dst_mode;
2204
2205 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2206
2207 x = expand_normal (src);
2208
2209 bytes = int_size_in_bytes (TREE_TYPE (src));
2210 if (bytes == 0)
2211 return NULL_RTX;
2212
2213 /* If the structure doesn't take up a whole number of words, see
2214 whether the register value should be padded on the left or on
2215 the right. Set PADDING_CORRECTION to the number of padding
2216 bits needed on the left side.
2217
2218 In most ABIs, the structure will be returned at the least end of
2219 the register, which translates to right padding on little-endian
2220 targets and left padding on big-endian targets. The opposite
2221 holds if the structure is returned at the most significant
2222 end of the register. */
2223 if (bytes % UNITS_PER_WORD != 0
2224 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2225 ? !BYTES_BIG_ENDIAN
2226 : BYTES_BIG_ENDIAN))
2227 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2228 * BITS_PER_UNIT));
2229
2230 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2231 dst_words = XALLOCAVEC (rtx, n_regs);
2232 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2233
2234 /* Copy the structure BITSIZE bits at a time. */
2235 for (bitpos = 0, xbitpos = padding_correction;
2236 bitpos < bytes * BITS_PER_UNIT;
2237 bitpos += bitsize, xbitpos += bitsize)
2238 {
2239 /* We need a new destination pseudo each time xbitpos is
2240 on a word boundary and when xbitpos == padding_correction
2241 (the first time through). */
2242 if (xbitpos % BITS_PER_WORD == 0
2243 || xbitpos == padding_correction)
2244 {
2245 /* Generate an appropriate register. */
2246 dst_word = gen_reg_rtx (word_mode);
2247 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2248
2249 /* Clear the destination before we move anything into it. */
2250 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2251 }
2252
2253 /* We need a new source operand each time bitpos is on a word
2254 boundary. */
2255 if (bitpos % BITS_PER_WORD == 0)
2256 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2257
2258 /* Use bitpos for the source extraction (left justified) and
2259 xbitpos for the destination store (right justified). */
2260 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2261 0, 0, word_mode,
2262 extract_bit_field (src_word, bitsize,
2263 bitpos % BITS_PER_WORD, 1,
2264 NULL_RTX, word_mode, word_mode,
2265 false),
2266 false);
2267 }
2268
2269 if (mode == BLKmode)
2270 {
2271 /* Find the smallest integer mode large enough to hold the
2272 entire structure. */
2273 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2274 mode != VOIDmode;
2275 mode = GET_MODE_WIDER_MODE (mode))
2276 /* Have we found a large enough mode? */
2277 if (GET_MODE_SIZE (mode) >= bytes)
2278 break;
2279
2280 /* A suitable mode should have been found. */
2281 gcc_assert (mode != VOIDmode);
2282 }
2283
2284 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2285 dst_mode = word_mode;
2286 else
2287 dst_mode = mode;
2288 dst = gen_reg_rtx (dst_mode);
2289
2290 for (i = 0; i < n_regs; i++)
2291 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2292
2293 if (mode != dst_mode)
2294 dst = gen_lowpart (mode, dst);
2295
2296 return dst;
2297 }
2298
2299 /* Add a USE expression for REG to the (possibly empty) list pointed
2300 to by CALL_FUSAGE. REG must denote a hard register. */
2301
2302 void
2303 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2304 {
2305 gcc_assert (REG_P (reg));
2306
2307 if (!HARD_REGISTER_P (reg))
2308 return;
2309
2310 *call_fusage
2311 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2312 }
2313
2314 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2315 to by CALL_FUSAGE. REG must denote a hard register. */
2316
2317 void
2318 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2319 {
2320 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2321
2322 *call_fusage
2323 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2324 }
2325
2326 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2327 starting at REGNO. All of these registers must be hard registers. */
2328
2329 void
2330 use_regs (rtx *call_fusage, int regno, int nregs)
2331 {
2332 int i;
2333
2334 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2335
2336 for (i = 0; i < nregs; i++)
2337 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2338 }
2339
2340 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2341 PARALLEL REGS. This is for calls that pass values in multiple
2342 non-contiguous locations. The Irix 6 ABI has examples of this. */
2343
2344 void
2345 use_group_regs (rtx *call_fusage, rtx regs)
2346 {
2347 int i;
2348
2349 for (i = 0; i < XVECLEN (regs, 0); i++)
2350 {
2351 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2352
2353 /* A NULL entry means the parameter goes both on the stack and in
2354 registers. This can also be a MEM for targets that pass values
2355 partially on the stack and partially in registers. */
2356 if (reg != 0 && REG_P (reg))
2357 use_reg (call_fusage, reg);
2358 }
2359 }
2360
2361 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2362 assigment and the code of the expresion on the RHS is CODE. Return
2363 NULL otherwise. */
2364
2365 static gimple *
2366 get_def_for_expr (tree name, enum tree_code code)
2367 {
2368 gimple *def_stmt;
2369
2370 if (TREE_CODE (name) != SSA_NAME)
2371 return NULL;
2372
2373 def_stmt = get_gimple_for_ssa_name (name);
2374 if (!def_stmt
2375 || gimple_assign_rhs_code (def_stmt) != code)
2376 return NULL;
2377
2378 return def_stmt;
2379 }
2380
2381 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2382 assigment and the class of the expresion on the RHS is CLASS. Return
2383 NULL otherwise. */
2384
2385 static gimple *
2386 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2387 {
2388 gimple *def_stmt;
2389
2390 if (TREE_CODE (name) != SSA_NAME)
2391 return NULL;
2392
2393 def_stmt = get_gimple_for_ssa_name (name);
2394 if (!def_stmt
2395 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2396 return NULL;
2397
2398 return def_stmt;
2399 }
2400 \f
2401
2402 /* Determine whether the LEN bytes generated by CONSTFUN can be
2403 stored to memory using several move instructions. CONSTFUNDATA is
2404 a pointer which will be passed as argument in every CONSTFUN call.
2405 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2406 a memset operation and false if it's a copy of a constant string.
2407 Return nonzero if a call to store_by_pieces should succeed. */
2408
2409 int
2410 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2411 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2412 void *constfundata, unsigned int align, bool memsetp)
2413 {
2414 unsigned HOST_WIDE_INT l;
2415 unsigned int max_size;
2416 HOST_WIDE_INT offset = 0;
2417 machine_mode mode;
2418 enum insn_code icode;
2419 int reverse;
2420 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2421 rtx cst ATTRIBUTE_UNUSED;
2422
2423 if (len == 0)
2424 return 1;
2425
2426 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2427 memsetp
2428 ? SET_BY_PIECES
2429 : STORE_BY_PIECES,
2430 optimize_insn_for_speed_p ()))
2431 return 0;
2432
2433 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2434
2435 /* We would first store what we can in the largest integer mode, then go to
2436 successively smaller modes. */
2437
2438 for (reverse = 0;
2439 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2440 reverse++)
2441 {
2442 l = len;
2443 max_size = STORE_MAX_PIECES + 1;
2444 while (max_size > 1 && l > 0)
2445 {
2446 mode = widest_int_mode_for_size (max_size);
2447
2448 if (mode == VOIDmode)
2449 break;
2450
2451 icode = optab_handler (mov_optab, mode);
2452 if (icode != CODE_FOR_nothing
2453 && align >= GET_MODE_ALIGNMENT (mode))
2454 {
2455 unsigned int size = GET_MODE_SIZE (mode);
2456
2457 while (l >= size)
2458 {
2459 if (reverse)
2460 offset -= size;
2461
2462 cst = (*constfun) (constfundata, offset, mode);
2463 if (!targetm.legitimate_constant_p (mode, cst))
2464 return 0;
2465
2466 if (!reverse)
2467 offset += size;
2468
2469 l -= size;
2470 }
2471 }
2472
2473 max_size = GET_MODE_SIZE (mode);
2474 }
2475
2476 /* The code above should have handled everything. */
2477 gcc_assert (!l);
2478 }
2479
2480 return 1;
2481 }
2482
2483 /* Generate several move instructions to store LEN bytes generated by
2484 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2485 pointer which will be passed as argument in every CONSTFUN call.
2486 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2487 a memset operation and false if it's a copy of a constant string.
2488 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2489 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2490 stpcpy. */
2491
2492 rtx
2493 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2494 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2495 void *constfundata, unsigned int align, bool memsetp, int endp)
2496 {
2497 machine_mode to_addr_mode = get_address_mode (to);
2498 struct store_by_pieces_d data;
2499
2500 if (len == 0)
2501 {
2502 gcc_assert (endp != 2);
2503 return to;
2504 }
2505
2506 gcc_assert (targetm.use_by_pieces_infrastructure_p
2507 (len, align,
2508 memsetp
2509 ? SET_BY_PIECES
2510 : STORE_BY_PIECES,
2511 optimize_insn_for_speed_p ()));
2512
2513 data.constfun = constfun;
2514 data.constfundata = constfundata;
2515 data.len = len;
2516 data.to = to;
2517 store_by_pieces_1 (&data, align);
2518 if (endp)
2519 {
2520 rtx to1;
2521
2522 gcc_assert (!data.reverse);
2523 if (data.autinc_to)
2524 {
2525 if (endp == 2)
2526 {
2527 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2528 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2529 else
2530 data.to_addr = copy_to_mode_reg (to_addr_mode,
2531 plus_constant (to_addr_mode,
2532 data.to_addr,
2533 -1));
2534 }
2535 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2536 data.offset);
2537 }
2538 else
2539 {
2540 if (endp == 2)
2541 --data.offset;
2542 to1 = adjust_address (data.to, QImode, data.offset);
2543 }
2544 return to1;
2545 }
2546 else
2547 return data.to;
2548 }
2549
2550 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2551 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2552
2553 static void
2554 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2555 {
2556 struct store_by_pieces_d data;
2557
2558 if (len == 0)
2559 return;
2560
2561 data.constfun = clear_by_pieces_1;
2562 data.constfundata = NULL;
2563 data.len = len;
2564 data.to = to;
2565 store_by_pieces_1 (&data, align);
2566 }
2567
2568 /* Callback routine for clear_by_pieces.
2569 Return const0_rtx unconditionally. */
2570
2571 static rtx
2572 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2573 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2574 machine_mode mode ATTRIBUTE_UNUSED)
2575 {
2576 return const0_rtx;
2577 }
2578
2579 /* Subroutine of clear_by_pieces and store_by_pieces.
2580 Generate several move instructions to store LEN bytes of block TO. (A MEM
2581 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2582
2583 static void
2584 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2585 unsigned int align ATTRIBUTE_UNUSED)
2586 {
2587 machine_mode to_addr_mode = get_address_mode (data->to);
2588 rtx to_addr = XEXP (data->to, 0);
2589 unsigned int max_size = STORE_MAX_PIECES + 1;
2590 enum insn_code icode;
2591
2592 data->offset = 0;
2593 data->to_addr = to_addr;
2594 data->autinc_to
2595 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2596 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2597
2598 data->explicit_inc_to = 0;
2599 data->reverse
2600 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2601 if (data->reverse)
2602 data->offset = data->len;
2603
2604 /* If storing requires more than two move insns,
2605 copy addresses to registers (to make displacements shorter)
2606 and use post-increment if available. */
2607 if (!data->autinc_to
2608 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2609 {
2610 /* Determine the main mode we'll be using.
2611 MODE might not be used depending on the definitions of the
2612 USE_* macros below. */
2613 machine_mode mode ATTRIBUTE_UNUSED
2614 = widest_int_mode_for_size (max_size);
2615
2616 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2617 {
2618 data->to_addr = copy_to_mode_reg (to_addr_mode,
2619 plus_constant (to_addr_mode,
2620 to_addr,
2621 data->len));
2622 data->autinc_to = 1;
2623 data->explicit_inc_to = -1;
2624 }
2625
2626 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2627 && ! data->autinc_to)
2628 {
2629 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2630 data->autinc_to = 1;
2631 data->explicit_inc_to = 1;
2632 }
2633
2634 if ( !data->autinc_to && CONSTANT_P (to_addr))
2635 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2636 }
2637
2638 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2639
2640 /* First store what we can in the largest integer mode, then go to
2641 successively smaller modes. */
2642
2643 while (max_size > 1 && data->len > 0)
2644 {
2645 machine_mode mode = widest_int_mode_for_size (max_size);
2646
2647 if (mode == VOIDmode)
2648 break;
2649
2650 icode = optab_handler (mov_optab, mode);
2651 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2652 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2653
2654 max_size = GET_MODE_SIZE (mode);
2655 }
2656
2657 /* The code above should have handled everything. */
2658 gcc_assert (!data->len);
2659 }
2660
2661 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2662 with move instructions for mode MODE. GENFUN is the gen_... function
2663 to make a move insn for that mode. DATA has all the other info. */
2664
2665 static void
2666 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2667 struct store_by_pieces_d *data)
2668 {
2669 unsigned int size = GET_MODE_SIZE (mode);
2670 rtx to1, cst;
2671
2672 while (data->len >= size)
2673 {
2674 if (data->reverse)
2675 data->offset -= size;
2676
2677 if (data->autinc_to)
2678 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2679 data->offset);
2680 else
2681 to1 = adjust_address (data->to, mode, data->offset);
2682
2683 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2684 emit_insn (gen_add2_insn (data->to_addr,
2685 gen_int_mode (-(HOST_WIDE_INT) size,
2686 GET_MODE (data->to_addr))));
2687
2688 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2689 emit_insn ((*genfun) (to1, cst));
2690
2691 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2692 emit_insn (gen_add2_insn (data->to_addr,
2693 gen_int_mode (size,
2694 GET_MODE (data->to_addr))));
2695
2696 if (! data->reverse)
2697 data->offset += size;
2698
2699 data->len -= size;
2700 }
2701 }
2702 \f
2703 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2704 its length in bytes. */
2705
2706 rtx
2707 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2708 unsigned int expected_align, HOST_WIDE_INT expected_size,
2709 unsigned HOST_WIDE_INT min_size,
2710 unsigned HOST_WIDE_INT max_size,
2711 unsigned HOST_WIDE_INT probable_max_size)
2712 {
2713 machine_mode mode = GET_MODE (object);
2714 unsigned int align;
2715
2716 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2717
2718 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2719 just move a zero. Otherwise, do this a piece at a time. */
2720 if (mode != BLKmode
2721 && CONST_INT_P (size)
2722 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2723 {
2724 rtx zero = CONST0_RTX (mode);
2725 if (zero != NULL)
2726 {
2727 emit_move_insn (object, zero);
2728 return NULL;
2729 }
2730
2731 if (COMPLEX_MODE_P (mode))
2732 {
2733 zero = CONST0_RTX (GET_MODE_INNER (mode));
2734 if (zero != NULL)
2735 {
2736 write_complex_part (object, zero, 0);
2737 write_complex_part (object, zero, 1);
2738 return NULL;
2739 }
2740 }
2741 }
2742
2743 if (size == const0_rtx)
2744 return NULL;
2745
2746 align = MEM_ALIGN (object);
2747
2748 if (CONST_INT_P (size)
2749 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2750 CLEAR_BY_PIECES,
2751 optimize_insn_for_speed_p ()))
2752 clear_by_pieces (object, INTVAL (size), align);
2753 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2754 expected_align, expected_size,
2755 min_size, max_size, probable_max_size))
2756 ;
2757 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2758 return set_storage_via_libcall (object, size, const0_rtx,
2759 method == BLOCK_OP_TAILCALL);
2760 else
2761 gcc_unreachable ();
2762
2763 return NULL;
2764 }
2765
2766 rtx
2767 clear_storage (rtx object, rtx size, enum block_op_methods method)
2768 {
2769 unsigned HOST_WIDE_INT max, min = 0;
2770 if (GET_CODE (size) == CONST_INT)
2771 min = max = UINTVAL (size);
2772 else
2773 max = GET_MODE_MASK (GET_MODE (size));
2774 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2775 }
2776
2777
2778 /* A subroutine of clear_storage. Expand a call to memset.
2779 Return the return value of memset, 0 otherwise. */
2780
2781 rtx
2782 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2783 {
2784 tree call_expr, fn, object_tree, size_tree, val_tree;
2785 machine_mode size_mode;
2786 rtx retval;
2787
2788 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2789 place those into new pseudos into a VAR_DECL and use them later. */
2790
2791 object = copy_addr_to_reg (XEXP (object, 0));
2792
2793 size_mode = TYPE_MODE (sizetype);
2794 size = convert_to_mode (size_mode, size, 1);
2795 size = copy_to_mode_reg (size_mode, size);
2796
2797 /* It is incorrect to use the libcall calling conventions to call
2798 memset in this context. This could be a user call to memset and
2799 the user may wish to examine the return value from memset. For
2800 targets where libcalls and normal calls have different conventions
2801 for returning pointers, we could end up generating incorrect code. */
2802
2803 object_tree = make_tree (ptr_type_node, object);
2804 if (!CONST_INT_P (val))
2805 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2806 size_tree = make_tree (sizetype, size);
2807 val_tree = make_tree (integer_type_node, val);
2808
2809 fn = clear_storage_libcall_fn (true);
2810 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2811 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2812
2813 retval = expand_normal (call_expr);
2814
2815 return retval;
2816 }
2817
2818 /* A subroutine of set_storage_via_libcall. Create the tree node
2819 for the function we use for block clears. */
2820
2821 tree block_clear_fn;
2822
2823 void
2824 init_block_clear_fn (const char *asmspec)
2825 {
2826 if (!block_clear_fn)
2827 {
2828 tree fn, args;
2829
2830 fn = get_identifier ("memset");
2831 args = build_function_type_list (ptr_type_node, ptr_type_node,
2832 integer_type_node, sizetype,
2833 NULL_TREE);
2834
2835 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2836 DECL_EXTERNAL (fn) = 1;
2837 TREE_PUBLIC (fn) = 1;
2838 DECL_ARTIFICIAL (fn) = 1;
2839 TREE_NOTHROW (fn) = 1;
2840 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2841 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2842
2843 block_clear_fn = fn;
2844 }
2845
2846 if (asmspec)
2847 set_user_assembler_name (block_clear_fn, asmspec);
2848 }
2849
2850 static tree
2851 clear_storage_libcall_fn (int for_call)
2852 {
2853 static bool emitted_extern;
2854
2855 if (!block_clear_fn)
2856 init_block_clear_fn (NULL);
2857
2858 if (for_call && !emitted_extern)
2859 {
2860 emitted_extern = true;
2861 make_decl_rtl (block_clear_fn);
2862 }
2863
2864 return block_clear_fn;
2865 }
2866 \f
2867 /* Expand a setmem pattern; return true if successful. */
2868
2869 bool
2870 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2871 unsigned int expected_align, HOST_WIDE_INT expected_size,
2872 unsigned HOST_WIDE_INT min_size,
2873 unsigned HOST_WIDE_INT max_size,
2874 unsigned HOST_WIDE_INT probable_max_size)
2875 {
2876 /* Try the most limited insn first, because there's no point
2877 including more than one in the machine description unless
2878 the more limited one has some advantage. */
2879
2880 machine_mode mode;
2881
2882 if (expected_align < align)
2883 expected_align = align;
2884 if (expected_size != -1)
2885 {
2886 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2887 expected_size = max_size;
2888 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2889 expected_size = min_size;
2890 }
2891
2892 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2893 mode = GET_MODE_WIDER_MODE (mode))
2894 {
2895 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2896
2897 if (code != CODE_FOR_nothing
2898 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2899 here because if SIZE is less than the mode mask, as it is
2900 returned by the macro, it will definitely be less than the
2901 actual mode mask. Since SIZE is within the Pmode address
2902 space, we limit MODE to Pmode. */
2903 && ((CONST_INT_P (size)
2904 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2905 <= (GET_MODE_MASK (mode) >> 1)))
2906 || max_size <= (GET_MODE_MASK (mode) >> 1)
2907 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2908 {
2909 struct expand_operand ops[9];
2910 unsigned int nops;
2911
2912 nops = insn_data[(int) code].n_generator_args;
2913 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2914
2915 create_fixed_operand (&ops[0], object);
2916 /* The check above guarantees that this size conversion is valid. */
2917 create_convert_operand_to (&ops[1], size, mode, true);
2918 create_convert_operand_from (&ops[2], val, byte_mode, true);
2919 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2920 if (nops >= 6)
2921 {
2922 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2923 create_integer_operand (&ops[5], expected_size);
2924 }
2925 if (nops >= 8)
2926 {
2927 create_integer_operand (&ops[6], min_size);
2928 /* If we can not represent the maximal size,
2929 make parameter NULL. */
2930 if ((HOST_WIDE_INT) max_size != -1)
2931 create_integer_operand (&ops[7], max_size);
2932 else
2933 create_fixed_operand (&ops[7], NULL);
2934 }
2935 if (nops == 9)
2936 {
2937 /* If we can not represent the maximal size,
2938 make parameter NULL. */
2939 if ((HOST_WIDE_INT) probable_max_size != -1)
2940 create_integer_operand (&ops[8], probable_max_size);
2941 else
2942 create_fixed_operand (&ops[8], NULL);
2943 }
2944 if (maybe_expand_insn (code, nops, ops))
2945 return true;
2946 }
2947 }
2948
2949 return false;
2950 }
2951
2952 \f
2953 /* Write to one of the components of the complex value CPLX. Write VAL to
2954 the real part if IMAG_P is false, and the imaginary part if its true. */
2955
2956 void
2957 write_complex_part (rtx cplx, rtx val, bool imag_p)
2958 {
2959 machine_mode cmode;
2960 machine_mode imode;
2961 unsigned ibitsize;
2962
2963 if (GET_CODE (cplx) == CONCAT)
2964 {
2965 emit_move_insn (XEXP (cplx, imag_p), val);
2966 return;
2967 }
2968
2969 cmode = GET_MODE (cplx);
2970 imode = GET_MODE_INNER (cmode);
2971 ibitsize = GET_MODE_BITSIZE (imode);
2972
2973 /* For MEMs simplify_gen_subreg may generate an invalid new address
2974 because, e.g., the original address is considered mode-dependent
2975 by the target, which restricts simplify_subreg from invoking
2976 adjust_address_nv. Instead of preparing fallback support for an
2977 invalid address, we call adjust_address_nv directly. */
2978 if (MEM_P (cplx))
2979 {
2980 emit_move_insn (adjust_address_nv (cplx, imode,
2981 imag_p ? GET_MODE_SIZE (imode) : 0),
2982 val);
2983 return;
2984 }
2985
2986 /* If the sub-object is at least word sized, then we know that subregging
2987 will work. This special case is important, since store_bit_field
2988 wants to operate on integer modes, and there's rarely an OImode to
2989 correspond to TCmode. */
2990 if (ibitsize >= BITS_PER_WORD
2991 /* For hard regs we have exact predicates. Assume we can split
2992 the original object if it spans an even number of hard regs.
2993 This special case is important for SCmode on 64-bit platforms
2994 where the natural size of floating-point regs is 32-bit. */
2995 || (REG_P (cplx)
2996 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2997 && REG_NREGS (cplx) % 2 == 0))
2998 {
2999 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3000 imag_p ? GET_MODE_SIZE (imode) : 0);
3001 if (part)
3002 {
3003 emit_move_insn (part, val);
3004 return;
3005 }
3006 else
3007 /* simplify_gen_subreg may fail for sub-word MEMs. */
3008 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3009 }
3010
3011 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val,
3012 false);
3013 }
3014
3015 /* Extract one of the components of the complex value CPLX. Extract the
3016 real part if IMAG_P is false, and the imaginary part if it's true. */
3017
3018 rtx
3019 read_complex_part (rtx cplx, bool imag_p)
3020 {
3021 machine_mode cmode, imode;
3022 unsigned ibitsize;
3023
3024 if (GET_CODE (cplx) == CONCAT)
3025 return XEXP (cplx, imag_p);
3026
3027 cmode = GET_MODE (cplx);
3028 imode = GET_MODE_INNER (cmode);
3029 ibitsize = GET_MODE_BITSIZE (imode);
3030
3031 /* Special case reads from complex constants that got spilled to memory. */
3032 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3033 {
3034 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3035 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3036 {
3037 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3038 if (CONSTANT_CLASS_P (part))
3039 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3040 }
3041 }
3042
3043 /* For MEMs simplify_gen_subreg may generate an invalid new address
3044 because, e.g., the original address is considered mode-dependent
3045 by the target, which restricts simplify_subreg from invoking
3046 adjust_address_nv. Instead of preparing fallback support for an
3047 invalid address, we call adjust_address_nv directly. */
3048 if (MEM_P (cplx))
3049 return adjust_address_nv (cplx, imode,
3050 imag_p ? GET_MODE_SIZE (imode) : 0);
3051
3052 /* If the sub-object is at least word sized, then we know that subregging
3053 will work. This special case is important, since extract_bit_field
3054 wants to operate on integer modes, and there's rarely an OImode to
3055 correspond to TCmode. */
3056 if (ibitsize >= BITS_PER_WORD
3057 /* For hard regs we have exact predicates. Assume we can split
3058 the original object if it spans an even number of hard regs.
3059 This special case is important for SCmode on 64-bit platforms
3060 where the natural size of floating-point regs is 32-bit. */
3061 || (REG_P (cplx)
3062 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3063 && REG_NREGS (cplx) % 2 == 0))
3064 {
3065 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3066 imag_p ? GET_MODE_SIZE (imode) : 0);
3067 if (ret)
3068 return ret;
3069 else
3070 /* simplify_gen_subreg may fail for sub-word MEMs. */
3071 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3072 }
3073
3074 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3075 true, NULL_RTX, imode, imode, false);
3076 }
3077 \f
3078 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3079 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3080 represented in NEW_MODE. If FORCE is true, this will never happen, as
3081 we'll force-create a SUBREG if needed. */
3082
3083 static rtx
3084 emit_move_change_mode (machine_mode new_mode,
3085 machine_mode old_mode, rtx x, bool force)
3086 {
3087 rtx ret;
3088
3089 if (push_operand (x, GET_MODE (x)))
3090 {
3091 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3092 MEM_COPY_ATTRIBUTES (ret, x);
3093 }
3094 else if (MEM_P (x))
3095 {
3096 /* We don't have to worry about changing the address since the
3097 size in bytes is supposed to be the same. */
3098 if (reload_in_progress)
3099 {
3100 /* Copy the MEM to change the mode and move any
3101 substitutions from the old MEM to the new one. */
3102 ret = adjust_address_nv (x, new_mode, 0);
3103 copy_replacements (x, ret);
3104 }
3105 else
3106 ret = adjust_address (x, new_mode, 0);
3107 }
3108 else
3109 {
3110 /* Note that we do want simplify_subreg's behavior of validating
3111 that the new mode is ok for a hard register. If we were to use
3112 simplify_gen_subreg, we would create the subreg, but would
3113 probably run into the target not being able to implement it. */
3114 /* Except, of course, when FORCE is true, when this is exactly what
3115 we want. Which is needed for CCmodes on some targets. */
3116 if (force)
3117 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3118 else
3119 ret = simplify_subreg (new_mode, x, old_mode, 0);
3120 }
3121
3122 return ret;
3123 }
3124
3125 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3126 an integer mode of the same size as MODE. Returns the instruction
3127 emitted, or NULL if such a move could not be generated. */
3128
3129 static rtx_insn *
3130 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3131 {
3132 machine_mode imode;
3133 enum insn_code code;
3134
3135 /* There must exist a mode of the exact size we require. */
3136 imode = int_mode_for_mode (mode);
3137 if (imode == BLKmode)
3138 return NULL;
3139
3140 /* The target must support moves in this mode. */
3141 code = optab_handler (mov_optab, imode);
3142 if (code == CODE_FOR_nothing)
3143 return NULL;
3144
3145 x = emit_move_change_mode (imode, mode, x, force);
3146 if (x == NULL_RTX)
3147 return NULL;
3148 y = emit_move_change_mode (imode, mode, y, force);
3149 if (y == NULL_RTX)
3150 return NULL;
3151 return emit_insn (GEN_FCN (code) (x, y));
3152 }
3153
3154 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3155 Return an equivalent MEM that does not use an auto-increment. */
3156
3157 rtx
3158 emit_move_resolve_push (machine_mode mode, rtx x)
3159 {
3160 enum rtx_code code = GET_CODE (XEXP (x, 0));
3161 HOST_WIDE_INT adjust;
3162 rtx temp;
3163
3164 adjust = GET_MODE_SIZE (mode);
3165 #ifdef PUSH_ROUNDING
3166 adjust = PUSH_ROUNDING (adjust);
3167 #endif
3168 if (code == PRE_DEC || code == POST_DEC)
3169 adjust = -adjust;
3170 else if (code == PRE_MODIFY || code == POST_MODIFY)
3171 {
3172 rtx expr = XEXP (XEXP (x, 0), 1);
3173 HOST_WIDE_INT val;
3174
3175 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3176 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3177 val = INTVAL (XEXP (expr, 1));
3178 if (GET_CODE (expr) == MINUS)
3179 val = -val;
3180 gcc_assert (adjust == val || adjust == -val);
3181 adjust = val;
3182 }
3183
3184 /* Do not use anti_adjust_stack, since we don't want to update
3185 stack_pointer_delta. */
3186 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3187 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3188 0, OPTAB_LIB_WIDEN);
3189 if (temp != stack_pointer_rtx)
3190 emit_move_insn (stack_pointer_rtx, temp);
3191
3192 switch (code)
3193 {
3194 case PRE_INC:
3195 case PRE_DEC:
3196 case PRE_MODIFY:
3197 temp = stack_pointer_rtx;
3198 break;
3199 case POST_INC:
3200 case POST_DEC:
3201 case POST_MODIFY:
3202 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3203 break;
3204 default:
3205 gcc_unreachable ();
3206 }
3207
3208 return replace_equiv_address (x, temp);
3209 }
3210
3211 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3212 X is known to satisfy push_operand, and MODE is known to be complex.
3213 Returns the last instruction emitted. */
3214
3215 rtx_insn *
3216 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3217 {
3218 machine_mode submode = GET_MODE_INNER (mode);
3219 bool imag_first;
3220
3221 #ifdef PUSH_ROUNDING
3222 unsigned int submodesize = GET_MODE_SIZE (submode);
3223
3224 /* In case we output to the stack, but the size is smaller than the
3225 machine can push exactly, we need to use move instructions. */
3226 if (PUSH_ROUNDING (submodesize) != submodesize)
3227 {
3228 x = emit_move_resolve_push (mode, x);
3229 return emit_move_insn (x, y);
3230 }
3231 #endif
3232
3233 /* Note that the real part always precedes the imag part in memory
3234 regardless of machine's endianness. */
3235 switch (GET_CODE (XEXP (x, 0)))
3236 {
3237 case PRE_DEC:
3238 case POST_DEC:
3239 imag_first = true;
3240 break;
3241 case PRE_INC:
3242 case POST_INC:
3243 imag_first = false;
3244 break;
3245 default:
3246 gcc_unreachable ();
3247 }
3248
3249 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3250 read_complex_part (y, imag_first));
3251 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3252 read_complex_part (y, !imag_first));
3253 }
3254
3255 /* A subroutine of emit_move_complex. Perform the move from Y to X
3256 via two moves of the parts. Returns the last instruction emitted. */
3257
3258 rtx_insn *
3259 emit_move_complex_parts (rtx x, rtx y)
3260 {
3261 /* Show the output dies here. This is necessary for SUBREGs
3262 of pseudos since we cannot track their lifetimes correctly;
3263 hard regs shouldn't appear here except as return values. */
3264 if (!reload_completed && !reload_in_progress
3265 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3266 emit_clobber (x);
3267
3268 write_complex_part (x, read_complex_part (y, false), false);
3269 write_complex_part (x, read_complex_part (y, true), true);
3270
3271 return get_last_insn ();
3272 }
3273
3274 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3275 MODE is known to be complex. Returns the last instruction emitted. */
3276
3277 static rtx_insn *
3278 emit_move_complex (machine_mode mode, rtx x, rtx y)
3279 {
3280 bool try_int;
3281
3282 /* Need to take special care for pushes, to maintain proper ordering
3283 of the data, and possibly extra padding. */
3284 if (push_operand (x, mode))
3285 return emit_move_complex_push (mode, x, y);
3286
3287 /* See if we can coerce the target into moving both values at once, except
3288 for floating point where we favor moving as parts if this is easy. */
3289 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3290 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3291 && !(REG_P (x)
3292 && HARD_REGISTER_P (x)
3293 && REG_NREGS (x) == 1)
3294 && !(REG_P (y)
3295 && HARD_REGISTER_P (y)
3296 && REG_NREGS (y) == 1))
3297 try_int = false;
3298 /* Not possible if the values are inherently not adjacent. */
3299 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3300 try_int = false;
3301 /* Is possible if both are registers (or subregs of registers). */
3302 else if (register_operand (x, mode) && register_operand (y, mode))
3303 try_int = true;
3304 /* If one of the operands is a memory, and alignment constraints
3305 are friendly enough, we may be able to do combined memory operations.
3306 We do not attempt this if Y is a constant because that combination is
3307 usually better with the by-parts thing below. */
3308 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3309 && (!STRICT_ALIGNMENT
3310 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3311 try_int = true;
3312 else
3313 try_int = false;
3314
3315 if (try_int)
3316 {
3317 rtx_insn *ret;
3318
3319 /* For memory to memory moves, optimal behavior can be had with the
3320 existing block move logic. */
3321 if (MEM_P (x) && MEM_P (y))
3322 {
3323 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3324 BLOCK_OP_NO_LIBCALL);
3325 return get_last_insn ();
3326 }
3327
3328 ret = emit_move_via_integer (mode, x, y, true);
3329 if (ret)
3330 return ret;
3331 }
3332
3333 return emit_move_complex_parts (x, y);
3334 }
3335
3336 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3337 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3338
3339 static rtx_insn *
3340 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3341 {
3342 rtx_insn *ret;
3343
3344 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3345 if (mode != CCmode)
3346 {
3347 enum insn_code code = optab_handler (mov_optab, CCmode);
3348 if (code != CODE_FOR_nothing)
3349 {
3350 x = emit_move_change_mode (CCmode, mode, x, true);
3351 y = emit_move_change_mode (CCmode, mode, y, true);
3352 return emit_insn (GEN_FCN (code) (x, y));
3353 }
3354 }
3355
3356 /* Otherwise, find the MODE_INT mode of the same width. */
3357 ret = emit_move_via_integer (mode, x, y, false);
3358 gcc_assert (ret != NULL);
3359 return ret;
3360 }
3361
3362 /* Return true if word I of OP lies entirely in the
3363 undefined bits of a paradoxical subreg. */
3364
3365 static bool
3366 undefined_operand_subword_p (const_rtx op, int i)
3367 {
3368 machine_mode innermode, innermostmode;
3369 int offset;
3370 if (GET_CODE (op) != SUBREG)
3371 return false;
3372 innermode = GET_MODE (op);
3373 innermostmode = GET_MODE (SUBREG_REG (op));
3374 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3375 /* The SUBREG_BYTE represents offset, as if the value were stored in
3376 memory, except for a paradoxical subreg where we define
3377 SUBREG_BYTE to be 0; undo this exception as in
3378 simplify_subreg. */
3379 if (SUBREG_BYTE (op) == 0
3380 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3381 {
3382 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3383 if (WORDS_BIG_ENDIAN)
3384 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3385 if (BYTES_BIG_ENDIAN)
3386 offset += difference % UNITS_PER_WORD;
3387 }
3388 if (offset >= GET_MODE_SIZE (innermostmode)
3389 || offset <= -GET_MODE_SIZE (word_mode))
3390 return true;
3391 return false;
3392 }
3393
3394 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3395 MODE is any multi-word or full-word mode that lacks a move_insn
3396 pattern. Note that you will get better code if you define such
3397 patterns, even if they must turn into multiple assembler instructions. */
3398
3399 static rtx_insn *
3400 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3401 {
3402 rtx_insn *last_insn = 0;
3403 rtx_insn *seq;
3404 rtx inner;
3405 bool need_clobber;
3406 int i;
3407
3408 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3409
3410 /* If X is a push on the stack, do the push now and replace
3411 X with a reference to the stack pointer. */
3412 if (push_operand (x, mode))
3413 x = emit_move_resolve_push (mode, x);
3414
3415 /* If we are in reload, see if either operand is a MEM whose address
3416 is scheduled for replacement. */
3417 if (reload_in_progress && MEM_P (x)
3418 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3419 x = replace_equiv_address_nv (x, inner);
3420 if (reload_in_progress && MEM_P (y)
3421 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3422 y = replace_equiv_address_nv (y, inner);
3423
3424 start_sequence ();
3425
3426 need_clobber = false;
3427 for (i = 0;
3428 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3429 i++)
3430 {
3431 rtx xpart = operand_subword (x, i, 1, mode);
3432 rtx ypart;
3433
3434 /* Do not generate code for a move if it would come entirely
3435 from the undefined bits of a paradoxical subreg. */
3436 if (undefined_operand_subword_p (y, i))
3437 continue;
3438
3439 ypart = operand_subword (y, i, 1, mode);
3440
3441 /* If we can't get a part of Y, put Y into memory if it is a
3442 constant. Otherwise, force it into a register. Then we must
3443 be able to get a part of Y. */
3444 if (ypart == 0 && CONSTANT_P (y))
3445 {
3446 y = use_anchored_address (force_const_mem (mode, y));
3447 ypart = operand_subword (y, i, 1, mode);
3448 }
3449 else if (ypart == 0)
3450 ypart = operand_subword_force (y, i, mode);
3451
3452 gcc_assert (xpart && ypart);
3453
3454 need_clobber |= (GET_CODE (xpart) == SUBREG);
3455
3456 last_insn = emit_move_insn (xpart, ypart);
3457 }
3458
3459 seq = get_insns ();
3460 end_sequence ();
3461
3462 /* Show the output dies here. This is necessary for SUBREGs
3463 of pseudos since we cannot track their lifetimes correctly;
3464 hard regs shouldn't appear here except as return values.
3465 We never want to emit such a clobber after reload. */
3466 if (x != y
3467 && ! (reload_in_progress || reload_completed)
3468 && need_clobber != 0)
3469 emit_clobber (x);
3470
3471 emit_insn (seq);
3472
3473 return last_insn;
3474 }
3475
3476 /* Low level part of emit_move_insn.
3477 Called just like emit_move_insn, but assumes X and Y
3478 are basically valid. */
3479
3480 rtx_insn *
3481 emit_move_insn_1 (rtx x, rtx y)
3482 {
3483 machine_mode mode = GET_MODE (x);
3484 enum insn_code code;
3485
3486 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3487
3488 code = optab_handler (mov_optab, mode);
3489 if (code != CODE_FOR_nothing)
3490 return emit_insn (GEN_FCN (code) (x, y));
3491
3492 /* Expand complex moves by moving real part and imag part. */
3493 if (COMPLEX_MODE_P (mode))
3494 return emit_move_complex (mode, x, y);
3495
3496 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3497 || ALL_FIXED_POINT_MODE_P (mode))
3498 {
3499 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3500
3501 /* If we can't find an integer mode, use multi words. */
3502 if (result)
3503 return result;
3504 else
3505 return emit_move_multi_word (mode, x, y);
3506 }
3507
3508 if (GET_MODE_CLASS (mode) == MODE_CC)
3509 return emit_move_ccmode (mode, x, y);
3510
3511 /* Try using a move pattern for the corresponding integer mode. This is
3512 only safe when simplify_subreg can convert MODE constants into integer
3513 constants. At present, it can only do this reliably if the value
3514 fits within a HOST_WIDE_INT. */
3515 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3516 {
3517 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3518
3519 if (ret)
3520 {
3521 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3522 return ret;
3523 }
3524 }
3525
3526 return emit_move_multi_word (mode, x, y);
3527 }
3528
3529 /* Generate code to copy Y into X.
3530 Both Y and X must have the same mode, except that
3531 Y can be a constant with VOIDmode.
3532 This mode cannot be BLKmode; use emit_block_move for that.
3533
3534 Return the last instruction emitted. */
3535
3536 rtx_insn *
3537 emit_move_insn (rtx x, rtx y)
3538 {
3539 machine_mode mode = GET_MODE (x);
3540 rtx y_cst = NULL_RTX;
3541 rtx_insn *last_insn;
3542 rtx set;
3543
3544 gcc_assert (mode != BLKmode
3545 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3546
3547 if (CONSTANT_P (y))
3548 {
3549 if (optimize
3550 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3551 && (last_insn = compress_float_constant (x, y)))
3552 return last_insn;
3553
3554 y_cst = y;
3555
3556 if (!targetm.legitimate_constant_p (mode, y))
3557 {
3558 y = force_const_mem (mode, y);
3559
3560 /* If the target's cannot_force_const_mem prevented the spill,
3561 assume that the target's move expanders will also take care
3562 of the non-legitimate constant. */
3563 if (!y)
3564 y = y_cst;
3565 else
3566 y = use_anchored_address (y);
3567 }
3568 }
3569
3570 /* If X or Y are memory references, verify that their addresses are valid
3571 for the machine. */
3572 if (MEM_P (x)
3573 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3574 MEM_ADDR_SPACE (x))
3575 && ! push_operand (x, GET_MODE (x))))
3576 x = validize_mem (x);
3577
3578 if (MEM_P (y)
3579 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3580 MEM_ADDR_SPACE (y)))
3581 y = validize_mem (y);
3582
3583 gcc_assert (mode != BLKmode);
3584
3585 last_insn = emit_move_insn_1 (x, y);
3586
3587 if (y_cst && REG_P (x)
3588 && (set = single_set (last_insn)) != NULL_RTX
3589 && SET_DEST (set) == x
3590 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3591 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3592
3593 return last_insn;
3594 }
3595
3596 /* Generate the body of an instruction to copy Y into X.
3597 It may be a list of insns, if one insn isn't enough. */
3598
3599 rtx_insn *
3600 gen_move_insn (rtx x, rtx y)
3601 {
3602 rtx_insn *seq;
3603
3604 start_sequence ();
3605 emit_move_insn_1 (x, y);
3606 seq = get_insns ();
3607 end_sequence ();
3608 return seq;
3609 }
3610
3611 /* If Y is representable exactly in a narrower mode, and the target can
3612 perform the extension directly from constant or memory, then emit the
3613 move as an extension. */
3614
3615 static rtx_insn *
3616 compress_float_constant (rtx x, rtx y)
3617 {
3618 machine_mode dstmode = GET_MODE (x);
3619 machine_mode orig_srcmode = GET_MODE (y);
3620 machine_mode srcmode;
3621 const REAL_VALUE_TYPE *r;
3622 int oldcost, newcost;
3623 bool speed = optimize_insn_for_speed_p ();
3624
3625 r = CONST_DOUBLE_REAL_VALUE (y);
3626
3627 if (targetm.legitimate_constant_p (dstmode, y))
3628 oldcost = set_src_cost (y, orig_srcmode, speed);
3629 else
3630 oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3631
3632 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3633 srcmode != orig_srcmode;
3634 srcmode = GET_MODE_WIDER_MODE (srcmode))
3635 {
3636 enum insn_code ic;
3637 rtx trunc_y;
3638 rtx_insn *last_insn;
3639
3640 /* Skip if the target can't extend this way. */
3641 ic = can_extend_p (dstmode, srcmode, 0);
3642 if (ic == CODE_FOR_nothing)
3643 continue;
3644
3645 /* Skip if the narrowed value isn't exact. */
3646 if (! exact_real_truncate (srcmode, r))
3647 continue;
3648
3649 trunc_y = const_double_from_real_value (*r, srcmode);
3650
3651 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3652 {
3653 /* Skip if the target needs extra instructions to perform
3654 the extension. */
3655 if (!insn_operand_matches (ic, 1, trunc_y))
3656 continue;
3657 /* This is valid, but may not be cheaper than the original. */
3658 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3659 dstmode, speed);
3660 if (oldcost < newcost)
3661 continue;
3662 }
3663 else if (float_extend_from_mem[dstmode][srcmode])
3664 {
3665 trunc_y = force_const_mem (srcmode, trunc_y);
3666 /* This is valid, but may not be cheaper than the original. */
3667 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3668 dstmode, speed);
3669 if (oldcost < newcost)
3670 continue;
3671 trunc_y = validize_mem (trunc_y);
3672 }
3673 else
3674 continue;
3675
3676 /* For CSE's benefit, force the compressed constant pool entry
3677 into a new pseudo. This constant may be used in different modes,
3678 and if not, combine will put things back together for us. */
3679 trunc_y = force_reg (srcmode, trunc_y);
3680
3681 /* If x is a hard register, perform the extension into a pseudo,
3682 so that e.g. stack realignment code is aware of it. */
3683 rtx target = x;
3684 if (REG_P (x) && HARD_REGISTER_P (x))
3685 target = gen_reg_rtx (dstmode);
3686
3687 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3688 last_insn = get_last_insn ();
3689
3690 if (REG_P (target))
3691 set_unique_reg_note (last_insn, REG_EQUAL, y);
3692
3693 if (target != x)
3694 return emit_move_insn (x, target);
3695 return last_insn;
3696 }
3697
3698 return NULL;
3699 }
3700 \f
3701 /* Pushing data onto the stack. */
3702
3703 /* Push a block of length SIZE (perhaps variable)
3704 and return an rtx to address the beginning of the block.
3705 The value may be virtual_outgoing_args_rtx.
3706
3707 EXTRA is the number of bytes of padding to push in addition to SIZE.
3708 BELOW nonzero means this padding comes at low addresses;
3709 otherwise, the padding comes at high addresses. */
3710
3711 rtx
3712 push_block (rtx size, int extra, int below)
3713 {
3714 rtx temp;
3715
3716 size = convert_modes (Pmode, ptr_mode, size, 1);
3717 if (CONSTANT_P (size))
3718 anti_adjust_stack (plus_constant (Pmode, size, extra));
3719 else if (REG_P (size) && extra == 0)
3720 anti_adjust_stack (size);
3721 else
3722 {
3723 temp = copy_to_mode_reg (Pmode, size);
3724 if (extra != 0)
3725 temp = expand_binop (Pmode, add_optab, temp,
3726 gen_int_mode (extra, Pmode),
3727 temp, 0, OPTAB_LIB_WIDEN);
3728 anti_adjust_stack (temp);
3729 }
3730
3731 if (STACK_GROWS_DOWNWARD)
3732 {
3733 temp = virtual_outgoing_args_rtx;
3734 if (extra != 0 && below)
3735 temp = plus_constant (Pmode, temp, extra);
3736 }
3737 else
3738 {
3739 if (CONST_INT_P (size))
3740 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3741 -INTVAL (size) - (below ? 0 : extra));
3742 else if (extra != 0 && !below)
3743 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3744 negate_rtx (Pmode, plus_constant (Pmode, size,
3745 extra)));
3746 else
3747 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3748 negate_rtx (Pmode, size));
3749 }
3750
3751 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3752 }
3753
3754 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3755
3756 static rtx
3757 mem_autoinc_base (rtx mem)
3758 {
3759 if (MEM_P (mem))
3760 {
3761 rtx addr = XEXP (mem, 0);
3762 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3763 return XEXP (addr, 0);
3764 }
3765 return NULL;
3766 }
3767
3768 /* A utility routine used here, in reload, and in try_split. The insns
3769 after PREV up to and including LAST are known to adjust the stack,
3770 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3771 placing notes as appropriate. PREV may be NULL, indicating the
3772 entire insn sequence prior to LAST should be scanned.
3773
3774 The set of allowed stack pointer modifications is small:
3775 (1) One or more auto-inc style memory references (aka pushes),
3776 (2) One or more addition/subtraction with the SP as destination,
3777 (3) A single move insn with the SP as destination,
3778 (4) A call_pop insn,
3779 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3780
3781 Insns in the sequence that do not modify the SP are ignored,
3782 except for noreturn calls.
3783
3784 The return value is the amount of adjustment that can be trivially
3785 verified, via immediate operand or auto-inc. If the adjustment
3786 cannot be trivially extracted, the return value is INT_MIN. */
3787
3788 HOST_WIDE_INT
3789 find_args_size_adjust (rtx_insn *insn)
3790 {
3791 rtx dest, set, pat;
3792 int i;
3793
3794 pat = PATTERN (insn);
3795 set = NULL;
3796
3797 /* Look for a call_pop pattern. */
3798 if (CALL_P (insn))
3799 {
3800 /* We have to allow non-call_pop patterns for the case
3801 of emit_single_push_insn of a TLS address. */
3802 if (GET_CODE (pat) != PARALLEL)
3803 return 0;
3804
3805 /* All call_pop have a stack pointer adjust in the parallel.
3806 The call itself is always first, and the stack adjust is
3807 usually last, so search from the end. */
3808 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3809 {
3810 set = XVECEXP (pat, 0, i);
3811 if (GET_CODE (set) != SET)
3812 continue;
3813 dest = SET_DEST (set);
3814 if (dest == stack_pointer_rtx)
3815 break;
3816 }
3817 /* We'd better have found the stack pointer adjust. */
3818 if (i == 0)
3819 return 0;
3820 /* Fall through to process the extracted SET and DEST
3821 as if it was a standalone insn. */
3822 }
3823 else if (GET_CODE (pat) == SET)
3824 set = pat;
3825 else if ((set = single_set (insn)) != NULL)
3826 ;
3827 else if (GET_CODE (pat) == PARALLEL)
3828 {
3829 /* ??? Some older ports use a parallel with a stack adjust
3830 and a store for a PUSH_ROUNDING pattern, rather than a
3831 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3832 /* ??? See h8300 and m68k, pushqi1. */
3833 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3834 {
3835 set = XVECEXP (pat, 0, i);
3836 if (GET_CODE (set) != SET)
3837 continue;
3838 dest = SET_DEST (set);
3839 if (dest == stack_pointer_rtx)
3840 break;
3841
3842 /* We do not expect an auto-inc of the sp in the parallel. */
3843 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3844 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3845 != stack_pointer_rtx);
3846 }
3847 if (i < 0)
3848 return 0;
3849 }
3850 else
3851 return 0;
3852
3853 dest = SET_DEST (set);
3854
3855 /* Look for direct modifications of the stack pointer. */
3856 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3857 {
3858 /* Look for a trivial adjustment, otherwise assume nothing. */
3859 /* Note that the SPU restore_stack_block pattern refers to
3860 the stack pointer in V4SImode. Consider that non-trivial. */
3861 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3862 && GET_CODE (SET_SRC (set)) == PLUS
3863 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3864 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3865 return INTVAL (XEXP (SET_SRC (set), 1));
3866 /* ??? Reload can generate no-op moves, which will be cleaned
3867 up later. Recognize it and continue searching. */
3868 else if (rtx_equal_p (dest, SET_SRC (set)))
3869 return 0;
3870 else
3871 return HOST_WIDE_INT_MIN;
3872 }
3873 else
3874 {
3875 rtx mem, addr;
3876
3877 /* Otherwise only think about autoinc patterns. */
3878 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3879 {
3880 mem = dest;
3881 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3882 != stack_pointer_rtx);
3883 }
3884 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3885 mem = SET_SRC (set);
3886 else
3887 return 0;
3888
3889 addr = XEXP (mem, 0);
3890 switch (GET_CODE (addr))
3891 {
3892 case PRE_INC:
3893 case POST_INC:
3894 return GET_MODE_SIZE (GET_MODE (mem));
3895 case PRE_DEC:
3896 case POST_DEC:
3897 return -GET_MODE_SIZE (GET_MODE (mem));
3898 case PRE_MODIFY:
3899 case POST_MODIFY:
3900 addr = XEXP (addr, 1);
3901 gcc_assert (GET_CODE (addr) == PLUS);
3902 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3903 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3904 return INTVAL (XEXP (addr, 1));
3905 default:
3906 gcc_unreachable ();
3907 }
3908 }
3909 }
3910
3911 int
3912 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3913 {
3914 int args_size = end_args_size;
3915 bool saw_unknown = false;
3916 rtx_insn *insn;
3917
3918 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3919 {
3920 HOST_WIDE_INT this_delta;
3921
3922 if (!NONDEBUG_INSN_P (insn))
3923 continue;
3924
3925 this_delta = find_args_size_adjust (insn);
3926 if (this_delta == 0)
3927 {
3928 if (!CALL_P (insn)
3929 || ACCUMULATE_OUTGOING_ARGS
3930 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3931 continue;
3932 }
3933
3934 gcc_assert (!saw_unknown);
3935 if (this_delta == HOST_WIDE_INT_MIN)
3936 saw_unknown = true;
3937
3938 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3939 if (STACK_GROWS_DOWNWARD)
3940 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3941
3942 args_size -= this_delta;
3943 }
3944
3945 return saw_unknown ? INT_MIN : args_size;
3946 }
3947
3948 #ifdef PUSH_ROUNDING
3949 /* Emit single push insn. */
3950
3951 static void
3952 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
3953 {
3954 rtx dest_addr;
3955 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3956 rtx dest;
3957 enum insn_code icode;
3958
3959 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3960 /* If there is push pattern, use it. Otherwise try old way of throwing
3961 MEM representing push operation to move expander. */
3962 icode = optab_handler (push_optab, mode);
3963 if (icode != CODE_FOR_nothing)
3964 {
3965 struct expand_operand ops[1];
3966
3967 create_input_operand (&ops[0], x, mode);
3968 if (maybe_expand_insn (icode, 1, ops))
3969 return;
3970 }
3971 if (GET_MODE_SIZE (mode) == rounded_size)
3972 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3973 /* If we are to pad downward, adjust the stack pointer first and
3974 then store X into the stack location using an offset. This is
3975 because emit_move_insn does not know how to pad; it does not have
3976 access to type. */
3977 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3978 {
3979 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3980 HOST_WIDE_INT offset;
3981
3982 emit_move_insn (stack_pointer_rtx,
3983 expand_binop (Pmode,
3984 STACK_GROWS_DOWNWARD ? sub_optab
3985 : add_optab,
3986 stack_pointer_rtx,
3987 gen_int_mode (rounded_size, Pmode),
3988 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3989
3990 offset = (HOST_WIDE_INT) padding_size;
3991 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
3992 /* We have already decremented the stack pointer, so get the
3993 previous value. */
3994 offset += (HOST_WIDE_INT) rounded_size;
3995
3996 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
3997 /* We have already incremented the stack pointer, so get the
3998 previous value. */
3999 offset -= (HOST_WIDE_INT) rounded_size;
4000
4001 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4002 gen_int_mode (offset, Pmode));
4003 }
4004 else
4005 {
4006 if (STACK_GROWS_DOWNWARD)
4007 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4008 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4009 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4010 Pmode));
4011 else
4012 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4013 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4014 gen_int_mode (rounded_size, Pmode));
4015
4016 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4017 }
4018
4019 dest = gen_rtx_MEM (mode, dest_addr);
4020
4021 if (type != 0)
4022 {
4023 set_mem_attributes (dest, type, 1);
4024
4025 if (cfun->tail_call_marked)
4026 /* Function incoming arguments may overlap with sibling call
4027 outgoing arguments and we cannot allow reordering of reads
4028 from function arguments with stores to outgoing arguments
4029 of sibling calls. */
4030 set_mem_alias_set (dest, 0);
4031 }
4032 emit_move_insn (dest, x);
4033 }
4034
4035 /* Emit and annotate a single push insn. */
4036
4037 static void
4038 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4039 {
4040 int delta, old_delta = stack_pointer_delta;
4041 rtx_insn *prev = get_last_insn ();
4042 rtx_insn *last;
4043
4044 emit_single_push_insn_1 (mode, x, type);
4045
4046 last = get_last_insn ();
4047
4048 /* Notice the common case where we emitted exactly one insn. */
4049 if (PREV_INSN (last) == prev)
4050 {
4051 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4052 return;
4053 }
4054
4055 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4056 gcc_assert (delta == INT_MIN || delta == old_delta);
4057 }
4058 #endif
4059
4060 /* If reading SIZE bytes from X will end up reading from
4061 Y return the number of bytes that overlap. Return -1
4062 if there is no overlap or -2 if we can't determine
4063 (for example when X and Y have different base registers). */
4064
4065 static int
4066 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4067 {
4068 rtx tmp = plus_constant (Pmode, x, size);
4069 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4070
4071 if (!CONST_INT_P (sub))
4072 return -2;
4073
4074 HOST_WIDE_INT val = INTVAL (sub);
4075
4076 return IN_RANGE (val, 1, size) ? val : -1;
4077 }
4078
4079 /* Generate code to push X onto the stack, assuming it has mode MODE and
4080 type TYPE.
4081 MODE is redundant except when X is a CONST_INT (since they don't
4082 carry mode info).
4083 SIZE is an rtx for the size of data to be copied (in bytes),
4084 needed only if X is BLKmode.
4085 Return true if successful. May return false if asked to push a
4086 partial argument during a sibcall optimization (as specified by
4087 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4088 to not overlap.
4089
4090 ALIGN (in bits) is maximum alignment we can assume.
4091
4092 If PARTIAL and REG are both nonzero, then copy that many of the first
4093 bytes of X into registers starting with REG, and push the rest of X.
4094 The amount of space pushed is decreased by PARTIAL bytes.
4095 REG must be a hard register in this case.
4096 If REG is zero but PARTIAL is not, take any all others actions for an
4097 argument partially in registers, but do not actually load any
4098 registers.
4099
4100 EXTRA is the amount in bytes of extra space to leave next to this arg.
4101 This is ignored if an argument block has already been allocated.
4102
4103 On a machine that lacks real push insns, ARGS_ADDR is the address of
4104 the bottom of the argument block for this call. We use indexing off there
4105 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4106 argument block has not been preallocated.
4107
4108 ARGS_SO_FAR is the size of args previously pushed for this call.
4109
4110 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4111 for arguments passed in registers. If nonzero, it will be the number
4112 of bytes required. */
4113
4114 bool
4115 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4116 unsigned int align, int partial, rtx reg, int extra,
4117 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4118 rtx alignment_pad, bool sibcall_p)
4119 {
4120 rtx xinner;
4121 enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
4122
4123 /* Decide where to pad the argument: `downward' for below,
4124 `upward' for above, or `none' for don't pad it.
4125 Default is below for small data on big-endian machines; else above. */
4126 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4127
4128 /* Invert direction if stack is post-decrement.
4129 FIXME: why? */
4130 if (STACK_PUSH_CODE == POST_DEC)
4131 if (where_pad != none)
4132 where_pad = (where_pad == downward ? upward : downward);
4133
4134 xinner = x;
4135
4136 int nregs = partial / UNITS_PER_WORD;
4137 rtx *tmp_regs = NULL;
4138 int overlapping = 0;
4139
4140 if (mode == BLKmode
4141 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4142 {
4143 /* Copy a block into the stack, entirely or partially. */
4144
4145 rtx temp;
4146 int used;
4147 int offset;
4148 int skip;
4149
4150 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4151 used = partial - offset;
4152
4153 if (mode != BLKmode)
4154 {
4155 /* A value is to be stored in an insufficiently aligned
4156 stack slot; copy via a suitably aligned slot if
4157 necessary. */
4158 size = GEN_INT (GET_MODE_SIZE (mode));
4159 if (!MEM_P (xinner))
4160 {
4161 temp = assign_temp (type, 1, 1);
4162 emit_move_insn (temp, xinner);
4163 xinner = temp;
4164 }
4165 }
4166
4167 gcc_assert (size);
4168
4169 /* USED is now the # of bytes we need not copy to the stack
4170 because registers will take care of them. */
4171
4172 if (partial != 0)
4173 xinner = adjust_address (xinner, BLKmode, used);
4174
4175 /* If the partial register-part of the arg counts in its stack size,
4176 skip the part of stack space corresponding to the registers.
4177 Otherwise, start copying to the beginning of the stack space,
4178 by setting SKIP to 0. */
4179 skip = (reg_parm_stack_space == 0) ? 0 : used;
4180
4181 #ifdef PUSH_ROUNDING
4182 /* Do it with several push insns if that doesn't take lots of insns
4183 and if there is no difficulty with push insns that skip bytes
4184 on the stack for alignment purposes. */
4185 if (args_addr == 0
4186 && PUSH_ARGS
4187 && CONST_INT_P (size)
4188 && skip == 0
4189 && MEM_ALIGN (xinner) >= align
4190 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4191 /* Here we avoid the case of a structure whose weak alignment
4192 forces many pushes of a small amount of data,
4193 and such small pushes do rounding that causes trouble. */
4194 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4195 || align >= BIGGEST_ALIGNMENT
4196 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4197 == (align / BITS_PER_UNIT)))
4198 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4199 {
4200 /* Push padding now if padding above and stack grows down,
4201 or if padding below and stack grows up.
4202 But if space already allocated, this has already been done. */
4203 if (extra && args_addr == 0
4204 && where_pad != none && where_pad != stack_direction)
4205 anti_adjust_stack (GEN_INT (extra));
4206
4207 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4208 }
4209 else
4210 #endif /* PUSH_ROUNDING */
4211 {
4212 rtx target;
4213
4214 /* Otherwise make space on the stack and copy the data
4215 to the address of that space. */
4216
4217 /* Deduct words put into registers from the size we must copy. */
4218 if (partial != 0)
4219 {
4220 if (CONST_INT_P (size))
4221 size = GEN_INT (INTVAL (size) - used);
4222 else
4223 size = expand_binop (GET_MODE (size), sub_optab, size,
4224 gen_int_mode (used, GET_MODE (size)),
4225 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4226 }
4227
4228 /* Get the address of the stack space.
4229 In this case, we do not deal with EXTRA separately.
4230 A single stack adjust will do. */
4231 if (! args_addr)
4232 {
4233 temp = push_block (size, extra, where_pad == downward);
4234 extra = 0;
4235 }
4236 else if (CONST_INT_P (args_so_far))
4237 temp = memory_address (BLKmode,
4238 plus_constant (Pmode, args_addr,
4239 skip + INTVAL (args_so_far)));
4240 else
4241 temp = memory_address (BLKmode,
4242 plus_constant (Pmode,
4243 gen_rtx_PLUS (Pmode,
4244 args_addr,
4245 args_so_far),
4246 skip));
4247
4248 if (!ACCUMULATE_OUTGOING_ARGS)
4249 {
4250 /* If the source is referenced relative to the stack pointer,
4251 copy it to another register to stabilize it. We do not need
4252 to do this if we know that we won't be changing sp. */
4253
4254 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4255 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4256 temp = copy_to_reg (temp);
4257 }
4258
4259 target = gen_rtx_MEM (BLKmode, temp);
4260
4261 /* We do *not* set_mem_attributes here, because incoming arguments
4262 may overlap with sibling call outgoing arguments and we cannot
4263 allow reordering of reads from function arguments with stores
4264 to outgoing arguments of sibling calls. We do, however, want
4265 to record the alignment of the stack slot. */
4266 /* ALIGN may well be better aligned than TYPE, e.g. due to
4267 PARM_BOUNDARY. Assume the caller isn't lying. */
4268 set_mem_align (target, align);
4269
4270 /* If part should go in registers and pushing to that part would
4271 overwrite some of the values that need to go into regs, load the
4272 overlapping values into temporary pseudos to be moved into the hard
4273 regs at the end after the stack pushing has completed.
4274 We cannot load them directly into the hard regs here because
4275 they can be clobbered by the block move expansions.
4276 See PR 65358. */
4277
4278 if (partial > 0 && reg != 0 && mode == BLKmode
4279 && GET_CODE (reg) != PARALLEL)
4280 {
4281 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4282 if (overlapping > 0)
4283 {
4284 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4285 overlapping /= UNITS_PER_WORD;
4286
4287 tmp_regs = XALLOCAVEC (rtx, overlapping);
4288
4289 for (int i = 0; i < overlapping; i++)
4290 tmp_regs[i] = gen_reg_rtx (word_mode);
4291
4292 for (int i = 0; i < overlapping; i++)
4293 emit_move_insn (tmp_regs[i],
4294 operand_subword_force (target, i, mode));
4295 }
4296 else if (overlapping == -1)
4297 overlapping = 0;
4298 /* Could not determine whether there is overlap.
4299 Fail the sibcall. */
4300 else
4301 {
4302 overlapping = 0;
4303 if (sibcall_p)
4304 return false;
4305 }
4306 }
4307 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4308 }
4309 }
4310 else if (partial > 0)
4311 {
4312 /* Scalar partly in registers. */
4313
4314 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4315 int i;
4316 int not_stack;
4317 /* # bytes of start of argument
4318 that we must make space for but need not store. */
4319 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4320 int args_offset = INTVAL (args_so_far);
4321 int skip;
4322
4323 /* Push padding now if padding above and stack grows down,
4324 or if padding below and stack grows up.
4325 But if space already allocated, this has already been done. */
4326 if (extra && args_addr == 0
4327 && where_pad != none && where_pad != stack_direction)
4328 anti_adjust_stack (GEN_INT (extra));
4329
4330 /* If we make space by pushing it, we might as well push
4331 the real data. Otherwise, we can leave OFFSET nonzero
4332 and leave the space uninitialized. */
4333 if (args_addr == 0)
4334 offset = 0;
4335
4336 /* Now NOT_STACK gets the number of words that we don't need to
4337 allocate on the stack. Convert OFFSET to words too. */
4338 not_stack = (partial - offset) / UNITS_PER_WORD;
4339 offset /= UNITS_PER_WORD;
4340
4341 /* If the partial register-part of the arg counts in its stack size,
4342 skip the part of stack space corresponding to the registers.
4343 Otherwise, start copying to the beginning of the stack space,
4344 by setting SKIP to 0. */
4345 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4346
4347 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4348 x = validize_mem (force_const_mem (mode, x));
4349
4350 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4351 SUBREGs of such registers are not allowed. */
4352 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4353 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4354 x = copy_to_reg (x);
4355
4356 /* Loop over all the words allocated on the stack for this arg. */
4357 /* We can do it by words, because any scalar bigger than a word
4358 has a size a multiple of a word. */
4359 for (i = size - 1; i >= not_stack; i--)
4360 if (i >= not_stack + offset)
4361 if (!emit_push_insn (operand_subword_force (x, i, mode),
4362 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4363 0, args_addr,
4364 GEN_INT (args_offset + ((i - not_stack + skip)
4365 * UNITS_PER_WORD)),
4366 reg_parm_stack_space, alignment_pad, sibcall_p))
4367 return false;
4368 }
4369 else
4370 {
4371 rtx addr;
4372 rtx dest;
4373
4374 /* Push padding now if padding above and stack grows down,
4375 or if padding below and stack grows up.
4376 But if space already allocated, this has already been done. */
4377 if (extra && args_addr == 0
4378 && where_pad != none && where_pad != stack_direction)
4379 anti_adjust_stack (GEN_INT (extra));
4380
4381 #ifdef PUSH_ROUNDING
4382 if (args_addr == 0 && PUSH_ARGS)
4383 emit_single_push_insn (mode, x, type);
4384 else
4385 #endif
4386 {
4387 if (CONST_INT_P (args_so_far))
4388 addr
4389 = memory_address (mode,
4390 plus_constant (Pmode, args_addr,
4391 INTVAL (args_so_far)));
4392 else
4393 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4394 args_so_far));
4395 dest = gen_rtx_MEM (mode, addr);
4396
4397 /* We do *not* set_mem_attributes here, because incoming arguments
4398 may overlap with sibling call outgoing arguments and we cannot
4399 allow reordering of reads from function arguments with stores
4400 to outgoing arguments of sibling calls. We do, however, want
4401 to record the alignment of the stack slot. */
4402 /* ALIGN may well be better aligned than TYPE, e.g. due to
4403 PARM_BOUNDARY. Assume the caller isn't lying. */
4404 set_mem_align (dest, align);
4405
4406 emit_move_insn (dest, x);
4407 }
4408 }
4409
4410 /* Move the partial arguments into the registers and any overlapping
4411 values that we moved into the pseudos in tmp_regs. */
4412 if (partial > 0 && reg != 0)
4413 {
4414 /* Handle calls that pass values in multiple non-contiguous locations.
4415 The Irix 6 ABI has examples of this. */
4416 if (GET_CODE (reg) == PARALLEL)
4417 emit_group_load (reg, x, type, -1);
4418 else
4419 {
4420 gcc_assert (partial % UNITS_PER_WORD == 0);
4421 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4422
4423 for (int i = 0; i < overlapping; i++)
4424 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4425 + nregs - overlapping + i),
4426 tmp_regs[i]);
4427
4428 }
4429 }
4430
4431 if (extra && args_addr == 0 && where_pad == stack_direction)
4432 anti_adjust_stack (GEN_INT (extra));
4433
4434 if (alignment_pad && args_addr == 0)
4435 anti_adjust_stack (alignment_pad);
4436
4437 return true;
4438 }
4439 \f
4440 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4441 operations. */
4442
4443 static rtx
4444 get_subtarget (rtx x)
4445 {
4446 return (optimize
4447 || x == 0
4448 /* Only registers can be subtargets. */
4449 || !REG_P (x)
4450 /* Don't use hard regs to avoid extending their life. */
4451 || REGNO (x) < FIRST_PSEUDO_REGISTER
4452 ? 0 : x);
4453 }
4454
4455 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4456 FIELD is a bitfield. Returns true if the optimization was successful,
4457 and there's nothing else to do. */
4458
4459 static bool
4460 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4461 unsigned HOST_WIDE_INT bitpos,
4462 unsigned HOST_WIDE_INT bitregion_start,
4463 unsigned HOST_WIDE_INT bitregion_end,
4464 machine_mode mode1, rtx str_rtx,
4465 tree to, tree src, bool reverse)
4466 {
4467 machine_mode str_mode = GET_MODE (str_rtx);
4468 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4469 tree op0, op1;
4470 rtx value, result;
4471 optab binop;
4472 gimple *srcstmt;
4473 enum tree_code code;
4474
4475 if (mode1 != VOIDmode
4476 || bitsize >= BITS_PER_WORD
4477 || str_bitsize > BITS_PER_WORD
4478 || TREE_SIDE_EFFECTS (to)
4479 || TREE_THIS_VOLATILE (to))
4480 return false;
4481
4482 STRIP_NOPS (src);
4483 if (TREE_CODE (src) != SSA_NAME)
4484 return false;
4485 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4486 return false;
4487
4488 srcstmt = get_gimple_for_ssa_name (src);
4489 if (!srcstmt
4490 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4491 return false;
4492
4493 code = gimple_assign_rhs_code (srcstmt);
4494
4495 op0 = gimple_assign_rhs1 (srcstmt);
4496
4497 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4498 to find its initialization. Hopefully the initialization will
4499 be from a bitfield load. */
4500 if (TREE_CODE (op0) == SSA_NAME)
4501 {
4502 gimple *op0stmt = get_gimple_for_ssa_name (op0);
4503
4504 /* We want to eventually have OP0 be the same as TO, which
4505 should be a bitfield. */
4506 if (!op0stmt
4507 || !is_gimple_assign (op0stmt)
4508 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4509 return false;
4510 op0 = gimple_assign_rhs1 (op0stmt);
4511 }
4512
4513 op1 = gimple_assign_rhs2 (srcstmt);
4514
4515 if (!operand_equal_p (to, op0, 0))
4516 return false;
4517
4518 if (MEM_P (str_rtx))
4519 {
4520 unsigned HOST_WIDE_INT offset1;
4521
4522 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4523 str_mode = word_mode;
4524 str_mode = get_best_mode (bitsize, bitpos,
4525 bitregion_start, bitregion_end,
4526 MEM_ALIGN (str_rtx), str_mode, 0);
4527 if (str_mode == VOIDmode)
4528 return false;
4529 str_bitsize = GET_MODE_BITSIZE (str_mode);
4530
4531 offset1 = bitpos;
4532 bitpos %= str_bitsize;
4533 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4534 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4535 }
4536 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4537 return false;
4538 else
4539 gcc_assert (!reverse);
4540
4541 /* If the bit field covers the whole REG/MEM, store_field
4542 will likely generate better code. */
4543 if (bitsize >= str_bitsize)
4544 return false;
4545
4546 /* We can't handle fields split across multiple entities. */
4547 if (bitpos + bitsize > str_bitsize)
4548 return false;
4549
4550 if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4551 bitpos = str_bitsize - bitpos - bitsize;
4552
4553 switch (code)
4554 {
4555 case PLUS_EXPR:
4556 case MINUS_EXPR:
4557 /* For now, just optimize the case of the topmost bitfield
4558 where we don't need to do any masking and also
4559 1 bit bitfields where xor can be used.
4560 We might win by one instruction for the other bitfields
4561 too if insv/extv instructions aren't used, so that
4562 can be added later. */
4563 if ((reverse || bitpos + bitsize != str_bitsize)
4564 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4565 break;
4566
4567 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4568 value = convert_modes (str_mode,
4569 TYPE_MODE (TREE_TYPE (op1)), value,
4570 TYPE_UNSIGNED (TREE_TYPE (op1)));
4571
4572 /* We may be accessing data outside the field, which means
4573 we can alias adjacent data. */
4574 if (MEM_P (str_rtx))
4575 {
4576 str_rtx = shallow_copy_rtx (str_rtx);
4577 set_mem_alias_set (str_rtx, 0);
4578 set_mem_expr (str_rtx, 0);
4579 }
4580
4581 if (bitsize == 1 && (reverse || bitpos + bitsize != str_bitsize))
4582 {
4583 value = expand_and (str_mode, value, const1_rtx, NULL);
4584 binop = xor_optab;
4585 }
4586 else
4587 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4588
4589 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4590 if (reverse)
4591 value = flip_storage_order (str_mode, value);
4592 result = expand_binop (str_mode, binop, str_rtx,
4593 value, str_rtx, 1, OPTAB_WIDEN);
4594 if (result != str_rtx)
4595 emit_move_insn (str_rtx, result);
4596 return true;
4597
4598 case BIT_IOR_EXPR:
4599 case BIT_XOR_EXPR:
4600 if (TREE_CODE (op1) != INTEGER_CST)
4601 break;
4602 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4603 value = convert_modes (str_mode,
4604 TYPE_MODE (TREE_TYPE (op1)), value,
4605 TYPE_UNSIGNED (TREE_TYPE (op1)));
4606
4607 /* We may be accessing data outside the field, which means
4608 we can alias adjacent data. */
4609 if (MEM_P (str_rtx))
4610 {
4611 str_rtx = shallow_copy_rtx (str_rtx);
4612 set_mem_alias_set (str_rtx, 0);
4613 set_mem_expr (str_rtx, 0);
4614 }
4615
4616 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4617 if (bitpos + bitsize != str_bitsize)
4618 {
4619 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4620 str_mode);
4621 value = expand_and (str_mode, value, mask, NULL_RTX);
4622 }
4623 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4624 if (reverse)
4625 value = flip_storage_order (str_mode, value);
4626 result = expand_binop (str_mode, binop, str_rtx,
4627 value, str_rtx, 1, OPTAB_WIDEN);
4628 if (result != str_rtx)
4629 emit_move_insn (str_rtx, result);
4630 return true;
4631
4632 default:
4633 break;
4634 }
4635
4636 return false;
4637 }
4638
4639 /* In the C++ memory model, consecutive bit fields in a structure are
4640 considered one memory location.
4641
4642 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4643 returns the bit range of consecutive bits in which this COMPONENT_REF
4644 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4645 and *OFFSET may be adjusted in the process.
4646
4647 If the access does not need to be restricted, 0 is returned in both
4648 *BITSTART and *BITEND. */
4649
4650 static void
4651 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4652 unsigned HOST_WIDE_INT *bitend,
4653 tree exp,
4654 HOST_WIDE_INT *bitpos,
4655 tree *offset)
4656 {
4657 HOST_WIDE_INT bitoffset;
4658 tree field, repr;
4659
4660 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4661
4662 field = TREE_OPERAND (exp, 1);
4663 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4664 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4665 need to limit the range we can access. */
4666 if (!repr)
4667 {
4668 *bitstart = *bitend = 0;
4669 return;
4670 }
4671
4672 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4673 part of a larger bit field, then the representative does not serve any
4674 useful purpose. This can occur in Ada. */
4675 if (handled_component_p (TREE_OPERAND (exp, 0)))
4676 {
4677 machine_mode rmode;
4678 HOST_WIDE_INT rbitsize, rbitpos;
4679 tree roffset;
4680 int unsignedp, reversep, volatilep = 0;
4681 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4682 &roffset, &rmode, &unsignedp, &reversep,
4683 &volatilep, false);
4684 if ((rbitpos % BITS_PER_UNIT) != 0)
4685 {
4686 *bitstart = *bitend = 0;
4687 return;
4688 }
4689 }
4690
4691 /* Compute the adjustment to bitpos from the offset of the field
4692 relative to the representative. DECL_FIELD_OFFSET of field and
4693 repr are the same by construction if they are not constants,
4694 see finish_bitfield_layout. */
4695 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4696 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4697 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4698 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4699 else
4700 bitoffset = 0;
4701 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4702 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4703
4704 /* If the adjustment is larger than bitpos, we would have a negative bit
4705 position for the lower bound and this may wreak havoc later. Adjust
4706 offset and bitpos to make the lower bound non-negative in that case. */
4707 if (bitoffset > *bitpos)
4708 {
4709 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4710 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4711
4712 *bitpos += adjust;
4713 if (*offset == NULL_TREE)
4714 *offset = size_int (-adjust / BITS_PER_UNIT);
4715 else
4716 *offset
4717 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4718 *bitstart = 0;
4719 }
4720 else
4721 *bitstart = *bitpos - bitoffset;
4722
4723 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4724 }
4725
4726 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4727 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4728 DECL_RTL was not set yet, return NORTL. */
4729
4730 static inline bool
4731 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4732 {
4733 if (TREE_CODE (addr) != ADDR_EXPR)
4734 return false;
4735
4736 tree base = TREE_OPERAND (addr, 0);
4737
4738 if (!DECL_P (base)
4739 || TREE_ADDRESSABLE (base)
4740 || DECL_MODE (base) == BLKmode)
4741 return false;
4742
4743 if (!DECL_RTL_SET_P (base))
4744 return nortl;
4745
4746 return (!MEM_P (DECL_RTL (base)));
4747 }
4748
4749 /* Returns true if the MEM_REF REF refers to an object that does not
4750 reside in memory and has non-BLKmode. */
4751
4752 static inline bool
4753 mem_ref_refers_to_non_mem_p (tree ref)
4754 {
4755 tree base = TREE_OPERAND (ref, 0);
4756 return addr_expr_of_non_mem_decl_p_1 (base, false);
4757 }
4758
4759 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4760 is true, try generating a nontemporal store. */
4761
4762 void
4763 expand_assignment (tree to, tree from, bool nontemporal)
4764 {
4765 rtx to_rtx = 0;
4766 rtx result;
4767 machine_mode mode;
4768 unsigned int align;
4769 enum insn_code icode;
4770
4771 /* Don't crash if the lhs of the assignment was erroneous. */
4772 if (TREE_CODE (to) == ERROR_MARK)
4773 {
4774 expand_normal (from);
4775 return;
4776 }
4777
4778 /* Optimize away no-op moves without side-effects. */
4779 if (operand_equal_p (to, from, 0))
4780 return;
4781
4782 /* Handle misaligned stores. */
4783 mode = TYPE_MODE (TREE_TYPE (to));
4784 if ((TREE_CODE (to) == MEM_REF
4785 || TREE_CODE (to) == TARGET_MEM_REF)
4786 && mode != BLKmode
4787 && !mem_ref_refers_to_non_mem_p (to)
4788 && ((align = get_object_alignment (to))
4789 < GET_MODE_ALIGNMENT (mode))
4790 && (((icode = optab_handler (movmisalign_optab, mode))
4791 != CODE_FOR_nothing)
4792 || SLOW_UNALIGNED_ACCESS (mode, align)))
4793 {
4794 rtx reg, mem;
4795
4796 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4797 reg = force_not_mem (reg);
4798 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4799 if (TREE_CODE (to) == MEM_REF && REF_REVERSE_STORAGE_ORDER (to))
4800 reg = flip_storage_order (mode, reg);
4801
4802 if (icode != CODE_FOR_nothing)
4803 {
4804 struct expand_operand ops[2];
4805
4806 create_fixed_operand (&ops[0], mem);
4807 create_input_operand (&ops[1], reg, mode);
4808 /* The movmisalign<mode> pattern cannot fail, else the assignment
4809 would silently be omitted. */
4810 expand_insn (icode, 2, ops);
4811 }
4812 else
4813 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg,
4814 false);
4815 return;
4816 }
4817
4818 /* Assignment of a structure component needs special treatment
4819 if the structure component's rtx is not simply a MEM.
4820 Assignment of an array element at a constant index, and assignment of
4821 an array element in an unaligned packed structure field, has the same
4822 problem. Same for (partially) storing into a non-memory object. */
4823 if (handled_component_p (to)
4824 || (TREE_CODE (to) == MEM_REF
4825 && (REF_REVERSE_STORAGE_ORDER (to)
4826 || mem_ref_refers_to_non_mem_p (to)))
4827 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4828 {
4829 machine_mode mode1;
4830 HOST_WIDE_INT bitsize, bitpos;
4831 unsigned HOST_WIDE_INT bitregion_start = 0;
4832 unsigned HOST_WIDE_INT bitregion_end = 0;
4833 tree offset;
4834 int unsignedp, reversep, volatilep = 0;
4835 tree tem;
4836
4837 push_temp_slots ();
4838 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4839 &unsignedp, &reversep, &volatilep, true);
4840
4841 /* Make sure bitpos is not negative, it can wreak havoc later. */
4842 if (bitpos < 0)
4843 {
4844 gcc_assert (offset == NULL_TREE);
4845 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4846 ? 3 : exact_log2 (BITS_PER_UNIT)));
4847 bitpos &= BITS_PER_UNIT - 1;
4848 }
4849
4850 if (TREE_CODE (to) == COMPONENT_REF
4851 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4852 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4853 /* The C++ memory model naturally applies to byte-aligned fields.
4854 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4855 BITSIZE are not byte-aligned, there is no need to limit the range
4856 we can access. This can occur with packed structures in Ada. */
4857 else if (bitsize > 0
4858 && bitsize % BITS_PER_UNIT == 0
4859 && bitpos % BITS_PER_UNIT == 0)
4860 {
4861 bitregion_start = bitpos;
4862 bitregion_end = bitpos + bitsize - 1;
4863 }
4864
4865 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4866
4867 /* If the field has a mode, we want to access it in the
4868 field's mode, not the computed mode.
4869 If a MEM has VOIDmode (external with incomplete type),
4870 use BLKmode for it instead. */
4871 if (MEM_P (to_rtx))
4872 {
4873 if (mode1 != VOIDmode)
4874 to_rtx = adjust_address (to_rtx, mode1, 0);
4875 else if (GET_MODE (to_rtx) == VOIDmode)
4876 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4877 }
4878
4879 if (offset != 0)
4880 {
4881 machine_mode address_mode;
4882 rtx offset_rtx;
4883
4884 if (!MEM_P (to_rtx))
4885 {
4886 /* We can get constant negative offsets into arrays with broken
4887 user code. Translate this to a trap instead of ICEing. */
4888 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4889 expand_builtin_trap ();
4890 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4891 }
4892
4893 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4894 address_mode = get_address_mode (to_rtx);
4895 if (GET_MODE (offset_rtx) != address_mode)
4896 {
4897 /* We cannot be sure that the RTL in offset_rtx is valid outside
4898 of a memory address context, so force it into a register
4899 before attempting to convert it to the desired mode. */
4900 offset_rtx = force_operand (offset_rtx, NULL_RTX);
4901 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4902 }
4903
4904 /* If we have an expression in OFFSET_RTX and a non-zero
4905 byte offset in BITPOS, adding the byte offset before the
4906 OFFSET_RTX results in better intermediate code, which makes
4907 later rtl optimization passes perform better.
4908
4909 We prefer intermediate code like this:
4910
4911 r124:DI=r123:DI+0x18
4912 [r124:DI]=r121:DI
4913
4914 ... instead of ...
4915
4916 r124:DI=r123:DI+0x10
4917 [r124:DI+0x8]=r121:DI
4918
4919 This is only done for aligned data values, as these can
4920 be expected to result in single move instructions. */
4921 if (mode1 != VOIDmode
4922 && bitpos != 0
4923 && bitsize > 0
4924 && (bitpos % bitsize) == 0
4925 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4926 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4927 {
4928 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4929 bitregion_start = 0;
4930 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4931 bitregion_end -= bitpos;
4932 bitpos = 0;
4933 }
4934
4935 to_rtx = offset_address (to_rtx, offset_rtx,
4936 highest_pow2_factor_for_target (to,
4937 offset));
4938 }
4939
4940 /* No action is needed if the target is not a memory and the field
4941 lies completely outside that target. This can occur if the source
4942 code contains an out-of-bounds access to a small array. */
4943 if (!MEM_P (to_rtx)
4944 && GET_MODE (to_rtx) != BLKmode
4945 && (unsigned HOST_WIDE_INT) bitpos
4946 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4947 {
4948 expand_normal (from);
4949 result = NULL;
4950 }
4951 /* Handle expand_expr of a complex value returning a CONCAT. */
4952 else if (GET_CODE (to_rtx) == CONCAT)
4953 {
4954 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4955 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4956 && bitpos == 0
4957 && bitsize == mode_bitsize)
4958 result = store_expr (from, to_rtx, false, nontemporal, reversep);
4959 else if (bitsize == mode_bitsize / 2
4960 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4961 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4962 nontemporal, reversep);
4963 else if (bitpos + bitsize <= mode_bitsize / 2)
4964 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4965 bitregion_start, bitregion_end,
4966 mode1, from, get_alias_set (to),
4967 nontemporal, reversep);
4968 else if (bitpos >= mode_bitsize / 2)
4969 result = store_field (XEXP (to_rtx, 1), bitsize,
4970 bitpos - mode_bitsize / 2,
4971 bitregion_start, bitregion_end,
4972 mode1, from, get_alias_set (to),
4973 nontemporal, reversep);
4974 else if (bitpos == 0 && bitsize == mode_bitsize)
4975 {
4976 rtx from_rtx;
4977 result = expand_normal (from);
4978 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4979 TYPE_MODE (TREE_TYPE (from)), 0);
4980 emit_move_insn (XEXP (to_rtx, 0),
4981 read_complex_part (from_rtx, false));
4982 emit_move_insn (XEXP (to_rtx, 1),
4983 read_complex_part (from_rtx, true));
4984 }
4985 else
4986 {
4987 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4988 GET_MODE_SIZE (GET_MODE (to_rtx)));
4989 write_complex_part (temp, XEXP (to_rtx, 0), false);
4990 write_complex_part (temp, XEXP (to_rtx, 1), true);
4991 result = store_field (temp, bitsize, bitpos,
4992 bitregion_start, bitregion_end,
4993 mode1, from, get_alias_set (to),
4994 nontemporal, reversep);
4995 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4996 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4997 }
4998 }
4999 else
5000 {
5001 if (MEM_P (to_rtx))
5002 {
5003 /* If the field is at offset zero, we could have been given the
5004 DECL_RTX of the parent struct. Don't munge it. */
5005 to_rtx = shallow_copy_rtx (to_rtx);
5006 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5007 if (volatilep)
5008 MEM_VOLATILE_P (to_rtx) = 1;
5009 }
5010
5011 if (optimize_bitfield_assignment_op (bitsize, bitpos,
5012 bitregion_start, bitregion_end,
5013 mode1, to_rtx, to, from,
5014 reversep))
5015 result = NULL;
5016 else
5017 result = store_field (to_rtx, bitsize, bitpos,
5018 bitregion_start, bitregion_end,
5019 mode1, from, get_alias_set (to),
5020 nontemporal, reversep);
5021 }
5022
5023 if (result)
5024 preserve_temp_slots (result);
5025 pop_temp_slots ();
5026 return;
5027 }
5028
5029 /* If the rhs is a function call and its value is not an aggregate,
5030 call the function before we start to compute the lhs.
5031 This is needed for correct code for cases such as
5032 val = setjmp (buf) on machines where reference to val
5033 requires loading up part of an address in a separate insn.
5034
5035 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5036 since it might be a promoted variable where the zero- or sign- extension
5037 needs to be done. Handling this in the normal way is safe because no
5038 computation is done before the call. The same is true for SSA names. */
5039 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5040 && COMPLETE_TYPE_P (TREE_TYPE (from))
5041 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5042 && ! (((TREE_CODE (to) == VAR_DECL
5043 || TREE_CODE (to) == PARM_DECL
5044 || TREE_CODE (to) == RESULT_DECL)
5045 && REG_P (DECL_RTL (to)))
5046 || TREE_CODE (to) == SSA_NAME))
5047 {
5048 rtx value;
5049 rtx bounds;
5050
5051 push_temp_slots ();
5052 value = expand_normal (from);
5053
5054 /* Split value and bounds to store them separately. */
5055 chkp_split_slot (value, &value, &bounds);
5056
5057 if (to_rtx == 0)
5058 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5059
5060 /* Handle calls that return values in multiple non-contiguous locations.
5061 The Irix 6 ABI has examples of this. */
5062 if (GET_CODE (to_rtx) == PARALLEL)
5063 {
5064 if (GET_CODE (value) == PARALLEL)
5065 emit_group_move (to_rtx, value);
5066 else
5067 emit_group_load (to_rtx, value, TREE_TYPE (from),
5068 int_size_in_bytes (TREE_TYPE (from)));
5069 }
5070 else if (GET_CODE (value) == PARALLEL)
5071 emit_group_store (to_rtx, value, TREE_TYPE (from),
5072 int_size_in_bytes (TREE_TYPE (from)));
5073 else if (GET_MODE (to_rtx) == BLKmode)
5074 {
5075 /* Handle calls that return BLKmode values in registers. */
5076 if (REG_P (value))
5077 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5078 else
5079 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5080 }
5081 else
5082 {
5083 if (POINTER_TYPE_P (TREE_TYPE (to)))
5084 value = convert_memory_address_addr_space
5085 (GET_MODE (to_rtx), value,
5086 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5087
5088 emit_move_insn (to_rtx, value);
5089 }
5090
5091 /* Store bounds if required. */
5092 if (bounds
5093 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5094 {
5095 gcc_assert (MEM_P (to_rtx));
5096 chkp_emit_bounds_store (bounds, value, to_rtx);
5097 }
5098
5099 preserve_temp_slots (to_rtx);
5100 pop_temp_slots ();
5101 return;
5102 }
5103
5104 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5105 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5106
5107 /* Don't move directly into a return register. */
5108 if (TREE_CODE (to) == RESULT_DECL
5109 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5110 {
5111 rtx temp;
5112
5113 push_temp_slots ();
5114
5115 /* If the source is itself a return value, it still is in a pseudo at
5116 this point so we can move it back to the return register directly. */
5117 if (REG_P (to_rtx)
5118 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5119 && TREE_CODE (from) != CALL_EXPR)
5120 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5121 else
5122 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5123
5124 /* Handle calls that return values in multiple non-contiguous locations.
5125 The Irix 6 ABI has examples of this. */
5126 if (GET_CODE (to_rtx) == PARALLEL)
5127 {
5128 if (GET_CODE (temp) == PARALLEL)
5129 emit_group_move (to_rtx, temp);
5130 else
5131 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5132 int_size_in_bytes (TREE_TYPE (from)));
5133 }
5134 else if (temp)
5135 emit_move_insn (to_rtx, temp);
5136
5137 preserve_temp_slots (to_rtx);
5138 pop_temp_slots ();
5139 return;
5140 }
5141
5142 /* In case we are returning the contents of an object which overlaps
5143 the place the value is being stored, use a safe function when copying
5144 a value through a pointer into a structure value return block. */
5145 if (TREE_CODE (to) == RESULT_DECL
5146 && TREE_CODE (from) == INDIRECT_REF
5147 && ADDR_SPACE_GENERIC_P
5148 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5149 && refs_may_alias_p (to, from)
5150 && cfun->returns_struct
5151 && !cfun->returns_pcc_struct)
5152 {
5153 rtx from_rtx, size;
5154
5155 push_temp_slots ();
5156 size = expr_size (from);
5157 from_rtx = expand_normal (from);
5158
5159 emit_library_call (memmove_libfunc, LCT_NORMAL,
5160 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5161 XEXP (from_rtx, 0), Pmode,
5162 convert_to_mode (TYPE_MODE (sizetype),
5163 size, TYPE_UNSIGNED (sizetype)),
5164 TYPE_MODE (sizetype));
5165
5166 preserve_temp_slots (to_rtx);
5167 pop_temp_slots ();
5168 return;
5169 }
5170
5171 /* Compute FROM and store the value in the rtx we got. */
5172
5173 push_temp_slots ();
5174 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, false, to);
5175 preserve_temp_slots (result);
5176 pop_temp_slots ();
5177 return;
5178 }
5179
5180 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5181 succeeded, false otherwise. */
5182
5183 bool
5184 emit_storent_insn (rtx to, rtx from)
5185 {
5186 struct expand_operand ops[2];
5187 machine_mode mode = GET_MODE (to);
5188 enum insn_code code = optab_handler (storent_optab, mode);
5189
5190 if (code == CODE_FOR_nothing)
5191 return false;
5192
5193 create_fixed_operand (&ops[0], to);
5194 create_input_operand (&ops[1], from, mode);
5195 return maybe_expand_insn (code, 2, ops);
5196 }
5197
5198 /* Generate code for computing expression EXP,
5199 and storing the value into TARGET.
5200
5201 If the mode is BLKmode then we may return TARGET itself.
5202 It turns out that in BLKmode it doesn't cause a problem.
5203 because C has no operators that could combine two different
5204 assignments into the same BLKmode object with different values
5205 with no sequence point. Will other languages need this to
5206 be more thorough?
5207
5208 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5209 stack, and block moves may need to be treated specially.
5210
5211 If NONTEMPORAL is true, try using a nontemporal store instruction.
5212
5213 If REVERSE is true, the store is to be done in reverse order.
5214
5215 If BTARGET is not NULL then computed bounds of EXP are
5216 associated with BTARGET. */
5217
5218 rtx
5219 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5220 bool nontemporal, bool reverse, tree btarget)
5221 {
5222 rtx temp;
5223 rtx alt_rtl = NULL_RTX;
5224 location_t loc = curr_insn_location ();
5225
5226 if (VOID_TYPE_P (TREE_TYPE (exp)))
5227 {
5228 /* C++ can generate ?: expressions with a throw expression in one
5229 branch and an rvalue in the other. Here, we resolve attempts to
5230 store the throw expression's nonexistent result. */
5231 gcc_assert (!call_param_p);
5232 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5233 return NULL_RTX;
5234 }
5235 if (TREE_CODE (exp) == COMPOUND_EXPR)
5236 {
5237 /* Perform first part of compound expression, then assign from second
5238 part. */
5239 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5240 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5241 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5242 call_param_p, nontemporal, reverse,
5243 btarget);
5244 }
5245 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5246 {
5247 /* For conditional expression, get safe form of the target. Then
5248 test the condition, doing the appropriate assignment on either
5249 side. This avoids the creation of unnecessary temporaries.
5250 For non-BLKmode, it is more efficient not to do this. */
5251
5252 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5253
5254 do_pending_stack_adjust ();
5255 NO_DEFER_POP;
5256 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5257 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5258 nontemporal, reverse, btarget);
5259 emit_jump_insn (targetm.gen_jump (lab2));
5260 emit_barrier ();
5261 emit_label (lab1);
5262 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5263 nontemporal, reverse, btarget);
5264 emit_label (lab2);
5265 OK_DEFER_POP;
5266
5267 return NULL_RTX;
5268 }
5269 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5270 /* If this is a scalar in a register that is stored in a wider mode
5271 than the declared mode, compute the result into its declared mode
5272 and then convert to the wider mode. Our value is the computed
5273 expression. */
5274 {
5275 rtx inner_target = 0;
5276
5277 /* We can do the conversion inside EXP, which will often result
5278 in some optimizations. Do the conversion in two steps: first
5279 change the signedness, if needed, then the extend. But don't
5280 do this if the type of EXP is a subtype of something else
5281 since then the conversion might involve more than just
5282 converting modes. */
5283 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5284 && TREE_TYPE (TREE_TYPE (exp)) == 0
5285 && GET_MODE_PRECISION (GET_MODE (target))
5286 == TYPE_PRECISION (TREE_TYPE (exp)))
5287 {
5288 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5289 TYPE_UNSIGNED (TREE_TYPE (exp))))
5290 {
5291 /* Some types, e.g. Fortran's logical*4, won't have a signed
5292 version, so use the mode instead. */
5293 tree ntype
5294 = (signed_or_unsigned_type_for
5295 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5296 if (ntype == NULL)
5297 ntype = lang_hooks.types.type_for_mode
5298 (TYPE_MODE (TREE_TYPE (exp)),
5299 SUBREG_PROMOTED_SIGN (target));
5300
5301 exp = fold_convert_loc (loc, ntype, exp);
5302 }
5303
5304 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5305 (GET_MODE (SUBREG_REG (target)),
5306 SUBREG_PROMOTED_SIGN (target)),
5307 exp);
5308
5309 inner_target = SUBREG_REG (target);
5310 }
5311
5312 temp = expand_expr (exp, inner_target, VOIDmode,
5313 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5314
5315 /* Handle bounds returned by call. */
5316 if (TREE_CODE (exp) == CALL_EXPR)
5317 {
5318 rtx bounds;
5319 chkp_split_slot (temp, &temp, &bounds);
5320 if (bounds && btarget)
5321 {
5322 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5323 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5324 chkp_set_rtl_bounds (btarget, tmp);
5325 }
5326 }
5327
5328 /* If TEMP is a VOIDmode constant, use convert_modes to make
5329 sure that we properly convert it. */
5330 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5331 {
5332 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5333 temp, SUBREG_PROMOTED_SIGN (target));
5334 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5335 GET_MODE (target), temp,
5336 SUBREG_PROMOTED_SIGN (target));
5337 }
5338
5339 convert_move (SUBREG_REG (target), temp,
5340 SUBREG_PROMOTED_SIGN (target));
5341
5342 return NULL_RTX;
5343 }
5344 else if ((TREE_CODE (exp) == STRING_CST
5345 || (TREE_CODE (exp) == MEM_REF
5346 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5347 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5348 == STRING_CST
5349 && integer_zerop (TREE_OPERAND (exp, 1))))
5350 && !nontemporal && !call_param_p
5351 && MEM_P (target))
5352 {
5353 /* Optimize initialization of an array with a STRING_CST. */
5354 HOST_WIDE_INT exp_len, str_copy_len;
5355 rtx dest_mem;
5356 tree str = TREE_CODE (exp) == STRING_CST
5357 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5358
5359 exp_len = int_expr_size (exp);
5360 if (exp_len <= 0)
5361 goto normal_expr;
5362
5363 if (TREE_STRING_LENGTH (str) <= 0)
5364 goto normal_expr;
5365
5366 str_copy_len = strlen (TREE_STRING_POINTER (str));
5367 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5368 goto normal_expr;
5369
5370 str_copy_len = TREE_STRING_LENGTH (str);
5371 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5372 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5373 {
5374 str_copy_len += STORE_MAX_PIECES - 1;
5375 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5376 }
5377 str_copy_len = MIN (str_copy_len, exp_len);
5378 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5379 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5380 MEM_ALIGN (target), false))
5381 goto normal_expr;
5382
5383 dest_mem = target;
5384
5385 dest_mem = store_by_pieces (dest_mem,
5386 str_copy_len, builtin_strncpy_read_str,
5387 CONST_CAST (char *,
5388 TREE_STRING_POINTER (str)),
5389 MEM_ALIGN (target), false,
5390 exp_len > str_copy_len ? 1 : 0);
5391 if (exp_len > str_copy_len)
5392 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5393 GEN_INT (exp_len - str_copy_len),
5394 BLOCK_OP_NORMAL);
5395 return NULL_RTX;
5396 }
5397 else
5398 {
5399 rtx tmp_target;
5400
5401 normal_expr:
5402 /* If we want to use a nontemporal or a reverse order store, force the
5403 value into a register first. */
5404 tmp_target = nontemporal || reverse ? NULL_RTX : target;
5405 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5406 (call_param_p
5407 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5408 &alt_rtl, false);
5409
5410 /* Handle bounds returned by call. */
5411 if (TREE_CODE (exp) == CALL_EXPR)
5412 {
5413 rtx bounds;
5414 chkp_split_slot (temp, &temp, &bounds);
5415 if (bounds && btarget)
5416 {
5417 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5418 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5419 chkp_set_rtl_bounds (btarget, tmp);
5420 }
5421 }
5422 }
5423
5424 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5425 the same as that of TARGET, adjust the constant. This is needed, for
5426 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5427 only a word-sized value. */
5428 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5429 && TREE_CODE (exp) != ERROR_MARK
5430 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5431 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5432 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5433
5434 /* If value was not generated in the target, store it there.
5435 Convert the value to TARGET's type first if necessary and emit the
5436 pending incrementations that have been queued when expanding EXP.
5437 Note that we cannot emit the whole queue blindly because this will
5438 effectively disable the POST_INC optimization later.
5439
5440 If TEMP and TARGET compare equal according to rtx_equal_p, but
5441 one or both of them are volatile memory refs, we have to distinguish
5442 two cases:
5443 - expand_expr has used TARGET. In this case, we must not generate
5444 another copy. This can be detected by TARGET being equal according
5445 to == .
5446 - expand_expr has not used TARGET - that means that the source just
5447 happens to have the same RTX form. Since temp will have been created
5448 by expand_expr, it will compare unequal according to == .
5449 We must generate a copy in this case, to reach the correct number
5450 of volatile memory references. */
5451
5452 if ((! rtx_equal_p (temp, target)
5453 || (temp != target && (side_effects_p (temp)
5454 || side_effects_p (target))))
5455 && TREE_CODE (exp) != ERROR_MARK
5456 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5457 but TARGET is not valid memory reference, TEMP will differ
5458 from TARGET although it is really the same location. */
5459 && !(alt_rtl
5460 && rtx_equal_p (alt_rtl, target)
5461 && !side_effects_p (alt_rtl)
5462 && !side_effects_p (target))
5463 /* If there's nothing to copy, don't bother. Don't call
5464 expr_size unless necessary, because some front-ends (C++)
5465 expr_size-hook must not be given objects that are not
5466 supposed to be bit-copied or bit-initialized. */
5467 && expr_size (exp) != const0_rtx)
5468 {
5469 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5470 {
5471 if (GET_MODE (target) == BLKmode)
5472 {
5473 /* Handle calls that return BLKmode values in registers. */
5474 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5475 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5476 else
5477 store_bit_field (target,
5478 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5479 0, 0, 0, GET_MODE (temp), temp, reverse);
5480 }
5481 else
5482 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5483 }
5484
5485 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5486 {
5487 /* Handle copying a string constant into an array. The string
5488 constant may be shorter than the array. So copy just the string's
5489 actual length, and clear the rest. First get the size of the data
5490 type of the string, which is actually the size of the target. */
5491 rtx size = expr_size (exp);
5492
5493 if (CONST_INT_P (size)
5494 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5495 emit_block_move (target, temp, size,
5496 (call_param_p
5497 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5498 else
5499 {
5500 machine_mode pointer_mode
5501 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5502 machine_mode address_mode = get_address_mode (target);
5503
5504 /* Compute the size of the data to copy from the string. */
5505 tree copy_size
5506 = size_binop_loc (loc, MIN_EXPR,
5507 make_tree (sizetype, size),
5508 size_int (TREE_STRING_LENGTH (exp)));
5509 rtx copy_size_rtx
5510 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5511 (call_param_p
5512 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5513 rtx_code_label *label = 0;
5514
5515 /* Copy that much. */
5516 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5517 TYPE_UNSIGNED (sizetype));
5518 emit_block_move (target, temp, copy_size_rtx,
5519 (call_param_p
5520 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5521
5522 /* Figure out how much is left in TARGET that we have to clear.
5523 Do all calculations in pointer_mode. */
5524 if (CONST_INT_P (copy_size_rtx))
5525 {
5526 size = plus_constant (address_mode, size,
5527 -INTVAL (copy_size_rtx));
5528 target = adjust_address (target, BLKmode,
5529 INTVAL (copy_size_rtx));
5530 }
5531 else
5532 {
5533 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5534 copy_size_rtx, NULL_RTX, 0,
5535 OPTAB_LIB_WIDEN);
5536
5537 if (GET_MODE (copy_size_rtx) != address_mode)
5538 copy_size_rtx = convert_to_mode (address_mode,
5539 copy_size_rtx,
5540 TYPE_UNSIGNED (sizetype));
5541
5542 target = offset_address (target, copy_size_rtx,
5543 highest_pow2_factor (copy_size));
5544 label = gen_label_rtx ();
5545 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5546 GET_MODE (size), 0, label);
5547 }
5548
5549 if (size != const0_rtx)
5550 clear_storage (target, size, BLOCK_OP_NORMAL);
5551
5552 if (label)
5553 emit_label (label);
5554 }
5555 }
5556 /* Handle calls that return values in multiple non-contiguous locations.
5557 The Irix 6 ABI has examples of this. */
5558 else if (GET_CODE (target) == PARALLEL)
5559 {
5560 if (GET_CODE (temp) == PARALLEL)
5561 emit_group_move (target, temp);
5562 else
5563 emit_group_load (target, temp, TREE_TYPE (exp),
5564 int_size_in_bytes (TREE_TYPE (exp)));
5565 }
5566 else if (GET_CODE (temp) == PARALLEL)
5567 emit_group_store (target, temp, TREE_TYPE (exp),
5568 int_size_in_bytes (TREE_TYPE (exp)));
5569 else if (GET_MODE (temp) == BLKmode)
5570 emit_block_move (target, temp, expr_size (exp),
5571 (call_param_p
5572 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5573 /* If we emit a nontemporal store, there is nothing else to do. */
5574 else if (nontemporal && emit_storent_insn (target, temp))
5575 ;
5576 else
5577 {
5578 if (reverse)
5579 temp = flip_storage_order (GET_MODE (target), temp);
5580 temp = force_operand (temp, target);
5581 if (temp != target)
5582 emit_move_insn (target, temp);
5583 }
5584 }
5585
5586 return NULL_RTX;
5587 }
5588
5589 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5590 rtx
5591 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal,
5592 bool reverse)
5593 {
5594 return store_expr_with_bounds (exp, target, call_param_p, nontemporal,
5595 reverse, NULL);
5596 }
5597 \f
5598 /* Return true if field F of structure TYPE is a flexible array. */
5599
5600 static bool
5601 flexible_array_member_p (const_tree f, const_tree type)
5602 {
5603 const_tree tf;
5604
5605 tf = TREE_TYPE (f);
5606 return (DECL_CHAIN (f) == NULL
5607 && TREE_CODE (tf) == ARRAY_TYPE
5608 && TYPE_DOMAIN (tf)
5609 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5610 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5611 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5612 && int_size_in_bytes (type) >= 0);
5613 }
5614
5615 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5616 must have in order for it to completely initialize a value of type TYPE.
5617 Return -1 if the number isn't known.
5618
5619 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5620
5621 static HOST_WIDE_INT
5622 count_type_elements (const_tree type, bool for_ctor_p)
5623 {
5624 switch (TREE_CODE (type))
5625 {
5626 case ARRAY_TYPE:
5627 {
5628 tree nelts;
5629
5630 nelts = array_type_nelts (type);
5631 if (nelts && tree_fits_uhwi_p (nelts))
5632 {
5633 unsigned HOST_WIDE_INT n;
5634
5635 n = tree_to_uhwi (nelts) + 1;
5636 if (n == 0 || for_ctor_p)
5637 return n;
5638 else
5639 return n * count_type_elements (TREE_TYPE (type), false);
5640 }
5641 return for_ctor_p ? -1 : 1;
5642 }
5643
5644 case RECORD_TYPE:
5645 {
5646 unsigned HOST_WIDE_INT n;
5647 tree f;
5648
5649 n = 0;
5650 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5651 if (TREE_CODE (f) == FIELD_DECL)
5652 {
5653 if (!for_ctor_p)
5654 n += count_type_elements (TREE_TYPE (f), false);
5655 else if (!flexible_array_member_p (f, type))
5656 /* Don't count flexible arrays, which are not supposed
5657 to be initialized. */
5658 n += 1;
5659 }
5660
5661 return n;
5662 }
5663
5664 case UNION_TYPE:
5665 case QUAL_UNION_TYPE:
5666 {
5667 tree f;
5668 HOST_WIDE_INT n, m;
5669
5670 gcc_assert (!for_ctor_p);
5671 /* Estimate the number of scalars in each field and pick the
5672 maximum. Other estimates would do instead; the idea is simply
5673 to make sure that the estimate is not sensitive to the ordering
5674 of the fields. */
5675 n = 1;
5676 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5677 if (TREE_CODE (f) == FIELD_DECL)
5678 {
5679 m = count_type_elements (TREE_TYPE (f), false);
5680 /* If the field doesn't span the whole union, add an extra
5681 scalar for the rest. */
5682 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5683 TYPE_SIZE (type)) != 1)
5684 m++;
5685 if (n < m)
5686 n = m;
5687 }
5688 return n;
5689 }
5690
5691 case COMPLEX_TYPE:
5692 return 2;
5693
5694 case VECTOR_TYPE:
5695 return TYPE_VECTOR_SUBPARTS (type);
5696
5697 case INTEGER_TYPE:
5698 case REAL_TYPE:
5699 case FIXED_POINT_TYPE:
5700 case ENUMERAL_TYPE:
5701 case BOOLEAN_TYPE:
5702 case POINTER_TYPE:
5703 case OFFSET_TYPE:
5704 case REFERENCE_TYPE:
5705 case NULLPTR_TYPE:
5706 return 1;
5707
5708 case ERROR_MARK:
5709 return 0;
5710
5711 case VOID_TYPE:
5712 case METHOD_TYPE:
5713 case FUNCTION_TYPE:
5714 case LANG_TYPE:
5715 default:
5716 gcc_unreachable ();
5717 }
5718 }
5719
5720 /* Helper for categorize_ctor_elements. Identical interface. */
5721
5722 static bool
5723 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5724 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5725 {
5726 unsigned HOST_WIDE_INT idx;
5727 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5728 tree value, purpose, elt_type;
5729
5730 /* Whether CTOR is a valid constant initializer, in accordance with what
5731 initializer_constant_valid_p does. If inferred from the constructor
5732 elements, true until proven otherwise. */
5733 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5734 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5735
5736 nz_elts = 0;
5737 init_elts = 0;
5738 num_fields = 0;
5739 elt_type = NULL_TREE;
5740
5741 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5742 {
5743 HOST_WIDE_INT mult = 1;
5744
5745 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5746 {
5747 tree lo_index = TREE_OPERAND (purpose, 0);
5748 tree hi_index = TREE_OPERAND (purpose, 1);
5749
5750 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5751 mult = (tree_to_uhwi (hi_index)
5752 - tree_to_uhwi (lo_index) + 1);
5753 }
5754 num_fields += mult;
5755 elt_type = TREE_TYPE (value);
5756
5757 switch (TREE_CODE (value))
5758 {
5759 case CONSTRUCTOR:
5760 {
5761 HOST_WIDE_INT nz = 0, ic = 0;
5762
5763 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5764 p_complete);
5765
5766 nz_elts += mult * nz;
5767 init_elts += mult * ic;
5768
5769 if (const_from_elts_p && const_p)
5770 const_p = const_elt_p;
5771 }
5772 break;
5773
5774 case INTEGER_CST:
5775 case REAL_CST:
5776 case FIXED_CST:
5777 if (!initializer_zerop (value))
5778 nz_elts += mult;
5779 init_elts += mult;
5780 break;
5781
5782 case STRING_CST:
5783 nz_elts += mult * TREE_STRING_LENGTH (value);
5784 init_elts += mult * TREE_STRING_LENGTH (value);
5785 break;
5786
5787 case COMPLEX_CST:
5788 if (!initializer_zerop (TREE_REALPART (value)))
5789 nz_elts += mult;
5790 if (!initializer_zerop (TREE_IMAGPART (value)))
5791 nz_elts += mult;
5792 init_elts += mult;
5793 break;
5794
5795 case VECTOR_CST:
5796 {
5797 unsigned i;
5798 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5799 {
5800 tree v = VECTOR_CST_ELT (value, i);
5801 if (!initializer_zerop (v))
5802 nz_elts += mult;
5803 init_elts += mult;
5804 }
5805 }
5806 break;
5807
5808 default:
5809 {
5810 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5811 nz_elts += mult * tc;
5812 init_elts += mult * tc;
5813
5814 if (const_from_elts_p && const_p)
5815 const_p
5816 = initializer_constant_valid_p (value,
5817 elt_type,
5818 TYPE_REVERSE_STORAGE_ORDER
5819 (TREE_TYPE (ctor)))
5820 != NULL_TREE;
5821 }
5822 break;
5823 }
5824 }
5825
5826 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5827 num_fields, elt_type))
5828 *p_complete = false;
5829
5830 *p_nz_elts += nz_elts;
5831 *p_init_elts += init_elts;
5832
5833 return const_p;
5834 }
5835
5836 /* Examine CTOR to discover:
5837 * how many scalar fields are set to nonzero values,
5838 and place it in *P_NZ_ELTS;
5839 * how many scalar fields in total are in CTOR,
5840 and place it in *P_ELT_COUNT.
5841 * whether the constructor is complete -- in the sense that every
5842 meaningful byte is explicitly given a value --
5843 and place it in *P_COMPLETE.
5844
5845 Return whether or not CTOR is a valid static constant initializer, the same
5846 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5847
5848 bool
5849 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5850 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5851 {
5852 *p_nz_elts = 0;
5853 *p_init_elts = 0;
5854 *p_complete = true;
5855
5856 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5857 }
5858
5859 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5860 of which had type LAST_TYPE. Each element was itself a complete
5861 initializer, in the sense that every meaningful byte was explicitly
5862 given a value. Return true if the same is true for the constructor
5863 as a whole. */
5864
5865 bool
5866 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5867 const_tree last_type)
5868 {
5869 if (TREE_CODE (type) == UNION_TYPE
5870 || TREE_CODE (type) == QUAL_UNION_TYPE)
5871 {
5872 if (num_elts == 0)
5873 return false;
5874
5875 gcc_assert (num_elts == 1 && last_type);
5876
5877 /* ??? We could look at each element of the union, and find the
5878 largest element. Which would avoid comparing the size of the
5879 initialized element against any tail padding in the union.
5880 Doesn't seem worth the effort... */
5881 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5882 }
5883
5884 return count_type_elements (type, true) == num_elts;
5885 }
5886
5887 /* Return 1 if EXP contains mostly (3/4) zeros. */
5888
5889 static int
5890 mostly_zeros_p (const_tree exp)
5891 {
5892 if (TREE_CODE (exp) == CONSTRUCTOR)
5893 {
5894 HOST_WIDE_INT nz_elts, init_elts;
5895 bool complete_p;
5896
5897 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5898 return !complete_p || nz_elts < init_elts / 4;
5899 }
5900
5901 return initializer_zerop (exp);
5902 }
5903
5904 /* Return 1 if EXP contains all zeros. */
5905
5906 static int
5907 all_zeros_p (const_tree exp)
5908 {
5909 if (TREE_CODE (exp) == CONSTRUCTOR)
5910 {
5911 HOST_WIDE_INT nz_elts, init_elts;
5912 bool complete_p;
5913
5914 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5915 return nz_elts == 0;
5916 }
5917
5918 return initializer_zerop (exp);
5919 }
5920 \f
5921 /* Helper function for store_constructor.
5922 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5923 CLEARED is as for store_constructor.
5924 ALIAS_SET is the alias set to use for any stores.
5925 If REVERSE is true, the store is to be done in reverse order.
5926
5927 This provides a recursive shortcut back to store_constructor when it isn't
5928 necessary to go through store_field. This is so that we can pass through
5929 the cleared field to let store_constructor know that we may not have to
5930 clear a substructure if the outer structure has already been cleared. */
5931
5932 static void
5933 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5934 HOST_WIDE_INT bitpos, machine_mode mode,
5935 tree exp, int cleared,
5936 alias_set_type alias_set, bool reverse)
5937 {
5938 if (TREE_CODE (exp) == CONSTRUCTOR
5939 /* We can only call store_constructor recursively if the size and
5940 bit position are on a byte boundary. */
5941 && bitpos % BITS_PER_UNIT == 0
5942 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5943 /* If we have a nonzero bitpos for a register target, then we just
5944 let store_field do the bitfield handling. This is unlikely to
5945 generate unnecessary clear instructions anyways. */
5946 && (bitpos == 0 || MEM_P (target)))
5947 {
5948 if (MEM_P (target))
5949 target
5950 = adjust_address (target,
5951 GET_MODE (target) == BLKmode
5952 || 0 != (bitpos
5953 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5954 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5955
5956
5957 /* Update the alias set, if required. */
5958 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5959 && MEM_ALIAS_SET (target) != 0)
5960 {
5961 target = copy_rtx (target);
5962 set_mem_alias_set (target, alias_set);
5963 }
5964
5965 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT,
5966 reverse);
5967 }
5968 else
5969 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false,
5970 reverse);
5971 }
5972
5973
5974 /* Returns the number of FIELD_DECLs in TYPE. */
5975
5976 static int
5977 fields_length (const_tree type)
5978 {
5979 tree t = TYPE_FIELDS (type);
5980 int count = 0;
5981
5982 for (; t; t = DECL_CHAIN (t))
5983 if (TREE_CODE (t) == FIELD_DECL)
5984 ++count;
5985
5986 return count;
5987 }
5988
5989
5990 /* Store the value of constructor EXP into the rtx TARGET.
5991 TARGET is either a REG or a MEM; we know it cannot conflict, since
5992 safe_from_p has been called.
5993 CLEARED is true if TARGET is known to have been zero'd.
5994 SIZE is the number of bytes of TARGET we are allowed to modify: this
5995 may not be the same as the size of EXP if we are assigning to a field
5996 which has been packed to exclude padding bits.
5997 If REVERSE is true, the store is to be done in reverse order. */
5998
5999 static void
6000 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
6001 bool reverse)
6002 {
6003 tree type = TREE_TYPE (exp);
6004 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
6005
6006 switch (TREE_CODE (type))
6007 {
6008 case RECORD_TYPE:
6009 case UNION_TYPE:
6010 case QUAL_UNION_TYPE:
6011 {
6012 unsigned HOST_WIDE_INT idx;
6013 tree field, value;
6014
6015 /* The storage order is specified for every aggregate type. */
6016 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6017
6018 /* If size is zero or the target is already cleared, do nothing. */
6019 if (size == 0 || cleared)
6020 cleared = 1;
6021 /* We either clear the aggregate or indicate the value is dead. */
6022 else if ((TREE_CODE (type) == UNION_TYPE
6023 || TREE_CODE (type) == QUAL_UNION_TYPE)
6024 && ! CONSTRUCTOR_ELTS (exp))
6025 /* If the constructor is empty, clear the union. */
6026 {
6027 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6028 cleared = 1;
6029 }
6030
6031 /* If we are building a static constructor into a register,
6032 set the initial value as zero so we can fold the value into
6033 a constant. But if more than one register is involved,
6034 this probably loses. */
6035 else if (REG_P (target) && TREE_STATIC (exp)
6036 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6037 {
6038 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6039 cleared = 1;
6040 }
6041
6042 /* If the constructor has fewer fields than the structure or
6043 if we are initializing the structure to mostly zeros, clear
6044 the whole structure first. Don't do this if TARGET is a
6045 register whose mode size isn't equal to SIZE since
6046 clear_storage can't handle this case. */
6047 else if (size > 0
6048 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6049 != fields_length (type))
6050 || mostly_zeros_p (exp))
6051 && (!REG_P (target)
6052 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6053 == size)))
6054 {
6055 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6056 cleared = 1;
6057 }
6058
6059 if (REG_P (target) && !cleared)
6060 emit_clobber (target);
6061
6062 /* Store each element of the constructor into the
6063 corresponding field of TARGET. */
6064 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6065 {
6066 machine_mode mode;
6067 HOST_WIDE_INT bitsize;
6068 HOST_WIDE_INT bitpos = 0;
6069 tree offset;
6070 rtx to_rtx = target;
6071
6072 /* Just ignore missing fields. We cleared the whole
6073 structure, above, if any fields are missing. */
6074 if (field == 0)
6075 continue;
6076
6077 if (cleared && initializer_zerop (value))
6078 continue;
6079
6080 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6081 bitsize = tree_to_uhwi (DECL_SIZE (field));
6082 else
6083 bitsize = -1;
6084
6085 mode = DECL_MODE (field);
6086 if (DECL_BIT_FIELD (field))
6087 mode = VOIDmode;
6088
6089 offset = DECL_FIELD_OFFSET (field);
6090 if (tree_fits_shwi_p (offset)
6091 && tree_fits_shwi_p (bit_position (field)))
6092 {
6093 bitpos = int_bit_position (field);
6094 offset = 0;
6095 }
6096 else
6097 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6098
6099 if (offset)
6100 {
6101 machine_mode address_mode;
6102 rtx offset_rtx;
6103
6104 offset
6105 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6106 make_tree (TREE_TYPE (exp),
6107 target));
6108
6109 offset_rtx = expand_normal (offset);
6110 gcc_assert (MEM_P (to_rtx));
6111
6112 address_mode = get_address_mode (to_rtx);
6113 if (GET_MODE (offset_rtx) != address_mode)
6114 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6115
6116 to_rtx = offset_address (to_rtx, offset_rtx,
6117 highest_pow2_factor (offset));
6118 }
6119
6120 /* If this initializes a field that is smaller than a
6121 word, at the start of a word, try to widen it to a full
6122 word. This special case allows us to output C++ member
6123 function initializations in a form that the optimizers
6124 can understand. */
6125 if (WORD_REGISTER_OPERATIONS
6126 && REG_P (target)
6127 && bitsize < BITS_PER_WORD
6128 && bitpos % BITS_PER_WORD == 0
6129 && GET_MODE_CLASS (mode) == MODE_INT
6130 && TREE_CODE (value) == INTEGER_CST
6131 && exp_size >= 0
6132 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6133 {
6134 tree type = TREE_TYPE (value);
6135
6136 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6137 {
6138 type = lang_hooks.types.type_for_mode
6139 (word_mode, TYPE_UNSIGNED (type));
6140 value = fold_convert (type, value);
6141 }
6142
6143 if (BYTES_BIG_ENDIAN)
6144 value
6145 = fold_build2 (LSHIFT_EXPR, type, value,
6146 build_int_cst (type,
6147 BITS_PER_WORD - bitsize));
6148 bitsize = BITS_PER_WORD;
6149 mode = word_mode;
6150 }
6151
6152 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6153 && DECL_NONADDRESSABLE_P (field))
6154 {
6155 to_rtx = copy_rtx (to_rtx);
6156 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6157 }
6158
6159 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6160 value, cleared,
6161 get_alias_set (TREE_TYPE (field)),
6162 reverse);
6163 }
6164 break;
6165 }
6166 case ARRAY_TYPE:
6167 {
6168 tree value, index;
6169 unsigned HOST_WIDE_INT i;
6170 int need_to_clear;
6171 tree domain;
6172 tree elttype = TREE_TYPE (type);
6173 int const_bounds_p;
6174 HOST_WIDE_INT minelt = 0;
6175 HOST_WIDE_INT maxelt = 0;
6176
6177 /* The storage order is specified for every aggregate type. */
6178 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6179
6180 domain = TYPE_DOMAIN (type);
6181 const_bounds_p = (TYPE_MIN_VALUE (domain)
6182 && TYPE_MAX_VALUE (domain)
6183 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6184 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6185
6186 /* If we have constant bounds for the range of the type, get them. */
6187 if (const_bounds_p)
6188 {
6189 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6190 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6191 }
6192
6193 /* If the constructor has fewer elements than the array, clear
6194 the whole array first. Similarly if this is static
6195 constructor of a non-BLKmode object. */
6196 if (cleared)
6197 need_to_clear = 0;
6198 else if (REG_P (target) && TREE_STATIC (exp))
6199 need_to_clear = 1;
6200 else
6201 {
6202 unsigned HOST_WIDE_INT idx;
6203 tree index, value;
6204 HOST_WIDE_INT count = 0, zero_count = 0;
6205 need_to_clear = ! const_bounds_p;
6206
6207 /* This loop is a more accurate version of the loop in
6208 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6209 is also needed to check for missing elements. */
6210 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6211 {
6212 HOST_WIDE_INT this_node_count;
6213
6214 if (need_to_clear)
6215 break;
6216
6217 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6218 {
6219 tree lo_index = TREE_OPERAND (index, 0);
6220 tree hi_index = TREE_OPERAND (index, 1);
6221
6222 if (! tree_fits_uhwi_p (lo_index)
6223 || ! tree_fits_uhwi_p (hi_index))
6224 {
6225 need_to_clear = 1;
6226 break;
6227 }
6228
6229 this_node_count = (tree_to_uhwi (hi_index)
6230 - tree_to_uhwi (lo_index) + 1);
6231 }
6232 else
6233 this_node_count = 1;
6234
6235 count += this_node_count;
6236 if (mostly_zeros_p (value))
6237 zero_count += this_node_count;
6238 }
6239
6240 /* Clear the entire array first if there are any missing
6241 elements, or if the incidence of zero elements is >=
6242 75%. */
6243 if (! need_to_clear
6244 && (count < maxelt - minelt + 1
6245 || 4 * zero_count >= 3 * count))
6246 need_to_clear = 1;
6247 }
6248
6249 if (need_to_clear && size > 0)
6250 {
6251 if (REG_P (target))
6252 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6253 else
6254 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6255 cleared = 1;
6256 }
6257
6258 if (!cleared && REG_P (target))
6259 /* Inform later passes that the old value is dead. */
6260 emit_clobber (target);
6261
6262 /* Store each element of the constructor into the
6263 corresponding element of TARGET, determined by counting the
6264 elements. */
6265 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6266 {
6267 machine_mode mode;
6268 HOST_WIDE_INT bitsize;
6269 HOST_WIDE_INT bitpos;
6270 rtx xtarget = target;
6271
6272 if (cleared && initializer_zerop (value))
6273 continue;
6274
6275 mode = TYPE_MODE (elttype);
6276 if (mode == BLKmode)
6277 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6278 ? tree_to_uhwi (TYPE_SIZE (elttype))
6279 : -1);
6280 else
6281 bitsize = GET_MODE_BITSIZE (mode);
6282
6283 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6284 {
6285 tree lo_index = TREE_OPERAND (index, 0);
6286 tree hi_index = TREE_OPERAND (index, 1);
6287 rtx index_r, pos_rtx;
6288 HOST_WIDE_INT lo, hi, count;
6289 tree position;
6290
6291 /* If the range is constant and "small", unroll the loop. */
6292 if (const_bounds_p
6293 && tree_fits_shwi_p (lo_index)
6294 && tree_fits_shwi_p (hi_index)
6295 && (lo = tree_to_shwi (lo_index),
6296 hi = tree_to_shwi (hi_index),
6297 count = hi - lo + 1,
6298 (!MEM_P (target)
6299 || count <= 2
6300 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6301 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6302 <= 40 * 8)))))
6303 {
6304 lo -= minelt; hi -= minelt;
6305 for (; lo <= hi; lo++)
6306 {
6307 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6308
6309 if (MEM_P (target)
6310 && !MEM_KEEP_ALIAS_SET_P (target)
6311 && TREE_CODE (type) == ARRAY_TYPE
6312 && TYPE_NONALIASED_COMPONENT (type))
6313 {
6314 target = copy_rtx (target);
6315 MEM_KEEP_ALIAS_SET_P (target) = 1;
6316 }
6317
6318 store_constructor_field
6319 (target, bitsize, bitpos, mode, value, cleared,
6320 get_alias_set (elttype), reverse);
6321 }
6322 }
6323 else
6324 {
6325 rtx_code_label *loop_start = gen_label_rtx ();
6326 rtx_code_label *loop_end = gen_label_rtx ();
6327 tree exit_cond;
6328
6329 expand_normal (hi_index);
6330
6331 index = build_decl (EXPR_LOCATION (exp),
6332 VAR_DECL, NULL_TREE, domain);
6333 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6334 SET_DECL_RTL (index, index_r);
6335 store_expr (lo_index, index_r, 0, false, reverse);
6336
6337 /* Build the head of the loop. */
6338 do_pending_stack_adjust ();
6339 emit_label (loop_start);
6340
6341 /* Assign value to element index. */
6342 position =
6343 fold_convert (ssizetype,
6344 fold_build2 (MINUS_EXPR,
6345 TREE_TYPE (index),
6346 index,
6347 TYPE_MIN_VALUE (domain)));
6348
6349 position =
6350 size_binop (MULT_EXPR, position,
6351 fold_convert (ssizetype,
6352 TYPE_SIZE_UNIT (elttype)));
6353
6354 pos_rtx = expand_normal (position);
6355 xtarget = offset_address (target, pos_rtx,
6356 highest_pow2_factor (position));
6357 xtarget = adjust_address (xtarget, mode, 0);
6358 if (TREE_CODE (value) == CONSTRUCTOR)
6359 store_constructor (value, xtarget, cleared,
6360 bitsize / BITS_PER_UNIT, reverse);
6361 else
6362 store_expr (value, xtarget, 0, false, reverse);
6363
6364 /* Generate a conditional jump to exit the loop. */
6365 exit_cond = build2 (LT_EXPR, integer_type_node,
6366 index, hi_index);
6367 jumpif (exit_cond, loop_end, -1);
6368
6369 /* Update the loop counter, and jump to the head of
6370 the loop. */
6371 expand_assignment (index,
6372 build2 (PLUS_EXPR, TREE_TYPE (index),
6373 index, integer_one_node),
6374 false);
6375
6376 emit_jump (loop_start);
6377
6378 /* Build the end of the loop. */
6379 emit_label (loop_end);
6380 }
6381 }
6382 else if ((index != 0 && ! tree_fits_shwi_p (index))
6383 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6384 {
6385 tree position;
6386
6387 if (index == 0)
6388 index = ssize_int (1);
6389
6390 if (minelt)
6391 index = fold_convert (ssizetype,
6392 fold_build2 (MINUS_EXPR,
6393 TREE_TYPE (index),
6394 index,
6395 TYPE_MIN_VALUE (domain)));
6396
6397 position =
6398 size_binop (MULT_EXPR, index,
6399 fold_convert (ssizetype,
6400 TYPE_SIZE_UNIT (elttype)));
6401 xtarget = offset_address (target,
6402 expand_normal (position),
6403 highest_pow2_factor (position));
6404 xtarget = adjust_address (xtarget, mode, 0);
6405 store_expr (value, xtarget, 0, false, reverse);
6406 }
6407 else
6408 {
6409 if (index != 0)
6410 bitpos = ((tree_to_shwi (index) - minelt)
6411 * tree_to_uhwi (TYPE_SIZE (elttype)));
6412 else
6413 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6414
6415 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6416 && TREE_CODE (type) == ARRAY_TYPE
6417 && TYPE_NONALIASED_COMPONENT (type))
6418 {
6419 target = copy_rtx (target);
6420 MEM_KEEP_ALIAS_SET_P (target) = 1;
6421 }
6422 store_constructor_field (target, bitsize, bitpos, mode, value,
6423 cleared, get_alias_set (elttype),
6424 reverse);
6425 }
6426 }
6427 break;
6428 }
6429
6430 case VECTOR_TYPE:
6431 {
6432 unsigned HOST_WIDE_INT idx;
6433 constructor_elt *ce;
6434 int i;
6435 int need_to_clear;
6436 int icode = CODE_FOR_nothing;
6437 tree elttype = TREE_TYPE (type);
6438 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6439 machine_mode eltmode = TYPE_MODE (elttype);
6440 HOST_WIDE_INT bitsize;
6441 HOST_WIDE_INT bitpos;
6442 rtvec vector = NULL;
6443 unsigned n_elts;
6444 alias_set_type alias;
6445
6446 gcc_assert (eltmode != BLKmode);
6447
6448 n_elts = TYPE_VECTOR_SUBPARTS (type);
6449 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6450 {
6451 machine_mode mode = GET_MODE (target);
6452
6453 icode = (int) optab_handler (vec_init_optab, mode);
6454 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6455 if (icode != CODE_FOR_nothing)
6456 {
6457 tree value;
6458
6459 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6460 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6461 {
6462 icode = CODE_FOR_nothing;
6463 break;
6464 }
6465 }
6466 if (icode != CODE_FOR_nothing)
6467 {
6468 unsigned int i;
6469
6470 vector = rtvec_alloc (n_elts);
6471 for (i = 0; i < n_elts; i++)
6472 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6473 }
6474 }
6475
6476 /* If the constructor has fewer elements than the vector,
6477 clear the whole array first. Similarly if this is static
6478 constructor of a non-BLKmode object. */
6479 if (cleared)
6480 need_to_clear = 0;
6481 else if (REG_P (target) && TREE_STATIC (exp))
6482 need_to_clear = 1;
6483 else
6484 {
6485 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6486 tree value;
6487
6488 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6489 {
6490 int n_elts_here = tree_to_uhwi
6491 (int_const_binop (TRUNC_DIV_EXPR,
6492 TYPE_SIZE (TREE_TYPE (value)),
6493 TYPE_SIZE (elttype)));
6494
6495 count += n_elts_here;
6496 if (mostly_zeros_p (value))
6497 zero_count += n_elts_here;
6498 }
6499
6500 /* Clear the entire vector first if there are any missing elements,
6501 or if the incidence of zero elements is >= 75%. */
6502 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6503 }
6504
6505 if (need_to_clear && size > 0 && !vector)
6506 {
6507 if (REG_P (target))
6508 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6509 else
6510 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6511 cleared = 1;
6512 }
6513
6514 /* Inform later passes that the old value is dead. */
6515 if (!cleared && !vector && REG_P (target))
6516 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6517
6518 if (MEM_P (target))
6519 alias = MEM_ALIAS_SET (target);
6520 else
6521 alias = get_alias_set (elttype);
6522
6523 /* Store each element of the constructor into the corresponding
6524 element of TARGET, determined by counting the elements. */
6525 for (idx = 0, i = 0;
6526 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6527 idx++, i += bitsize / elt_size)
6528 {
6529 HOST_WIDE_INT eltpos;
6530 tree value = ce->value;
6531
6532 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6533 if (cleared && initializer_zerop (value))
6534 continue;
6535
6536 if (ce->index)
6537 eltpos = tree_to_uhwi (ce->index);
6538 else
6539 eltpos = i;
6540
6541 if (vector)
6542 {
6543 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6544 elements. */
6545 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6546 RTVEC_ELT (vector, eltpos)
6547 = expand_normal (value);
6548 }
6549 else
6550 {
6551 machine_mode value_mode =
6552 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6553 ? TYPE_MODE (TREE_TYPE (value))
6554 : eltmode;
6555 bitpos = eltpos * elt_size;
6556 store_constructor_field (target, bitsize, bitpos, value_mode,
6557 value, cleared, alias, reverse);
6558 }
6559 }
6560
6561 if (vector)
6562 emit_insn (GEN_FCN (icode)
6563 (target,
6564 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6565 break;
6566 }
6567
6568 default:
6569 gcc_unreachable ();
6570 }
6571 }
6572
6573 /* Store the value of EXP (an expression tree)
6574 into a subfield of TARGET which has mode MODE and occupies
6575 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6576 If MODE is VOIDmode, it means that we are storing into a bit-field.
6577
6578 BITREGION_START is bitpos of the first bitfield in this region.
6579 BITREGION_END is the bitpos of the ending bitfield in this region.
6580 These two fields are 0, if the C++ memory model does not apply,
6581 or we are not interested in keeping track of bitfield regions.
6582
6583 Always return const0_rtx unless we have something particular to
6584 return.
6585
6586 ALIAS_SET is the alias set for the destination. This value will
6587 (in general) be different from that for TARGET, since TARGET is a
6588 reference to the containing structure.
6589
6590 If NONTEMPORAL is true, try generating a nontemporal store.
6591
6592 If REVERSE is true, the store is to be done in reverse order. */
6593
6594 static rtx
6595 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6596 unsigned HOST_WIDE_INT bitregion_start,
6597 unsigned HOST_WIDE_INT bitregion_end,
6598 machine_mode mode, tree exp,
6599 alias_set_type alias_set, bool nontemporal, bool reverse)
6600 {
6601 if (TREE_CODE (exp) == ERROR_MARK)
6602 return const0_rtx;
6603
6604 /* If we have nothing to store, do nothing unless the expression has
6605 side-effects. */
6606 if (bitsize == 0)
6607 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6608
6609 if (GET_CODE (target) == CONCAT)
6610 {
6611 /* We're storing into a struct containing a single __complex. */
6612
6613 gcc_assert (!bitpos);
6614 return store_expr (exp, target, 0, nontemporal, reverse);
6615 }
6616
6617 /* If the structure is in a register or if the component
6618 is a bit field, we cannot use addressing to access it.
6619 Use bit-field techniques or SUBREG to store in it. */
6620
6621 if (mode == VOIDmode
6622 || (mode != BLKmode && ! direct_store[(int) mode]
6623 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6624 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6625 || REG_P (target)
6626 || GET_CODE (target) == SUBREG
6627 /* If the field isn't aligned enough to store as an ordinary memref,
6628 store it as a bit field. */
6629 || (mode != BLKmode
6630 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6631 || bitpos % GET_MODE_ALIGNMENT (mode))
6632 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6633 || (bitpos % BITS_PER_UNIT != 0)))
6634 || (bitsize >= 0 && mode != BLKmode
6635 && GET_MODE_BITSIZE (mode) > bitsize)
6636 /* If the RHS and field are a constant size and the size of the
6637 RHS isn't the same size as the bitfield, we must use bitfield
6638 operations. */
6639 || (bitsize >= 0
6640 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6641 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0
6642 /* Except for initialization of full bytes from a CONSTRUCTOR, which
6643 we will handle specially below. */
6644 && !(TREE_CODE (exp) == CONSTRUCTOR
6645 && bitsize % BITS_PER_UNIT == 0))
6646 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6647 decl we must use bitfield operations. */
6648 || (bitsize >= 0
6649 && TREE_CODE (exp) == MEM_REF
6650 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6651 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6652 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6653 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6654 {
6655 rtx temp;
6656 gimple *nop_def;
6657
6658 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6659 implies a mask operation. If the precision is the same size as
6660 the field we're storing into, that mask is redundant. This is
6661 particularly common with bit field assignments generated by the
6662 C front end. */
6663 nop_def = get_def_for_expr (exp, NOP_EXPR);
6664 if (nop_def)
6665 {
6666 tree type = TREE_TYPE (exp);
6667 if (INTEGRAL_TYPE_P (type)
6668 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6669 && bitsize == TYPE_PRECISION (type))
6670 {
6671 tree op = gimple_assign_rhs1 (nop_def);
6672 type = TREE_TYPE (op);
6673 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6674 exp = op;
6675 }
6676 }
6677
6678 temp = expand_normal (exp);
6679
6680 /* If the value has a record type and an integral mode then, if BITSIZE
6681 is narrower than this mode and this is for big-endian data, we must
6682 first put the value into the low-order bits. Moreover, the field may
6683 be not aligned on a byte boundary; in this case, if it has reverse
6684 storage order, it needs to be accessed as a scalar field with reverse
6685 storage order and we must first put the value into target order. */
6686 if (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
6687 && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT)
6688 {
6689 HOST_WIDE_INT size = GET_MODE_BITSIZE (GET_MODE (temp));
6690
6691 reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp));
6692
6693 if (reverse)
6694 temp = flip_storage_order (GET_MODE (temp), temp);
6695
6696 if (bitsize < size
6697 && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6698 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6699 size - bitsize, NULL_RTX, 1);
6700 }
6701
6702 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6703 if (mode != VOIDmode && mode != BLKmode
6704 && mode != TYPE_MODE (TREE_TYPE (exp)))
6705 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6706
6707 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6708 are both BLKmode, both must be in memory and BITPOS must be aligned
6709 on a byte boundary. If so, we simply do a block copy. Likewise for
6710 a BLKmode-like TARGET. */
6711 if (GET_CODE (temp) != PARALLEL
6712 && GET_MODE (temp) == BLKmode
6713 && (GET_MODE (target) == BLKmode
6714 || (MEM_P (target)
6715 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6716 && (bitpos % BITS_PER_UNIT) == 0
6717 && (bitsize % BITS_PER_UNIT) == 0)))
6718 {
6719 gcc_assert (MEM_P (target) && MEM_P (temp)
6720 && (bitpos % BITS_PER_UNIT) == 0);
6721
6722 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6723 emit_block_move (target, temp,
6724 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6725 / BITS_PER_UNIT),
6726 BLOCK_OP_NORMAL);
6727
6728 return const0_rtx;
6729 }
6730
6731 /* Handle calls that return values in multiple non-contiguous locations.
6732 The Irix 6 ABI has examples of this. */
6733 if (GET_CODE (temp) == PARALLEL)
6734 {
6735 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6736 rtx temp_target;
6737 if (mode == BLKmode || mode == VOIDmode)
6738 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6739 temp_target = gen_reg_rtx (mode);
6740 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6741 temp = temp_target;
6742 }
6743 else if (mode == BLKmode)
6744 {
6745 /* Handle calls that return BLKmode values in registers. */
6746 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6747 {
6748 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6749 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6750 temp = temp_target;
6751 }
6752 else
6753 {
6754 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6755 rtx temp_target;
6756 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6757 temp_target = gen_reg_rtx (mode);
6758 temp_target
6759 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6760 temp_target, mode, mode, false);
6761 temp = temp_target;
6762 }
6763 }
6764
6765 /* Store the value in the bitfield. */
6766 store_bit_field (target, bitsize, bitpos,
6767 bitregion_start, bitregion_end,
6768 mode, temp, reverse);
6769
6770 return const0_rtx;
6771 }
6772 else
6773 {
6774 /* Now build a reference to just the desired component. */
6775 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6776
6777 if (to_rtx == target)
6778 to_rtx = copy_rtx (to_rtx);
6779
6780 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6781 set_mem_alias_set (to_rtx, alias_set);
6782
6783 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
6784 into a target smaller than its type; handle that case now. */
6785 if (TREE_CODE (exp) == CONSTRUCTOR && bitsize >= 0)
6786 {
6787 gcc_assert (bitsize % BITS_PER_UNIT == 0);
6788 store_constructor (exp, to_rtx, 0, bitsize / BITS_PER_UNIT, reverse);
6789 return to_rtx;
6790 }
6791
6792 return store_expr (exp, to_rtx, 0, nontemporal, reverse);
6793 }
6794 }
6795 \f
6796 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6797 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6798 codes and find the ultimate containing object, which we return.
6799
6800 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6801 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
6802 storage order of the field.
6803 If the position of the field is variable, we store a tree
6804 giving the variable offset (in units) in *POFFSET.
6805 This offset is in addition to the bit position.
6806 If the position is not variable, we store 0 in *POFFSET.
6807
6808 If any of the extraction expressions is volatile,
6809 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6810
6811 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6812 Otherwise, it is a mode that can be used to access the field.
6813
6814 If the field describes a variable-sized object, *PMODE is set to
6815 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6816 this case, but the address of the object can be found.
6817
6818 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6819 look through nodes that serve as markers of a greater alignment than
6820 the one that can be deduced from the expression. These nodes make it
6821 possible for front-ends to prevent temporaries from being created by
6822 the middle-end on alignment considerations. For that purpose, the
6823 normal operating mode at high-level is to always pass FALSE so that
6824 the ultimate containing object is really returned; moreover, the
6825 associated predicate handled_component_p will always return TRUE
6826 on these nodes, thus indicating that they are essentially handled
6827 by get_inner_reference. TRUE should only be passed when the caller
6828 is scanning the expression in order to build another representation
6829 and specifically knows how to handle these nodes; as such, this is
6830 the normal operating mode in the RTL expanders. */
6831
6832 tree
6833 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6834 HOST_WIDE_INT *pbitpos, tree *poffset,
6835 machine_mode *pmode, int *punsignedp,
6836 int *preversep, int *pvolatilep, bool keep_aligning)
6837 {
6838 tree size_tree = 0;
6839 machine_mode mode = VOIDmode;
6840 bool blkmode_bitfield = false;
6841 tree offset = size_zero_node;
6842 offset_int bit_offset = 0;
6843
6844 /* First get the mode, signedness, storage order and size. We do this from
6845 just the outermost expression. */
6846 *pbitsize = -1;
6847 if (TREE_CODE (exp) == COMPONENT_REF)
6848 {
6849 tree field = TREE_OPERAND (exp, 1);
6850 size_tree = DECL_SIZE (field);
6851 if (flag_strict_volatile_bitfields > 0
6852 && TREE_THIS_VOLATILE (exp)
6853 && DECL_BIT_FIELD_TYPE (field)
6854 && DECL_MODE (field) != BLKmode)
6855 /* Volatile bitfields should be accessed in the mode of the
6856 field's type, not the mode computed based on the bit
6857 size. */
6858 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6859 else if (!DECL_BIT_FIELD (field))
6860 mode = DECL_MODE (field);
6861 else if (DECL_MODE (field) == BLKmode)
6862 blkmode_bitfield = true;
6863
6864 *punsignedp = DECL_UNSIGNED (field);
6865 }
6866 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6867 {
6868 size_tree = TREE_OPERAND (exp, 1);
6869 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6870 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6871
6872 /* For vector types, with the correct size of access, use the mode of
6873 inner type. */
6874 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6875 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6876 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6877 mode = TYPE_MODE (TREE_TYPE (exp));
6878 }
6879 else
6880 {
6881 mode = TYPE_MODE (TREE_TYPE (exp));
6882 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6883
6884 if (mode == BLKmode)
6885 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6886 else
6887 *pbitsize = GET_MODE_BITSIZE (mode);
6888 }
6889
6890 if (size_tree != 0)
6891 {
6892 if (! tree_fits_uhwi_p (size_tree))
6893 mode = BLKmode, *pbitsize = -1;
6894 else
6895 *pbitsize = tree_to_uhwi (size_tree);
6896 }
6897
6898 *preversep = reverse_storage_order_for_component_p (exp);
6899
6900 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6901 and find the ultimate containing object. */
6902 while (1)
6903 {
6904 switch (TREE_CODE (exp))
6905 {
6906 case BIT_FIELD_REF:
6907 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6908 break;
6909
6910 case COMPONENT_REF:
6911 {
6912 tree field = TREE_OPERAND (exp, 1);
6913 tree this_offset = component_ref_field_offset (exp);
6914
6915 /* If this field hasn't been filled in yet, don't go past it.
6916 This should only happen when folding expressions made during
6917 type construction. */
6918 if (this_offset == 0)
6919 break;
6920
6921 offset = size_binop (PLUS_EXPR, offset, this_offset);
6922 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6923
6924 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6925 }
6926 break;
6927
6928 case ARRAY_REF:
6929 case ARRAY_RANGE_REF:
6930 {
6931 tree index = TREE_OPERAND (exp, 1);
6932 tree low_bound = array_ref_low_bound (exp);
6933 tree unit_size = array_ref_element_size (exp);
6934
6935 /* We assume all arrays have sizes that are a multiple of a byte.
6936 First subtract the lower bound, if any, in the type of the
6937 index, then convert to sizetype and multiply by the size of
6938 the array element. */
6939 if (! integer_zerop (low_bound))
6940 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6941 index, low_bound);
6942
6943 offset = size_binop (PLUS_EXPR, offset,
6944 size_binop (MULT_EXPR,
6945 fold_convert (sizetype, index),
6946 unit_size));
6947 }
6948 break;
6949
6950 case REALPART_EXPR:
6951 break;
6952
6953 case IMAGPART_EXPR:
6954 bit_offset += *pbitsize;
6955 break;
6956
6957 case VIEW_CONVERT_EXPR:
6958 if (keep_aligning && STRICT_ALIGNMENT
6959 && (TYPE_ALIGN (TREE_TYPE (exp))
6960 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6961 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6962 < BIGGEST_ALIGNMENT)
6963 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6964 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6965 goto done;
6966 break;
6967
6968 case MEM_REF:
6969 /* Hand back the decl for MEM[&decl, off]. */
6970 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6971 {
6972 tree off = TREE_OPERAND (exp, 1);
6973 if (!integer_zerop (off))
6974 {
6975 offset_int boff, coff = mem_ref_offset (exp);
6976 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6977 bit_offset += boff;
6978 }
6979 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6980 }
6981 goto done;
6982
6983 default:
6984 goto done;
6985 }
6986
6987 /* If any reference in the chain is volatile, the effect is volatile. */
6988 if (TREE_THIS_VOLATILE (exp))
6989 *pvolatilep = 1;
6990
6991 exp = TREE_OPERAND (exp, 0);
6992 }
6993 done:
6994
6995 /* If OFFSET is constant, see if we can return the whole thing as a
6996 constant bit position. Make sure to handle overflow during
6997 this conversion. */
6998 if (TREE_CODE (offset) == INTEGER_CST)
6999 {
7000 offset_int tem = wi::sext (wi::to_offset (offset),
7001 TYPE_PRECISION (sizetype));
7002 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
7003 tem += bit_offset;
7004 if (wi::fits_shwi_p (tem))
7005 {
7006 *pbitpos = tem.to_shwi ();
7007 *poffset = offset = NULL_TREE;
7008 }
7009 }
7010
7011 /* Otherwise, split it up. */
7012 if (offset)
7013 {
7014 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7015 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
7016 {
7017 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
7018 offset_int tem = bit_offset.and_not (mask);
7019 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
7020 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
7021 bit_offset -= tem;
7022 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
7023 offset = size_binop (PLUS_EXPR, offset,
7024 wide_int_to_tree (sizetype, tem));
7025 }
7026
7027 *pbitpos = bit_offset.to_shwi ();
7028 *poffset = offset;
7029 }
7030
7031 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7032 if (mode == VOIDmode
7033 && blkmode_bitfield
7034 && (*pbitpos % BITS_PER_UNIT) == 0
7035 && (*pbitsize % BITS_PER_UNIT) == 0)
7036 *pmode = BLKmode;
7037 else
7038 *pmode = mode;
7039
7040 return exp;
7041 }
7042
7043 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7044
7045 static unsigned HOST_WIDE_INT
7046 target_align (const_tree target)
7047 {
7048 /* We might have a chain of nested references with intermediate misaligning
7049 bitfields components, so need to recurse to find out. */
7050
7051 unsigned HOST_WIDE_INT this_align, outer_align;
7052
7053 switch (TREE_CODE (target))
7054 {
7055 case BIT_FIELD_REF:
7056 return 1;
7057
7058 case COMPONENT_REF:
7059 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7060 outer_align = target_align (TREE_OPERAND (target, 0));
7061 return MIN (this_align, outer_align);
7062
7063 case ARRAY_REF:
7064 case ARRAY_RANGE_REF:
7065 this_align = TYPE_ALIGN (TREE_TYPE (target));
7066 outer_align = target_align (TREE_OPERAND (target, 0));
7067 return MIN (this_align, outer_align);
7068
7069 CASE_CONVERT:
7070 case NON_LVALUE_EXPR:
7071 case VIEW_CONVERT_EXPR:
7072 this_align = TYPE_ALIGN (TREE_TYPE (target));
7073 outer_align = target_align (TREE_OPERAND (target, 0));
7074 return MAX (this_align, outer_align);
7075
7076 default:
7077 return TYPE_ALIGN (TREE_TYPE (target));
7078 }
7079 }
7080
7081 \f
7082 /* Given an rtx VALUE that may contain additions and multiplications, return
7083 an equivalent value that just refers to a register, memory, or constant.
7084 This is done by generating instructions to perform the arithmetic and
7085 returning a pseudo-register containing the value.
7086
7087 The returned value may be a REG, SUBREG, MEM or constant. */
7088
7089 rtx
7090 force_operand (rtx value, rtx target)
7091 {
7092 rtx op1, op2;
7093 /* Use subtarget as the target for operand 0 of a binary operation. */
7094 rtx subtarget = get_subtarget (target);
7095 enum rtx_code code = GET_CODE (value);
7096
7097 /* Check for subreg applied to an expression produced by loop optimizer. */
7098 if (code == SUBREG
7099 && !REG_P (SUBREG_REG (value))
7100 && !MEM_P (SUBREG_REG (value)))
7101 {
7102 value
7103 = simplify_gen_subreg (GET_MODE (value),
7104 force_reg (GET_MODE (SUBREG_REG (value)),
7105 force_operand (SUBREG_REG (value),
7106 NULL_RTX)),
7107 GET_MODE (SUBREG_REG (value)),
7108 SUBREG_BYTE (value));
7109 code = GET_CODE (value);
7110 }
7111
7112 /* Check for a PIC address load. */
7113 if ((code == PLUS || code == MINUS)
7114 && XEXP (value, 0) == pic_offset_table_rtx
7115 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7116 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7117 || GET_CODE (XEXP (value, 1)) == CONST))
7118 {
7119 if (!subtarget)
7120 subtarget = gen_reg_rtx (GET_MODE (value));
7121 emit_move_insn (subtarget, value);
7122 return subtarget;
7123 }
7124
7125 if (ARITHMETIC_P (value))
7126 {
7127 op2 = XEXP (value, 1);
7128 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7129 subtarget = 0;
7130 if (code == MINUS && CONST_INT_P (op2))
7131 {
7132 code = PLUS;
7133 op2 = negate_rtx (GET_MODE (value), op2);
7134 }
7135
7136 /* Check for an addition with OP2 a constant integer and our first
7137 operand a PLUS of a virtual register and something else. In that
7138 case, we want to emit the sum of the virtual register and the
7139 constant first and then add the other value. This allows virtual
7140 register instantiation to simply modify the constant rather than
7141 creating another one around this addition. */
7142 if (code == PLUS && CONST_INT_P (op2)
7143 && GET_CODE (XEXP (value, 0)) == PLUS
7144 && REG_P (XEXP (XEXP (value, 0), 0))
7145 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7146 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7147 {
7148 rtx temp = expand_simple_binop (GET_MODE (value), code,
7149 XEXP (XEXP (value, 0), 0), op2,
7150 subtarget, 0, OPTAB_LIB_WIDEN);
7151 return expand_simple_binop (GET_MODE (value), code, temp,
7152 force_operand (XEXP (XEXP (value,
7153 0), 1), 0),
7154 target, 0, OPTAB_LIB_WIDEN);
7155 }
7156
7157 op1 = force_operand (XEXP (value, 0), subtarget);
7158 op2 = force_operand (op2, NULL_RTX);
7159 switch (code)
7160 {
7161 case MULT:
7162 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7163 case DIV:
7164 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7165 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7166 target, 1, OPTAB_LIB_WIDEN);
7167 else
7168 return expand_divmod (0,
7169 FLOAT_MODE_P (GET_MODE (value))
7170 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7171 GET_MODE (value), op1, op2, target, 0);
7172 case MOD:
7173 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7174 target, 0);
7175 case UDIV:
7176 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7177 target, 1);
7178 case UMOD:
7179 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7180 target, 1);
7181 case ASHIFTRT:
7182 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7183 target, 0, OPTAB_LIB_WIDEN);
7184 default:
7185 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7186 target, 1, OPTAB_LIB_WIDEN);
7187 }
7188 }
7189 if (UNARY_P (value))
7190 {
7191 if (!target)
7192 target = gen_reg_rtx (GET_MODE (value));
7193 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7194 switch (code)
7195 {
7196 case ZERO_EXTEND:
7197 case SIGN_EXTEND:
7198 case TRUNCATE:
7199 case FLOAT_EXTEND:
7200 case FLOAT_TRUNCATE:
7201 convert_move (target, op1, code == ZERO_EXTEND);
7202 return target;
7203
7204 case FIX:
7205 case UNSIGNED_FIX:
7206 expand_fix (target, op1, code == UNSIGNED_FIX);
7207 return target;
7208
7209 case FLOAT:
7210 case UNSIGNED_FLOAT:
7211 expand_float (target, op1, code == UNSIGNED_FLOAT);
7212 return target;
7213
7214 default:
7215 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7216 }
7217 }
7218
7219 #ifdef INSN_SCHEDULING
7220 /* On machines that have insn scheduling, we want all memory reference to be
7221 explicit, so we need to deal with such paradoxical SUBREGs. */
7222 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7223 value
7224 = simplify_gen_subreg (GET_MODE (value),
7225 force_reg (GET_MODE (SUBREG_REG (value)),
7226 force_operand (SUBREG_REG (value),
7227 NULL_RTX)),
7228 GET_MODE (SUBREG_REG (value)),
7229 SUBREG_BYTE (value));
7230 #endif
7231
7232 return value;
7233 }
7234 \f
7235 /* Subroutine of expand_expr: return nonzero iff there is no way that
7236 EXP can reference X, which is being modified. TOP_P is nonzero if this
7237 call is going to be used to determine whether we need a temporary
7238 for EXP, as opposed to a recursive call to this function.
7239
7240 It is always safe for this routine to return zero since it merely
7241 searches for optimization opportunities. */
7242
7243 int
7244 safe_from_p (const_rtx x, tree exp, int top_p)
7245 {
7246 rtx exp_rtl = 0;
7247 int i, nops;
7248
7249 if (x == 0
7250 /* If EXP has varying size, we MUST use a target since we currently
7251 have no way of allocating temporaries of variable size
7252 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7253 So we assume here that something at a higher level has prevented a
7254 clash. This is somewhat bogus, but the best we can do. Only
7255 do this when X is BLKmode and when we are at the top level. */
7256 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7257 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7258 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7259 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7260 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7261 != INTEGER_CST)
7262 && GET_MODE (x) == BLKmode)
7263 /* If X is in the outgoing argument area, it is always safe. */
7264 || (MEM_P (x)
7265 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7266 || (GET_CODE (XEXP (x, 0)) == PLUS
7267 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7268 return 1;
7269
7270 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7271 find the underlying pseudo. */
7272 if (GET_CODE (x) == SUBREG)
7273 {
7274 x = SUBREG_REG (x);
7275 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7276 return 0;
7277 }
7278
7279 /* Now look at our tree code and possibly recurse. */
7280 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7281 {
7282 case tcc_declaration:
7283 exp_rtl = DECL_RTL_IF_SET (exp);
7284 break;
7285
7286 case tcc_constant:
7287 return 1;
7288
7289 case tcc_exceptional:
7290 if (TREE_CODE (exp) == TREE_LIST)
7291 {
7292 while (1)
7293 {
7294 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7295 return 0;
7296 exp = TREE_CHAIN (exp);
7297 if (!exp)
7298 return 1;
7299 if (TREE_CODE (exp) != TREE_LIST)
7300 return safe_from_p (x, exp, 0);
7301 }
7302 }
7303 else if (TREE_CODE (exp) == CONSTRUCTOR)
7304 {
7305 constructor_elt *ce;
7306 unsigned HOST_WIDE_INT idx;
7307
7308 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7309 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7310 || !safe_from_p (x, ce->value, 0))
7311 return 0;
7312 return 1;
7313 }
7314 else if (TREE_CODE (exp) == ERROR_MARK)
7315 return 1; /* An already-visited SAVE_EXPR? */
7316 else
7317 return 0;
7318
7319 case tcc_statement:
7320 /* The only case we look at here is the DECL_INITIAL inside a
7321 DECL_EXPR. */
7322 return (TREE_CODE (exp) != DECL_EXPR
7323 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7324 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7325 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7326
7327 case tcc_binary:
7328 case tcc_comparison:
7329 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7330 return 0;
7331 /* Fall through. */
7332
7333 case tcc_unary:
7334 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7335
7336 case tcc_expression:
7337 case tcc_reference:
7338 case tcc_vl_exp:
7339 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7340 the expression. If it is set, we conflict iff we are that rtx or
7341 both are in memory. Otherwise, we check all operands of the
7342 expression recursively. */
7343
7344 switch (TREE_CODE (exp))
7345 {
7346 case ADDR_EXPR:
7347 /* If the operand is static or we are static, we can't conflict.
7348 Likewise if we don't conflict with the operand at all. */
7349 if (staticp (TREE_OPERAND (exp, 0))
7350 || TREE_STATIC (exp)
7351 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7352 return 1;
7353
7354 /* Otherwise, the only way this can conflict is if we are taking
7355 the address of a DECL a that address if part of X, which is
7356 very rare. */
7357 exp = TREE_OPERAND (exp, 0);
7358 if (DECL_P (exp))
7359 {
7360 if (!DECL_RTL_SET_P (exp)
7361 || !MEM_P (DECL_RTL (exp)))
7362 return 0;
7363 else
7364 exp_rtl = XEXP (DECL_RTL (exp), 0);
7365 }
7366 break;
7367
7368 case MEM_REF:
7369 if (MEM_P (x)
7370 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7371 get_alias_set (exp)))
7372 return 0;
7373 break;
7374
7375 case CALL_EXPR:
7376 /* Assume that the call will clobber all hard registers and
7377 all of memory. */
7378 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7379 || MEM_P (x))
7380 return 0;
7381 break;
7382
7383 case WITH_CLEANUP_EXPR:
7384 case CLEANUP_POINT_EXPR:
7385 /* Lowered by gimplify.c. */
7386 gcc_unreachable ();
7387
7388 case SAVE_EXPR:
7389 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7390
7391 default:
7392 break;
7393 }
7394
7395 /* If we have an rtx, we do not need to scan our operands. */
7396 if (exp_rtl)
7397 break;
7398
7399 nops = TREE_OPERAND_LENGTH (exp);
7400 for (i = 0; i < nops; i++)
7401 if (TREE_OPERAND (exp, i) != 0
7402 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7403 return 0;
7404
7405 break;
7406
7407 case tcc_type:
7408 /* Should never get a type here. */
7409 gcc_unreachable ();
7410 }
7411
7412 /* If we have an rtl, find any enclosed object. Then see if we conflict
7413 with it. */
7414 if (exp_rtl)
7415 {
7416 if (GET_CODE (exp_rtl) == SUBREG)
7417 {
7418 exp_rtl = SUBREG_REG (exp_rtl);
7419 if (REG_P (exp_rtl)
7420 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7421 return 0;
7422 }
7423
7424 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7425 are memory and they conflict. */
7426 return ! (rtx_equal_p (x, exp_rtl)
7427 || (MEM_P (x) && MEM_P (exp_rtl)
7428 && true_dependence (exp_rtl, VOIDmode, x)));
7429 }
7430
7431 /* If we reach here, it is safe. */
7432 return 1;
7433 }
7434
7435 \f
7436 /* Return the highest power of two that EXP is known to be a multiple of.
7437 This is used in updating alignment of MEMs in array references. */
7438
7439 unsigned HOST_WIDE_INT
7440 highest_pow2_factor (const_tree exp)
7441 {
7442 unsigned HOST_WIDE_INT ret;
7443 int trailing_zeros = tree_ctz (exp);
7444 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7445 return BIGGEST_ALIGNMENT;
7446 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7447 if (ret > BIGGEST_ALIGNMENT)
7448 return BIGGEST_ALIGNMENT;
7449 return ret;
7450 }
7451
7452 /* Similar, except that the alignment requirements of TARGET are
7453 taken into account. Assume it is at least as aligned as its
7454 type, unless it is a COMPONENT_REF in which case the layout of
7455 the structure gives the alignment. */
7456
7457 static unsigned HOST_WIDE_INT
7458 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7459 {
7460 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7461 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7462
7463 return MAX (factor, talign);
7464 }
7465 \f
7466 /* Convert the tree comparison code TCODE to the rtl one where the
7467 signedness is UNSIGNEDP. */
7468
7469 static enum rtx_code
7470 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7471 {
7472 enum rtx_code code;
7473 switch (tcode)
7474 {
7475 case EQ_EXPR:
7476 code = EQ;
7477 break;
7478 case NE_EXPR:
7479 code = NE;
7480 break;
7481 case LT_EXPR:
7482 code = unsignedp ? LTU : LT;
7483 break;
7484 case LE_EXPR:
7485 code = unsignedp ? LEU : LE;
7486 break;
7487 case GT_EXPR:
7488 code = unsignedp ? GTU : GT;
7489 break;
7490 case GE_EXPR:
7491 code = unsignedp ? GEU : GE;
7492 break;
7493 case UNORDERED_EXPR:
7494 code = UNORDERED;
7495 break;
7496 case ORDERED_EXPR:
7497 code = ORDERED;
7498 break;
7499 case UNLT_EXPR:
7500 code = UNLT;
7501 break;
7502 case UNLE_EXPR:
7503 code = UNLE;
7504 break;
7505 case UNGT_EXPR:
7506 code = UNGT;
7507 break;
7508 case UNGE_EXPR:
7509 code = UNGE;
7510 break;
7511 case UNEQ_EXPR:
7512 code = UNEQ;
7513 break;
7514 case LTGT_EXPR:
7515 code = LTGT;
7516 break;
7517
7518 default:
7519 gcc_unreachable ();
7520 }
7521 return code;
7522 }
7523
7524 /* Subroutine of expand_expr. Expand the two operands of a binary
7525 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7526 The value may be stored in TARGET if TARGET is nonzero. The
7527 MODIFIER argument is as documented by expand_expr. */
7528
7529 void
7530 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7531 enum expand_modifier modifier)
7532 {
7533 if (! safe_from_p (target, exp1, 1))
7534 target = 0;
7535 if (operand_equal_p (exp0, exp1, 0))
7536 {
7537 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7538 *op1 = copy_rtx (*op0);
7539 }
7540 else
7541 {
7542 /* If we need to preserve evaluation order, copy exp0 into its own
7543 temporary variable so that it can't be clobbered by exp1. */
7544 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7545 exp0 = save_expr (exp0);
7546 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7547 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7548 }
7549 }
7550
7551 \f
7552 /* Return a MEM that contains constant EXP. DEFER is as for
7553 output_constant_def and MODIFIER is as for expand_expr. */
7554
7555 static rtx
7556 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7557 {
7558 rtx mem;
7559
7560 mem = output_constant_def (exp, defer);
7561 if (modifier != EXPAND_INITIALIZER)
7562 mem = use_anchored_address (mem);
7563 return mem;
7564 }
7565
7566 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7567 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7568
7569 static rtx
7570 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7571 enum expand_modifier modifier, addr_space_t as)
7572 {
7573 rtx result, subtarget;
7574 tree inner, offset;
7575 HOST_WIDE_INT bitsize, bitpos;
7576 int unsignedp, reversep, volatilep = 0;
7577 machine_mode mode1;
7578
7579 /* If we are taking the address of a constant and are at the top level,
7580 we have to use output_constant_def since we can't call force_const_mem
7581 at top level. */
7582 /* ??? This should be considered a front-end bug. We should not be
7583 generating ADDR_EXPR of something that isn't an LVALUE. The only
7584 exception here is STRING_CST. */
7585 if (CONSTANT_CLASS_P (exp))
7586 {
7587 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7588 if (modifier < EXPAND_SUM)
7589 result = force_operand (result, target);
7590 return result;
7591 }
7592
7593 /* Everything must be something allowed by is_gimple_addressable. */
7594 switch (TREE_CODE (exp))
7595 {
7596 case INDIRECT_REF:
7597 /* This case will happen via recursion for &a->b. */
7598 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7599
7600 case MEM_REF:
7601 {
7602 tree tem = TREE_OPERAND (exp, 0);
7603 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7604 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7605 return expand_expr (tem, target, tmode, modifier);
7606 }
7607
7608 case CONST_DECL:
7609 /* Expand the initializer like constants above. */
7610 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7611 0, modifier), 0);
7612 if (modifier < EXPAND_SUM)
7613 result = force_operand (result, target);
7614 return result;
7615
7616 case REALPART_EXPR:
7617 /* The real part of the complex number is always first, therefore
7618 the address is the same as the address of the parent object. */
7619 offset = 0;
7620 bitpos = 0;
7621 inner = TREE_OPERAND (exp, 0);
7622 break;
7623
7624 case IMAGPART_EXPR:
7625 /* The imaginary part of the complex number is always second.
7626 The expression is therefore always offset by the size of the
7627 scalar type. */
7628 offset = 0;
7629 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7630 inner = TREE_OPERAND (exp, 0);
7631 break;
7632
7633 case COMPOUND_LITERAL_EXPR:
7634 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7635 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7636 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7637 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7638 the initializers aren't gimplified. */
7639 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7640 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7641 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7642 target, tmode, modifier, as);
7643 /* FALLTHRU */
7644 default:
7645 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7646 expand_expr, as that can have various side effects; LABEL_DECLs for
7647 example, may not have their DECL_RTL set yet. Expand the rtl of
7648 CONSTRUCTORs too, which should yield a memory reference for the
7649 constructor's contents. Assume language specific tree nodes can
7650 be expanded in some interesting way. */
7651 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7652 if (DECL_P (exp)
7653 || TREE_CODE (exp) == CONSTRUCTOR
7654 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7655 {
7656 result = expand_expr (exp, target, tmode,
7657 modifier == EXPAND_INITIALIZER
7658 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7659
7660 /* If the DECL isn't in memory, then the DECL wasn't properly
7661 marked TREE_ADDRESSABLE, which will be either a front-end
7662 or a tree optimizer bug. */
7663
7664 gcc_assert (MEM_P (result));
7665 result = XEXP (result, 0);
7666
7667 /* ??? Is this needed anymore? */
7668 if (DECL_P (exp))
7669 TREE_USED (exp) = 1;
7670
7671 if (modifier != EXPAND_INITIALIZER
7672 && modifier != EXPAND_CONST_ADDRESS
7673 && modifier != EXPAND_SUM)
7674 result = force_operand (result, target);
7675 return result;
7676 }
7677
7678 /* Pass FALSE as the last argument to get_inner_reference although
7679 we are expanding to RTL. The rationale is that we know how to
7680 handle "aligning nodes" here: we can just bypass them because
7681 they won't change the final object whose address will be returned
7682 (they actually exist only for that purpose). */
7683 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
7684 &unsignedp, &reversep, &volatilep, false);
7685 break;
7686 }
7687
7688 /* We must have made progress. */
7689 gcc_assert (inner != exp);
7690
7691 subtarget = offset || bitpos ? NULL_RTX : target;
7692 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7693 inner alignment, force the inner to be sufficiently aligned. */
7694 if (CONSTANT_CLASS_P (inner)
7695 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7696 {
7697 inner = copy_node (inner);
7698 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7699 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7700 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7701 }
7702 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7703
7704 if (offset)
7705 {
7706 rtx tmp;
7707
7708 if (modifier != EXPAND_NORMAL)
7709 result = force_operand (result, NULL);
7710 tmp = expand_expr (offset, NULL_RTX, tmode,
7711 modifier == EXPAND_INITIALIZER
7712 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7713
7714 /* expand_expr is allowed to return an object in a mode other
7715 than TMODE. If it did, we need to convert. */
7716 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7717 tmp = convert_modes (tmode, GET_MODE (tmp),
7718 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7719 result = convert_memory_address_addr_space (tmode, result, as);
7720 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7721
7722 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7723 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7724 else
7725 {
7726 subtarget = bitpos ? NULL_RTX : target;
7727 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7728 1, OPTAB_LIB_WIDEN);
7729 }
7730 }
7731
7732 if (bitpos)
7733 {
7734 /* Someone beforehand should have rejected taking the address
7735 of such an object. */
7736 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7737
7738 result = convert_memory_address_addr_space (tmode, result, as);
7739 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7740 if (modifier < EXPAND_SUM)
7741 result = force_operand (result, target);
7742 }
7743
7744 return result;
7745 }
7746
7747 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7748 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7749
7750 static rtx
7751 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7752 enum expand_modifier modifier)
7753 {
7754 addr_space_t as = ADDR_SPACE_GENERIC;
7755 machine_mode address_mode = Pmode;
7756 machine_mode pointer_mode = ptr_mode;
7757 machine_mode rmode;
7758 rtx result;
7759
7760 /* Target mode of VOIDmode says "whatever's natural". */
7761 if (tmode == VOIDmode)
7762 tmode = TYPE_MODE (TREE_TYPE (exp));
7763
7764 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7765 {
7766 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7767 address_mode = targetm.addr_space.address_mode (as);
7768 pointer_mode = targetm.addr_space.pointer_mode (as);
7769 }
7770
7771 /* We can get called with some Weird Things if the user does silliness
7772 like "(short) &a". In that case, convert_memory_address won't do
7773 the right thing, so ignore the given target mode. */
7774 if (tmode != address_mode && tmode != pointer_mode)
7775 tmode = address_mode;
7776
7777 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7778 tmode, modifier, as);
7779
7780 /* Despite expand_expr claims concerning ignoring TMODE when not
7781 strictly convenient, stuff breaks if we don't honor it. Note
7782 that combined with the above, we only do this for pointer modes. */
7783 rmode = GET_MODE (result);
7784 if (rmode == VOIDmode)
7785 rmode = tmode;
7786 if (rmode != tmode)
7787 result = convert_memory_address_addr_space (tmode, result, as);
7788
7789 return result;
7790 }
7791
7792 /* Generate code for computing CONSTRUCTOR EXP.
7793 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7794 is TRUE, instead of creating a temporary variable in memory
7795 NULL is returned and the caller needs to handle it differently. */
7796
7797 static rtx
7798 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7799 bool avoid_temp_mem)
7800 {
7801 tree type = TREE_TYPE (exp);
7802 machine_mode mode = TYPE_MODE (type);
7803
7804 /* Try to avoid creating a temporary at all. This is possible
7805 if all of the initializer is zero.
7806 FIXME: try to handle all [0..255] initializers we can handle
7807 with memset. */
7808 if (TREE_STATIC (exp)
7809 && !TREE_ADDRESSABLE (exp)
7810 && target != 0 && mode == BLKmode
7811 && all_zeros_p (exp))
7812 {
7813 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7814 return target;
7815 }
7816
7817 /* All elts simple constants => refer to a constant in memory. But
7818 if this is a non-BLKmode mode, let it store a field at a time
7819 since that should make a CONST_INT, CONST_WIDE_INT or
7820 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7821 use, it is best to store directly into the target unless the type
7822 is large enough that memcpy will be used. If we are making an
7823 initializer and all operands are constant, put it in memory as
7824 well.
7825
7826 FIXME: Avoid trying to fill vector constructors piece-meal.
7827 Output them with output_constant_def below unless we're sure
7828 they're zeros. This should go away when vector initializers
7829 are treated like VECTOR_CST instead of arrays. */
7830 if ((TREE_STATIC (exp)
7831 && ((mode == BLKmode
7832 && ! (target != 0 && safe_from_p (target, exp, 1)))
7833 || TREE_ADDRESSABLE (exp)
7834 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7835 && (! can_move_by_pieces
7836 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7837 TYPE_ALIGN (type)))
7838 && ! mostly_zeros_p (exp))))
7839 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7840 && TREE_CONSTANT (exp)))
7841 {
7842 rtx constructor;
7843
7844 if (avoid_temp_mem)
7845 return NULL_RTX;
7846
7847 constructor = expand_expr_constant (exp, 1, modifier);
7848
7849 if (modifier != EXPAND_CONST_ADDRESS
7850 && modifier != EXPAND_INITIALIZER
7851 && modifier != EXPAND_SUM)
7852 constructor = validize_mem (constructor);
7853
7854 return constructor;
7855 }
7856
7857 /* Handle calls that pass values in multiple non-contiguous
7858 locations. The Irix 6 ABI has examples of this. */
7859 if (target == 0 || ! safe_from_p (target, exp, 1)
7860 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7861 {
7862 if (avoid_temp_mem)
7863 return NULL_RTX;
7864
7865 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7866 }
7867
7868 store_constructor (exp, target, 0, int_expr_size (exp), false);
7869 return target;
7870 }
7871
7872
7873 /* expand_expr: generate code for computing expression EXP.
7874 An rtx for the computed value is returned. The value is never null.
7875 In the case of a void EXP, const0_rtx is returned.
7876
7877 The value may be stored in TARGET if TARGET is nonzero.
7878 TARGET is just a suggestion; callers must assume that
7879 the rtx returned may not be the same as TARGET.
7880
7881 If TARGET is CONST0_RTX, it means that the value will be ignored.
7882
7883 If TMODE is not VOIDmode, it suggests generating the
7884 result in mode TMODE. But this is done only when convenient.
7885 Otherwise, TMODE is ignored and the value generated in its natural mode.
7886 TMODE is just a suggestion; callers must assume that
7887 the rtx returned may not have mode TMODE.
7888
7889 Note that TARGET may have neither TMODE nor MODE. In that case, it
7890 probably will not be used.
7891
7892 If MODIFIER is EXPAND_SUM then when EXP is an addition
7893 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7894 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7895 products as above, or REG or MEM, or constant.
7896 Ordinarily in such cases we would output mul or add instructions
7897 and then return a pseudo reg containing the sum.
7898
7899 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7900 it also marks a label as absolutely required (it can't be dead).
7901 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7902 This is used for outputting expressions used in initializers.
7903
7904 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7905 with a constant address even if that address is not normally legitimate.
7906 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7907
7908 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7909 a call parameter. Such targets require special care as we haven't yet
7910 marked TARGET so that it's safe from being trashed by libcalls. We
7911 don't want to use TARGET for anything but the final result;
7912 Intermediate values must go elsewhere. Additionally, calls to
7913 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7914
7915 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7916 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7917 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7918 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7919 recursively.
7920
7921 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7922 In this case, we don't adjust a returned MEM rtx that wouldn't be
7923 sufficiently aligned for its mode; instead, it's up to the caller
7924 to deal with it afterwards. This is used to make sure that unaligned
7925 base objects for which out-of-bounds accesses are supported, for
7926 example record types with trailing arrays, aren't realigned behind
7927 the back of the caller.
7928 The normal operating mode is to pass FALSE for this parameter. */
7929
7930 rtx
7931 expand_expr_real (tree exp, rtx target, machine_mode tmode,
7932 enum expand_modifier modifier, rtx *alt_rtl,
7933 bool inner_reference_p)
7934 {
7935 rtx ret;
7936
7937 /* Handle ERROR_MARK before anybody tries to access its type. */
7938 if (TREE_CODE (exp) == ERROR_MARK
7939 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7940 {
7941 ret = CONST0_RTX (tmode);
7942 return ret ? ret : const0_rtx;
7943 }
7944
7945 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7946 inner_reference_p);
7947 return ret;
7948 }
7949
7950 /* Try to expand the conditional expression which is represented by
7951 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
7952 return the rtl reg which represents the result. Otherwise return
7953 NULL_RTX. */
7954
7955 static rtx
7956 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7957 tree treeop1 ATTRIBUTE_UNUSED,
7958 tree treeop2 ATTRIBUTE_UNUSED)
7959 {
7960 rtx insn;
7961 rtx op00, op01, op1, op2;
7962 enum rtx_code comparison_code;
7963 machine_mode comparison_mode;
7964 gimple *srcstmt;
7965 rtx temp;
7966 tree type = TREE_TYPE (treeop1);
7967 int unsignedp = TYPE_UNSIGNED (type);
7968 machine_mode mode = TYPE_MODE (type);
7969 machine_mode orig_mode = mode;
7970
7971 /* If we cannot do a conditional move on the mode, try doing it
7972 with the promoted mode. */
7973 if (!can_conditionally_move_p (mode))
7974 {
7975 mode = promote_mode (type, mode, &unsignedp);
7976 if (!can_conditionally_move_p (mode))
7977 return NULL_RTX;
7978 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7979 }
7980 else
7981 temp = assign_temp (type, 0, 1);
7982
7983 start_sequence ();
7984 expand_operands (treeop1, treeop2,
7985 temp, &op1, &op2, EXPAND_NORMAL);
7986
7987 if (TREE_CODE (treeop0) == SSA_NAME
7988 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7989 {
7990 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7991 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7992 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7993 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7994 comparison_mode = TYPE_MODE (type);
7995 unsignedp = TYPE_UNSIGNED (type);
7996 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7997 }
7998 else if (COMPARISON_CLASS_P (treeop0))
7999 {
8000 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8001 enum tree_code cmpcode = TREE_CODE (treeop0);
8002 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8003 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8004 unsignedp = TYPE_UNSIGNED (type);
8005 comparison_mode = TYPE_MODE (type);
8006 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8007 }
8008 else
8009 {
8010 op00 = expand_normal (treeop0);
8011 op01 = const0_rtx;
8012 comparison_code = NE;
8013 comparison_mode = GET_MODE (op00);
8014 if (comparison_mode == VOIDmode)
8015 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8016 }
8017
8018 if (GET_MODE (op1) != mode)
8019 op1 = gen_lowpart (mode, op1);
8020
8021 if (GET_MODE (op2) != mode)
8022 op2 = gen_lowpart (mode, op2);
8023
8024 /* Try to emit the conditional move. */
8025 insn = emit_conditional_move (temp, comparison_code,
8026 op00, op01, comparison_mode,
8027 op1, op2, mode,
8028 unsignedp);
8029
8030 /* If we could do the conditional move, emit the sequence,
8031 and return. */
8032 if (insn)
8033 {
8034 rtx_insn *seq = get_insns ();
8035 end_sequence ();
8036 emit_insn (seq);
8037 return convert_modes (orig_mode, mode, temp, 0);
8038 }
8039
8040 /* Otherwise discard the sequence and fall back to code with
8041 branches. */
8042 end_sequence ();
8043 return NULL_RTX;
8044 }
8045
8046 rtx
8047 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8048 enum expand_modifier modifier)
8049 {
8050 rtx op0, op1, op2, temp;
8051 rtx_code_label *lab;
8052 tree type;
8053 int unsignedp;
8054 machine_mode mode;
8055 enum tree_code code = ops->code;
8056 optab this_optab;
8057 rtx subtarget, original_target;
8058 int ignore;
8059 bool reduce_bit_field;
8060 location_t loc = ops->location;
8061 tree treeop0, treeop1, treeop2;
8062 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8063 ? reduce_to_bit_field_precision ((expr), \
8064 target, \
8065 type) \
8066 : (expr))
8067
8068 type = ops->type;
8069 mode = TYPE_MODE (type);
8070 unsignedp = TYPE_UNSIGNED (type);
8071
8072 treeop0 = ops->op0;
8073 treeop1 = ops->op1;
8074 treeop2 = ops->op2;
8075
8076 /* We should be called only on simple (binary or unary) expressions,
8077 exactly those that are valid in gimple expressions that aren't
8078 GIMPLE_SINGLE_RHS (or invalid). */
8079 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8080 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8081 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8082
8083 ignore = (target == const0_rtx
8084 || ((CONVERT_EXPR_CODE_P (code)
8085 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8086 && TREE_CODE (type) == VOID_TYPE));
8087
8088 /* We should be called only if we need the result. */
8089 gcc_assert (!ignore);
8090
8091 /* An operation in what may be a bit-field type needs the
8092 result to be reduced to the precision of the bit-field type,
8093 which is narrower than that of the type's mode. */
8094 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8095 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8096
8097 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8098 target = 0;
8099
8100 /* Use subtarget as the target for operand 0 of a binary operation. */
8101 subtarget = get_subtarget (target);
8102 original_target = target;
8103
8104 switch (code)
8105 {
8106 case NON_LVALUE_EXPR:
8107 case PAREN_EXPR:
8108 CASE_CONVERT:
8109 if (treeop0 == error_mark_node)
8110 return const0_rtx;
8111
8112 if (TREE_CODE (type) == UNION_TYPE)
8113 {
8114 tree valtype = TREE_TYPE (treeop0);
8115
8116 /* If both input and output are BLKmode, this conversion isn't doing
8117 anything except possibly changing memory attribute. */
8118 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8119 {
8120 rtx result = expand_expr (treeop0, target, tmode,
8121 modifier);
8122
8123 result = copy_rtx (result);
8124 set_mem_attributes (result, type, 0);
8125 return result;
8126 }
8127
8128 if (target == 0)
8129 {
8130 if (TYPE_MODE (type) != BLKmode)
8131 target = gen_reg_rtx (TYPE_MODE (type));
8132 else
8133 target = assign_temp (type, 1, 1);
8134 }
8135
8136 if (MEM_P (target))
8137 /* Store data into beginning of memory target. */
8138 store_expr (treeop0,
8139 adjust_address (target, TYPE_MODE (valtype), 0),
8140 modifier == EXPAND_STACK_PARM,
8141 false, TYPE_REVERSE_STORAGE_ORDER (type));
8142
8143 else
8144 {
8145 gcc_assert (REG_P (target)
8146 && !TYPE_REVERSE_STORAGE_ORDER (type));
8147
8148 /* Store this field into a union of the proper type. */
8149 store_field (target,
8150 MIN ((int_size_in_bytes (TREE_TYPE
8151 (treeop0))
8152 * BITS_PER_UNIT),
8153 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8154 0, 0, 0, TYPE_MODE (valtype), treeop0, 0,
8155 false, false);
8156 }
8157
8158 /* Return the entire union. */
8159 return target;
8160 }
8161
8162 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8163 {
8164 op0 = expand_expr (treeop0, target, VOIDmode,
8165 modifier);
8166
8167 /* If the signedness of the conversion differs and OP0 is
8168 a promoted SUBREG, clear that indication since we now
8169 have to do the proper extension. */
8170 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8171 && GET_CODE (op0) == SUBREG)
8172 SUBREG_PROMOTED_VAR_P (op0) = 0;
8173
8174 return REDUCE_BIT_FIELD (op0);
8175 }
8176
8177 op0 = expand_expr (treeop0, NULL_RTX, mode,
8178 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8179 if (GET_MODE (op0) == mode)
8180 ;
8181
8182 /* If OP0 is a constant, just convert it into the proper mode. */
8183 else if (CONSTANT_P (op0))
8184 {
8185 tree inner_type = TREE_TYPE (treeop0);
8186 machine_mode inner_mode = GET_MODE (op0);
8187
8188 if (inner_mode == VOIDmode)
8189 inner_mode = TYPE_MODE (inner_type);
8190
8191 if (modifier == EXPAND_INITIALIZER)
8192 op0 = lowpart_subreg (mode, op0, inner_mode);
8193 else
8194 op0= convert_modes (mode, inner_mode, op0,
8195 TYPE_UNSIGNED (inner_type));
8196 }
8197
8198 else if (modifier == EXPAND_INITIALIZER)
8199 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8200
8201 else if (target == 0)
8202 op0 = convert_to_mode (mode, op0,
8203 TYPE_UNSIGNED (TREE_TYPE
8204 (treeop0)));
8205 else
8206 {
8207 convert_move (target, op0,
8208 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8209 op0 = target;
8210 }
8211
8212 return REDUCE_BIT_FIELD (op0);
8213
8214 case ADDR_SPACE_CONVERT_EXPR:
8215 {
8216 tree treeop0_type = TREE_TYPE (treeop0);
8217
8218 gcc_assert (POINTER_TYPE_P (type));
8219 gcc_assert (POINTER_TYPE_P (treeop0_type));
8220
8221 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8222 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8223
8224 /* Conversions between pointers to the same address space should
8225 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8226 gcc_assert (as_to != as_from);
8227
8228 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8229
8230 /* Ask target code to handle conversion between pointers
8231 to overlapping address spaces. */
8232 if (targetm.addr_space.subset_p (as_to, as_from)
8233 || targetm.addr_space.subset_p (as_from, as_to))
8234 {
8235 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8236 }
8237 else
8238 {
8239 /* For disjoint address spaces, converting anything but a null
8240 pointer invokes undefined behaviour. We truncate or extend the
8241 value as if we'd converted via integers, which handles 0 as
8242 required, and all others as the programmer likely expects. */
8243 #ifndef POINTERS_EXTEND_UNSIGNED
8244 const int POINTERS_EXTEND_UNSIGNED = 1;
8245 #endif
8246 op0 = convert_modes (mode, TYPE_MODE (treeop0_type),
8247 op0, POINTERS_EXTEND_UNSIGNED);
8248 }
8249 gcc_assert (op0);
8250 return op0;
8251 }
8252
8253 case POINTER_PLUS_EXPR:
8254 /* Even though the sizetype mode and the pointer's mode can be different
8255 expand is able to handle this correctly and get the correct result out
8256 of the PLUS_EXPR code. */
8257 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8258 if sizetype precision is smaller than pointer precision. */
8259 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8260 treeop1 = fold_convert_loc (loc, type,
8261 fold_convert_loc (loc, ssizetype,
8262 treeop1));
8263 /* If sizetype precision is larger than pointer precision, truncate the
8264 offset to have matching modes. */
8265 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8266 treeop1 = fold_convert_loc (loc, type, treeop1);
8267
8268 case PLUS_EXPR:
8269 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8270 something else, make sure we add the register to the constant and
8271 then to the other thing. This case can occur during strength
8272 reduction and doing it this way will produce better code if the
8273 frame pointer or argument pointer is eliminated.
8274
8275 fold-const.c will ensure that the constant is always in the inner
8276 PLUS_EXPR, so the only case we need to do anything about is if
8277 sp, ap, or fp is our second argument, in which case we must swap
8278 the innermost first argument and our second argument. */
8279
8280 if (TREE_CODE (treeop0) == PLUS_EXPR
8281 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8282 && TREE_CODE (treeop1) == VAR_DECL
8283 && (DECL_RTL (treeop1) == frame_pointer_rtx
8284 || DECL_RTL (treeop1) == stack_pointer_rtx
8285 || DECL_RTL (treeop1) == arg_pointer_rtx))
8286 {
8287 gcc_unreachable ();
8288 }
8289
8290 /* If the result is to be ptr_mode and we are adding an integer to
8291 something, we might be forming a constant. So try to use
8292 plus_constant. If it produces a sum and we can't accept it,
8293 use force_operand. This allows P = &ARR[const] to generate
8294 efficient code on machines where a SYMBOL_REF is not a valid
8295 address.
8296
8297 If this is an EXPAND_SUM call, always return the sum. */
8298 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8299 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8300 {
8301 if (modifier == EXPAND_STACK_PARM)
8302 target = 0;
8303 if (TREE_CODE (treeop0) == INTEGER_CST
8304 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8305 && TREE_CONSTANT (treeop1))
8306 {
8307 rtx constant_part;
8308 HOST_WIDE_INT wc;
8309 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8310
8311 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8312 EXPAND_SUM);
8313 /* Use wi::shwi to ensure that the constant is
8314 truncated according to the mode of OP1, then sign extended
8315 to a HOST_WIDE_INT. Using the constant directly can result
8316 in non-canonical RTL in a 64x32 cross compile. */
8317 wc = TREE_INT_CST_LOW (treeop0);
8318 constant_part =
8319 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8320 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8321 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8322 op1 = force_operand (op1, target);
8323 return REDUCE_BIT_FIELD (op1);
8324 }
8325
8326 else if (TREE_CODE (treeop1) == INTEGER_CST
8327 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8328 && TREE_CONSTANT (treeop0))
8329 {
8330 rtx constant_part;
8331 HOST_WIDE_INT wc;
8332 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8333
8334 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8335 (modifier == EXPAND_INITIALIZER
8336 ? EXPAND_INITIALIZER : EXPAND_SUM));
8337 if (! CONSTANT_P (op0))
8338 {
8339 op1 = expand_expr (treeop1, NULL_RTX,
8340 VOIDmode, modifier);
8341 /* Return a PLUS if modifier says it's OK. */
8342 if (modifier == EXPAND_SUM
8343 || modifier == EXPAND_INITIALIZER)
8344 return simplify_gen_binary (PLUS, mode, op0, op1);
8345 goto binop2;
8346 }
8347 /* Use wi::shwi to ensure that the constant is
8348 truncated according to the mode of OP1, then sign extended
8349 to a HOST_WIDE_INT. Using the constant directly can result
8350 in non-canonical RTL in a 64x32 cross compile. */
8351 wc = TREE_INT_CST_LOW (treeop1);
8352 constant_part
8353 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8354 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8355 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8356 op0 = force_operand (op0, target);
8357 return REDUCE_BIT_FIELD (op0);
8358 }
8359 }
8360
8361 /* Use TER to expand pointer addition of a negated value
8362 as pointer subtraction. */
8363 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8364 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8365 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8366 && TREE_CODE (treeop1) == SSA_NAME
8367 && TYPE_MODE (TREE_TYPE (treeop0))
8368 == TYPE_MODE (TREE_TYPE (treeop1)))
8369 {
8370 gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
8371 if (def)
8372 {
8373 treeop1 = gimple_assign_rhs1 (def);
8374 code = MINUS_EXPR;
8375 goto do_minus;
8376 }
8377 }
8378
8379 /* No sense saving up arithmetic to be done
8380 if it's all in the wrong mode to form part of an address.
8381 And force_operand won't know whether to sign-extend or
8382 zero-extend. */
8383 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8384 || mode != ptr_mode)
8385 {
8386 expand_operands (treeop0, treeop1,
8387 subtarget, &op0, &op1, EXPAND_NORMAL);
8388 if (op0 == const0_rtx)
8389 return op1;
8390 if (op1 == const0_rtx)
8391 return op0;
8392 goto binop2;
8393 }
8394
8395 expand_operands (treeop0, treeop1,
8396 subtarget, &op0, &op1, modifier);
8397 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8398
8399 case MINUS_EXPR:
8400 do_minus:
8401 /* For initializers, we are allowed to return a MINUS of two
8402 symbolic constants. Here we handle all cases when both operands
8403 are constant. */
8404 /* Handle difference of two symbolic constants,
8405 for the sake of an initializer. */
8406 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8407 && really_constant_p (treeop0)
8408 && really_constant_p (treeop1))
8409 {
8410 expand_operands (treeop0, treeop1,
8411 NULL_RTX, &op0, &op1, modifier);
8412
8413 /* If the last operand is a CONST_INT, use plus_constant of
8414 the negated constant. Else make the MINUS. */
8415 if (CONST_INT_P (op1))
8416 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8417 -INTVAL (op1)));
8418 else
8419 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8420 }
8421
8422 /* No sense saving up arithmetic to be done
8423 if it's all in the wrong mode to form part of an address.
8424 And force_operand won't know whether to sign-extend or
8425 zero-extend. */
8426 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8427 || mode != ptr_mode)
8428 goto binop;
8429
8430 expand_operands (treeop0, treeop1,
8431 subtarget, &op0, &op1, modifier);
8432
8433 /* Convert A - const to A + (-const). */
8434 if (CONST_INT_P (op1))
8435 {
8436 op1 = negate_rtx (mode, op1);
8437 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8438 }
8439
8440 goto binop2;
8441
8442 case WIDEN_MULT_PLUS_EXPR:
8443 case WIDEN_MULT_MINUS_EXPR:
8444 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8445 op2 = expand_normal (treeop2);
8446 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8447 target, unsignedp);
8448 return target;
8449
8450 case WIDEN_MULT_EXPR:
8451 /* If first operand is constant, swap them.
8452 Thus the following special case checks need only
8453 check the second operand. */
8454 if (TREE_CODE (treeop0) == INTEGER_CST)
8455 std::swap (treeop0, treeop1);
8456
8457 /* First, check if we have a multiplication of one signed and one
8458 unsigned operand. */
8459 if (TREE_CODE (treeop1) != INTEGER_CST
8460 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8461 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8462 {
8463 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8464 this_optab = usmul_widen_optab;
8465 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8466 != CODE_FOR_nothing)
8467 {
8468 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8469 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8470 EXPAND_NORMAL);
8471 else
8472 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8473 EXPAND_NORMAL);
8474 /* op0 and op1 might still be constant, despite the above
8475 != INTEGER_CST check. Handle it. */
8476 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8477 {
8478 op0 = convert_modes (innermode, mode, op0, true);
8479 op1 = convert_modes (innermode, mode, op1, false);
8480 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8481 target, unsignedp));
8482 }
8483 goto binop3;
8484 }
8485 }
8486 /* Check for a multiplication with matching signedness. */
8487 else if ((TREE_CODE (treeop1) == INTEGER_CST
8488 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8489 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8490 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8491 {
8492 tree op0type = TREE_TYPE (treeop0);
8493 machine_mode innermode = TYPE_MODE (op0type);
8494 bool zextend_p = TYPE_UNSIGNED (op0type);
8495 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8496 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8497
8498 if (TREE_CODE (treeop0) != INTEGER_CST)
8499 {
8500 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8501 != CODE_FOR_nothing)
8502 {
8503 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8504 EXPAND_NORMAL);
8505 /* op0 and op1 might still be constant, despite the above
8506 != INTEGER_CST check. Handle it. */
8507 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8508 {
8509 widen_mult_const:
8510 op0 = convert_modes (innermode, mode, op0, zextend_p);
8511 op1
8512 = convert_modes (innermode, mode, op1,
8513 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8514 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8515 target,
8516 unsignedp));
8517 }
8518 temp = expand_widening_mult (mode, op0, op1, target,
8519 unsignedp, this_optab);
8520 return REDUCE_BIT_FIELD (temp);
8521 }
8522 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8523 != CODE_FOR_nothing
8524 && innermode == word_mode)
8525 {
8526 rtx htem, hipart;
8527 op0 = expand_normal (treeop0);
8528 if (TREE_CODE (treeop1) == INTEGER_CST)
8529 op1 = convert_modes (innermode, mode,
8530 expand_normal (treeop1),
8531 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8532 else
8533 op1 = expand_normal (treeop1);
8534 /* op0 and op1 might still be constant, despite the above
8535 != INTEGER_CST check. Handle it. */
8536 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8537 goto widen_mult_const;
8538 temp = expand_binop (mode, other_optab, op0, op1, target,
8539 unsignedp, OPTAB_LIB_WIDEN);
8540 hipart = gen_highpart (innermode, temp);
8541 htem = expand_mult_highpart_adjust (innermode, hipart,
8542 op0, op1, hipart,
8543 zextend_p);
8544 if (htem != hipart)
8545 emit_move_insn (hipart, htem);
8546 return REDUCE_BIT_FIELD (temp);
8547 }
8548 }
8549 }
8550 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8551 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8552 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8553 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8554
8555 case FMA_EXPR:
8556 {
8557 optab opt = fma_optab;
8558 gimple *def0, *def2;
8559
8560 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8561 call. */
8562 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8563 {
8564 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8565 tree call_expr;
8566
8567 gcc_assert (fn != NULL_TREE);
8568 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8569 return expand_builtin (call_expr, target, subtarget, mode, false);
8570 }
8571
8572 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8573 /* The multiplication is commutative - look at its 2nd operand
8574 if the first isn't fed by a negate. */
8575 if (!def0)
8576 {
8577 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8578 /* Swap operands if the 2nd operand is fed by a negate. */
8579 if (def0)
8580 std::swap (treeop0, treeop1);
8581 }
8582 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8583
8584 op0 = op2 = NULL;
8585
8586 if (def0 && def2
8587 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8588 {
8589 opt = fnms_optab;
8590 op0 = expand_normal (gimple_assign_rhs1 (def0));
8591 op2 = expand_normal (gimple_assign_rhs1 (def2));
8592 }
8593 else if (def0
8594 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8595 {
8596 opt = fnma_optab;
8597 op0 = expand_normal (gimple_assign_rhs1 (def0));
8598 }
8599 else if (def2
8600 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8601 {
8602 opt = fms_optab;
8603 op2 = expand_normal (gimple_assign_rhs1 (def2));
8604 }
8605
8606 if (op0 == NULL)
8607 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8608 if (op2 == NULL)
8609 op2 = expand_normal (treeop2);
8610 op1 = expand_normal (treeop1);
8611
8612 return expand_ternary_op (TYPE_MODE (type), opt,
8613 op0, op1, op2, target, 0);
8614 }
8615
8616 case MULT_EXPR:
8617 /* If this is a fixed-point operation, then we cannot use the code
8618 below because "expand_mult" doesn't support sat/no-sat fixed-point
8619 multiplications. */
8620 if (ALL_FIXED_POINT_MODE_P (mode))
8621 goto binop;
8622
8623 /* If first operand is constant, swap them.
8624 Thus the following special case checks need only
8625 check the second operand. */
8626 if (TREE_CODE (treeop0) == INTEGER_CST)
8627 std::swap (treeop0, treeop1);
8628
8629 /* Attempt to return something suitable for generating an
8630 indexed address, for machines that support that. */
8631
8632 if (modifier == EXPAND_SUM && mode == ptr_mode
8633 && tree_fits_shwi_p (treeop1))
8634 {
8635 tree exp1 = treeop1;
8636
8637 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8638 EXPAND_SUM);
8639
8640 if (!REG_P (op0))
8641 op0 = force_operand (op0, NULL_RTX);
8642 if (!REG_P (op0))
8643 op0 = copy_to_mode_reg (mode, op0);
8644
8645 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8646 gen_int_mode (tree_to_shwi (exp1),
8647 TYPE_MODE (TREE_TYPE (exp1)))));
8648 }
8649
8650 if (modifier == EXPAND_STACK_PARM)
8651 target = 0;
8652
8653 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8654 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8655
8656 case TRUNC_DIV_EXPR:
8657 case FLOOR_DIV_EXPR:
8658 case CEIL_DIV_EXPR:
8659 case ROUND_DIV_EXPR:
8660 case EXACT_DIV_EXPR:
8661 /* If this is a fixed-point operation, then we cannot use the code
8662 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8663 divisions. */
8664 if (ALL_FIXED_POINT_MODE_P (mode))
8665 goto binop;
8666
8667 if (modifier == EXPAND_STACK_PARM)
8668 target = 0;
8669 /* Possible optimization: compute the dividend with EXPAND_SUM
8670 then if the divisor is constant can optimize the case
8671 where some terms of the dividend have coeffs divisible by it. */
8672 expand_operands (treeop0, treeop1,
8673 subtarget, &op0, &op1, EXPAND_NORMAL);
8674 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8675
8676 case RDIV_EXPR:
8677 goto binop;
8678
8679 case MULT_HIGHPART_EXPR:
8680 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8681 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8682 gcc_assert (temp);
8683 return temp;
8684
8685 case TRUNC_MOD_EXPR:
8686 case FLOOR_MOD_EXPR:
8687 case CEIL_MOD_EXPR:
8688 case ROUND_MOD_EXPR:
8689 if (modifier == EXPAND_STACK_PARM)
8690 target = 0;
8691 expand_operands (treeop0, treeop1,
8692 subtarget, &op0, &op1, EXPAND_NORMAL);
8693 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8694
8695 case FIXED_CONVERT_EXPR:
8696 op0 = expand_normal (treeop0);
8697 if (target == 0 || modifier == EXPAND_STACK_PARM)
8698 target = gen_reg_rtx (mode);
8699
8700 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8701 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8702 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8703 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8704 else
8705 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8706 return target;
8707
8708 case FIX_TRUNC_EXPR:
8709 op0 = expand_normal (treeop0);
8710 if (target == 0 || modifier == EXPAND_STACK_PARM)
8711 target = gen_reg_rtx (mode);
8712 expand_fix (target, op0, unsignedp);
8713 return target;
8714
8715 case FLOAT_EXPR:
8716 op0 = expand_normal (treeop0);
8717 if (target == 0 || modifier == EXPAND_STACK_PARM)
8718 target = gen_reg_rtx (mode);
8719 /* expand_float can't figure out what to do if FROM has VOIDmode.
8720 So give it the correct mode. With -O, cse will optimize this. */
8721 if (GET_MODE (op0) == VOIDmode)
8722 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8723 op0);
8724 expand_float (target, op0,
8725 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8726 return target;
8727
8728 case NEGATE_EXPR:
8729 op0 = expand_expr (treeop0, subtarget,
8730 VOIDmode, EXPAND_NORMAL);
8731 if (modifier == EXPAND_STACK_PARM)
8732 target = 0;
8733 temp = expand_unop (mode,
8734 optab_for_tree_code (NEGATE_EXPR, type,
8735 optab_default),
8736 op0, target, 0);
8737 gcc_assert (temp);
8738 return REDUCE_BIT_FIELD (temp);
8739
8740 case ABS_EXPR:
8741 op0 = expand_expr (treeop0, subtarget,
8742 VOIDmode, EXPAND_NORMAL);
8743 if (modifier == EXPAND_STACK_PARM)
8744 target = 0;
8745
8746 /* ABS_EXPR is not valid for complex arguments. */
8747 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8748 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8749
8750 /* Unsigned abs is simply the operand. Testing here means we don't
8751 risk generating incorrect code below. */
8752 if (TYPE_UNSIGNED (type))
8753 return op0;
8754
8755 return expand_abs (mode, op0, target, unsignedp,
8756 safe_from_p (target, treeop0, 1));
8757
8758 case MAX_EXPR:
8759 case MIN_EXPR:
8760 target = original_target;
8761 if (target == 0
8762 || modifier == EXPAND_STACK_PARM
8763 || (MEM_P (target) && MEM_VOLATILE_P (target))
8764 || GET_MODE (target) != mode
8765 || (REG_P (target)
8766 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8767 target = gen_reg_rtx (mode);
8768 expand_operands (treeop0, treeop1,
8769 target, &op0, &op1, EXPAND_NORMAL);
8770
8771 /* First try to do it with a special MIN or MAX instruction.
8772 If that does not win, use a conditional jump to select the proper
8773 value. */
8774 this_optab = optab_for_tree_code (code, type, optab_default);
8775 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8776 OPTAB_WIDEN);
8777 if (temp != 0)
8778 return temp;
8779
8780 /* At this point, a MEM target is no longer useful; we will get better
8781 code without it. */
8782
8783 if (! REG_P (target))
8784 target = gen_reg_rtx (mode);
8785
8786 /* If op1 was placed in target, swap op0 and op1. */
8787 if (target != op0 && target == op1)
8788 std::swap (op0, op1);
8789
8790 /* We generate better code and avoid problems with op1 mentioning
8791 target by forcing op1 into a pseudo if it isn't a constant. */
8792 if (! CONSTANT_P (op1))
8793 op1 = force_reg (mode, op1);
8794
8795 {
8796 enum rtx_code comparison_code;
8797 rtx cmpop1 = op1;
8798
8799 if (code == MAX_EXPR)
8800 comparison_code = unsignedp ? GEU : GE;
8801 else
8802 comparison_code = unsignedp ? LEU : LE;
8803
8804 /* Canonicalize to comparisons against 0. */
8805 if (op1 == const1_rtx)
8806 {
8807 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8808 or (a != 0 ? a : 1) for unsigned.
8809 For MIN we are safe converting (a <= 1 ? a : 1)
8810 into (a <= 0 ? a : 1) */
8811 cmpop1 = const0_rtx;
8812 if (code == MAX_EXPR)
8813 comparison_code = unsignedp ? NE : GT;
8814 }
8815 if (op1 == constm1_rtx && !unsignedp)
8816 {
8817 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8818 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8819 cmpop1 = const0_rtx;
8820 if (code == MIN_EXPR)
8821 comparison_code = LT;
8822 }
8823
8824 /* Use a conditional move if possible. */
8825 if (can_conditionally_move_p (mode))
8826 {
8827 rtx insn;
8828
8829 start_sequence ();
8830
8831 /* Try to emit the conditional move. */
8832 insn = emit_conditional_move (target, comparison_code,
8833 op0, cmpop1, mode,
8834 op0, op1, mode,
8835 unsignedp);
8836
8837 /* If we could do the conditional move, emit the sequence,
8838 and return. */
8839 if (insn)
8840 {
8841 rtx_insn *seq = get_insns ();
8842 end_sequence ();
8843 emit_insn (seq);
8844 return target;
8845 }
8846
8847 /* Otherwise discard the sequence and fall back to code with
8848 branches. */
8849 end_sequence ();
8850 }
8851
8852 if (target != op0)
8853 emit_move_insn (target, op0);
8854
8855 lab = gen_label_rtx ();
8856 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8857 unsignedp, mode, NULL_RTX, NULL, lab,
8858 -1);
8859 }
8860 emit_move_insn (target, op1);
8861 emit_label (lab);
8862 return target;
8863
8864 case BIT_NOT_EXPR:
8865 op0 = expand_expr (treeop0, subtarget,
8866 VOIDmode, EXPAND_NORMAL);
8867 if (modifier == EXPAND_STACK_PARM)
8868 target = 0;
8869 /* In case we have to reduce the result to bitfield precision
8870 for unsigned bitfield expand this as XOR with a proper constant
8871 instead. */
8872 if (reduce_bit_field && TYPE_UNSIGNED (type))
8873 {
8874 wide_int mask = wi::mask (TYPE_PRECISION (type),
8875 false, GET_MODE_PRECISION (mode));
8876
8877 temp = expand_binop (mode, xor_optab, op0,
8878 immed_wide_int_const (mask, mode),
8879 target, 1, OPTAB_LIB_WIDEN);
8880 }
8881 else
8882 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8883 gcc_assert (temp);
8884 return temp;
8885
8886 /* ??? Can optimize bitwise operations with one arg constant.
8887 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8888 and (a bitwise1 b) bitwise2 b (etc)
8889 but that is probably not worth while. */
8890
8891 case BIT_AND_EXPR:
8892 case BIT_IOR_EXPR:
8893 case BIT_XOR_EXPR:
8894 goto binop;
8895
8896 case LROTATE_EXPR:
8897 case RROTATE_EXPR:
8898 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8899 || (GET_MODE_PRECISION (TYPE_MODE (type))
8900 == TYPE_PRECISION (type)));
8901 /* fall through */
8902
8903 case LSHIFT_EXPR:
8904 case RSHIFT_EXPR:
8905 {
8906 /* If this is a fixed-point operation, then we cannot use the code
8907 below because "expand_shift" doesn't support sat/no-sat fixed-point
8908 shifts. */
8909 if (ALL_FIXED_POINT_MODE_P (mode))
8910 goto binop;
8911
8912 if (! safe_from_p (subtarget, treeop1, 1))
8913 subtarget = 0;
8914 if (modifier == EXPAND_STACK_PARM)
8915 target = 0;
8916 op0 = expand_expr (treeop0, subtarget,
8917 VOIDmode, EXPAND_NORMAL);
8918
8919 /* Left shift optimization when shifting across word_size boundary.
8920
8921 If mode == GET_MODE_WIDER_MODE (word_mode), then normally there isn't
8922 native instruction to support this wide mode left shift. Given below
8923 scenario:
8924
8925 Type A = (Type) B << C
8926
8927 |< T >|
8928 | dest_high | dest_low |
8929
8930 | word_size |
8931
8932 If the shift amount C caused we shift B to across the word size
8933 boundary, i.e part of B shifted into high half of destination
8934 register, and part of B remains in the low half, then GCC will use
8935 the following left shift expand logic:
8936
8937 1. Initialize dest_low to B.
8938 2. Initialize every bit of dest_high to the sign bit of B.
8939 3. Logic left shift dest_low by C bit to finalize dest_low.
8940 The value of dest_low before this shift is kept in a temp D.
8941 4. Logic left shift dest_high by C.
8942 5. Logic right shift D by (word_size - C).
8943 6. Or the result of 4 and 5 to finalize dest_high.
8944
8945 While, by checking gimple statements, if operand B is coming from
8946 signed extension, then we can simplify above expand logic into:
8947
8948 1. dest_high = src_low >> (word_size - C).
8949 2. dest_low = src_low << C.
8950
8951 We can use one arithmetic right shift to finish all the purpose of
8952 steps 2, 4, 5, 6, thus we reduce the steps needed from 6 into 2. */
8953
8954 temp = NULL_RTX;
8955 if (code == LSHIFT_EXPR
8956 && target
8957 && REG_P (target)
8958 && ! unsignedp
8959 && mode == GET_MODE_WIDER_MODE (word_mode)
8960 && GET_MODE_SIZE (mode) == 2 * GET_MODE_SIZE (word_mode)
8961 && TREE_CONSTANT (treeop1)
8962 && TREE_CODE (treeop0) == SSA_NAME)
8963 {
8964 gimple *def = SSA_NAME_DEF_STMT (treeop0);
8965 if (is_gimple_assign (def)
8966 && gimple_assign_rhs_code (def) == NOP_EXPR)
8967 {
8968 machine_mode rmode = TYPE_MODE
8969 (TREE_TYPE (gimple_assign_rhs1 (def)));
8970
8971 if (GET_MODE_SIZE (rmode) < GET_MODE_SIZE (mode)
8972 && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode)
8973 && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode))
8974 >= GET_MODE_BITSIZE (word_mode)))
8975 {
8976 rtx_insn *seq, *seq_old;
8977 unsigned int high_off = subreg_highpart_offset (word_mode,
8978 mode);
8979 rtx low = lowpart_subreg (word_mode, op0, mode);
8980 rtx dest_low = lowpart_subreg (word_mode, target, mode);
8981 rtx dest_high = simplify_gen_subreg (word_mode, target,
8982 mode, high_off);
8983 HOST_WIDE_INT ramount = (BITS_PER_WORD
8984 - TREE_INT_CST_LOW (treeop1));
8985 tree rshift = build_int_cst (TREE_TYPE (treeop1), ramount);
8986
8987 start_sequence ();
8988 /* dest_high = src_low >> (word_size - C). */
8989 temp = expand_variable_shift (RSHIFT_EXPR, word_mode, low,
8990 rshift, dest_high, unsignedp);
8991 if (temp != dest_high)
8992 emit_move_insn (dest_high, temp);
8993
8994 /* dest_low = src_low << C. */
8995 temp = expand_variable_shift (LSHIFT_EXPR, word_mode, low,
8996 treeop1, dest_low, unsignedp);
8997 if (temp != dest_low)
8998 emit_move_insn (dest_low, temp);
8999
9000 seq = get_insns ();
9001 end_sequence ();
9002 temp = target ;
9003
9004 if (have_insn_for (ASHIFT, mode))
9005 {
9006 bool speed_p = optimize_insn_for_speed_p ();
9007 start_sequence ();
9008 rtx ret_old = expand_variable_shift (code, mode, op0,
9009 treeop1, target,
9010 unsignedp);
9011
9012 seq_old = get_insns ();
9013 end_sequence ();
9014 if (seq_cost (seq, speed_p)
9015 >= seq_cost (seq_old, speed_p))
9016 {
9017 seq = seq_old;
9018 temp = ret_old;
9019 }
9020 }
9021 emit_insn (seq);
9022 }
9023 }
9024 }
9025
9026 if (temp == NULL_RTX)
9027 temp = expand_variable_shift (code, mode, op0, treeop1, target,
9028 unsignedp);
9029 if (code == LSHIFT_EXPR)
9030 temp = REDUCE_BIT_FIELD (temp);
9031 return temp;
9032 }
9033
9034 /* Could determine the answer when only additive constants differ. Also,
9035 the addition of one can be handled by changing the condition. */
9036 case LT_EXPR:
9037 case LE_EXPR:
9038 case GT_EXPR:
9039 case GE_EXPR:
9040 case EQ_EXPR:
9041 case NE_EXPR:
9042 case UNORDERED_EXPR:
9043 case ORDERED_EXPR:
9044 case UNLT_EXPR:
9045 case UNLE_EXPR:
9046 case UNGT_EXPR:
9047 case UNGE_EXPR:
9048 case UNEQ_EXPR:
9049 case LTGT_EXPR:
9050 {
9051 temp = do_store_flag (ops,
9052 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9053 tmode != VOIDmode ? tmode : mode);
9054 if (temp)
9055 return temp;
9056
9057 /* Use a compare and a jump for BLKmode comparisons, or for function
9058 type comparisons is have_canonicalize_funcptr_for_compare. */
9059
9060 if ((target == 0
9061 || modifier == EXPAND_STACK_PARM
9062 || ! safe_from_p (target, treeop0, 1)
9063 || ! safe_from_p (target, treeop1, 1)
9064 /* Make sure we don't have a hard reg (such as function's return
9065 value) live across basic blocks, if not optimizing. */
9066 || (!optimize && REG_P (target)
9067 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9068 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9069
9070 emit_move_insn (target, const0_rtx);
9071
9072 rtx_code_label *lab1 = gen_label_rtx ();
9073 jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
9074
9075 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9076 emit_move_insn (target, constm1_rtx);
9077 else
9078 emit_move_insn (target, const1_rtx);
9079
9080 emit_label (lab1);
9081 return target;
9082 }
9083 case COMPLEX_EXPR:
9084 /* Get the rtx code of the operands. */
9085 op0 = expand_normal (treeop0);
9086 op1 = expand_normal (treeop1);
9087
9088 if (!target)
9089 target = gen_reg_rtx (TYPE_MODE (type));
9090 else
9091 /* If target overlaps with op1, then either we need to force
9092 op1 into a pseudo (if target also overlaps with op0),
9093 or write the complex parts in reverse order. */
9094 switch (GET_CODE (target))
9095 {
9096 case CONCAT:
9097 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9098 {
9099 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9100 {
9101 complex_expr_force_op1:
9102 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9103 emit_move_insn (temp, op1);
9104 op1 = temp;
9105 break;
9106 }
9107 complex_expr_swap_order:
9108 /* Move the imaginary (op1) and real (op0) parts to their
9109 location. */
9110 write_complex_part (target, op1, true);
9111 write_complex_part (target, op0, false);
9112
9113 return target;
9114 }
9115 break;
9116 case MEM:
9117 temp = adjust_address_nv (target,
9118 GET_MODE_INNER (GET_MODE (target)), 0);
9119 if (reg_overlap_mentioned_p (temp, op1))
9120 {
9121 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9122 temp = adjust_address_nv (target, imode,
9123 GET_MODE_SIZE (imode));
9124 if (reg_overlap_mentioned_p (temp, op0))
9125 goto complex_expr_force_op1;
9126 goto complex_expr_swap_order;
9127 }
9128 break;
9129 default:
9130 if (reg_overlap_mentioned_p (target, op1))
9131 {
9132 if (reg_overlap_mentioned_p (target, op0))
9133 goto complex_expr_force_op1;
9134 goto complex_expr_swap_order;
9135 }
9136 break;
9137 }
9138
9139 /* Move the real (op0) and imaginary (op1) parts to their location. */
9140 write_complex_part (target, op0, false);
9141 write_complex_part (target, op1, true);
9142
9143 return target;
9144
9145 case WIDEN_SUM_EXPR:
9146 {
9147 tree oprnd0 = treeop0;
9148 tree oprnd1 = treeop1;
9149
9150 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9151 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9152 target, unsignedp);
9153 return target;
9154 }
9155
9156 case REDUC_MAX_EXPR:
9157 case REDUC_MIN_EXPR:
9158 case REDUC_PLUS_EXPR:
9159 {
9160 op0 = expand_normal (treeop0);
9161 this_optab = optab_for_tree_code (code, type, optab_default);
9162 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9163
9164 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9165 {
9166 struct expand_operand ops[2];
9167 enum insn_code icode = optab_handler (this_optab, vec_mode);
9168
9169 create_output_operand (&ops[0], target, mode);
9170 create_input_operand (&ops[1], op0, vec_mode);
9171 if (maybe_expand_insn (icode, 2, ops))
9172 {
9173 target = ops[0].value;
9174 if (GET_MODE (target) != mode)
9175 return gen_lowpart (tmode, target);
9176 return target;
9177 }
9178 }
9179 /* Fall back to optab with vector result, and then extract scalar. */
9180 this_optab = scalar_reduc_to_vector (this_optab, type);
9181 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9182 gcc_assert (temp);
9183 /* The tree code produces a scalar result, but (somewhat by convention)
9184 the optab produces a vector with the result in element 0 if
9185 little-endian, or element N-1 if big-endian. So pull the scalar
9186 result out of that element. */
9187 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9188 int bitsize = GET_MODE_UNIT_BITSIZE (vec_mode);
9189 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9190 target, mode, mode, false);
9191 gcc_assert (temp);
9192 return temp;
9193 }
9194
9195 case VEC_UNPACK_HI_EXPR:
9196 case VEC_UNPACK_LO_EXPR:
9197 {
9198 op0 = expand_normal (treeop0);
9199 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9200 target, unsignedp);
9201 gcc_assert (temp);
9202 return temp;
9203 }
9204
9205 case VEC_UNPACK_FLOAT_HI_EXPR:
9206 case VEC_UNPACK_FLOAT_LO_EXPR:
9207 {
9208 op0 = expand_normal (treeop0);
9209 /* The signedness is determined from input operand. */
9210 temp = expand_widen_pattern_expr
9211 (ops, op0, NULL_RTX, NULL_RTX,
9212 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9213
9214 gcc_assert (temp);
9215 return temp;
9216 }
9217
9218 case VEC_WIDEN_MULT_HI_EXPR:
9219 case VEC_WIDEN_MULT_LO_EXPR:
9220 case VEC_WIDEN_MULT_EVEN_EXPR:
9221 case VEC_WIDEN_MULT_ODD_EXPR:
9222 case VEC_WIDEN_LSHIFT_HI_EXPR:
9223 case VEC_WIDEN_LSHIFT_LO_EXPR:
9224 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9225 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9226 target, unsignedp);
9227 gcc_assert (target);
9228 return target;
9229
9230 case VEC_PACK_TRUNC_EXPR:
9231 case VEC_PACK_SAT_EXPR:
9232 case VEC_PACK_FIX_TRUNC_EXPR:
9233 mode = TYPE_MODE (TREE_TYPE (treeop0));
9234 goto binop;
9235
9236 case VEC_PERM_EXPR:
9237 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9238 op2 = expand_normal (treeop2);
9239
9240 /* Careful here: if the target doesn't support integral vector modes,
9241 a constant selection vector could wind up smooshed into a normal
9242 integral constant. */
9243 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9244 {
9245 tree sel_type = TREE_TYPE (treeop2);
9246 machine_mode vmode
9247 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9248 TYPE_VECTOR_SUBPARTS (sel_type));
9249 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9250 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9251 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9252 }
9253 else
9254 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9255
9256 temp = expand_vec_perm (mode, op0, op1, op2, target);
9257 gcc_assert (temp);
9258 return temp;
9259
9260 case DOT_PROD_EXPR:
9261 {
9262 tree oprnd0 = treeop0;
9263 tree oprnd1 = treeop1;
9264 tree oprnd2 = treeop2;
9265 rtx op2;
9266
9267 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9268 op2 = expand_normal (oprnd2);
9269 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9270 target, unsignedp);
9271 return target;
9272 }
9273
9274 case SAD_EXPR:
9275 {
9276 tree oprnd0 = treeop0;
9277 tree oprnd1 = treeop1;
9278 tree oprnd2 = treeop2;
9279 rtx op2;
9280
9281 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9282 op2 = expand_normal (oprnd2);
9283 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9284 target, unsignedp);
9285 return target;
9286 }
9287
9288 case REALIGN_LOAD_EXPR:
9289 {
9290 tree oprnd0 = treeop0;
9291 tree oprnd1 = treeop1;
9292 tree oprnd2 = treeop2;
9293 rtx op2;
9294
9295 this_optab = optab_for_tree_code (code, type, optab_default);
9296 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9297 op2 = expand_normal (oprnd2);
9298 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9299 target, unsignedp);
9300 gcc_assert (temp);
9301 return temp;
9302 }
9303
9304 case COND_EXPR:
9305 {
9306 /* A COND_EXPR with its type being VOID_TYPE represents a
9307 conditional jump and is handled in
9308 expand_gimple_cond_expr. */
9309 gcc_assert (!VOID_TYPE_P (type));
9310
9311 /* Note that COND_EXPRs whose type is a structure or union
9312 are required to be constructed to contain assignments of
9313 a temporary variable, so that we can evaluate them here
9314 for side effect only. If type is void, we must do likewise. */
9315
9316 gcc_assert (!TREE_ADDRESSABLE (type)
9317 && !ignore
9318 && TREE_TYPE (treeop1) != void_type_node
9319 && TREE_TYPE (treeop2) != void_type_node);
9320
9321 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9322 if (temp)
9323 return temp;
9324
9325 /* If we are not to produce a result, we have no target. Otherwise,
9326 if a target was specified use it; it will not be used as an
9327 intermediate target unless it is safe. If no target, use a
9328 temporary. */
9329
9330 if (modifier != EXPAND_STACK_PARM
9331 && original_target
9332 && safe_from_p (original_target, treeop0, 1)
9333 && GET_MODE (original_target) == mode
9334 && !MEM_P (original_target))
9335 temp = original_target;
9336 else
9337 temp = assign_temp (type, 0, 1);
9338
9339 do_pending_stack_adjust ();
9340 NO_DEFER_POP;
9341 rtx_code_label *lab0 = gen_label_rtx ();
9342 rtx_code_label *lab1 = gen_label_rtx ();
9343 jumpifnot (treeop0, lab0, -1);
9344 store_expr (treeop1, temp,
9345 modifier == EXPAND_STACK_PARM,
9346 false, false);
9347
9348 emit_jump_insn (targetm.gen_jump (lab1));
9349 emit_barrier ();
9350 emit_label (lab0);
9351 store_expr (treeop2, temp,
9352 modifier == EXPAND_STACK_PARM,
9353 false, false);
9354
9355 emit_label (lab1);
9356 OK_DEFER_POP;
9357 return temp;
9358 }
9359
9360 case VEC_COND_EXPR:
9361 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9362 return target;
9363
9364 default:
9365 gcc_unreachable ();
9366 }
9367
9368 /* Here to do an ordinary binary operator. */
9369 binop:
9370 expand_operands (treeop0, treeop1,
9371 subtarget, &op0, &op1, EXPAND_NORMAL);
9372 binop2:
9373 this_optab = optab_for_tree_code (code, type, optab_default);
9374 binop3:
9375 if (modifier == EXPAND_STACK_PARM)
9376 target = 0;
9377 temp = expand_binop (mode, this_optab, op0, op1, target,
9378 unsignedp, OPTAB_LIB_WIDEN);
9379 gcc_assert (temp);
9380 /* Bitwise operations do not need bitfield reduction as we expect their
9381 operands being properly truncated. */
9382 if (code == BIT_XOR_EXPR
9383 || code == BIT_AND_EXPR
9384 || code == BIT_IOR_EXPR)
9385 return temp;
9386 return REDUCE_BIT_FIELD (temp);
9387 }
9388 #undef REDUCE_BIT_FIELD
9389
9390
9391 /* Return TRUE if expression STMT is suitable for replacement.
9392 Never consider memory loads as replaceable, because those don't ever lead
9393 into constant expressions. */
9394
9395 static bool
9396 stmt_is_replaceable_p (gimple *stmt)
9397 {
9398 if (ssa_is_replaceable_p (stmt))
9399 {
9400 /* Don't move around loads. */
9401 if (!gimple_assign_single_p (stmt)
9402 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9403 return true;
9404 }
9405 return false;
9406 }
9407
9408 rtx
9409 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9410 enum expand_modifier modifier, rtx *alt_rtl,
9411 bool inner_reference_p)
9412 {
9413 rtx op0, op1, temp, decl_rtl;
9414 tree type;
9415 int unsignedp;
9416 machine_mode mode, dmode;
9417 enum tree_code code = TREE_CODE (exp);
9418 rtx subtarget, original_target;
9419 int ignore;
9420 tree context;
9421 bool reduce_bit_field;
9422 location_t loc = EXPR_LOCATION (exp);
9423 struct separate_ops ops;
9424 tree treeop0, treeop1, treeop2;
9425 tree ssa_name = NULL_TREE;
9426 gimple *g;
9427
9428 type = TREE_TYPE (exp);
9429 mode = TYPE_MODE (type);
9430 unsignedp = TYPE_UNSIGNED (type);
9431
9432 treeop0 = treeop1 = treeop2 = NULL_TREE;
9433 if (!VL_EXP_CLASS_P (exp))
9434 switch (TREE_CODE_LENGTH (code))
9435 {
9436 default:
9437 case 3: treeop2 = TREE_OPERAND (exp, 2);
9438 case 2: treeop1 = TREE_OPERAND (exp, 1);
9439 case 1: treeop0 = TREE_OPERAND (exp, 0);
9440 case 0: break;
9441 }
9442 ops.code = code;
9443 ops.type = type;
9444 ops.op0 = treeop0;
9445 ops.op1 = treeop1;
9446 ops.op2 = treeop2;
9447 ops.location = loc;
9448
9449 ignore = (target == const0_rtx
9450 || ((CONVERT_EXPR_CODE_P (code)
9451 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9452 && TREE_CODE (type) == VOID_TYPE));
9453
9454 /* An operation in what may be a bit-field type needs the
9455 result to be reduced to the precision of the bit-field type,
9456 which is narrower than that of the type's mode. */
9457 reduce_bit_field = (!ignore
9458 && INTEGRAL_TYPE_P (type)
9459 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9460
9461 /* If we are going to ignore this result, we need only do something
9462 if there is a side-effect somewhere in the expression. If there
9463 is, short-circuit the most common cases here. Note that we must
9464 not call expand_expr with anything but const0_rtx in case this
9465 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9466
9467 if (ignore)
9468 {
9469 if (! TREE_SIDE_EFFECTS (exp))
9470 return const0_rtx;
9471
9472 /* Ensure we reference a volatile object even if value is ignored, but
9473 don't do this if all we are doing is taking its address. */
9474 if (TREE_THIS_VOLATILE (exp)
9475 && TREE_CODE (exp) != FUNCTION_DECL
9476 && mode != VOIDmode && mode != BLKmode
9477 && modifier != EXPAND_CONST_ADDRESS)
9478 {
9479 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9480 if (MEM_P (temp))
9481 copy_to_reg (temp);
9482 return const0_rtx;
9483 }
9484
9485 if (TREE_CODE_CLASS (code) == tcc_unary
9486 || code == BIT_FIELD_REF
9487 || code == COMPONENT_REF
9488 || code == INDIRECT_REF)
9489 return expand_expr (treeop0, const0_rtx, VOIDmode,
9490 modifier);
9491
9492 else if (TREE_CODE_CLASS (code) == tcc_binary
9493 || TREE_CODE_CLASS (code) == tcc_comparison
9494 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9495 {
9496 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9497 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9498 return const0_rtx;
9499 }
9500
9501 target = 0;
9502 }
9503
9504 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9505 target = 0;
9506
9507 /* Use subtarget as the target for operand 0 of a binary operation. */
9508 subtarget = get_subtarget (target);
9509 original_target = target;
9510
9511 switch (code)
9512 {
9513 case LABEL_DECL:
9514 {
9515 tree function = decl_function_context (exp);
9516
9517 temp = label_rtx (exp);
9518 temp = gen_rtx_LABEL_REF (Pmode, temp);
9519
9520 if (function != current_function_decl
9521 && function != 0)
9522 LABEL_REF_NONLOCAL_P (temp) = 1;
9523
9524 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9525 return temp;
9526 }
9527
9528 case SSA_NAME:
9529 /* ??? ivopts calls expander, without any preparation from
9530 out-of-ssa. So fake instructions as if this was an access to the
9531 base variable. This unnecessarily allocates a pseudo, see how we can
9532 reuse it, if partition base vars have it set already. */
9533 if (!currently_expanding_to_rtl)
9534 {
9535 tree var = SSA_NAME_VAR (exp);
9536 if (var && DECL_RTL_SET_P (var))
9537 return DECL_RTL (var);
9538 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9539 LAST_VIRTUAL_REGISTER + 1);
9540 }
9541
9542 g = get_gimple_for_ssa_name (exp);
9543 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9544 if (g == NULL
9545 && modifier == EXPAND_INITIALIZER
9546 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9547 && (optimize || !SSA_NAME_VAR (exp)
9548 || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9549 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9550 g = SSA_NAME_DEF_STMT (exp);
9551 if (g)
9552 {
9553 rtx r;
9554 location_t saved_loc = curr_insn_location ();
9555 location_t loc = gimple_location (g);
9556 if (loc != UNKNOWN_LOCATION)
9557 set_curr_insn_location (loc);
9558 ops.code = gimple_assign_rhs_code (g);
9559 switch (get_gimple_rhs_class (ops.code))
9560 {
9561 case GIMPLE_TERNARY_RHS:
9562 ops.op2 = gimple_assign_rhs3 (g);
9563 /* Fallthru */
9564 case GIMPLE_BINARY_RHS:
9565 ops.op1 = gimple_assign_rhs2 (g);
9566
9567 /* Try to expand conditonal compare. */
9568 if (targetm.gen_ccmp_first)
9569 {
9570 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9571 r = expand_ccmp_expr (g);
9572 if (r)
9573 break;
9574 }
9575 /* Fallthru */
9576 case GIMPLE_UNARY_RHS:
9577 ops.op0 = gimple_assign_rhs1 (g);
9578 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9579 ops.location = loc;
9580 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9581 break;
9582 case GIMPLE_SINGLE_RHS:
9583 {
9584 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9585 tmode, modifier, NULL, inner_reference_p);
9586 break;
9587 }
9588 default:
9589 gcc_unreachable ();
9590 }
9591 set_curr_insn_location (saved_loc);
9592 if (REG_P (r) && !REG_EXPR (r))
9593 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9594 return r;
9595 }
9596
9597 ssa_name = exp;
9598 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9599 exp = SSA_NAME_VAR (ssa_name);
9600 goto expand_decl_rtl;
9601
9602 case PARM_DECL:
9603 case VAR_DECL:
9604 /* If a static var's type was incomplete when the decl was written,
9605 but the type is complete now, lay out the decl now. */
9606 if (DECL_SIZE (exp) == 0
9607 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9608 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9609 layout_decl (exp, 0);
9610
9611 /* ... fall through ... */
9612
9613 case FUNCTION_DECL:
9614 case RESULT_DECL:
9615 decl_rtl = DECL_RTL (exp);
9616 expand_decl_rtl:
9617 gcc_assert (decl_rtl);
9618 decl_rtl = copy_rtx (decl_rtl);
9619 /* Record writes to register variables. */
9620 if (modifier == EXPAND_WRITE
9621 && REG_P (decl_rtl)
9622 && HARD_REGISTER_P (decl_rtl))
9623 add_to_hard_reg_set (&crtl->asm_clobbers,
9624 GET_MODE (decl_rtl), REGNO (decl_rtl));
9625
9626 /* Ensure variable marked as used even if it doesn't go through
9627 a parser. If it hasn't be used yet, write out an external
9628 definition. */
9629 if (exp)
9630 TREE_USED (exp) = 1;
9631
9632 /* Show we haven't gotten RTL for this yet. */
9633 temp = 0;
9634
9635 /* Variables inherited from containing functions should have
9636 been lowered by this point. */
9637 if (exp)
9638 context = decl_function_context (exp);
9639 gcc_assert (!exp
9640 || SCOPE_FILE_SCOPE_P (context)
9641 || context == current_function_decl
9642 || TREE_STATIC (exp)
9643 || DECL_EXTERNAL (exp)
9644 /* ??? C++ creates functions that are not TREE_STATIC. */
9645 || TREE_CODE (exp) == FUNCTION_DECL);
9646
9647 /* This is the case of an array whose size is to be determined
9648 from its initializer, while the initializer is still being parsed.
9649 ??? We aren't parsing while expanding anymore. */
9650
9651 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9652 temp = validize_mem (decl_rtl);
9653
9654 /* If DECL_RTL is memory, we are in the normal case and the
9655 address is not valid, get the address into a register. */
9656
9657 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9658 {
9659 if (alt_rtl)
9660 *alt_rtl = decl_rtl;
9661 decl_rtl = use_anchored_address (decl_rtl);
9662 if (modifier != EXPAND_CONST_ADDRESS
9663 && modifier != EXPAND_SUM
9664 && !memory_address_addr_space_p (exp ? DECL_MODE (exp)
9665 : GET_MODE (decl_rtl),
9666 XEXP (decl_rtl, 0),
9667 MEM_ADDR_SPACE (decl_rtl)))
9668 temp = replace_equiv_address (decl_rtl,
9669 copy_rtx (XEXP (decl_rtl, 0)));
9670 }
9671
9672 /* If we got something, return it. But first, set the alignment
9673 if the address is a register. */
9674 if (temp != 0)
9675 {
9676 if (exp && MEM_P (temp) && REG_P (XEXP (temp, 0)))
9677 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9678
9679 return temp;
9680 }
9681
9682 if (exp)
9683 dmode = DECL_MODE (exp);
9684 else
9685 dmode = TYPE_MODE (TREE_TYPE (ssa_name));
9686
9687 /* If the mode of DECL_RTL does not match that of the decl,
9688 there are two cases: we are dealing with a BLKmode value
9689 that is returned in a register, or we are dealing with
9690 a promoted value. In the latter case, return a SUBREG
9691 of the wanted mode, but mark it so that we know that it
9692 was already extended. */
9693 if (REG_P (decl_rtl)
9694 && dmode != BLKmode
9695 && GET_MODE (decl_rtl) != dmode)
9696 {
9697 machine_mode pmode;
9698
9699 /* Get the signedness to be used for this variable. Ensure we get
9700 the same mode we got when the variable was declared. */
9701 if (code != SSA_NAME)
9702 pmode = promote_decl_mode (exp, &unsignedp);
9703 else if ((g = SSA_NAME_DEF_STMT (ssa_name))
9704 && gimple_code (g) == GIMPLE_CALL
9705 && !gimple_call_internal_p (g))
9706 pmode = promote_function_mode (type, mode, &unsignedp,
9707 gimple_call_fntype (g),
9708 2);
9709 else
9710 pmode = promote_ssa_mode (ssa_name, &unsignedp);
9711 gcc_assert (GET_MODE (decl_rtl) == pmode);
9712
9713 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9714 SUBREG_PROMOTED_VAR_P (temp) = 1;
9715 SUBREG_PROMOTED_SET (temp, unsignedp);
9716 return temp;
9717 }
9718
9719 return decl_rtl;
9720
9721 case INTEGER_CST:
9722 /* Given that TYPE_PRECISION (type) is not always equal to
9723 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9724 the former to the latter according to the signedness of the
9725 type. */
9726 temp = immed_wide_int_const (wide_int::from
9727 (exp,
9728 GET_MODE_PRECISION (TYPE_MODE (type)),
9729 TYPE_SIGN (type)),
9730 TYPE_MODE (type));
9731 return temp;
9732
9733 case VECTOR_CST:
9734 {
9735 tree tmp = NULL_TREE;
9736 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9737 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9738 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9739 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9740 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9741 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9742 return const_vector_from_tree (exp);
9743 if (GET_MODE_CLASS (mode) == MODE_INT)
9744 {
9745 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9746 if (type_for_mode)
9747 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9748 }
9749 if (!tmp)
9750 {
9751 vec<constructor_elt, va_gc> *v;
9752 unsigned i;
9753 vec_alloc (v, VECTOR_CST_NELTS (exp));
9754 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9755 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9756 tmp = build_constructor (type, v);
9757 }
9758 return expand_expr (tmp, ignore ? const0_rtx : target,
9759 tmode, modifier);
9760 }
9761
9762 case CONST_DECL:
9763 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9764
9765 case REAL_CST:
9766 /* If optimized, generate immediate CONST_DOUBLE
9767 which will be turned into memory by reload if necessary.
9768
9769 We used to force a register so that loop.c could see it. But
9770 this does not allow gen_* patterns to perform optimizations with
9771 the constants. It also produces two insns in cases like "x = 1.0;".
9772 On most machines, floating-point constants are not permitted in
9773 many insns, so we'd end up copying it to a register in any case.
9774
9775 Now, we do the copying in expand_binop, if appropriate. */
9776 return const_double_from_real_value (TREE_REAL_CST (exp),
9777 TYPE_MODE (TREE_TYPE (exp)));
9778
9779 case FIXED_CST:
9780 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9781 TYPE_MODE (TREE_TYPE (exp)));
9782
9783 case COMPLEX_CST:
9784 /* Handle evaluating a complex constant in a CONCAT target. */
9785 if (original_target && GET_CODE (original_target) == CONCAT)
9786 {
9787 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9788 rtx rtarg, itarg;
9789
9790 rtarg = XEXP (original_target, 0);
9791 itarg = XEXP (original_target, 1);
9792
9793 /* Move the real and imaginary parts separately. */
9794 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9795 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9796
9797 if (op0 != rtarg)
9798 emit_move_insn (rtarg, op0);
9799 if (op1 != itarg)
9800 emit_move_insn (itarg, op1);
9801
9802 return original_target;
9803 }
9804
9805 /* ... fall through ... */
9806
9807 case STRING_CST:
9808 temp = expand_expr_constant (exp, 1, modifier);
9809
9810 /* temp contains a constant address.
9811 On RISC machines where a constant address isn't valid,
9812 make some insns to get that address into a register. */
9813 if (modifier != EXPAND_CONST_ADDRESS
9814 && modifier != EXPAND_INITIALIZER
9815 && modifier != EXPAND_SUM
9816 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9817 MEM_ADDR_SPACE (temp)))
9818 return replace_equiv_address (temp,
9819 copy_rtx (XEXP (temp, 0)));
9820 return temp;
9821
9822 case SAVE_EXPR:
9823 {
9824 tree val = treeop0;
9825 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9826 inner_reference_p);
9827
9828 if (!SAVE_EXPR_RESOLVED_P (exp))
9829 {
9830 /* We can indeed still hit this case, typically via builtin
9831 expanders calling save_expr immediately before expanding
9832 something. Assume this means that we only have to deal
9833 with non-BLKmode values. */
9834 gcc_assert (GET_MODE (ret) != BLKmode);
9835
9836 val = build_decl (curr_insn_location (),
9837 VAR_DECL, NULL, TREE_TYPE (exp));
9838 DECL_ARTIFICIAL (val) = 1;
9839 DECL_IGNORED_P (val) = 1;
9840 treeop0 = val;
9841 TREE_OPERAND (exp, 0) = treeop0;
9842 SAVE_EXPR_RESOLVED_P (exp) = 1;
9843
9844 if (!CONSTANT_P (ret))
9845 ret = copy_to_reg (ret);
9846 SET_DECL_RTL (val, ret);
9847 }
9848
9849 return ret;
9850 }
9851
9852
9853 case CONSTRUCTOR:
9854 /* If we don't need the result, just ensure we evaluate any
9855 subexpressions. */
9856 if (ignore)
9857 {
9858 unsigned HOST_WIDE_INT idx;
9859 tree value;
9860
9861 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9862 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9863
9864 return const0_rtx;
9865 }
9866
9867 return expand_constructor (exp, target, modifier, false);
9868
9869 case TARGET_MEM_REF:
9870 {
9871 addr_space_t as
9872 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9873 enum insn_code icode;
9874 unsigned int align;
9875
9876 op0 = addr_for_mem_ref (exp, as, true);
9877 op0 = memory_address_addr_space (mode, op0, as);
9878 temp = gen_rtx_MEM (mode, op0);
9879 set_mem_attributes (temp, exp, 0);
9880 set_mem_addr_space (temp, as);
9881 align = get_object_alignment (exp);
9882 if (modifier != EXPAND_WRITE
9883 && modifier != EXPAND_MEMORY
9884 && mode != BLKmode
9885 && align < GET_MODE_ALIGNMENT (mode)
9886 /* If the target does not have special handling for unaligned
9887 loads of mode then it can use regular moves for them. */
9888 && ((icode = optab_handler (movmisalign_optab, mode))
9889 != CODE_FOR_nothing))
9890 {
9891 struct expand_operand ops[2];
9892
9893 /* We've already validated the memory, and we're creating a
9894 new pseudo destination. The predicates really can't fail,
9895 nor can the generator. */
9896 create_output_operand (&ops[0], NULL_RTX, mode);
9897 create_fixed_operand (&ops[1], temp);
9898 expand_insn (icode, 2, ops);
9899 temp = ops[0].value;
9900 }
9901 return temp;
9902 }
9903
9904 case MEM_REF:
9905 {
9906 const bool reverse = REF_REVERSE_STORAGE_ORDER (exp);
9907 addr_space_t as
9908 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9909 machine_mode address_mode;
9910 tree base = TREE_OPERAND (exp, 0);
9911 gimple *def_stmt;
9912 enum insn_code icode;
9913 unsigned align;
9914 /* Handle expansion of non-aliased memory with non-BLKmode. That
9915 might end up in a register. */
9916 if (mem_ref_refers_to_non_mem_p (exp))
9917 {
9918 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9919 base = TREE_OPERAND (base, 0);
9920 if (offset == 0
9921 && !reverse
9922 && tree_fits_uhwi_p (TYPE_SIZE (type))
9923 && (GET_MODE_BITSIZE (DECL_MODE (base))
9924 == tree_to_uhwi (TYPE_SIZE (type))))
9925 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9926 target, tmode, modifier);
9927 if (TYPE_MODE (type) == BLKmode)
9928 {
9929 temp = assign_stack_temp (DECL_MODE (base),
9930 GET_MODE_SIZE (DECL_MODE (base)));
9931 store_expr (base, temp, 0, false, false);
9932 temp = adjust_address (temp, BLKmode, offset);
9933 set_mem_size (temp, int_size_in_bytes (type));
9934 return temp;
9935 }
9936 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9937 bitsize_int (offset * BITS_PER_UNIT));
9938 REF_REVERSE_STORAGE_ORDER (exp) = reverse;
9939 return expand_expr (exp, target, tmode, modifier);
9940 }
9941 address_mode = targetm.addr_space.address_mode (as);
9942 base = TREE_OPERAND (exp, 0);
9943 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9944 {
9945 tree mask = gimple_assign_rhs2 (def_stmt);
9946 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9947 gimple_assign_rhs1 (def_stmt), mask);
9948 TREE_OPERAND (exp, 0) = base;
9949 }
9950 align = get_object_alignment (exp);
9951 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9952 op0 = memory_address_addr_space (mode, op0, as);
9953 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9954 {
9955 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9956 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9957 op0 = memory_address_addr_space (mode, op0, as);
9958 }
9959 temp = gen_rtx_MEM (mode, op0);
9960 set_mem_attributes (temp, exp, 0);
9961 set_mem_addr_space (temp, as);
9962 if (TREE_THIS_VOLATILE (exp))
9963 MEM_VOLATILE_P (temp) = 1;
9964 if (modifier != EXPAND_WRITE
9965 && modifier != EXPAND_MEMORY
9966 && !inner_reference_p
9967 && mode != BLKmode
9968 && align < GET_MODE_ALIGNMENT (mode))
9969 {
9970 if ((icode = optab_handler (movmisalign_optab, mode))
9971 != CODE_FOR_nothing)
9972 {
9973 struct expand_operand ops[2];
9974
9975 /* We've already validated the memory, and we're creating a
9976 new pseudo destination. The predicates really can't fail,
9977 nor can the generator. */
9978 create_output_operand (&ops[0], NULL_RTX, mode);
9979 create_fixed_operand (&ops[1], temp);
9980 expand_insn (icode, 2, ops);
9981 temp = ops[0].value;
9982 }
9983 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9984 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9985 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9986 (modifier == EXPAND_STACK_PARM
9987 ? NULL_RTX : target),
9988 mode, mode, false);
9989 }
9990 if (reverse
9991 && modifier != EXPAND_MEMORY
9992 && modifier != EXPAND_WRITE)
9993 temp = flip_storage_order (mode, temp);
9994 return temp;
9995 }
9996
9997 case ARRAY_REF:
9998
9999 {
10000 tree array = treeop0;
10001 tree index = treeop1;
10002 tree init;
10003
10004 /* Fold an expression like: "foo"[2].
10005 This is not done in fold so it won't happen inside &.
10006 Don't fold if this is for wide characters since it's too
10007 difficult to do correctly and this is a very rare case. */
10008
10009 if (modifier != EXPAND_CONST_ADDRESS
10010 && modifier != EXPAND_INITIALIZER
10011 && modifier != EXPAND_MEMORY)
10012 {
10013 tree t = fold_read_from_constant_string (exp);
10014
10015 if (t)
10016 return expand_expr (t, target, tmode, modifier);
10017 }
10018
10019 /* If this is a constant index into a constant array,
10020 just get the value from the array. Handle both the cases when
10021 we have an explicit constructor and when our operand is a variable
10022 that was declared const. */
10023
10024 if (modifier != EXPAND_CONST_ADDRESS
10025 && modifier != EXPAND_INITIALIZER
10026 && modifier != EXPAND_MEMORY
10027 && TREE_CODE (array) == CONSTRUCTOR
10028 && ! TREE_SIDE_EFFECTS (array)
10029 && TREE_CODE (index) == INTEGER_CST)
10030 {
10031 unsigned HOST_WIDE_INT ix;
10032 tree field, value;
10033
10034 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
10035 field, value)
10036 if (tree_int_cst_equal (field, index))
10037 {
10038 if (!TREE_SIDE_EFFECTS (value))
10039 return expand_expr (fold (value), target, tmode, modifier);
10040 break;
10041 }
10042 }
10043
10044 else if (optimize >= 1
10045 && modifier != EXPAND_CONST_ADDRESS
10046 && modifier != EXPAND_INITIALIZER
10047 && modifier != EXPAND_MEMORY
10048 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
10049 && TREE_CODE (index) == INTEGER_CST
10050 && (TREE_CODE (array) == VAR_DECL
10051 || TREE_CODE (array) == CONST_DECL)
10052 && (init = ctor_for_folding (array)) != error_mark_node)
10053 {
10054 if (init == NULL_TREE)
10055 {
10056 tree value = build_zero_cst (type);
10057 if (TREE_CODE (value) == CONSTRUCTOR)
10058 {
10059 /* If VALUE is a CONSTRUCTOR, this optimization is only
10060 useful if this doesn't store the CONSTRUCTOR into
10061 memory. If it does, it is more efficient to just
10062 load the data from the array directly. */
10063 rtx ret = expand_constructor (value, target,
10064 modifier, true);
10065 if (ret == NULL_RTX)
10066 value = NULL_TREE;
10067 }
10068
10069 if (value)
10070 return expand_expr (value, target, tmode, modifier);
10071 }
10072 else if (TREE_CODE (init) == CONSTRUCTOR)
10073 {
10074 unsigned HOST_WIDE_INT ix;
10075 tree field, value;
10076
10077 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10078 field, value)
10079 if (tree_int_cst_equal (field, index))
10080 {
10081 if (TREE_SIDE_EFFECTS (value))
10082 break;
10083
10084 if (TREE_CODE (value) == CONSTRUCTOR)
10085 {
10086 /* If VALUE is a CONSTRUCTOR, this
10087 optimization is only useful if
10088 this doesn't store the CONSTRUCTOR
10089 into memory. If it does, it is more
10090 efficient to just load the data from
10091 the array directly. */
10092 rtx ret = expand_constructor (value, target,
10093 modifier, true);
10094 if (ret == NULL_RTX)
10095 break;
10096 }
10097
10098 return
10099 expand_expr (fold (value), target, tmode, modifier);
10100 }
10101 }
10102 else if (TREE_CODE (init) == STRING_CST)
10103 {
10104 tree low_bound = array_ref_low_bound (exp);
10105 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10106
10107 /* Optimize the special case of a zero lower bound.
10108
10109 We convert the lower bound to sizetype to avoid problems
10110 with constant folding. E.g. suppose the lower bound is
10111 1 and its mode is QI. Without the conversion
10112 (ARRAY + (INDEX - (unsigned char)1))
10113 becomes
10114 (ARRAY + (-(unsigned char)1) + INDEX)
10115 which becomes
10116 (ARRAY + 255 + INDEX). Oops! */
10117 if (!integer_zerop (low_bound))
10118 index1 = size_diffop_loc (loc, index1,
10119 fold_convert_loc (loc, sizetype,
10120 low_bound));
10121
10122 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10123 {
10124 tree type = TREE_TYPE (TREE_TYPE (init));
10125 machine_mode mode = TYPE_MODE (type);
10126
10127 if (GET_MODE_CLASS (mode) == MODE_INT
10128 && GET_MODE_SIZE (mode) == 1)
10129 return gen_int_mode (TREE_STRING_POINTER (init)
10130 [TREE_INT_CST_LOW (index1)],
10131 mode);
10132 }
10133 }
10134 }
10135 }
10136 goto normal_inner_ref;
10137
10138 case COMPONENT_REF:
10139 /* If the operand is a CONSTRUCTOR, we can just extract the
10140 appropriate field if it is present. */
10141 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10142 {
10143 unsigned HOST_WIDE_INT idx;
10144 tree field, value;
10145
10146 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10147 idx, field, value)
10148 if (field == treeop1
10149 /* We can normally use the value of the field in the
10150 CONSTRUCTOR. However, if this is a bitfield in
10151 an integral mode that we can fit in a HOST_WIDE_INT,
10152 we must mask only the number of bits in the bitfield,
10153 since this is done implicitly by the constructor. If
10154 the bitfield does not meet either of those conditions,
10155 we can't do this optimization. */
10156 && (! DECL_BIT_FIELD (field)
10157 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10158 && (GET_MODE_PRECISION (DECL_MODE (field))
10159 <= HOST_BITS_PER_WIDE_INT))))
10160 {
10161 if (DECL_BIT_FIELD (field)
10162 && modifier == EXPAND_STACK_PARM)
10163 target = 0;
10164 op0 = expand_expr (value, target, tmode, modifier);
10165 if (DECL_BIT_FIELD (field))
10166 {
10167 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10168 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10169
10170 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10171 {
10172 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10173 imode);
10174 op0 = expand_and (imode, op0, op1, target);
10175 }
10176 else
10177 {
10178 int count = GET_MODE_PRECISION (imode) - bitsize;
10179
10180 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10181 target, 0);
10182 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10183 target, 0);
10184 }
10185 }
10186
10187 return op0;
10188 }
10189 }
10190 goto normal_inner_ref;
10191
10192 case BIT_FIELD_REF:
10193 case ARRAY_RANGE_REF:
10194 normal_inner_ref:
10195 {
10196 machine_mode mode1, mode2;
10197 HOST_WIDE_INT bitsize, bitpos;
10198 tree offset;
10199 int reversep, volatilep = 0, must_force_mem;
10200 tree tem
10201 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
10202 &unsignedp, &reversep, &volatilep, true);
10203 rtx orig_op0, memloc;
10204 bool clear_mem_expr = false;
10205
10206 /* If we got back the original object, something is wrong. Perhaps
10207 we are evaluating an expression too early. In any event, don't
10208 infinitely recurse. */
10209 gcc_assert (tem != exp);
10210
10211 /* If TEM's type is a union of variable size, pass TARGET to the inner
10212 computation, since it will need a temporary and TARGET is known
10213 to have to do. This occurs in unchecked conversion in Ada. */
10214 orig_op0 = op0
10215 = expand_expr_real (tem,
10216 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10217 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10218 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10219 != INTEGER_CST)
10220 && modifier != EXPAND_STACK_PARM
10221 ? target : NULL_RTX),
10222 VOIDmode,
10223 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10224 NULL, true);
10225
10226 /* If the field has a mode, we want to access it in the
10227 field's mode, not the computed mode.
10228 If a MEM has VOIDmode (external with incomplete type),
10229 use BLKmode for it instead. */
10230 if (MEM_P (op0))
10231 {
10232 if (mode1 != VOIDmode)
10233 op0 = adjust_address (op0, mode1, 0);
10234 else if (GET_MODE (op0) == VOIDmode)
10235 op0 = adjust_address (op0, BLKmode, 0);
10236 }
10237
10238 mode2
10239 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10240
10241 /* If we have either an offset, a BLKmode result, or a reference
10242 outside the underlying object, we must force it to memory.
10243 Such a case can occur in Ada if we have unchecked conversion
10244 of an expression from a scalar type to an aggregate type or
10245 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10246 passed a partially uninitialized object or a view-conversion
10247 to a larger size. */
10248 must_force_mem = (offset
10249 || mode1 == BLKmode
10250 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10251
10252 /* Handle CONCAT first. */
10253 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10254 {
10255 if (bitpos == 0
10256 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10257 {
10258 if (reversep)
10259 op0 = flip_storage_order (GET_MODE (op0), op0);
10260 return op0;
10261 }
10262 if (bitpos == 0
10263 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10264 && bitsize)
10265 {
10266 op0 = XEXP (op0, 0);
10267 mode2 = GET_MODE (op0);
10268 }
10269 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10270 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10271 && bitpos
10272 && bitsize)
10273 {
10274 op0 = XEXP (op0, 1);
10275 bitpos = 0;
10276 mode2 = GET_MODE (op0);
10277 }
10278 else
10279 /* Otherwise force into memory. */
10280 must_force_mem = 1;
10281 }
10282
10283 /* If this is a constant, put it in a register if it is a legitimate
10284 constant and we don't need a memory reference. */
10285 if (CONSTANT_P (op0)
10286 && mode2 != BLKmode
10287 && targetm.legitimate_constant_p (mode2, op0)
10288 && !must_force_mem)
10289 op0 = force_reg (mode2, op0);
10290
10291 /* Otherwise, if this is a constant, try to force it to the constant
10292 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10293 is a legitimate constant. */
10294 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10295 op0 = validize_mem (memloc);
10296
10297 /* Otherwise, if this is a constant or the object is not in memory
10298 and need be, put it there. */
10299 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10300 {
10301 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10302 emit_move_insn (memloc, op0);
10303 op0 = memloc;
10304 clear_mem_expr = true;
10305 }
10306
10307 if (offset)
10308 {
10309 machine_mode address_mode;
10310 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10311 EXPAND_SUM);
10312
10313 gcc_assert (MEM_P (op0));
10314
10315 address_mode = get_address_mode (op0);
10316 if (GET_MODE (offset_rtx) != address_mode)
10317 {
10318 /* We cannot be sure that the RTL in offset_rtx is valid outside
10319 of a memory address context, so force it into a register
10320 before attempting to convert it to the desired mode. */
10321 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10322 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10323 }
10324
10325 /* See the comment in expand_assignment for the rationale. */
10326 if (mode1 != VOIDmode
10327 && bitpos != 0
10328 && bitsize > 0
10329 && (bitpos % bitsize) == 0
10330 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10331 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10332 {
10333 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10334 bitpos = 0;
10335 }
10336
10337 op0 = offset_address (op0, offset_rtx,
10338 highest_pow2_factor (offset));
10339 }
10340
10341 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10342 record its alignment as BIGGEST_ALIGNMENT. */
10343 if (MEM_P (op0) && bitpos == 0 && offset != 0
10344 && is_aligning_offset (offset, tem))
10345 set_mem_align (op0, BIGGEST_ALIGNMENT);
10346
10347 /* Don't forget about volatility even if this is a bitfield. */
10348 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10349 {
10350 if (op0 == orig_op0)
10351 op0 = copy_rtx (op0);
10352
10353 MEM_VOLATILE_P (op0) = 1;
10354 }
10355
10356 /* In cases where an aligned union has an unaligned object
10357 as a field, we might be extracting a BLKmode value from
10358 an integer-mode (e.g., SImode) object. Handle this case
10359 by doing the extract into an object as wide as the field
10360 (which we know to be the width of a basic mode), then
10361 storing into memory, and changing the mode to BLKmode. */
10362 if (mode1 == VOIDmode
10363 || REG_P (op0) || GET_CODE (op0) == SUBREG
10364 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10365 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10366 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10367 && modifier != EXPAND_CONST_ADDRESS
10368 && modifier != EXPAND_INITIALIZER
10369 && modifier != EXPAND_MEMORY)
10370 /* If the bitfield is volatile and the bitsize
10371 is narrower than the access size of the bitfield,
10372 we need to extract bitfields from the access. */
10373 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10374 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10375 && mode1 != BLKmode
10376 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10377 /* If the field isn't aligned enough to fetch as a memref,
10378 fetch it as a bit field. */
10379 || (mode1 != BLKmode
10380 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10381 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10382 || (MEM_P (op0)
10383 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10384 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10385 && modifier != EXPAND_MEMORY
10386 && ((modifier == EXPAND_CONST_ADDRESS
10387 || modifier == EXPAND_INITIALIZER)
10388 ? STRICT_ALIGNMENT
10389 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10390 || (bitpos % BITS_PER_UNIT != 0)))
10391 /* If the type and the field are a constant size and the
10392 size of the type isn't the same size as the bitfield,
10393 we must use bitfield operations. */
10394 || (bitsize >= 0
10395 && TYPE_SIZE (TREE_TYPE (exp))
10396 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10397 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10398 bitsize)))
10399 {
10400 machine_mode ext_mode = mode;
10401
10402 if (ext_mode == BLKmode
10403 && ! (target != 0 && MEM_P (op0)
10404 && MEM_P (target)
10405 && bitpos % BITS_PER_UNIT == 0))
10406 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10407
10408 if (ext_mode == BLKmode)
10409 {
10410 if (target == 0)
10411 target = assign_temp (type, 1, 1);
10412
10413 /* ??? Unlike the similar test a few lines below, this one is
10414 very likely obsolete. */
10415 if (bitsize == 0)
10416 return target;
10417
10418 /* In this case, BITPOS must start at a byte boundary and
10419 TARGET, if specified, must be a MEM. */
10420 gcc_assert (MEM_P (op0)
10421 && (!target || MEM_P (target))
10422 && !(bitpos % BITS_PER_UNIT));
10423
10424 emit_block_move (target,
10425 adjust_address (op0, VOIDmode,
10426 bitpos / BITS_PER_UNIT),
10427 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10428 / BITS_PER_UNIT),
10429 (modifier == EXPAND_STACK_PARM
10430 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10431
10432 return target;
10433 }
10434
10435 /* If we have nothing to extract, the result will be 0 for targets
10436 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10437 return 0 for the sake of consistency, as reading a zero-sized
10438 bitfield is valid in Ada and the value is fully specified. */
10439 if (bitsize == 0)
10440 return const0_rtx;
10441
10442 op0 = validize_mem (op0);
10443
10444 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10445 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10446
10447 /* If the result has a record type and the extraction is done in
10448 an integral mode, then the field may be not aligned on a byte
10449 boundary; in this case, if it has reverse storage order, it
10450 needs to be extracted as a scalar field with reverse storage
10451 order and put back into memory order afterwards. */
10452 if (TREE_CODE (type) == RECORD_TYPE
10453 && GET_MODE_CLASS (ext_mode) == MODE_INT)
10454 reversep = TYPE_REVERSE_STORAGE_ORDER (type);
10455
10456 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10457 (modifier == EXPAND_STACK_PARM
10458 ? NULL_RTX : target),
10459 ext_mode, ext_mode, reversep);
10460
10461 /* If the result has a record type and the mode of OP0 is an
10462 integral mode then, if BITSIZE is narrower than this mode
10463 and this is for big-endian data, we must put the field
10464 into the high-order bits. And we must also put it back
10465 into memory order if it has been previously reversed. */
10466 if (TREE_CODE (type) == RECORD_TYPE
10467 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
10468 {
10469 HOST_WIDE_INT size = GET_MODE_BITSIZE (GET_MODE (op0));
10470
10471 if (bitsize < size
10472 && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
10473 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10474 size - bitsize, op0, 1);
10475
10476 if (reversep)
10477 op0 = flip_storage_order (GET_MODE (op0), op0);
10478 }
10479
10480 /* If the result type is BLKmode, store the data into a temporary
10481 of the appropriate type, but with the mode corresponding to the
10482 mode for the data we have (op0's mode). */
10483 if (mode == BLKmode)
10484 {
10485 rtx new_rtx
10486 = assign_stack_temp_for_type (ext_mode,
10487 GET_MODE_BITSIZE (ext_mode),
10488 type);
10489 emit_move_insn (new_rtx, op0);
10490 op0 = copy_rtx (new_rtx);
10491 PUT_MODE (op0, BLKmode);
10492 }
10493
10494 return op0;
10495 }
10496
10497 /* If the result is BLKmode, use that to access the object
10498 now as well. */
10499 if (mode == BLKmode)
10500 mode1 = BLKmode;
10501
10502 /* Get a reference to just this component. */
10503 if (modifier == EXPAND_CONST_ADDRESS
10504 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10505 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10506 else
10507 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10508
10509 if (op0 == orig_op0)
10510 op0 = copy_rtx (op0);
10511
10512 set_mem_attributes (op0, exp, 0);
10513
10514 if (REG_P (XEXP (op0, 0)))
10515 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10516
10517 /* If op0 is a temporary because the original expressions was forced
10518 to memory, clear MEM_EXPR so that the original expression cannot
10519 be marked as addressable through MEM_EXPR of the temporary. */
10520 if (clear_mem_expr)
10521 set_mem_expr (op0, NULL_TREE);
10522
10523 MEM_VOLATILE_P (op0) |= volatilep;
10524
10525 if (reversep
10526 && modifier != EXPAND_MEMORY
10527 && modifier != EXPAND_WRITE)
10528 op0 = flip_storage_order (mode1, op0);
10529
10530 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10531 || modifier == EXPAND_CONST_ADDRESS
10532 || modifier == EXPAND_INITIALIZER)
10533 return op0;
10534
10535 if (target == 0)
10536 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10537
10538 convert_move (target, op0, unsignedp);
10539 return target;
10540 }
10541
10542 case OBJ_TYPE_REF:
10543 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10544
10545 case CALL_EXPR:
10546 /* All valid uses of __builtin_va_arg_pack () are removed during
10547 inlining. */
10548 if (CALL_EXPR_VA_ARG_PACK (exp))
10549 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10550 {
10551 tree fndecl = get_callee_fndecl (exp), attr;
10552
10553 if (fndecl
10554 && (attr = lookup_attribute ("error",
10555 DECL_ATTRIBUTES (fndecl))) != NULL)
10556 error ("%Kcall to %qs declared with attribute error: %s",
10557 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10558 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10559 if (fndecl
10560 && (attr = lookup_attribute ("warning",
10561 DECL_ATTRIBUTES (fndecl))) != NULL)
10562 warning_at (tree_nonartificial_location (exp),
10563 0, "%Kcall to %qs declared with attribute warning: %s",
10564 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10565 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10566
10567 /* Check for a built-in function. */
10568 if (fndecl && DECL_BUILT_IN (fndecl))
10569 {
10570 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10571 if (CALL_WITH_BOUNDS_P (exp))
10572 return expand_builtin_with_bounds (exp, target, subtarget,
10573 tmode, ignore);
10574 else
10575 return expand_builtin (exp, target, subtarget, tmode, ignore);
10576 }
10577 }
10578 return expand_call (exp, target, ignore);
10579
10580 case VIEW_CONVERT_EXPR:
10581 op0 = NULL_RTX;
10582
10583 /* If we are converting to BLKmode, try to avoid an intermediate
10584 temporary by fetching an inner memory reference. */
10585 if (mode == BLKmode
10586 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10587 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10588 && handled_component_p (treeop0))
10589 {
10590 machine_mode mode1;
10591 HOST_WIDE_INT bitsize, bitpos;
10592 tree offset;
10593 int unsignedp, reversep, volatilep = 0;
10594 tree tem
10595 = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
10596 &unsignedp, &reversep, &volatilep, true);
10597 rtx orig_op0;
10598
10599 /* ??? We should work harder and deal with non-zero offsets. */
10600 if (!offset
10601 && (bitpos % BITS_PER_UNIT) == 0
10602 && !reversep
10603 && bitsize >= 0
10604 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10605 {
10606 /* See the normal_inner_ref case for the rationale. */
10607 orig_op0
10608 = expand_expr_real (tem,
10609 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10610 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10611 != INTEGER_CST)
10612 && modifier != EXPAND_STACK_PARM
10613 ? target : NULL_RTX),
10614 VOIDmode,
10615 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10616 NULL, true);
10617
10618 if (MEM_P (orig_op0))
10619 {
10620 op0 = orig_op0;
10621
10622 /* Get a reference to just this component. */
10623 if (modifier == EXPAND_CONST_ADDRESS
10624 || modifier == EXPAND_SUM
10625 || modifier == EXPAND_INITIALIZER)
10626 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10627 else
10628 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10629
10630 if (op0 == orig_op0)
10631 op0 = copy_rtx (op0);
10632
10633 set_mem_attributes (op0, treeop0, 0);
10634 if (REG_P (XEXP (op0, 0)))
10635 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10636
10637 MEM_VOLATILE_P (op0) |= volatilep;
10638 }
10639 }
10640 }
10641
10642 if (!op0)
10643 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10644 NULL, inner_reference_p);
10645
10646 /* If the input and output modes are both the same, we are done. */
10647 if (mode == GET_MODE (op0))
10648 ;
10649 /* If neither mode is BLKmode, and both modes are the same size
10650 then we can use gen_lowpart. */
10651 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10652 && (GET_MODE_PRECISION (mode)
10653 == GET_MODE_PRECISION (GET_MODE (op0)))
10654 && !COMPLEX_MODE_P (GET_MODE (op0)))
10655 {
10656 if (GET_CODE (op0) == SUBREG)
10657 op0 = force_reg (GET_MODE (op0), op0);
10658 temp = gen_lowpart_common (mode, op0);
10659 if (temp)
10660 op0 = temp;
10661 else
10662 {
10663 if (!REG_P (op0) && !MEM_P (op0))
10664 op0 = force_reg (GET_MODE (op0), op0);
10665 op0 = gen_lowpart (mode, op0);
10666 }
10667 }
10668 /* If both types are integral, convert from one mode to the other. */
10669 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10670 op0 = convert_modes (mode, GET_MODE (op0), op0,
10671 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10672 /* If the output type is a bit-field type, do an extraction. */
10673 else if (reduce_bit_field)
10674 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10675 TYPE_UNSIGNED (type), NULL_RTX,
10676 mode, mode, false);
10677 /* As a last resort, spill op0 to memory, and reload it in a
10678 different mode. */
10679 else if (!MEM_P (op0))
10680 {
10681 /* If the operand is not a MEM, force it into memory. Since we
10682 are going to be changing the mode of the MEM, don't call
10683 force_const_mem for constants because we don't allow pool
10684 constants to change mode. */
10685 tree inner_type = TREE_TYPE (treeop0);
10686
10687 gcc_assert (!TREE_ADDRESSABLE (exp));
10688
10689 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10690 target
10691 = assign_stack_temp_for_type
10692 (TYPE_MODE (inner_type),
10693 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10694
10695 emit_move_insn (target, op0);
10696 op0 = target;
10697 }
10698
10699 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10700 output type is such that the operand is known to be aligned, indicate
10701 that it is. Otherwise, we need only be concerned about alignment for
10702 non-BLKmode results. */
10703 if (MEM_P (op0))
10704 {
10705 enum insn_code icode;
10706
10707 if (TYPE_ALIGN_OK (type))
10708 {
10709 /* ??? Copying the MEM without substantially changing it might
10710 run afoul of the code handling volatile memory references in
10711 store_expr, which assumes that TARGET is returned unmodified
10712 if it has been used. */
10713 op0 = copy_rtx (op0);
10714 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10715 }
10716 else if (modifier != EXPAND_WRITE
10717 && modifier != EXPAND_MEMORY
10718 && !inner_reference_p
10719 && mode != BLKmode
10720 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10721 {
10722 /* If the target does have special handling for unaligned
10723 loads of mode then use them. */
10724 if ((icode = optab_handler (movmisalign_optab, mode))
10725 != CODE_FOR_nothing)
10726 {
10727 rtx reg;
10728
10729 op0 = adjust_address (op0, mode, 0);
10730 /* We've already validated the memory, and we're creating a
10731 new pseudo destination. The predicates really can't
10732 fail. */
10733 reg = gen_reg_rtx (mode);
10734
10735 /* Nor can the insn generator. */
10736 rtx_insn *insn = GEN_FCN (icode) (reg, op0);
10737 emit_insn (insn);
10738 return reg;
10739 }
10740 else if (STRICT_ALIGNMENT)
10741 {
10742 tree inner_type = TREE_TYPE (treeop0);
10743 HOST_WIDE_INT temp_size
10744 = MAX (int_size_in_bytes (inner_type),
10745 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10746 rtx new_rtx
10747 = assign_stack_temp_for_type (mode, temp_size, type);
10748 rtx new_with_op0_mode
10749 = adjust_address (new_rtx, GET_MODE (op0), 0);
10750
10751 gcc_assert (!TREE_ADDRESSABLE (exp));
10752
10753 if (GET_MODE (op0) == BLKmode)
10754 emit_block_move (new_with_op0_mode, op0,
10755 GEN_INT (GET_MODE_SIZE (mode)),
10756 (modifier == EXPAND_STACK_PARM
10757 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10758 else
10759 emit_move_insn (new_with_op0_mode, op0);
10760
10761 op0 = new_rtx;
10762 }
10763 }
10764
10765 op0 = adjust_address (op0, mode, 0);
10766 }
10767
10768 return op0;
10769
10770 case MODIFY_EXPR:
10771 {
10772 tree lhs = treeop0;
10773 tree rhs = treeop1;
10774 gcc_assert (ignore);
10775
10776 /* Check for |= or &= of a bitfield of size one into another bitfield
10777 of size 1. In this case, (unless we need the result of the
10778 assignment) we can do this more efficiently with a
10779 test followed by an assignment, if necessary.
10780
10781 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10782 things change so we do, this code should be enhanced to
10783 support it. */
10784 if (TREE_CODE (lhs) == COMPONENT_REF
10785 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10786 || TREE_CODE (rhs) == BIT_AND_EXPR)
10787 && TREE_OPERAND (rhs, 0) == lhs
10788 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10789 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10790 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10791 {
10792 rtx_code_label *label = gen_label_rtx ();
10793 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10794 do_jump (TREE_OPERAND (rhs, 1),
10795 value ? label : 0,
10796 value ? 0 : label, -1);
10797 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10798 false);
10799 do_pending_stack_adjust ();
10800 emit_label (label);
10801 return const0_rtx;
10802 }
10803
10804 expand_assignment (lhs, rhs, false);
10805 return const0_rtx;
10806 }
10807
10808 case ADDR_EXPR:
10809 return expand_expr_addr_expr (exp, target, tmode, modifier);
10810
10811 case REALPART_EXPR:
10812 op0 = expand_normal (treeop0);
10813 return read_complex_part (op0, false);
10814
10815 case IMAGPART_EXPR:
10816 op0 = expand_normal (treeop0);
10817 return read_complex_part (op0, true);
10818
10819 case RETURN_EXPR:
10820 case LABEL_EXPR:
10821 case GOTO_EXPR:
10822 case SWITCH_EXPR:
10823 case ASM_EXPR:
10824 /* Expanded in cfgexpand.c. */
10825 gcc_unreachable ();
10826
10827 case TRY_CATCH_EXPR:
10828 case CATCH_EXPR:
10829 case EH_FILTER_EXPR:
10830 case TRY_FINALLY_EXPR:
10831 /* Lowered by tree-eh.c. */
10832 gcc_unreachable ();
10833
10834 case WITH_CLEANUP_EXPR:
10835 case CLEANUP_POINT_EXPR:
10836 case TARGET_EXPR:
10837 case CASE_LABEL_EXPR:
10838 case VA_ARG_EXPR:
10839 case BIND_EXPR:
10840 case INIT_EXPR:
10841 case CONJ_EXPR:
10842 case COMPOUND_EXPR:
10843 case PREINCREMENT_EXPR:
10844 case PREDECREMENT_EXPR:
10845 case POSTINCREMENT_EXPR:
10846 case POSTDECREMENT_EXPR:
10847 case LOOP_EXPR:
10848 case EXIT_EXPR:
10849 case COMPOUND_LITERAL_EXPR:
10850 /* Lowered by gimplify.c. */
10851 gcc_unreachable ();
10852
10853 case FDESC_EXPR:
10854 /* Function descriptors are not valid except for as
10855 initialization constants, and should not be expanded. */
10856 gcc_unreachable ();
10857
10858 case WITH_SIZE_EXPR:
10859 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10860 have pulled out the size to use in whatever context it needed. */
10861 return expand_expr_real (treeop0, original_target, tmode,
10862 modifier, alt_rtl, inner_reference_p);
10863
10864 default:
10865 return expand_expr_real_2 (&ops, target, tmode, modifier);
10866 }
10867 }
10868 \f
10869 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10870 signedness of TYPE), possibly returning the result in TARGET. */
10871 static rtx
10872 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10873 {
10874 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10875 if (target && GET_MODE (target) != GET_MODE (exp))
10876 target = 0;
10877 /* For constant values, reduce using build_int_cst_type. */
10878 if (CONST_INT_P (exp))
10879 {
10880 HOST_WIDE_INT value = INTVAL (exp);
10881 tree t = build_int_cst_type (type, value);
10882 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10883 }
10884 else if (TYPE_UNSIGNED (type))
10885 {
10886 machine_mode mode = GET_MODE (exp);
10887 rtx mask = immed_wide_int_const
10888 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10889 return expand_and (mode, exp, mask, target);
10890 }
10891 else
10892 {
10893 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10894 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10895 exp, count, target, 0);
10896 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10897 exp, count, target, 0);
10898 }
10899 }
10900 \f
10901 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10902 when applied to the address of EXP produces an address known to be
10903 aligned more than BIGGEST_ALIGNMENT. */
10904
10905 static int
10906 is_aligning_offset (const_tree offset, const_tree exp)
10907 {
10908 /* Strip off any conversions. */
10909 while (CONVERT_EXPR_P (offset))
10910 offset = TREE_OPERAND (offset, 0);
10911
10912 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10913 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10914 if (TREE_CODE (offset) != BIT_AND_EXPR
10915 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10916 || compare_tree_int (TREE_OPERAND (offset, 1),
10917 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10918 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10919 return 0;
10920
10921 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10922 It must be NEGATE_EXPR. Then strip any more conversions. */
10923 offset = TREE_OPERAND (offset, 0);
10924 while (CONVERT_EXPR_P (offset))
10925 offset = TREE_OPERAND (offset, 0);
10926
10927 if (TREE_CODE (offset) != NEGATE_EXPR)
10928 return 0;
10929
10930 offset = TREE_OPERAND (offset, 0);
10931 while (CONVERT_EXPR_P (offset))
10932 offset = TREE_OPERAND (offset, 0);
10933
10934 /* This must now be the address of EXP. */
10935 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10936 }
10937 \f
10938 /* Return the tree node if an ARG corresponds to a string constant or zero
10939 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10940 in bytes within the string that ARG is accessing. The type of the
10941 offset will be `sizetype'. */
10942
10943 tree
10944 string_constant (tree arg, tree *ptr_offset)
10945 {
10946 tree array, offset, lower_bound;
10947 STRIP_NOPS (arg);
10948
10949 if (TREE_CODE (arg) == ADDR_EXPR)
10950 {
10951 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10952 {
10953 *ptr_offset = size_zero_node;
10954 return TREE_OPERAND (arg, 0);
10955 }
10956 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10957 {
10958 array = TREE_OPERAND (arg, 0);
10959 offset = size_zero_node;
10960 }
10961 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10962 {
10963 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10964 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10965 if (TREE_CODE (array) != STRING_CST
10966 && TREE_CODE (array) != VAR_DECL)
10967 return 0;
10968
10969 /* Check if the array has a nonzero lower bound. */
10970 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10971 if (!integer_zerop (lower_bound))
10972 {
10973 /* If the offset and base aren't both constants, return 0. */
10974 if (TREE_CODE (lower_bound) != INTEGER_CST)
10975 return 0;
10976 if (TREE_CODE (offset) != INTEGER_CST)
10977 return 0;
10978 /* Adjust offset by the lower bound. */
10979 offset = size_diffop (fold_convert (sizetype, offset),
10980 fold_convert (sizetype, lower_bound));
10981 }
10982 }
10983 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10984 {
10985 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10986 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10987 if (TREE_CODE (array) != ADDR_EXPR)
10988 return 0;
10989 array = TREE_OPERAND (array, 0);
10990 if (TREE_CODE (array) != STRING_CST
10991 && TREE_CODE (array) != VAR_DECL)
10992 return 0;
10993 }
10994 else
10995 return 0;
10996 }
10997 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10998 {
10999 tree arg0 = TREE_OPERAND (arg, 0);
11000 tree arg1 = TREE_OPERAND (arg, 1);
11001
11002 STRIP_NOPS (arg0);
11003 STRIP_NOPS (arg1);
11004
11005 if (TREE_CODE (arg0) == ADDR_EXPR
11006 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
11007 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
11008 {
11009 array = TREE_OPERAND (arg0, 0);
11010 offset = arg1;
11011 }
11012 else if (TREE_CODE (arg1) == ADDR_EXPR
11013 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
11014 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
11015 {
11016 array = TREE_OPERAND (arg1, 0);
11017 offset = arg0;
11018 }
11019 else
11020 return 0;
11021 }
11022 else
11023 return 0;
11024
11025 if (TREE_CODE (array) == STRING_CST)
11026 {
11027 *ptr_offset = fold_convert (sizetype, offset);
11028 return array;
11029 }
11030 else if (TREE_CODE (array) == VAR_DECL
11031 || TREE_CODE (array) == CONST_DECL)
11032 {
11033 int length;
11034 tree init = ctor_for_folding (array);
11035
11036 /* Variables initialized to string literals can be handled too. */
11037 if (init == error_mark_node
11038 || !init
11039 || TREE_CODE (init) != STRING_CST)
11040 return 0;
11041
11042 /* Avoid const char foo[4] = "abcde"; */
11043 if (DECL_SIZE_UNIT (array) == NULL_TREE
11044 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
11045 || (length = TREE_STRING_LENGTH (init)) <= 0
11046 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
11047 return 0;
11048
11049 /* If variable is bigger than the string literal, OFFSET must be constant
11050 and inside of the bounds of the string literal. */
11051 offset = fold_convert (sizetype, offset);
11052 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
11053 && (! tree_fits_uhwi_p (offset)
11054 || compare_tree_int (offset, length) >= 0))
11055 return 0;
11056
11057 *ptr_offset = offset;
11058 return init;
11059 }
11060
11061 return 0;
11062 }
11063 \f
11064 /* Generate code to calculate OPS, and exploded expression
11065 using a store-flag instruction and return an rtx for the result.
11066 OPS reflects a comparison.
11067
11068 If TARGET is nonzero, store the result there if convenient.
11069
11070 Return zero if there is no suitable set-flag instruction
11071 available on this machine.
11072
11073 Once expand_expr has been called on the arguments of the comparison,
11074 we are committed to doing the store flag, since it is not safe to
11075 re-evaluate the expression. We emit the store-flag insn by calling
11076 emit_store_flag, but only expand the arguments if we have a reason
11077 to believe that emit_store_flag will be successful. If we think that
11078 it will, but it isn't, we have to simulate the store-flag with a
11079 set/jump/set sequence. */
11080
11081 static rtx
11082 do_store_flag (sepops ops, rtx target, machine_mode mode)
11083 {
11084 enum rtx_code code;
11085 tree arg0, arg1, type;
11086 machine_mode operand_mode;
11087 int unsignedp;
11088 rtx op0, op1;
11089 rtx subtarget = target;
11090 location_t loc = ops->location;
11091
11092 arg0 = ops->op0;
11093 arg1 = ops->op1;
11094
11095 /* Don't crash if the comparison was erroneous. */
11096 if (arg0 == error_mark_node || arg1 == error_mark_node)
11097 return const0_rtx;
11098
11099 type = TREE_TYPE (arg0);
11100 operand_mode = TYPE_MODE (type);
11101 unsignedp = TYPE_UNSIGNED (type);
11102
11103 /* We won't bother with BLKmode store-flag operations because it would mean
11104 passing a lot of information to emit_store_flag. */
11105 if (operand_mode == BLKmode)
11106 return 0;
11107
11108 /* We won't bother with store-flag operations involving function pointers
11109 when function pointers must be canonicalized before comparisons. */
11110 if (targetm.have_canonicalize_funcptr_for_compare ()
11111 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11112 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11113 == FUNCTION_TYPE))
11114 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11115 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11116 == FUNCTION_TYPE))))
11117 return 0;
11118
11119 STRIP_NOPS (arg0);
11120 STRIP_NOPS (arg1);
11121
11122 /* For vector typed comparisons emit code to generate the desired
11123 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11124 expander for this. */
11125 if (TREE_CODE (ops->type) == VECTOR_TYPE)
11126 {
11127 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11128 if (VECTOR_BOOLEAN_TYPE_P (ops->type)
11129 && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type))
11130 return expand_vec_cmp_expr (ops->type, ifexp, target);
11131 else
11132 {
11133 tree if_true = constant_boolean_node (true, ops->type);
11134 tree if_false = constant_boolean_node (false, ops->type);
11135 return expand_vec_cond_expr (ops->type, ifexp, if_true,
11136 if_false, target);
11137 }
11138 }
11139
11140 /* Get the rtx comparison code to use. We know that EXP is a comparison
11141 operation of some type. Some comparisons against 1 and -1 can be
11142 converted to comparisons with zero. Do so here so that the tests
11143 below will be aware that we have a comparison with zero. These
11144 tests will not catch constants in the first operand, but constants
11145 are rarely passed as the first operand. */
11146
11147 switch (ops->code)
11148 {
11149 case EQ_EXPR:
11150 code = EQ;
11151 break;
11152 case NE_EXPR:
11153 code = NE;
11154 break;
11155 case LT_EXPR:
11156 if (integer_onep (arg1))
11157 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11158 else
11159 code = unsignedp ? LTU : LT;
11160 break;
11161 case LE_EXPR:
11162 if (! unsignedp && integer_all_onesp (arg1))
11163 arg1 = integer_zero_node, code = LT;
11164 else
11165 code = unsignedp ? LEU : LE;
11166 break;
11167 case GT_EXPR:
11168 if (! unsignedp && integer_all_onesp (arg1))
11169 arg1 = integer_zero_node, code = GE;
11170 else
11171 code = unsignedp ? GTU : GT;
11172 break;
11173 case GE_EXPR:
11174 if (integer_onep (arg1))
11175 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11176 else
11177 code = unsignedp ? GEU : GE;
11178 break;
11179
11180 case UNORDERED_EXPR:
11181 code = UNORDERED;
11182 break;
11183 case ORDERED_EXPR:
11184 code = ORDERED;
11185 break;
11186 case UNLT_EXPR:
11187 code = UNLT;
11188 break;
11189 case UNLE_EXPR:
11190 code = UNLE;
11191 break;
11192 case UNGT_EXPR:
11193 code = UNGT;
11194 break;
11195 case UNGE_EXPR:
11196 code = UNGE;
11197 break;
11198 case UNEQ_EXPR:
11199 code = UNEQ;
11200 break;
11201 case LTGT_EXPR:
11202 code = LTGT;
11203 break;
11204
11205 default:
11206 gcc_unreachable ();
11207 }
11208
11209 /* Put a constant second. */
11210 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11211 || TREE_CODE (arg0) == FIXED_CST)
11212 {
11213 std::swap (arg0, arg1);
11214 code = swap_condition (code);
11215 }
11216
11217 /* If this is an equality or inequality test of a single bit, we can
11218 do this by shifting the bit being tested to the low-order bit and
11219 masking the result with the constant 1. If the condition was EQ,
11220 we xor it with 1. This does not require an scc insn and is faster
11221 than an scc insn even if we have it.
11222
11223 The code to make this transformation was moved into fold_single_bit_test,
11224 so we just call into the folder and expand its result. */
11225
11226 if ((code == NE || code == EQ)
11227 && integer_zerop (arg1)
11228 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11229 {
11230 gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11231 if (srcstmt
11232 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11233 {
11234 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11235 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11236 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11237 gimple_assign_rhs1 (srcstmt),
11238 gimple_assign_rhs2 (srcstmt));
11239 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11240 if (temp)
11241 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11242 }
11243 }
11244
11245 if (! get_subtarget (target)
11246 || GET_MODE (subtarget) != operand_mode)
11247 subtarget = 0;
11248
11249 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11250
11251 if (target == 0)
11252 target = gen_reg_rtx (mode);
11253
11254 /* Try a cstore if possible. */
11255 return emit_store_flag_force (target, code, op0, op1,
11256 operand_mode, unsignedp,
11257 (TYPE_PRECISION (ops->type) == 1
11258 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11259 }
11260 \f
11261 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11262 0 otherwise (i.e. if there is no casesi instruction).
11263
11264 DEFAULT_PROBABILITY is the probability of jumping to the default
11265 label. */
11266 int
11267 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11268 rtx table_label, rtx default_label, rtx fallback_label,
11269 int default_probability)
11270 {
11271 struct expand_operand ops[5];
11272 machine_mode index_mode = SImode;
11273 rtx op1, op2, index;
11274
11275 if (! targetm.have_casesi ())
11276 return 0;
11277
11278 /* Convert the index to SImode. */
11279 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11280 {
11281 machine_mode omode = TYPE_MODE (index_type);
11282 rtx rangertx = expand_normal (range);
11283
11284 /* We must handle the endpoints in the original mode. */
11285 index_expr = build2 (MINUS_EXPR, index_type,
11286 index_expr, minval);
11287 minval = integer_zero_node;
11288 index = expand_normal (index_expr);
11289 if (default_label)
11290 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11291 omode, 1, default_label,
11292 default_probability);
11293 /* Now we can safely truncate. */
11294 index = convert_to_mode (index_mode, index, 0);
11295 }
11296 else
11297 {
11298 if (TYPE_MODE (index_type) != index_mode)
11299 {
11300 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11301 index_expr = fold_convert (index_type, index_expr);
11302 }
11303
11304 index = expand_normal (index_expr);
11305 }
11306
11307 do_pending_stack_adjust ();
11308
11309 op1 = expand_normal (minval);
11310 op2 = expand_normal (range);
11311
11312 create_input_operand (&ops[0], index, index_mode);
11313 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11314 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11315 create_fixed_operand (&ops[3], table_label);
11316 create_fixed_operand (&ops[4], (default_label
11317 ? default_label
11318 : fallback_label));
11319 expand_jump_insn (targetm.code_for_casesi, 5, ops);
11320 return 1;
11321 }
11322
11323 /* Attempt to generate a tablejump instruction; same concept. */
11324 /* Subroutine of the next function.
11325
11326 INDEX is the value being switched on, with the lowest value
11327 in the table already subtracted.
11328 MODE is its expected mode (needed if INDEX is constant).
11329 RANGE is the length of the jump table.
11330 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11331
11332 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11333 index value is out of range.
11334 DEFAULT_PROBABILITY is the probability of jumping to
11335 the default label. */
11336
11337 static void
11338 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11339 rtx default_label, int default_probability)
11340 {
11341 rtx temp, vector;
11342
11343 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11344 cfun->cfg->max_jumptable_ents = INTVAL (range);
11345
11346 /* Do an unsigned comparison (in the proper mode) between the index
11347 expression and the value which represents the length of the range.
11348 Since we just finished subtracting the lower bound of the range
11349 from the index expression, this comparison allows us to simultaneously
11350 check that the original index expression value is both greater than
11351 or equal to the minimum value of the range and less than or equal to
11352 the maximum value of the range. */
11353
11354 if (default_label)
11355 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11356 default_label, default_probability);
11357
11358
11359 /* If index is in range, it must fit in Pmode.
11360 Convert to Pmode so we can index with it. */
11361 if (mode != Pmode)
11362 index = convert_to_mode (Pmode, index, 1);
11363
11364 /* Don't let a MEM slip through, because then INDEX that comes
11365 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11366 and break_out_memory_refs will go to work on it and mess it up. */
11367 #ifdef PIC_CASE_VECTOR_ADDRESS
11368 if (flag_pic && !REG_P (index))
11369 index = copy_to_mode_reg (Pmode, index);
11370 #endif
11371
11372 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11373 GET_MODE_SIZE, because this indicates how large insns are. The other
11374 uses should all be Pmode, because they are addresses. This code
11375 could fail if addresses and insns are not the same size. */
11376 index = simplify_gen_binary (MULT, Pmode, index,
11377 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11378 Pmode));
11379 index = simplify_gen_binary (PLUS, Pmode, index,
11380 gen_rtx_LABEL_REF (Pmode, table_label));
11381
11382 #ifdef PIC_CASE_VECTOR_ADDRESS
11383 if (flag_pic)
11384 index = PIC_CASE_VECTOR_ADDRESS (index);
11385 else
11386 #endif
11387 index = memory_address (CASE_VECTOR_MODE, index);
11388 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11389 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11390 convert_move (temp, vector, 0);
11391
11392 emit_jump_insn (targetm.gen_tablejump (temp, table_label));
11393
11394 /* If we are generating PIC code or if the table is PC-relative, the
11395 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11396 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11397 emit_barrier ();
11398 }
11399
11400 int
11401 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11402 rtx table_label, rtx default_label, int default_probability)
11403 {
11404 rtx index;
11405
11406 if (! targetm.have_tablejump ())
11407 return 0;
11408
11409 index_expr = fold_build2 (MINUS_EXPR, index_type,
11410 fold_convert (index_type, index_expr),
11411 fold_convert (index_type, minval));
11412 index = expand_normal (index_expr);
11413 do_pending_stack_adjust ();
11414
11415 do_tablejump (index, TYPE_MODE (index_type),
11416 convert_modes (TYPE_MODE (index_type),
11417 TYPE_MODE (TREE_TYPE (range)),
11418 expand_normal (range),
11419 TYPE_UNSIGNED (TREE_TYPE (range))),
11420 table_label, default_label, default_probability);
11421 return 1;
11422 }
11423
11424 /* Return a CONST_VECTOR rtx representing vector mask for
11425 a VECTOR_CST of booleans. */
11426 static rtx
11427 const_vector_mask_from_tree (tree exp)
11428 {
11429 rtvec v;
11430 unsigned i;
11431 int units;
11432 tree elt;
11433 machine_mode inner, mode;
11434
11435 mode = TYPE_MODE (TREE_TYPE (exp));
11436 units = GET_MODE_NUNITS (mode);
11437 inner = GET_MODE_INNER (mode);
11438
11439 v = rtvec_alloc (units);
11440
11441 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11442 {
11443 elt = VECTOR_CST_ELT (exp, i);
11444
11445 gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11446 if (integer_zerop (elt))
11447 RTVEC_ELT (v, i) = CONST0_RTX (inner);
11448 else if (integer_onep (elt)
11449 || integer_minus_onep (elt))
11450 RTVEC_ELT (v, i) = CONSTM1_RTX (inner);
11451 else
11452 gcc_unreachable ();
11453 }
11454
11455 return gen_rtx_CONST_VECTOR (mode, v);
11456 }
11457
11458 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11459 static rtx
11460 const_vector_from_tree (tree exp)
11461 {
11462 rtvec v;
11463 unsigned i;
11464 int units;
11465 tree elt;
11466 machine_mode inner, mode;
11467
11468 mode = TYPE_MODE (TREE_TYPE (exp));
11469
11470 if (initializer_zerop (exp))
11471 return CONST0_RTX (mode);
11472
11473 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
11474 return const_vector_mask_from_tree (exp);
11475
11476 units = GET_MODE_NUNITS (mode);
11477 inner = GET_MODE_INNER (mode);
11478
11479 v = rtvec_alloc (units);
11480
11481 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11482 {
11483 elt = VECTOR_CST_ELT (exp, i);
11484
11485 if (TREE_CODE (elt) == REAL_CST)
11486 RTVEC_ELT (v, i) = const_double_from_real_value (TREE_REAL_CST (elt),
11487 inner);
11488 else if (TREE_CODE (elt) == FIXED_CST)
11489 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11490 inner);
11491 else
11492 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11493 }
11494
11495 return gen_rtx_CONST_VECTOR (mode, v);
11496 }
11497
11498 /* Build a decl for a personality function given a language prefix. */
11499
11500 tree
11501 build_personality_function (const char *lang)
11502 {
11503 const char *unwind_and_version;
11504 tree decl, type;
11505 char *name;
11506
11507 switch (targetm_common.except_unwind_info (&global_options))
11508 {
11509 case UI_NONE:
11510 return NULL;
11511 case UI_SJLJ:
11512 unwind_and_version = "_sj0";
11513 break;
11514 case UI_DWARF2:
11515 case UI_TARGET:
11516 unwind_and_version = "_v0";
11517 break;
11518 case UI_SEH:
11519 unwind_and_version = "_seh0";
11520 break;
11521 default:
11522 gcc_unreachable ();
11523 }
11524
11525 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11526
11527 type = build_function_type_list (integer_type_node, integer_type_node,
11528 long_long_unsigned_type_node,
11529 ptr_type_node, ptr_type_node, NULL_TREE);
11530 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11531 get_identifier (name), type);
11532 DECL_ARTIFICIAL (decl) = 1;
11533 DECL_EXTERNAL (decl) = 1;
11534 TREE_PUBLIC (decl) = 1;
11535
11536 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11537 are the flags assigned by targetm.encode_section_info. */
11538 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11539
11540 return decl;
11541 }
11542
11543 /* Extracts the personality function of DECL and returns the corresponding
11544 libfunc. */
11545
11546 rtx
11547 get_personality_function (tree decl)
11548 {
11549 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11550 enum eh_personality_kind pk;
11551
11552 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11553 if (pk == eh_personality_none)
11554 return NULL;
11555
11556 if (!personality
11557 && pk == eh_personality_any)
11558 personality = lang_hooks.eh_personality ();
11559
11560 if (pk == eh_personality_lang)
11561 gcc_assert (personality != NULL_TREE);
11562
11563 return XEXP (DECL_RTL (personality), 0);
11564 }
11565
11566 /* Returns a tree for the size of EXP in bytes. */
11567
11568 static tree
11569 tree_expr_size (const_tree exp)
11570 {
11571 if (DECL_P (exp)
11572 && DECL_SIZE_UNIT (exp) != 0)
11573 return DECL_SIZE_UNIT (exp);
11574 else
11575 return size_in_bytes (TREE_TYPE (exp));
11576 }
11577
11578 /* Return an rtx for the size in bytes of the value of EXP. */
11579
11580 rtx
11581 expr_size (tree exp)
11582 {
11583 tree size;
11584
11585 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11586 size = TREE_OPERAND (exp, 1);
11587 else
11588 {
11589 size = tree_expr_size (exp);
11590 gcc_assert (size);
11591 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11592 }
11593
11594 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11595 }
11596
11597 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11598 if the size can vary or is larger than an integer. */
11599
11600 static HOST_WIDE_INT
11601 int_expr_size (tree exp)
11602 {
11603 tree size;
11604
11605 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11606 size = TREE_OPERAND (exp, 1);
11607 else
11608 {
11609 size = tree_expr_size (exp);
11610 gcc_assert (size);
11611 }
11612
11613 if (size == 0 || !tree_fits_shwi_p (size))
11614 return -1;
11615
11616 return tree_to_shwi (size);
11617 }
11618
11619 #include "gt-expr.h"