coretypes.h: Include machmode.h...
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "hash-set.h"
26 #include "vec.h"
27 #include "input.h"
28 #include "alias.h"
29 #include "symtab.h"
30 #include "inchash.h"
31 #include "tree.h"
32 #include "fold-const.h"
33 #include "stringpool.h"
34 #include "stor-layout.h"
35 #include "attribs.h"
36 #include "varasm.h"
37 #include "flags.h"
38 #include "regs.h"
39 #include "hard-reg-set.h"
40 #include "except.h"
41 #include "function.h"
42 #include "insn-config.h"
43 #include "insn-attr.h"
44 #include "hashtab.h"
45 #include "statistics.h"
46 #include "expmed.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "calls.h"
50 #include "emit-rtl.h"
51 #include "stmt.h"
52 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
53 #include "expr.h"
54 #include "insn-codes.h"
55 #include "optabs.h"
56 #include "libfuncs.h"
57 #include "recog.h"
58 #include "reload.h"
59 #include "typeclass.h"
60 #include "toplev.h"
61 #include "langhooks.h"
62 #include "intl.h"
63 #include "tm_p.h"
64 #include "tree-iterator.h"
65 #include "predict.h"
66 #include "dominance.h"
67 #include "cfg.h"
68 #include "basic-block.h"
69 #include "tree-ssa-alias.h"
70 #include "internal-fn.h"
71 #include "gimple-expr.h"
72 #include "is-a.h"
73 #include "gimple.h"
74 #include "gimple-ssa.h"
75 #include "hash-map.h"
76 #include "plugin-api.h"
77 #include "ipa-ref.h"
78 #include "cgraph.h"
79 #include "tree-ssanames.h"
80 #include "target.h"
81 #include "common/common-target.h"
82 #include "timevar.h"
83 #include "df.h"
84 #include "diagnostic.h"
85 #include "tree-ssa-live.h"
86 #include "tree-outof-ssa.h"
87 #include "target-globals.h"
88 #include "params.h"
89 #include "tree-ssa-address.h"
90 #include "cfgexpand.h"
91 #include "builtins.h"
92 #include "tree-chkp.h"
93 #include "rtl-chkp.h"
94 #include "ccmp.h"
95
96
97 /* If this is nonzero, we do not bother generating VOLATILE
98 around volatile memory references, and we are willing to
99 output indirect addresses. If cse is to follow, we reject
100 indirect addresses so a useful potential cse is generated;
101 if it is used only once, instruction combination will produce
102 the same indirect address eventually. */
103 int cse_not_expected;
104
105 /* This structure is used by move_by_pieces to describe the move to
106 be performed. */
107 struct move_by_pieces_d
108 {
109 rtx to;
110 rtx to_addr;
111 int autinc_to;
112 int explicit_inc_to;
113 rtx from;
114 rtx from_addr;
115 int autinc_from;
116 int explicit_inc_from;
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 int reverse;
120 };
121
122 /* This structure is used by store_by_pieces to describe the clear to
123 be performed. */
124
125 struct store_by_pieces_d
126 {
127 rtx to;
128 rtx to_addr;
129 int autinc_to;
130 int explicit_inc_to;
131 unsigned HOST_WIDE_INT len;
132 HOST_WIDE_INT offset;
133 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
134 void *constfundata;
135 int reverse;
136 };
137
138 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
139 struct move_by_pieces_d *);
140 static bool block_move_libcall_safe_for_call_parm (void);
141 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
142 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
143 unsigned HOST_WIDE_INT);
144 static tree emit_block_move_libcall_fn (int);
145 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
146 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
147 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
148 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
149 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
150 struct store_by_pieces_d *);
151 static tree clear_storage_libcall_fn (int);
152 static rtx_insn *compress_float_constant (rtx, rtx);
153 static rtx get_subtarget (rtx);
154 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, machine_mode,
156 tree, int, alias_set_type);
157 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
158 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
159 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
160 machine_mode, tree, alias_set_type, bool);
161
162 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
163
164 static int is_aligning_offset (const_tree, const_tree);
165 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
166 static rtx do_store_flag (sepops, rtx, machine_mode);
167 #ifdef PUSH_ROUNDING
168 static void emit_single_push_insn (machine_mode, rtx, tree);
169 #endif
170 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
171 static rtx const_vector_from_tree (tree);
172 static tree tree_expr_size (const_tree);
173 static HOST_WIDE_INT int_expr_size (tree);
174
175 \f
176 /* This is run to set up which modes can be used
177 directly in memory and to initialize the block move optab. It is run
178 at the beginning of compilation and when the target is reinitialized. */
179
180 void
181 init_expr_target (void)
182 {
183 rtx insn, pat;
184 machine_mode mode;
185 int num_clobbers;
186 rtx mem, mem1;
187 rtx reg;
188
189 /* Try indexing by frame ptr and try by stack ptr.
190 It is known that on the Convex the stack ptr isn't a valid index.
191 With luck, one or the other is valid on any machine. */
192 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
193 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
194
195 /* A scratch register we can modify in-place below to avoid
196 useless RTL allocations. */
197 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
198
199 insn = rtx_alloc (INSN);
200 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
201 PATTERN (insn) = pat;
202
203 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
204 mode = (machine_mode) ((int) mode + 1))
205 {
206 int regno;
207
208 direct_load[(int) mode] = direct_store[(int) mode] = 0;
209 PUT_MODE (mem, mode);
210 PUT_MODE (mem1, mode);
211
212 /* See if there is some register that can be used in this mode and
213 directly loaded or stored from memory. */
214
215 if (mode != VOIDmode && mode != BLKmode)
216 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
217 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
218 regno++)
219 {
220 if (! HARD_REGNO_MODE_OK (regno, mode))
221 continue;
222
223 set_mode_and_regno (reg, mode, regno);
224
225 SET_SRC (pat) = mem;
226 SET_DEST (pat) = reg;
227 if (recog (pat, insn, &num_clobbers) >= 0)
228 direct_load[(int) mode] = 1;
229
230 SET_SRC (pat) = mem1;
231 SET_DEST (pat) = reg;
232 if (recog (pat, insn, &num_clobbers) >= 0)
233 direct_load[(int) mode] = 1;
234
235 SET_SRC (pat) = reg;
236 SET_DEST (pat) = mem;
237 if (recog (pat, insn, &num_clobbers) >= 0)
238 direct_store[(int) mode] = 1;
239
240 SET_SRC (pat) = reg;
241 SET_DEST (pat) = mem1;
242 if (recog (pat, insn, &num_clobbers) >= 0)
243 direct_store[(int) mode] = 1;
244 }
245 }
246
247 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
248
249 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
250 mode = GET_MODE_WIDER_MODE (mode))
251 {
252 machine_mode srcmode;
253 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
254 srcmode = GET_MODE_WIDER_MODE (srcmode))
255 {
256 enum insn_code ic;
257
258 ic = can_extend_p (mode, srcmode, 0);
259 if (ic == CODE_FOR_nothing)
260 continue;
261
262 PUT_MODE (mem, srcmode);
263
264 if (insn_operand_matches (ic, 1, mem))
265 float_extend_from_mem[mode][srcmode] = true;
266 }
267 }
268 }
269
270 /* This is run at the start of compiling a function. */
271
272 void
273 init_expr (void)
274 {
275 memset (&crtl->expr, 0, sizeof (crtl->expr));
276 }
277 \f
278 /* Copy data from FROM to TO, where the machine modes are not the same.
279 Both modes may be integer, or both may be floating, or both may be
280 fixed-point.
281 UNSIGNEDP should be nonzero if FROM is an unsigned type.
282 This causes zero-extension instead of sign-extension. */
283
284 void
285 convert_move (rtx to, rtx from, int unsignedp)
286 {
287 machine_mode to_mode = GET_MODE (to);
288 machine_mode from_mode = GET_MODE (from);
289 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
290 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
291 enum insn_code code;
292 rtx libcall;
293
294 /* rtx code for making an equivalent value. */
295 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
296 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
297
298
299 gcc_assert (to_real == from_real);
300 gcc_assert (to_mode != BLKmode);
301 gcc_assert (from_mode != BLKmode);
302
303 /* If the source and destination are already the same, then there's
304 nothing to do. */
305 if (to == from)
306 return;
307
308 /* If FROM is a SUBREG that indicates that we have already done at least
309 the required extension, strip it. We don't handle such SUBREGs as
310 TO here. */
311
312 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
313 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
314 >= GET_MODE_PRECISION (to_mode))
315 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
316 from = gen_lowpart (to_mode, from), from_mode = to_mode;
317
318 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
319
320 if (to_mode == from_mode
321 || (from_mode == VOIDmode && CONSTANT_P (from)))
322 {
323 emit_move_insn (to, from);
324 return;
325 }
326
327 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
328 {
329 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
330
331 if (VECTOR_MODE_P (to_mode))
332 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
333 else
334 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
335
336 emit_move_insn (to, from);
337 return;
338 }
339
340 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
341 {
342 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
343 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
344 return;
345 }
346
347 if (to_real)
348 {
349 rtx value;
350 rtx_insn *insns;
351 convert_optab tab;
352
353 gcc_assert ((GET_MODE_PRECISION (from_mode)
354 != GET_MODE_PRECISION (to_mode))
355 || (DECIMAL_FLOAT_MODE_P (from_mode)
356 != DECIMAL_FLOAT_MODE_P (to_mode)));
357
358 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
359 /* Conversion between decimal float and binary float, same size. */
360 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
361 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
362 tab = sext_optab;
363 else
364 tab = trunc_optab;
365
366 /* Try converting directly if the insn is supported. */
367
368 code = convert_optab_handler (tab, to_mode, from_mode);
369 if (code != CODE_FOR_nothing)
370 {
371 emit_unop_insn (code, to, from,
372 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
373 return;
374 }
375
376 /* Otherwise use a libcall. */
377 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
378
379 /* Is this conversion implemented yet? */
380 gcc_assert (libcall);
381
382 start_sequence ();
383 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
384 1, from, from_mode);
385 insns = get_insns ();
386 end_sequence ();
387 emit_libcall_block (insns, to, value,
388 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
389 from)
390 : gen_rtx_FLOAT_EXTEND (to_mode, from));
391 return;
392 }
393
394 /* Handle pointer conversion. */ /* SPEE 900220. */
395 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
396 {
397 convert_optab ctab;
398
399 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
400 ctab = trunc_optab;
401 else if (unsignedp)
402 ctab = zext_optab;
403 else
404 ctab = sext_optab;
405
406 if (convert_optab_handler (ctab, to_mode, from_mode)
407 != CODE_FOR_nothing)
408 {
409 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
410 to, from, UNKNOWN);
411 return;
412 }
413 }
414
415 /* Targets are expected to provide conversion insns between PxImode and
416 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
417 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
418 {
419 machine_mode full_mode
420 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
421
422 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
423 != CODE_FOR_nothing);
424
425 if (full_mode != from_mode)
426 from = convert_to_mode (full_mode, from, unsignedp);
427 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
428 to, from, UNKNOWN);
429 return;
430 }
431 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
432 {
433 rtx new_from;
434 machine_mode full_mode
435 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
436 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
437 enum insn_code icode;
438
439 icode = convert_optab_handler (ctab, full_mode, from_mode);
440 gcc_assert (icode != CODE_FOR_nothing);
441
442 if (to_mode == full_mode)
443 {
444 emit_unop_insn (icode, to, from, UNKNOWN);
445 return;
446 }
447
448 new_from = gen_reg_rtx (full_mode);
449 emit_unop_insn (icode, new_from, from, UNKNOWN);
450
451 /* else proceed to integer conversions below. */
452 from_mode = full_mode;
453 from = new_from;
454 }
455
456 /* Make sure both are fixed-point modes or both are not. */
457 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
458 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
459 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
460 {
461 /* If we widen from_mode to to_mode and they are in the same class,
462 we won't saturate the result.
463 Otherwise, always saturate the result to play safe. */
464 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
465 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
466 expand_fixed_convert (to, from, 0, 0);
467 else
468 expand_fixed_convert (to, from, 0, 1);
469 return;
470 }
471
472 /* Now both modes are integers. */
473
474 /* Handle expanding beyond a word. */
475 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
476 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
477 {
478 rtx_insn *insns;
479 rtx lowpart;
480 rtx fill_value;
481 rtx lowfrom;
482 int i;
483 machine_mode lowpart_mode;
484 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
485
486 /* Try converting directly if the insn is supported. */
487 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
488 != CODE_FOR_nothing)
489 {
490 /* If FROM is a SUBREG, put it into a register. Do this
491 so that we always generate the same set of insns for
492 better cse'ing; if an intermediate assignment occurred,
493 we won't be doing the operation directly on the SUBREG. */
494 if (optimize > 0 && GET_CODE (from) == SUBREG)
495 from = force_reg (from_mode, from);
496 emit_unop_insn (code, to, from, equiv_code);
497 return;
498 }
499 /* Next, try converting via full word. */
500 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
501 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
502 != CODE_FOR_nothing))
503 {
504 rtx word_to = gen_reg_rtx (word_mode);
505 if (REG_P (to))
506 {
507 if (reg_overlap_mentioned_p (to, from))
508 from = force_reg (from_mode, from);
509 emit_clobber (to);
510 }
511 convert_move (word_to, from, unsignedp);
512 emit_unop_insn (code, to, word_to, equiv_code);
513 return;
514 }
515
516 /* No special multiword conversion insn; do it by hand. */
517 start_sequence ();
518
519 /* Since we will turn this into a no conflict block, we must ensure the
520 the source does not overlap the target so force it into an isolated
521 register when maybe so. Likewise for any MEM input, since the
522 conversion sequence might require several references to it and we
523 must ensure we're getting the same value every time. */
524
525 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
526 from = force_reg (from_mode, from);
527
528 /* Get a copy of FROM widened to a word, if necessary. */
529 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
530 lowpart_mode = word_mode;
531 else
532 lowpart_mode = from_mode;
533
534 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
535
536 lowpart = gen_lowpart (lowpart_mode, to);
537 emit_move_insn (lowpart, lowfrom);
538
539 /* Compute the value to put in each remaining word. */
540 if (unsignedp)
541 fill_value = const0_rtx;
542 else
543 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
544 LT, lowfrom, const0_rtx,
545 lowpart_mode, 0, -1);
546
547 /* Fill the remaining words. */
548 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
549 {
550 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
551 rtx subword = operand_subword (to, index, 1, to_mode);
552
553 gcc_assert (subword);
554
555 if (fill_value != subword)
556 emit_move_insn (subword, fill_value);
557 }
558
559 insns = get_insns ();
560 end_sequence ();
561
562 emit_insn (insns);
563 return;
564 }
565
566 /* Truncating multi-word to a word or less. */
567 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
568 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
569 {
570 if (!((MEM_P (from)
571 && ! MEM_VOLATILE_P (from)
572 && direct_load[(int) to_mode]
573 && ! mode_dependent_address_p (XEXP (from, 0),
574 MEM_ADDR_SPACE (from)))
575 || REG_P (from)
576 || GET_CODE (from) == SUBREG))
577 from = force_reg (from_mode, from);
578 convert_move (to, gen_lowpart (word_mode, from), 0);
579 return;
580 }
581
582 /* Now follow all the conversions between integers
583 no more than a word long. */
584
585 /* For truncation, usually we can just refer to FROM in a narrower mode. */
586 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
587 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
588 {
589 if (!((MEM_P (from)
590 && ! MEM_VOLATILE_P (from)
591 && direct_load[(int) to_mode]
592 && ! mode_dependent_address_p (XEXP (from, 0),
593 MEM_ADDR_SPACE (from)))
594 || REG_P (from)
595 || GET_CODE (from) == SUBREG))
596 from = force_reg (from_mode, from);
597 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
598 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
599 from = copy_to_reg (from);
600 emit_move_insn (to, gen_lowpart (to_mode, from));
601 return;
602 }
603
604 /* Handle extension. */
605 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
606 {
607 /* Convert directly if that works. */
608 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
609 != CODE_FOR_nothing)
610 {
611 emit_unop_insn (code, to, from, equiv_code);
612 return;
613 }
614 else
615 {
616 machine_mode intermediate;
617 rtx tmp;
618 int shift_amount;
619
620 /* Search for a mode to convert via. */
621 for (intermediate = from_mode; intermediate != VOIDmode;
622 intermediate = GET_MODE_WIDER_MODE (intermediate))
623 if (((can_extend_p (to_mode, intermediate, unsignedp)
624 != CODE_FOR_nothing)
625 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
626 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
627 && (can_extend_p (intermediate, from_mode, unsignedp)
628 != CODE_FOR_nothing))
629 {
630 convert_move (to, convert_to_mode (intermediate, from,
631 unsignedp), unsignedp);
632 return;
633 }
634
635 /* No suitable intermediate mode.
636 Generate what we need with shifts. */
637 shift_amount = (GET_MODE_PRECISION (to_mode)
638 - GET_MODE_PRECISION (from_mode));
639 from = gen_lowpart (to_mode, force_reg (from_mode, from));
640 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
641 to, unsignedp);
642 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
643 to, unsignedp);
644 if (tmp != to)
645 emit_move_insn (to, tmp);
646 return;
647 }
648 }
649
650 /* Support special truncate insns for certain modes. */
651 if (convert_optab_handler (trunc_optab, to_mode,
652 from_mode) != CODE_FOR_nothing)
653 {
654 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
655 to, from, UNKNOWN);
656 return;
657 }
658
659 /* Handle truncation of volatile memrefs, and so on;
660 the things that couldn't be truncated directly,
661 and for which there was no special instruction.
662
663 ??? Code above formerly short-circuited this, for most integer
664 mode pairs, with a force_reg in from_mode followed by a recursive
665 call to this routine. Appears always to have been wrong. */
666 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
667 {
668 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
669 emit_move_insn (to, temp);
670 return;
671 }
672
673 /* Mode combination is not recognized. */
674 gcc_unreachable ();
675 }
676
677 /* Return an rtx for a value that would result
678 from converting X to mode MODE.
679 Both X and MODE may be floating, or both integer.
680 UNSIGNEDP is nonzero if X is an unsigned value.
681 This can be done by referring to a part of X in place
682 or by copying to a new temporary with conversion. */
683
684 rtx
685 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
686 {
687 return convert_modes (mode, VOIDmode, x, unsignedp);
688 }
689
690 /* Return an rtx for a value that would result
691 from converting X from mode OLDMODE to mode MODE.
692 Both modes may be floating, or both integer.
693 UNSIGNEDP is nonzero if X is an unsigned value.
694
695 This can be done by referring to a part of X in place
696 or by copying to a new temporary with conversion.
697
698 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
699
700 rtx
701 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
702 {
703 rtx temp;
704
705 /* If FROM is a SUBREG that indicates that we have already done at least
706 the required extension, strip it. */
707
708 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
709 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
710 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
711 x = gen_lowpart (mode, SUBREG_REG (x));
712
713 if (GET_MODE (x) != VOIDmode)
714 oldmode = GET_MODE (x);
715
716 if (mode == oldmode)
717 return x;
718
719 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
720 {
721 /* If the caller did not tell us the old mode, then there is not
722 much to do with respect to canonicalization. We have to
723 assume that all the bits are significant. */
724 if (GET_MODE_CLASS (oldmode) != MODE_INT)
725 oldmode = MAX_MODE_INT;
726 wide_int w = wide_int::from (std::make_pair (x, oldmode),
727 GET_MODE_PRECISION (mode),
728 unsignedp ? UNSIGNED : SIGNED);
729 return immed_wide_int_const (w, mode);
730 }
731
732 /* We can do this with a gen_lowpart if both desired and current modes
733 are integer, and this is either a constant integer, a register, or a
734 non-volatile MEM. */
735 if (GET_MODE_CLASS (mode) == MODE_INT
736 && GET_MODE_CLASS (oldmode) == MODE_INT
737 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
738 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
739 || (REG_P (x)
740 && (!HARD_REGISTER_P (x)
741 || HARD_REGNO_MODE_OK (REGNO (x), mode))
742 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
743
744 return gen_lowpart (mode, x);
745
746 /* Converting from integer constant into mode is always equivalent to an
747 subreg operation. */
748 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
749 {
750 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
751 return simplify_gen_subreg (mode, x, oldmode, 0);
752 }
753
754 temp = gen_reg_rtx (mode);
755 convert_move (temp, x, unsignedp);
756 return temp;
757 }
758 \f
759 /* Return the largest alignment we can use for doing a move (or store)
760 of MAX_PIECES. ALIGN is the largest alignment we could use. */
761
762 static unsigned int
763 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
764 {
765 machine_mode tmode;
766
767 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
768 if (align >= GET_MODE_ALIGNMENT (tmode))
769 align = GET_MODE_ALIGNMENT (tmode);
770 else
771 {
772 machine_mode tmode, xmode;
773
774 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
775 tmode != VOIDmode;
776 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
777 if (GET_MODE_SIZE (tmode) > max_pieces
778 || SLOW_UNALIGNED_ACCESS (tmode, align))
779 break;
780
781 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
782 }
783
784 return align;
785 }
786
787 /* Return the widest integer mode no wider than SIZE. If no such mode
788 can be found, return VOIDmode. */
789
790 static machine_mode
791 widest_int_mode_for_size (unsigned int size)
792 {
793 machine_mode tmode, mode = VOIDmode;
794
795 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
796 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
797 if (GET_MODE_SIZE (tmode) < size)
798 mode = tmode;
799
800 return mode;
801 }
802
803 /* Determine whether the LEN bytes can be moved by using several move
804 instructions. Return nonzero if a call to move_by_pieces should
805 succeed. */
806
807 int
808 can_move_by_pieces (unsigned HOST_WIDE_INT len,
809 unsigned int align)
810 {
811 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
812 optimize_insn_for_speed_p ());
813 }
814
815 /* Generate several move instructions to copy LEN bytes from block FROM to
816 block TO. (These are MEM rtx's with BLKmode).
817
818 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
819 used to push FROM to the stack.
820
821 ALIGN is maximum stack alignment we can assume.
822
823 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
824 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
825 stpcpy. */
826
827 rtx
828 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
829 unsigned int align, int endp)
830 {
831 struct move_by_pieces_d data;
832 machine_mode to_addr_mode;
833 machine_mode from_addr_mode = get_address_mode (from);
834 rtx to_addr, from_addr = XEXP (from, 0);
835 unsigned int max_size = MOVE_MAX_PIECES + 1;
836 enum insn_code icode;
837
838 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
839
840 data.offset = 0;
841 data.from_addr = from_addr;
842 if (to)
843 {
844 to_addr_mode = get_address_mode (to);
845 to_addr = XEXP (to, 0);
846 data.to = to;
847 data.autinc_to
848 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
849 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
850 data.reverse
851 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
852 }
853 else
854 {
855 to_addr_mode = VOIDmode;
856 to_addr = NULL_RTX;
857 data.to = NULL_RTX;
858 data.autinc_to = 1;
859 if (STACK_GROWS_DOWNWARD)
860 data.reverse = 1;
861 else
862 data.reverse = 0;
863 }
864 data.to_addr = to_addr;
865 data.from = from;
866 data.autinc_from
867 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
868 || GET_CODE (from_addr) == POST_INC
869 || GET_CODE (from_addr) == POST_DEC);
870
871 data.explicit_inc_from = 0;
872 data.explicit_inc_to = 0;
873 if (data.reverse) data.offset = len;
874 data.len = len;
875
876 /* If copying requires more than two move insns,
877 copy addresses to registers (to make displacements shorter)
878 and use post-increment if available. */
879 if (!(data.autinc_from && data.autinc_to)
880 && move_by_pieces_ninsns (len, align, max_size) > 2)
881 {
882 /* Find the mode of the largest move...
883 MODE might not be used depending on the definitions of the
884 USE_* macros below. */
885 machine_mode mode ATTRIBUTE_UNUSED
886 = widest_int_mode_for_size (max_size);
887
888 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
889 {
890 data.from_addr = copy_to_mode_reg (from_addr_mode,
891 plus_constant (from_addr_mode,
892 from_addr, len));
893 data.autinc_from = 1;
894 data.explicit_inc_from = -1;
895 }
896 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
897 {
898 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
899 data.autinc_from = 1;
900 data.explicit_inc_from = 1;
901 }
902 if (!data.autinc_from && CONSTANT_P (from_addr))
903 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
904 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
905 {
906 data.to_addr = copy_to_mode_reg (to_addr_mode,
907 plus_constant (to_addr_mode,
908 to_addr, len));
909 data.autinc_to = 1;
910 data.explicit_inc_to = -1;
911 }
912 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
913 {
914 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
915 data.autinc_to = 1;
916 data.explicit_inc_to = 1;
917 }
918 if (!data.autinc_to && CONSTANT_P (to_addr))
919 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
920 }
921
922 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
923
924 /* First move what we can in the largest integer mode, then go to
925 successively smaller modes. */
926
927 while (max_size > 1 && data.len > 0)
928 {
929 machine_mode mode = widest_int_mode_for_size (max_size);
930
931 if (mode == VOIDmode)
932 break;
933
934 icode = optab_handler (mov_optab, mode);
935 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
936 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
937
938 max_size = GET_MODE_SIZE (mode);
939 }
940
941 /* The code above should have handled everything. */
942 gcc_assert (!data.len);
943
944 if (endp)
945 {
946 rtx to1;
947
948 gcc_assert (!data.reverse);
949 if (data.autinc_to)
950 {
951 if (endp == 2)
952 {
953 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
954 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
955 else
956 data.to_addr = copy_to_mode_reg (to_addr_mode,
957 plus_constant (to_addr_mode,
958 data.to_addr,
959 -1));
960 }
961 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
962 data.offset);
963 }
964 else
965 {
966 if (endp == 2)
967 --data.offset;
968 to1 = adjust_address (data.to, QImode, data.offset);
969 }
970 return to1;
971 }
972 else
973 return data.to;
974 }
975
976 /* Return number of insns required to move L bytes by pieces.
977 ALIGN (in bits) is maximum alignment we can assume. */
978
979 unsigned HOST_WIDE_INT
980 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
981 unsigned int max_size)
982 {
983 unsigned HOST_WIDE_INT n_insns = 0;
984
985 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
986
987 while (max_size > 1 && l > 0)
988 {
989 machine_mode mode;
990 enum insn_code icode;
991
992 mode = widest_int_mode_for_size (max_size);
993
994 if (mode == VOIDmode)
995 break;
996
997 icode = optab_handler (mov_optab, mode);
998 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
999 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1000
1001 max_size = GET_MODE_SIZE (mode);
1002 }
1003
1004 gcc_assert (!l);
1005 return n_insns;
1006 }
1007
1008 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1009 with move instructions for mode MODE. GENFUN is the gen_... function
1010 to make a move insn for that mode. DATA has all the other info. */
1011
1012 static void
1013 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1014 struct move_by_pieces_d *data)
1015 {
1016 unsigned int size = GET_MODE_SIZE (mode);
1017 rtx to1 = NULL_RTX, from1;
1018
1019 while (data->len >= size)
1020 {
1021 if (data->reverse)
1022 data->offset -= size;
1023
1024 if (data->to)
1025 {
1026 if (data->autinc_to)
1027 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1028 data->offset);
1029 else
1030 to1 = adjust_address (data->to, mode, data->offset);
1031 }
1032
1033 if (data->autinc_from)
1034 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1035 data->offset);
1036 else
1037 from1 = adjust_address (data->from, mode, data->offset);
1038
1039 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1040 emit_insn (gen_add2_insn (data->to_addr,
1041 gen_int_mode (-(HOST_WIDE_INT) size,
1042 GET_MODE (data->to_addr))));
1043 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1044 emit_insn (gen_add2_insn (data->from_addr,
1045 gen_int_mode (-(HOST_WIDE_INT) size,
1046 GET_MODE (data->from_addr))));
1047
1048 if (data->to)
1049 emit_insn ((*genfun) (to1, from1));
1050 else
1051 {
1052 #ifdef PUSH_ROUNDING
1053 emit_single_push_insn (mode, from1, NULL);
1054 #else
1055 gcc_unreachable ();
1056 #endif
1057 }
1058
1059 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1060 emit_insn (gen_add2_insn (data->to_addr,
1061 gen_int_mode (size,
1062 GET_MODE (data->to_addr))));
1063 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1064 emit_insn (gen_add2_insn (data->from_addr,
1065 gen_int_mode (size,
1066 GET_MODE (data->from_addr))));
1067
1068 if (! data->reverse)
1069 data->offset += size;
1070
1071 data->len -= size;
1072 }
1073 }
1074 \f
1075 /* Emit code to move a block Y to a block X. This may be done with
1076 string-move instructions, with multiple scalar move instructions,
1077 or with a library call.
1078
1079 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1080 SIZE is an rtx that says how long they are.
1081 ALIGN is the maximum alignment we can assume they have.
1082 METHOD describes what kind of copy this is, and what mechanisms may be used.
1083 MIN_SIZE is the minimal size of block to move
1084 MAX_SIZE is the maximal size of block to move, if it can not be represented
1085 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1086
1087 Return the address of the new block, if memcpy is called and returns it,
1088 0 otherwise. */
1089
1090 rtx
1091 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1092 unsigned int expected_align, HOST_WIDE_INT expected_size,
1093 unsigned HOST_WIDE_INT min_size,
1094 unsigned HOST_WIDE_INT max_size,
1095 unsigned HOST_WIDE_INT probable_max_size)
1096 {
1097 bool may_use_call;
1098 rtx retval = 0;
1099 unsigned int align;
1100
1101 gcc_assert (size);
1102 if (CONST_INT_P (size)
1103 && INTVAL (size) == 0)
1104 return 0;
1105
1106 switch (method)
1107 {
1108 case BLOCK_OP_NORMAL:
1109 case BLOCK_OP_TAILCALL:
1110 may_use_call = true;
1111 break;
1112
1113 case BLOCK_OP_CALL_PARM:
1114 may_use_call = block_move_libcall_safe_for_call_parm ();
1115
1116 /* Make inhibit_defer_pop nonzero around the library call
1117 to force it to pop the arguments right away. */
1118 NO_DEFER_POP;
1119 break;
1120
1121 case BLOCK_OP_NO_LIBCALL:
1122 may_use_call = false;
1123 break;
1124
1125 default:
1126 gcc_unreachable ();
1127 }
1128
1129 gcc_assert (MEM_P (x) && MEM_P (y));
1130 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1131 gcc_assert (align >= BITS_PER_UNIT);
1132
1133 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1134 block copy is more efficient for other large modes, e.g. DCmode. */
1135 x = adjust_address (x, BLKmode, 0);
1136 y = adjust_address (y, BLKmode, 0);
1137
1138 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1139 can be incorrect is coming from __builtin_memcpy. */
1140 if (CONST_INT_P (size))
1141 {
1142 x = shallow_copy_rtx (x);
1143 y = shallow_copy_rtx (y);
1144 set_mem_size (x, INTVAL (size));
1145 set_mem_size (y, INTVAL (size));
1146 }
1147
1148 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1149 move_by_pieces (x, y, INTVAL (size), align, 0);
1150 else if (emit_block_move_via_movmem (x, y, size, align,
1151 expected_align, expected_size,
1152 min_size, max_size, probable_max_size))
1153 ;
1154 else if (may_use_call
1155 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1156 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1157 {
1158 /* Since x and y are passed to a libcall, mark the corresponding
1159 tree EXPR as addressable. */
1160 tree y_expr = MEM_EXPR (y);
1161 tree x_expr = MEM_EXPR (x);
1162 if (y_expr)
1163 mark_addressable (y_expr);
1164 if (x_expr)
1165 mark_addressable (x_expr);
1166 retval = emit_block_move_via_libcall (x, y, size,
1167 method == BLOCK_OP_TAILCALL);
1168 }
1169
1170 else
1171 emit_block_move_via_loop (x, y, size, align);
1172
1173 if (method == BLOCK_OP_CALL_PARM)
1174 OK_DEFER_POP;
1175
1176 return retval;
1177 }
1178
1179 rtx
1180 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1181 {
1182 unsigned HOST_WIDE_INT max, min = 0;
1183 if (GET_CODE (size) == CONST_INT)
1184 min = max = UINTVAL (size);
1185 else
1186 max = GET_MODE_MASK (GET_MODE (size));
1187 return emit_block_move_hints (x, y, size, method, 0, -1,
1188 min, max, max);
1189 }
1190
1191 /* A subroutine of emit_block_move. Returns true if calling the
1192 block move libcall will not clobber any parameters which may have
1193 already been placed on the stack. */
1194
1195 static bool
1196 block_move_libcall_safe_for_call_parm (void)
1197 {
1198 #if defined (REG_PARM_STACK_SPACE)
1199 tree fn;
1200 #endif
1201
1202 /* If arguments are pushed on the stack, then they're safe. */
1203 if (PUSH_ARGS)
1204 return true;
1205
1206 /* If registers go on the stack anyway, any argument is sure to clobber
1207 an outgoing argument. */
1208 #if defined (REG_PARM_STACK_SPACE)
1209 fn = emit_block_move_libcall_fn (false);
1210 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1211 depend on its argument. */
1212 (void) fn;
1213 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1214 && REG_PARM_STACK_SPACE (fn) != 0)
1215 return false;
1216 #endif
1217
1218 /* If any argument goes in memory, then it might clobber an outgoing
1219 argument. */
1220 {
1221 CUMULATIVE_ARGS args_so_far_v;
1222 cumulative_args_t args_so_far;
1223 tree fn, arg;
1224
1225 fn = emit_block_move_libcall_fn (false);
1226 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1227 args_so_far = pack_cumulative_args (&args_so_far_v);
1228
1229 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1230 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1231 {
1232 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1233 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1234 NULL_TREE, true);
1235 if (!tmp || !REG_P (tmp))
1236 return false;
1237 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1238 return false;
1239 targetm.calls.function_arg_advance (args_so_far, mode,
1240 NULL_TREE, true);
1241 }
1242 }
1243 return true;
1244 }
1245
1246 /* A subroutine of emit_block_move. Expand a movmem pattern;
1247 return true if successful. */
1248
1249 static bool
1250 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1251 unsigned int expected_align, HOST_WIDE_INT expected_size,
1252 unsigned HOST_WIDE_INT min_size,
1253 unsigned HOST_WIDE_INT max_size,
1254 unsigned HOST_WIDE_INT probable_max_size)
1255 {
1256 int save_volatile_ok = volatile_ok;
1257 machine_mode mode;
1258
1259 if (expected_align < align)
1260 expected_align = align;
1261 if (expected_size != -1)
1262 {
1263 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1264 expected_size = probable_max_size;
1265 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1266 expected_size = min_size;
1267 }
1268
1269 /* Since this is a move insn, we don't care about volatility. */
1270 volatile_ok = 1;
1271
1272 /* Try the most limited insn first, because there's no point
1273 including more than one in the machine description unless
1274 the more limited one has some advantage. */
1275
1276 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1277 mode = GET_MODE_WIDER_MODE (mode))
1278 {
1279 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1280
1281 if (code != CODE_FOR_nothing
1282 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1283 here because if SIZE is less than the mode mask, as it is
1284 returned by the macro, it will definitely be less than the
1285 actual mode mask. Since SIZE is within the Pmode address
1286 space, we limit MODE to Pmode. */
1287 && ((CONST_INT_P (size)
1288 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1289 <= (GET_MODE_MASK (mode) >> 1)))
1290 || max_size <= (GET_MODE_MASK (mode) >> 1)
1291 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1292 {
1293 struct expand_operand ops[9];
1294 unsigned int nops;
1295
1296 /* ??? When called via emit_block_move_for_call, it'd be
1297 nice if there were some way to inform the backend, so
1298 that it doesn't fail the expansion because it thinks
1299 emitting the libcall would be more efficient. */
1300 nops = insn_data[(int) code].n_generator_args;
1301 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1302
1303 create_fixed_operand (&ops[0], x);
1304 create_fixed_operand (&ops[1], y);
1305 /* The check above guarantees that this size conversion is valid. */
1306 create_convert_operand_to (&ops[2], size, mode, true);
1307 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1308 if (nops >= 6)
1309 {
1310 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1311 create_integer_operand (&ops[5], expected_size);
1312 }
1313 if (nops >= 8)
1314 {
1315 create_integer_operand (&ops[6], min_size);
1316 /* If we can not represent the maximal size,
1317 make parameter NULL. */
1318 if ((HOST_WIDE_INT) max_size != -1)
1319 create_integer_operand (&ops[7], max_size);
1320 else
1321 create_fixed_operand (&ops[7], NULL);
1322 }
1323 if (nops == 9)
1324 {
1325 /* If we can not represent the maximal size,
1326 make parameter NULL. */
1327 if ((HOST_WIDE_INT) probable_max_size != -1)
1328 create_integer_operand (&ops[8], probable_max_size);
1329 else
1330 create_fixed_operand (&ops[8], NULL);
1331 }
1332 if (maybe_expand_insn (code, nops, ops))
1333 {
1334 volatile_ok = save_volatile_ok;
1335 return true;
1336 }
1337 }
1338 }
1339
1340 volatile_ok = save_volatile_ok;
1341 return false;
1342 }
1343
1344 /* A subroutine of emit_block_move. Expand a call to memcpy.
1345 Return the return value from memcpy, 0 otherwise. */
1346
1347 rtx
1348 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1349 {
1350 rtx dst_addr, src_addr;
1351 tree call_expr, fn, src_tree, dst_tree, size_tree;
1352 machine_mode size_mode;
1353 rtx retval;
1354
1355 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1356 pseudos. We can then place those new pseudos into a VAR_DECL and
1357 use them later. */
1358
1359 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1360 src_addr = copy_addr_to_reg (XEXP (src, 0));
1361
1362 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1363 src_addr = convert_memory_address (ptr_mode, src_addr);
1364
1365 dst_tree = make_tree (ptr_type_node, dst_addr);
1366 src_tree = make_tree (ptr_type_node, src_addr);
1367
1368 size_mode = TYPE_MODE (sizetype);
1369
1370 size = convert_to_mode (size_mode, size, 1);
1371 size = copy_to_mode_reg (size_mode, size);
1372
1373 /* It is incorrect to use the libcall calling conventions to call
1374 memcpy in this context. This could be a user call to memcpy and
1375 the user may wish to examine the return value from memcpy. For
1376 targets where libcalls and normal calls have different conventions
1377 for returning pointers, we could end up generating incorrect code. */
1378
1379 size_tree = make_tree (sizetype, size);
1380
1381 fn = emit_block_move_libcall_fn (true);
1382 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1383 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1384
1385 retval = expand_normal (call_expr);
1386
1387 return retval;
1388 }
1389
1390 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1391 for the function we use for block copies. */
1392
1393 static GTY(()) tree block_move_fn;
1394
1395 void
1396 init_block_move_fn (const char *asmspec)
1397 {
1398 if (!block_move_fn)
1399 {
1400 tree args, fn, attrs, attr_args;
1401
1402 fn = get_identifier ("memcpy");
1403 args = build_function_type_list (ptr_type_node, ptr_type_node,
1404 const_ptr_type_node, sizetype,
1405 NULL_TREE);
1406
1407 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1408 DECL_EXTERNAL (fn) = 1;
1409 TREE_PUBLIC (fn) = 1;
1410 DECL_ARTIFICIAL (fn) = 1;
1411 TREE_NOTHROW (fn) = 1;
1412 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1413 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1414
1415 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1416 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1417
1418 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1419
1420 block_move_fn = fn;
1421 }
1422
1423 if (asmspec)
1424 set_user_assembler_name (block_move_fn, asmspec);
1425 }
1426
1427 static tree
1428 emit_block_move_libcall_fn (int for_call)
1429 {
1430 static bool emitted_extern;
1431
1432 if (!block_move_fn)
1433 init_block_move_fn (NULL);
1434
1435 if (for_call && !emitted_extern)
1436 {
1437 emitted_extern = true;
1438 make_decl_rtl (block_move_fn);
1439 }
1440
1441 return block_move_fn;
1442 }
1443
1444 /* A subroutine of emit_block_move. Copy the data via an explicit
1445 loop. This is used only when libcalls are forbidden. */
1446 /* ??? It'd be nice to copy in hunks larger than QImode. */
1447
1448 static void
1449 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1450 unsigned int align ATTRIBUTE_UNUSED)
1451 {
1452 rtx_code_label *cmp_label, *top_label;
1453 rtx iter, x_addr, y_addr, tmp;
1454 machine_mode x_addr_mode = get_address_mode (x);
1455 machine_mode y_addr_mode = get_address_mode (y);
1456 machine_mode iter_mode;
1457
1458 iter_mode = GET_MODE (size);
1459 if (iter_mode == VOIDmode)
1460 iter_mode = word_mode;
1461
1462 top_label = gen_label_rtx ();
1463 cmp_label = gen_label_rtx ();
1464 iter = gen_reg_rtx (iter_mode);
1465
1466 emit_move_insn (iter, const0_rtx);
1467
1468 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1469 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1470 do_pending_stack_adjust ();
1471
1472 emit_jump (cmp_label);
1473 emit_label (top_label);
1474
1475 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1476 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1477
1478 if (x_addr_mode != y_addr_mode)
1479 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1480 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1481
1482 x = change_address (x, QImode, x_addr);
1483 y = change_address (y, QImode, y_addr);
1484
1485 emit_move_insn (x, y);
1486
1487 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1488 true, OPTAB_LIB_WIDEN);
1489 if (tmp != iter)
1490 emit_move_insn (iter, tmp);
1491
1492 emit_label (cmp_label);
1493
1494 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1495 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1496 }
1497 \f
1498 /* Copy all or part of a value X into registers starting at REGNO.
1499 The number of registers to be filled is NREGS. */
1500
1501 void
1502 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1503 {
1504 int i;
1505 rtx pat;
1506 rtx_insn *last;
1507
1508 if (nregs == 0)
1509 return;
1510
1511 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1512 x = validize_mem (force_const_mem (mode, x));
1513
1514 /* See if the machine can do this with a load multiple insn. */
1515 if (HAVE_load_multiple)
1516 {
1517 last = get_last_insn ();
1518 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1519 GEN_INT (nregs));
1520 if (pat)
1521 {
1522 emit_insn (pat);
1523 return;
1524 }
1525 else
1526 delete_insns_since (last);
1527 }
1528
1529 for (i = 0; i < nregs; i++)
1530 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1531 operand_subword_force (x, i, mode));
1532 }
1533
1534 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1535 The number of registers to be filled is NREGS. */
1536
1537 void
1538 move_block_from_reg (int regno, rtx x, int nregs)
1539 {
1540 int i;
1541
1542 if (nregs == 0)
1543 return;
1544
1545 /* See if the machine can do this with a store multiple insn. */
1546 if (HAVE_store_multiple)
1547 {
1548 rtx_insn *last = get_last_insn ();
1549 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1550 GEN_INT (nregs));
1551 if (pat)
1552 {
1553 emit_insn (pat);
1554 return;
1555 }
1556 else
1557 delete_insns_since (last);
1558 }
1559
1560 for (i = 0; i < nregs; i++)
1561 {
1562 rtx tem = operand_subword (x, i, 1, BLKmode);
1563
1564 gcc_assert (tem);
1565
1566 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1567 }
1568 }
1569
1570 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1571 ORIG, where ORIG is a non-consecutive group of registers represented by
1572 a PARALLEL. The clone is identical to the original except in that the
1573 original set of registers is replaced by a new set of pseudo registers.
1574 The new set has the same modes as the original set. */
1575
1576 rtx
1577 gen_group_rtx (rtx orig)
1578 {
1579 int i, length;
1580 rtx *tmps;
1581
1582 gcc_assert (GET_CODE (orig) == PARALLEL);
1583
1584 length = XVECLEN (orig, 0);
1585 tmps = XALLOCAVEC (rtx, length);
1586
1587 /* Skip a NULL entry in first slot. */
1588 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1589
1590 if (i)
1591 tmps[0] = 0;
1592
1593 for (; i < length; i++)
1594 {
1595 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1596 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1597
1598 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1599 }
1600
1601 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1602 }
1603
1604 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1605 except that values are placed in TMPS[i], and must later be moved
1606 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1607
1608 static void
1609 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1610 {
1611 rtx src;
1612 int start, i;
1613 machine_mode m = GET_MODE (orig_src);
1614
1615 gcc_assert (GET_CODE (dst) == PARALLEL);
1616
1617 if (m != VOIDmode
1618 && !SCALAR_INT_MODE_P (m)
1619 && !MEM_P (orig_src)
1620 && GET_CODE (orig_src) != CONCAT)
1621 {
1622 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1623 if (imode == BLKmode)
1624 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1625 else
1626 src = gen_reg_rtx (imode);
1627 if (imode != BLKmode)
1628 src = gen_lowpart (GET_MODE (orig_src), src);
1629 emit_move_insn (src, orig_src);
1630 /* ...and back again. */
1631 if (imode != BLKmode)
1632 src = gen_lowpart (imode, src);
1633 emit_group_load_1 (tmps, dst, src, type, ssize);
1634 return;
1635 }
1636
1637 /* Check for a NULL entry, used to indicate that the parameter goes
1638 both on the stack and in registers. */
1639 if (XEXP (XVECEXP (dst, 0, 0), 0))
1640 start = 0;
1641 else
1642 start = 1;
1643
1644 /* Process the pieces. */
1645 for (i = start; i < XVECLEN (dst, 0); i++)
1646 {
1647 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1648 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1649 unsigned int bytelen = GET_MODE_SIZE (mode);
1650 int shift = 0;
1651
1652 /* Handle trailing fragments that run over the size of the struct. */
1653 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1654 {
1655 /* Arrange to shift the fragment to where it belongs.
1656 extract_bit_field loads to the lsb of the reg. */
1657 if (
1658 #ifdef BLOCK_REG_PADDING
1659 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1660 == (BYTES_BIG_ENDIAN ? upward : downward)
1661 #else
1662 BYTES_BIG_ENDIAN
1663 #endif
1664 )
1665 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1666 bytelen = ssize - bytepos;
1667 gcc_assert (bytelen > 0);
1668 }
1669
1670 /* If we won't be loading directly from memory, protect the real source
1671 from strange tricks we might play; but make sure that the source can
1672 be loaded directly into the destination. */
1673 src = orig_src;
1674 if (!MEM_P (orig_src)
1675 && (!CONSTANT_P (orig_src)
1676 || (GET_MODE (orig_src) != mode
1677 && GET_MODE (orig_src) != VOIDmode)))
1678 {
1679 if (GET_MODE (orig_src) == VOIDmode)
1680 src = gen_reg_rtx (mode);
1681 else
1682 src = gen_reg_rtx (GET_MODE (orig_src));
1683
1684 emit_move_insn (src, orig_src);
1685 }
1686
1687 /* Optimize the access just a bit. */
1688 if (MEM_P (src)
1689 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1690 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1691 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1692 && bytelen == GET_MODE_SIZE (mode))
1693 {
1694 tmps[i] = gen_reg_rtx (mode);
1695 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1696 }
1697 else if (COMPLEX_MODE_P (mode)
1698 && GET_MODE (src) == mode
1699 && bytelen == GET_MODE_SIZE (mode))
1700 /* Let emit_move_complex do the bulk of the work. */
1701 tmps[i] = src;
1702 else if (GET_CODE (src) == CONCAT)
1703 {
1704 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1705 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1706
1707 if ((bytepos == 0 && bytelen == slen0)
1708 || (bytepos != 0 && bytepos + bytelen <= slen))
1709 {
1710 /* The following assumes that the concatenated objects all
1711 have the same size. In this case, a simple calculation
1712 can be used to determine the object and the bit field
1713 to be extracted. */
1714 tmps[i] = XEXP (src, bytepos / slen0);
1715 if (! CONSTANT_P (tmps[i])
1716 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1717 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1718 (bytepos % slen0) * BITS_PER_UNIT,
1719 1, NULL_RTX, mode, mode);
1720 }
1721 else
1722 {
1723 rtx mem;
1724
1725 gcc_assert (!bytepos);
1726 mem = assign_stack_temp (GET_MODE (src), slen);
1727 emit_move_insn (mem, src);
1728 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1729 0, 1, NULL_RTX, mode, mode);
1730 }
1731 }
1732 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1733 SIMD register, which is currently broken. While we get GCC
1734 to emit proper RTL for these cases, let's dump to memory. */
1735 else if (VECTOR_MODE_P (GET_MODE (dst))
1736 && REG_P (src))
1737 {
1738 int slen = GET_MODE_SIZE (GET_MODE (src));
1739 rtx mem;
1740
1741 mem = assign_stack_temp (GET_MODE (src), slen);
1742 emit_move_insn (mem, src);
1743 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1744 }
1745 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1746 && XVECLEN (dst, 0) > 1)
1747 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1748 else if (CONSTANT_P (src))
1749 {
1750 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1751
1752 if (len == ssize)
1753 tmps[i] = src;
1754 else
1755 {
1756 rtx first, second;
1757
1758 /* TODO: const_wide_int can have sizes other than this... */
1759 gcc_assert (2 * len == ssize);
1760 split_double (src, &first, &second);
1761 if (i)
1762 tmps[i] = second;
1763 else
1764 tmps[i] = first;
1765 }
1766 }
1767 else if (REG_P (src) && GET_MODE (src) == mode)
1768 tmps[i] = src;
1769 else
1770 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1771 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1772 mode, mode);
1773
1774 if (shift)
1775 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1776 shift, tmps[i], 0);
1777 }
1778 }
1779
1780 /* Emit code to move a block SRC of type TYPE to a block DST,
1781 where DST is non-consecutive registers represented by a PARALLEL.
1782 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1783 if not known. */
1784
1785 void
1786 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1787 {
1788 rtx *tmps;
1789 int i;
1790
1791 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1792 emit_group_load_1 (tmps, dst, src, type, ssize);
1793
1794 /* Copy the extracted pieces into the proper (probable) hard regs. */
1795 for (i = 0; i < XVECLEN (dst, 0); i++)
1796 {
1797 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1798 if (d == NULL)
1799 continue;
1800 emit_move_insn (d, tmps[i]);
1801 }
1802 }
1803
1804 /* Similar, but load SRC into new pseudos in a format that looks like
1805 PARALLEL. This can later be fed to emit_group_move to get things
1806 in the right place. */
1807
1808 rtx
1809 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1810 {
1811 rtvec vec;
1812 int i;
1813
1814 vec = rtvec_alloc (XVECLEN (parallel, 0));
1815 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1816
1817 /* Convert the vector to look just like the original PARALLEL, except
1818 with the computed values. */
1819 for (i = 0; i < XVECLEN (parallel, 0); i++)
1820 {
1821 rtx e = XVECEXP (parallel, 0, i);
1822 rtx d = XEXP (e, 0);
1823
1824 if (d)
1825 {
1826 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1827 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1828 }
1829 RTVEC_ELT (vec, i) = e;
1830 }
1831
1832 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1833 }
1834
1835 /* Emit code to move a block SRC to block DST, where SRC and DST are
1836 non-consecutive groups of registers, each represented by a PARALLEL. */
1837
1838 void
1839 emit_group_move (rtx dst, rtx src)
1840 {
1841 int i;
1842
1843 gcc_assert (GET_CODE (src) == PARALLEL
1844 && GET_CODE (dst) == PARALLEL
1845 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1846
1847 /* Skip first entry if NULL. */
1848 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1849 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1850 XEXP (XVECEXP (src, 0, i), 0));
1851 }
1852
1853 /* Move a group of registers represented by a PARALLEL into pseudos. */
1854
1855 rtx
1856 emit_group_move_into_temps (rtx src)
1857 {
1858 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1859 int i;
1860
1861 for (i = 0; i < XVECLEN (src, 0); i++)
1862 {
1863 rtx e = XVECEXP (src, 0, i);
1864 rtx d = XEXP (e, 0);
1865
1866 if (d)
1867 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1868 RTVEC_ELT (vec, i) = e;
1869 }
1870
1871 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1872 }
1873
1874 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1875 where SRC is non-consecutive registers represented by a PARALLEL.
1876 SSIZE represents the total size of block ORIG_DST, or -1 if not
1877 known. */
1878
1879 void
1880 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1881 {
1882 rtx *tmps, dst;
1883 int start, finish, i;
1884 machine_mode m = GET_MODE (orig_dst);
1885
1886 gcc_assert (GET_CODE (src) == PARALLEL);
1887
1888 if (!SCALAR_INT_MODE_P (m)
1889 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1890 {
1891 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1892 if (imode == BLKmode)
1893 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1894 else
1895 dst = gen_reg_rtx (imode);
1896 emit_group_store (dst, src, type, ssize);
1897 if (imode != BLKmode)
1898 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1899 emit_move_insn (orig_dst, dst);
1900 return;
1901 }
1902
1903 /* Check for a NULL entry, used to indicate that the parameter goes
1904 both on the stack and in registers. */
1905 if (XEXP (XVECEXP (src, 0, 0), 0))
1906 start = 0;
1907 else
1908 start = 1;
1909 finish = XVECLEN (src, 0);
1910
1911 tmps = XALLOCAVEC (rtx, finish);
1912
1913 /* Copy the (probable) hard regs into pseudos. */
1914 for (i = start; i < finish; i++)
1915 {
1916 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1917 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1918 {
1919 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1920 emit_move_insn (tmps[i], reg);
1921 }
1922 else
1923 tmps[i] = reg;
1924 }
1925
1926 /* If we won't be storing directly into memory, protect the real destination
1927 from strange tricks we might play. */
1928 dst = orig_dst;
1929 if (GET_CODE (dst) == PARALLEL)
1930 {
1931 rtx temp;
1932
1933 /* We can get a PARALLEL dst if there is a conditional expression in
1934 a return statement. In that case, the dst and src are the same,
1935 so no action is necessary. */
1936 if (rtx_equal_p (dst, src))
1937 return;
1938
1939 /* It is unclear if we can ever reach here, but we may as well handle
1940 it. Allocate a temporary, and split this into a store/load to/from
1941 the temporary. */
1942 temp = assign_stack_temp (GET_MODE (dst), ssize);
1943 emit_group_store (temp, src, type, ssize);
1944 emit_group_load (dst, temp, type, ssize);
1945 return;
1946 }
1947 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1948 {
1949 machine_mode outer = GET_MODE (dst);
1950 machine_mode inner;
1951 HOST_WIDE_INT bytepos;
1952 bool done = false;
1953 rtx temp;
1954
1955 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1956 dst = gen_reg_rtx (outer);
1957
1958 /* Make life a bit easier for combine. */
1959 /* If the first element of the vector is the low part
1960 of the destination mode, use a paradoxical subreg to
1961 initialize the destination. */
1962 if (start < finish)
1963 {
1964 inner = GET_MODE (tmps[start]);
1965 bytepos = subreg_lowpart_offset (inner, outer);
1966 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1967 {
1968 temp = simplify_gen_subreg (outer, tmps[start],
1969 inner, 0);
1970 if (temp)
1971 {
1972 emit_move_insn (dst, temp);
1973 done = true;
1974 start++;
1975 }
1976 }
1977 }
1978
1979 /* If the first element wasn't the low part, try the last. */
1980 if (!done
1981 && start < finish - 1)
1982 {
1983 inner = GET_MODE (tmps[finish - 1]);
1984 bytepos = subreg_lowpart_offset (inner, outer);
1985 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1986 {
1987 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1988 inner, 0);
1989 if (temp)
1990 {
1991 emit_move_insn (dst, temp);
1992 done = true;
1993 finish--;
1994 }
1995 }
1996 }
1997
1998 /* Otherwise, simply initialize the result to zero. */
1999 if (!done)
2000 emit_move_insn (dst, CONST0_RTX (outer));
2001 }
2002
2003 /* Process the pieces. */
2004 for (i = start; i < finish; i++)
2005 {
2006 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2007 machine_mode mode = GET_MODE (tmps[i]);
2008 unsigned int bytelen = GET_MODE_SIZE (mode);
2009 unsigned int adj_bytelen;
2010 rtx dest = dst;
2011
2012 /* Handle trailing fragments that run over the size of the struct. */
2013 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2014 adj_bytelen = ssize - bytepos;
2015 else
2016 adj_bytelen = bytelen;
2017
2018 if (GET_CODE (dst) == CONCAT)
2019 {
2020 if (bytepos + adj_bytelen
2021 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2022 dest = XEXP (dst, 0);
2023 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2024 {
2025 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2026 dest = XEXP (dst, 1);
2027 }
2028 else
2029 {
2030 machine_mode dest_mode = GET_MODE (dest);
2031 machine_mode tmp_mode = GET_MODE (tmps[i]);
2032
2033 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2034
2035 if (GET_MODE_ALIGNMENT (dest_mode)
2036 >= GET_MODE_ALIGNMENT (tmp_mode))
2037 {
2038 dest = assign_stack_temp (dest_mode,
2039 GET_MODE_SIZE (dest_mode));
2040 emit_move_insn (adjust_address (dest,
2041 tmp_mode,
2042 bytepos),
2043 tmps[i]);
2044 dst = dest;
2045 }
2046 else
2047 {
2048 dest = assign_stack_temp (tmp_mode,
2049 GET_MODE_SIZE (tmp_mode));
2050 emit_move_insn (dest, tmps[i]);
2051 dst = adjust_address (dest, dest_mode, bytepos);
2052 }
2053 break;
2054 }
2055 }
2056
2057 /* Handle trailing fragments that run over the size of the struct. */
2058 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2059 {
2060 /* store_bit_field always takes its value from the lsb.
2061 Move the fragment to the lsb if it's not already there. */
2062 if (
2063 #ifdef BLOCK_REG_PADDING
2064 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2065 == (BYTES_BIG_ENDIAN ? upward : downward)
2066 #else
2067 BYTES_BIG_ENDIAN
2068 #endif
2069 )
2070 {
2071 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2072 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2073 shift, tmps[i], 0);
2074 }
2075
2076 /* Make sure not to write past the end of the struct. */
2077 store_bit_field (dest,
2078 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2079 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2080 VOIDmode, tmps[i]);
2081 }
2082
2083 /* Optimize the access just a bit. */
2084 else if (MEM_P (dest)
2085 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2086 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2087 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2088 && bytelen == GET_MODE_SIZE (mode))
2089 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2090
2091 else
2092 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2093 0, 0, mode, tmps[i]);
2094 }
2095
2096 /* Copy from the pseudo into the (probable) hard reg. */
2097 if (orig_dst != dst)
2098 emit_move_insn (orig_dst, dst);
2099 }
2100
2101 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2102 of the value stored in X. */
2103
2104 rtx
2105 maybe_emit_group_store (rtx x, tree type)
2106 {
2107 machine_mode mode = TYPE_MODE (type);
2108 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2109 if (GET_CODE (x) == PARALLEL)
2110 {
2111 rtx result = gen_reg_rtx (mode);
2112 emit_group_store (result, x, type, int_size_in_bytes (type));
2113 return result;
2114 }
2115 return x;
2116 }
2117
2118 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2119
2120 This is used on targets that return BLKmode values in registers. */
2121
2122 void
2123 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2124 {
2125 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2126 rtx src = NULL, dst = NULL;
2127 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2128 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2129 machine_mode mode = GET_MODE (srcreg);
2130 machine_mode tmode = GET_MODE (target);
2131 machine_mode copy_mode;
2132
2133 /* BLKmode registers created in the back-end shouldn't have survived. */
2134 gcc_assert (mode != BLKmode);
2135
2136 /* If the structure doesn't take up a whole number of words, see whether
2137 SRCREG is padded on the left or on the right. If it's on the left,
2138 set PADDING_CORRECTION to the number of bits to skip.
2139
2140 In most ABIs, the structure will be returned at the least end of
2141 the register, which translates to right padding on little-endian
2142 targets and left padding on big-endian targets. The opposite
2143 holds if the structure is returned at the most significant
2144 end of the register. */
2145 if (bytes % UNITS_PER_WORD != 0
2146 && (targetm.calls.return_in_msb (type)
2147 ? !BYTES_BIG_ENDIAN
2148 : BYTES_BIG_ENDIAN))
2149 padding_correction
2150 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2151
2152 /* We can use a single move if we have an exact mode for the size. */
2153 else if (MEM_P (target)
2154 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2155 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2156 && bytes == GET_MODE_SIZE (mode))
2157 {
2158 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2159 return;
2160 }
2161
2162 /* And if we additionally have the same mode for a register. */
2163 else if (REG_P (target)
2164 && GET_MODE (target) == mode
2165 && bytes == GET_MODE_SIZE (mode))
2166 {
2167 emit_move_insn (target, srcreg);
2168 return;
2169 }
2170
2171 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2172 into a new pseudo which is a full word. */
2173 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2174 {
2175 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2176 mode = word_mode;
2177 }
2178
2179 /* Copy the structure BITSIZE bits at a time. If the target lives in
2180 memory, take care of not reading/writing past its end by selecting
2181 a copy mode suited to BITSIZE. This should always be possible given
2182 how it is computed.
2183
2184 If the target lives in register, make sure not to select a copy mode
2185 larger than the mode of the register.
2186
2187 We could probably emit more efficient code for machines which do not use
2188 strict alignment, but it doesn't seem worth the effort at the current
2189 time. */
2190
2191 copy_mode = word_mode;
2192 if (MEM_P (target))
2193 {
2194 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2195 if (mem_mode != BLKmode)
2196 copy_mode = mem_mode;
2197 }
2198 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2199 copy_mode = tmode;
2200
2201 for (bitpos = 0, xbitpos = padding_correction;
2202 bitpos < bytes * BITS_PER_UNIT;
2203 bitpos += bitsize, xbitpos += bitsize)
2204 {
2205 /* We need a new source operand each time xbitpos is on a
2206 word boundary and when xbitpos == padding_correction
2207 (the first time through). */
2208 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2209 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2210
2211 /* We need a new destination operand each time bitpos is on
2212 a word boundary. */
2213 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2214 dst = target;
2215 else if (bitpos % BITS_PER_WORD == 0)
2216 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2217
2218 /* Use xbitpos for the source extraction (right justified) and
2219 bitpos for the destination store (left justified). */
2220 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2221 extract_bit_field (src, bitsize,
2222 xbitpos % BITS_PER_WORD, 1,
2223 NULL_RTX, copy_mode, copy_mode));
2224 }
2225 }
2226
2227 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2228 register if it contains any data, otherwise return null.
2229
2230 This is used on targets that return BLKmode values in registers. */
2231
2232 rtx
2233 copy_blkmode_to_reg (machine_mode mode, tree src)
2234 {
2235 int i, n_regs;
2236 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2237 unsigned int bitsize;
2238 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2239 machine_mode dst_mode;
2240
2241 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2242
2243 x = expand_normal (src);
2244
2245 bytes = int_size_in_bytes (TREE_TYPE (src));
2246 if (bytes == 0)
2247 return NULL_RTX;
2248
2249 /* If the structure doesn't take up a whole number of words, see
2250 whether the register value should be padded on the left or on
2251 the right. Set PADDING_CORRECTION to the number of padding
2252 bits needed on the left side.
2253
2254 In most ABIs, the structure will be returned at the least end of
2255 the register, which translates to right padding on little-endian
2256 targets and left padding on big-endian targets. The opposite
2257 holds if the structure is returned at the most significant
2258 end of the register. */
2259 if (bytes % UNITS_PER_WORD != 0
2260 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2261 ? !BYTES_BIG_ENDIAN
2262 : BYTES_BIG_ENDIAN))
2263 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2264 * BITS_PER_UNIT));
2265
2266 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2267 dst_words = XALLOCAVEC (rtx, n_regs);
2268 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2269
2270 /* Copy the structure BITSIZE bits at a time. */
2271 for (bitpos = 0, xbitpos = padding_correction;
2272 bitpos < bytes * BITS_PER_UNIT;
2273 bitpos += bitsize, xbitpos += bitsize)
2274 {
2275 /* We need a new destination pseudo each time xbitpos is
2276 on a word boundary and when xbitpos == padding_correction
2277 (the first time through). */
2278 if (xbitpos % BITS_PER_WORD == 0
2279 || xbitpos == padding_correction)
2280 {
2281 /* Generate an appropriate register. */
2282 dst_word = gen_reg_rtx (word_mode);
2283 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2284
2285 /* Clear the destination before we move anything into it. */
2286 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2287 }
2288
2289 /* We need a new source operand each time bitpos is on a word
2290 boundary. */
2291 if (bitpos % BITS_PER_WORD == 0)
2292 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2293
2294 /* Use bitpos for the source extraction (left justified) and
2295 xbitpos for the destination store (right justified). */
2296 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2297 0, 0, word_mode,
2298 extract_bit_field (src_word, bitsize,
2299 bitpos % BITS_PER_WORD, 1,
2300 NULL_RTX, word_mode, word_mode));
2301 }
2302
2303 if (mode == BLKmode)
2304 {
2305 /* Find the smallest integer mode large enough to hold the
2306 entire structure. */
2307 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2308 mode != VOIDmode;
2309 mode = GET_MODE_WIDER_MODE (mode))
2310 /* Have we found a large enough mode? */
2311 if (GET_MODE_SIZE (mode) >= bytes)
2312 break;
2313
2314 /* A suitable mode should have been found. */
2315 gcc_assert (mode != VOIDmode);
2316 }
2317
2318 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2319 dst_mode = word_mode;
2320 else
2321 dst_mode = mode;
2322 dst = gen_reg_rtx (dst_mode);
2323
2324 for (i = 0; i < n_regs; i++)
2325 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2326
2327 if (mode != dst_mode)
2328 dst = gen_lowpart (mode, dst);
2329
2330 return dst;
2331 }
2332
2333 /* Add a USE expression for REG to the (possibly empty) list pointed
2334 to by CALL_FUSAGE. REG must denote a hard register. */
2335
2336 void
2337 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2338 {
2339 gcc_assert (REG_P (reg));
2340
2341 if (!HARD_REGISTER_P (reg))
2342 return;
2343
2344 *call_fusage
2345 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2346 }
2347
2348 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2349 to by CALL_FUSAGE. REG must denote a hard register. */
2350
2351 void
2352 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2353 {
2354 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2355
2356 *call_fusage
2357 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2358 }
2359
2360 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2361 starting at REGNO. All of these registers must be hard registers. */
2362
2363 void
2364 use_regs (rtx *call_fusage, int regno, int nregs)
2365 {
2366 int i;
2367
2368 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2369
2370 for (i = 0; i < nregs; i++)
2371 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2372 }
2373
2374 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2375 PARALLEL REGS. This is for calls that pass values in multiple
2376 non-contiguous locations. The Irix 6 ABI has examples of this. */
2377
2378 void
2379 use_group_regs (rtx *call_fusage, rtx regs)
2380 {
2381 int i;
2382
2383 for (i = 0; i < XVECLEN (regs, 0); i++)
2384 {
2385 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2386
2387 /* A NULL entry means the parameter goes both on the stack and in
2388 registers. This can also be a MEM for targets that pass values
2389 partially on the stack and partially in registers. */
2390 if (reg != 0 && REG_P (reg))
2391 use_reg (call_fusage, reg);
2392 }
2393 }
2394
2395 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2396 assigment and the code of the expresion on the RHS is CODE. Return
2397 NULL otherwise. */
2398
2399 static gimple
2400 get_def_for_expr (tree name, enum tree_code code)
2401 {
2402 gimple def_stmt;
2403
2404 if (TREE_CODE (name) != SSA_NAME)
2405 return NULL;
2406
2407 def_stmt = get_gimple_for_ssa_name (name);
2408 if (!def_stmt
2409 || gimple_assign_rhs_code (def_stmt) != code)
2410 return NULL;
2411
2412 return def_stmt;
2413 }
2414
2415 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2416 assigment and the class of the expresion on the RHS is CLASS. Return
2417 NULL otherwise. */
2418
2419 static gimple
2420 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2421 {
2422 gimple def_stmt;
2423
2424 if (TREE_CODE (name) != SSA_NAME)
2425 return NULL;
2426
2427 def_stmt = get_gimple_for_ssa_name (name);
2428 if (!def_stmt
2429 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2430 return NULL;
2431
2432 return def_stmt;
2433 }
2434 \f
2435
2436 /* Determine whether the LEN bytes generated by CONSTFUN can be
2437 stored to memory using several move instructions. CONSTFUNDATA is
2438 a pointer which will be passed as argument in every CONSTFUN call.
2439 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2440 a memset operation and false if it's a copy of a constant string.
2441 Return nonzero if a call to store_by_pieces should succeed. */
2442
2443 int
2444 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2445 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2446 void *constfundata, unsigned int align, bool memsetp)
2447 {
2448 unsigned HOST_WIDE_INT l;
2449 unsigned int max_size;
2450 HOST_WIDE_INT offset = 0;
2451 machine_mode mode;
2452 enum insn_code icode;
2453 int reverse;
2454 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2455 rtx cst ATTRIBUTE_UNUSED;
2456
2457 if (len == 0)
2458 return 1;
2459
2460 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2461 memsetp
2462 ? SET_BY_PIECES
2463 : STORE_BY_PIECES,
2464 optimize_insn_for_speed_p ()))
2465 return 0;
2466
2467 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2468
2469 /* We would first store what we can in the largest integer mode, then go to
2470 successively smaller modes. */
2471
2472 for (reverse = 0;
2473 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2474 reverse++)
2475 {
2476 l = len;
2477 max_size = STORE_MAX_PIECES + 1;
2478 while (max_size > 1 && l > 0)
2479 {
2480 mode = widest_int_mode_for_size (max_size);
2481
2482 if (mode == VOIDmode)
2483 break;
2484
2485 icode = optab_handler (mov_optab, mode);
2486 if (icode != CODE_FOR_nothing
2487 && align >= GET_MODE_ALIGNMENT (mode))
2488 {
2489 unsigned int size = GET_MODE_SIZE (mode);
2490
2491 while (l >= size)
2492 {
2493 if (reverse)
2494 offset -= size;
2495
2496 cst = (*constfun) (constfundata, offset, mode);
2497 if (!targetm.legitimate_constant_p (mode, cst))
2498 return 0;
2499
2500 if (!reverse)
2501 offset += size;
2502
2503 l -= size;
2504 }
2505 }
2506
2507 max_size = GET_MODE_SIZE (mode);
2508 }
2509
2510 /* The code above should have handled everything. */
2511 gcc_assert (!l);
2512 }
2513
2514 return 1;
2515 }
2516
2517 /* Generate several move instructions to store LEN bytes generated by
2518 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2519 pointer which will be passed as argument in every CONSTFUN call.
2520 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2521 a memset operation and false if it's a copy of a constant string.
2522 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2523 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2524 stpcpy. */
2525
2526 rtx
2527 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2528 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2529 void *constfundata, unsigned int align, bool memsetp, int endp)
2530 {
2531 machine_mode to_addr_mode = get_address_mode (to);
2532 struct store_by_pieces_d data;
2533
2534 if (len == 0)
2535 {
2536 gcc_assert (endp != 2);
2537 return to;
2538 }
2539
2540 gcc_assert (targetm.use_by_pieces_infrastructure_p
2541 (len, align,
2542 memsetp
2543 ? SET_BY_PIECES
2544 : STORE_BY_PIECES,
2545 optimize_insn_for_speed_p ()));
2546
2547 data.constfun = constfun;
2548 data.constfundata = constfundata;
2549 data.len = len;
2550 data.to = to;
2551 store_by_pieces_1 (&data, align);
2552 if (endp)
2553 {
2554 rtx to1;
2555
2556 gcc_assert (!data.reverse);
2557 if (data.autinc_to)
2558 {
2559 if (endp == 2)
2560 {
2561 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2562 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2563 else
2564 data.to_addr = copy_to_mode_reg (to_addr_mode,
2565 plus_constant (to_addr_mode,
2566 data.to_addr,
2567 -1));
2568 }
2569 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2570 data.offset);
2571 }
2572 else
2573 {
2574 if (endp == 2)
2575 --data.offset;
2576 to1 = adjust_address (data.to, QImode, data.offset);
2577 }
2578 return to1;
2579 }
2580 else
2581 return data.to;
2582 }
2583
2584 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2585 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2586
2587 static void
2588 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2589 {
2590 struct store_by_pieces_d data;
2591
2592 if (len == 0)
2593 return;
2594
2595 data.constfun = clear_by_pieces_1;
2596 data.constfundata = NULL;
2597 data.len = len;
2598 data.to = to;
2599 store_by_pieces_1 (&data, align);
2600 }
2601
2602 /* Callback routine for clear_by_pieces.
2603 Return const0_rtx unconditionally. */
2604
2605 static rtx
2606 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2607 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2608 machine_mode mode ATTRIBUTE_UNUSED)
2609 {
2610 return const0_rtx;
2611 }
2612
2613 /* Subroutine of clear_by_pieces and store_by_pieces.
2614 Generate several move instructions to store LEN bytes of block TO. (A MEM
2615 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2616
2617 static void
2618 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2619 unsigned int align ATTRIBUTE_UNUSED)
2620 {
2621 machine_mode to_addr_mode = get_address_mode (data->to);
2622 rtx to_addr = XEXP (data->to, 0);
2623 unsigned int max_size = STORE_MAX_PIECES + 1;
2624 enum insn_code icode;
2625
2626 data->offset = 0;
2627 data->to_addr = to_addr;
2628 data->autinc_to
2629 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2630 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2631
2632 data->explicit_inc_to = 0;
2633 data->reverse
2634 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2635 if (data->reverse)
2636 data->offset = data->len;
2637
2638 /* If storing requires more than two move insns,
2639 copy addresses to registers (to make displacements shorter)
2640 and use post-increment if available. */
2641 if (!data->autinc_to
2642 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2643 {
2644 /* Determine the main mode we'll be using.
2645 MODE might not be used depending on the definitions of the
2646 USE_* macros below. */
2647 machine_mode mode ATTRIBUTE_UNUSED
2648 = widest_int_mode_for_size (max_size);
2649
2650 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2651 {
2652 data->to_addr = copy_to_mode_reg (to_addr_mode,
2653 plus_constant (to_addr_mode,
2654 to_addr,
2655 data->len));
2656 data->autinc_to = 1;
2657 data->explicit_inc_to = -1;
2658 }
2659
2660 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2661 && ! data->autinc_to)
2662 {
2663 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2664 data->autinc_to = 1;
2665 data->explicit_inc_to = 1;
2666 }
2667
2668 if ( !data->autinc_to && CONSTANT_P (to_addr))
2669 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2670 }
2671
2672 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2673
2674 /* First store what we can in the largest integer mode, then go to
2675 successively smaller modes. */
2676
2677 while (max_size > 1 && data->len > 0)
2678 {
2679 machine_mode mode = widest_int_mode_for_size (max_size);
2680
2681 if (mode == VOIDmode)
2682 break;
2683
2684 icode = optab_handler (mov_optab, mode);
2685 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2686 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2687
2688 max_size = GET_MODE_SIZE (mode);
2689 }
2690
2691 /* The code above should have handled everything. */
2692 gcc_assert (!data->len);
2693 }
2694
2695 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2696 with move instructions for mode MODE. GENFUN is the gen_... function
2697 to make a move insn for that mode. DATA has all the other info. */
2698
2699 static void
2700 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2701 struct store_by_pieces_d *data)
2702 {
2703 unsigned int size = GET_MODE_SIZE (mode);
2704 rtx to1, cst;
2705
2706 while (data->len >= size)
2707 {
2708 if (data->reverse)
2709 data->offset -= size;
2710
2711 if (data->autinc_to)
2712 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2713 data->offset);
2714 else
2715 to1 = adjust_address (data->to, mode, data->offset);
2716
2717 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2718 emit_insn (gen_add2_insn (data->to_addr,
2719 gen_int_mode (-(HOST_WIDE_INT) size,
2720 GET_MODE (data->to_addr))));
2721
2722 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2723 emit_insn ((*genfun) (to1, cst));
2724
2725 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2726 emit_insn (gen_add2_insn (data->to_addr,
2727 gen_int_mode (size,
2728 GET_MODE (data->to_addr))));
2729
2730 if (! data->reverse)
2731 data->offset += size;
2732
2733 data->len -= size;
2734 }
2735 }
2736 \f
2737 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2738 its length in bytes. */
2739
2740 rtx
2741 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2742 unsigned int expected_align, HOST_WIDE_INT expected_size,
2743 unsigned HOST_WIDE_INT min_size,
2744 unsigned HOST_WIDE_INT max_size,
2745 unsigned HOST_WIDE_INT probable_max_size)
2746 {
2747 machine_mode mode = GET_MODE (object);
2748 unsigned int align;
2749
2750 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2751
2752 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2753 just move a zero. Otherwise, do this a piece at a time. */
2754 if (mode != BLKmode
2755 && CONST_INT_P (size)
2756 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2757 {
2758 rtx zero = CONST0_RTX (mode);
2759 if (zero != NULL)
2760 {
2761 emit_move_insn (object, zero);
2762 return NULL;
2763 }
2764
2765 if (COMPLEX_MODE_P (mode))
2766 {
2767 zero = CONST0_RTX (GET_MODE_INNER (mode));
2768 if (zero != NULL)
2769 {
2770 write_complex_part (object, zero, 0);
2771 write_complex_part (object, zero, 1);
2772 return NULL;
2773 }
2774 }
2775 }
2776
2777 if (size == const0_rtx)
2778 return NULL;
2779
2780 align = MEM_ALIGN (object);
2781
2782 if (CONST_INT_P (size)
2783 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2784 CLEAR_BY_PIECES,
2785 optimize_insn_for_speed_p ()))
2786 clear_by_pieces (object, INTVAL (size), align);
2787 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2788 expected_align, expected_size,
2789 min_size, max_size, probable_max_size))
2790 ;
2791 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2792 return set_storage_via_libcall (object, size, const0_rtx,
2793 method == BLOCK_OP_TAILCALL);
2794 else
2795 gcc_unreachable ();
2796
2797 return NULL;
2798 }
2799
2800 rtx
2801 clear_storage (rtx object, rtx size, enum block_op_methods method)
2802 {
2803 unsigned HOST_WIDE_INT max, min = 0;
2804 if (GET_CODE (size) == CONST_INT)
2805 min = max = UINTVAL (size);
2806 else
2807 max = GET_MODE_MASK (GET_MODE (size));
2808 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2809 }
2810
2811
2812 /* A subroutine of clear_storage. Expand a call to memset.
2813 Return the return value of memset, 0 otherwise. */
2814
2815 rtx
2816 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2817 {
2818 tree call_expr, fn, object_tree, size_tree, val_tree;
2819 machine_mode size_mode;
2820 rtx retval;
2821
2822 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2823 place those into new pseudos into a VAR_DECL and use them later. */
2824
2825 object = copy_addr_to_reg (XEXP (object, 0));
2826
2827 size_mode = TYPE_MODE (sizetype);
2828 size = convert_to_mode (size_mode, size, 1);
2829 size = copy_to_mode_reg (size_mode, size);
2830
2831 /* It is incorrect to use the libcall calling conventions to call
2832 memset in this context. This could be a user call to memset and
2833 the user may wish to examine the return value from memset. For
2834 targets where libcalls and normal calls have different conventions
2835 for returning pointers, we could end up generating incorrect code. */
2836
2837 object_tree = make_tree (ptr_type_node, object);
2838 if (!CONST_INT_P (val))
2839 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2840 size_tree = make_tree (sizetype, size);
2841 val_tree = make_tree (integer_type_node, val);
2842
2843 fn = clear_storage_libcall_fn (true);
2844 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2845 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2846
2847 retval = expand_normal (call_expr);
2848
2849 return retval;
2850 }
2851
2852 /* A subroutine of set_storage_via_libcall. Create the tree node
2853 for the function we use for block clears. */
2854
2855 tree block_clear_fn;
2856
2857 void
2858 init_block_clear_fn (const char *asmspec)
2859 {
2860 if (!block_clear_fn)
2861 {
2862 tree fn, args;
2863
2864 fn = get_identifier ("memset");
2865 args = build_function_type_list (ptr_type_node, ptr_type_node,
2866 integer_type_node, sizetype,
2867 NULL_TREE);
2868
2869 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2870 DECL_EXTERNAL (fn) = 1;
2871 TREE_PUBLIC (fn) = 1;
2872 DECL_ARTIFICIAL (fn) = 1;
2873 TREE_NOTHROW (fn) = 1;
2874 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2875 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2876
2877 block_clear_fn = fn;
2878 }
2879
2880 if (asmspec)
2881 set_user_assembler_name (block_clear_fn, asmspec);
2882 }
2883
2884 static tree
2885 clear_storage_libcall_fn (int for_call)
2886 {
2887 static bool emitted_extern;
2888
2889 if (!block_clear_fn)
2890 init_block_clear_fn (NULL);
2891
2892 if (for_call && !emitted_extern)
2893 {
2894 emitted_extern = true;
2895 make_decl_rtl (block_clear_fn);
2896 }
2897
2898 return block_clear_fn;
2899 }
2900 \f
2901 /* Expand a setmem pattern; return true if successful. */
2902
2903 bool
2904 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2905 unsigned int expected_align, HOST_WIDE_INT expected_size,
2906 unsigned HOST_WIDE_INT min_size,
2907 unsigned HOST_WIDE_INT max_size,
2908 unsigned HOST_WIDE_INT probable_max_size)
2909 {
2910 /* Try the most limited insn first, because there's no point
2911 including more than one in the machine description unless
2912 the more limited one has some advantage. */
2913
2914 machine_mode mode;
2915
2916 if (expected_align < align)
2917 expected_align = align;
2918 if (expected_size != -1)
2919 {
2920 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2921 expected_size = max_size;
2922 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2923 expected_size = min_size;
2924 }
2925
2926 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2927 mode = GET_MODE_WIDER_MODE (mode))
2928 {
2929 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2930
2931 if (code != CODE_FOR_nothing
2932 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2933 here because if SIZE is less than the mode mask, as it is
2934 returned by the macro, it will definitely be less than the
2935 actual mode mask. Since SIZE is within the Pmode address
2936 space, we limit MODE to Pmode. */
2937 && ((CONST_INT_P (size)
2938 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2939 <= (GET_MODE_MASK (mode) >> 1)))
2940 || max_size <= (GET_MODE_MASK (mode) >> 1)
2941 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2942 {
2943 struct expand_operand ops[9];
2944 unsigned int nops;
2945
2946 nops = insn_data[(int) code].n_generator_args;
2947 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2948
2949 create_fixed_operand (&ops[0], object);
2950 /* The check above guarantees that this size conversion is valid. */
2951 create_convert_operand_to (&ops[1], size, mode, true);
2952 create_convert_operand_from (&ops[2], val, byte_mode, true);
2953 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2954 if (nops >= 6)
2955 {
2956 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2957 create_integer_operand (&ops[5], expected_size);
2958 }
2959 if (nops >= 8)
2960 {
2961 create_integer_operand (&ops[6], min_size);
2962 /* If we can not represent the maximal size,
2963 make parameter NULL. */
2964 if ((HOST_WIDE_INT) max_size != -1)
2965 create_integer_operand (&ops[7], max_size);
2966 else
2967 create_fixed_operand (&ops[7], NULL);
2968 }
2969 if (nops == 9)
2970 {
2971 /* If we can not represent the maximal size,
2972 make parameter NULL. */
2973 if ((HOST_WIDE_INT) probable_max_size != -1)
2974 create_integer_operand (&ops[8], probable_max_size);
2975 else
2976 create_fixed_operand (&ops[8], NULL);
2977 }
2978 if (maybe_expand_insn (code, nops, ops))
2979 return true;
2980 }
2981 }
2982
2983 return false;
2984 }
2985
2986 \f
2987 /* Write to one of the components of the complex value CPLX. Write VAL to
2988 the real part if IMAG_P is false, and the imaginary part if its true. */
2989
2990 void
2991 write_complex_part (rtx cplx, rtx val, bool imag_p)
2992 {
2993 machine_mode cmode;
2994 machine_mode imode;
2995 unsigned ibitsize;
2996
2997 if (GET_CODE (cplx) == CONCAT)
2998 {
2999 emit_move_insn (XEXP (cplx, imag_p), val);
3000 return;
3001 }
3002
3003 cmode = GET_MODE (cplx);
3004 imode = GET_MODE_INNER (cmode);
3005 ibitsize = GET_MODE_BITSIZE (imode);
3006
3007 /* For MEMs simplify_gen_subreg may generate an invalid new address
3008 because, e.g., the original address is considered mode-dependent
3009 by the target, which restricts simplify_subreg from invoking
3010 adjust_address_nv. Instead of preparing fallback support for an
3011 invalid address, we call adjust_address_nv directly. */
3012 if (MEM_P (cplx))
3013 {
3014 emit_move_insn (adjust_address_nv (cplx, imode,
3015 imag_p ? GET_MODE_SIZE (imode) : 0),
3016 val);
3017 return;
3018 }
3019
3020 /* If the sub-object is at least word sized, then we know that subregging
3021 will work. This special case is important, since store_bit_field
3022 wants to operate on integer modes, and there's rarely an OImode to
3023 correspond to TCmode. */
3024 if (ibitsize >= BITS_PER_WORD
3025 /* For hard regs we have exact predicates. Assume we can split
3026 the original object if it spans an even number of hard regs.
3027 This special case is important for SCmode on 64-bit platforms
3028 where the natural size of floating-point regs is 32-bit. */
3029 || (REG_P (cplx)
3030 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3031 && REG_NREGS (cplx) % 2 == 0))
3032 {
3033 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3034 imag_p ? GET_MODE_SIZE (imode) : 0);
3035 if (part)
3036 {
3037 emit_move_insn (part, val);
3038 return;
3039 }
3040 else
3041 /* simplify_gen_subreg may fail for sub-word MEMs. */
3042 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3043 }
3044
3045 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3046 }
3047
3048 /* Extract one of the components of the complex value CPLX. Extract the
3049 real part if IMAG_P is false, and the imaginary part if it's true. */
3050
3051 static rtx
3052 read_complex_part (rtx cplx, bool imag_p)
3053 {
3054 machine_mode cmode, imode;
3055 unsigned ibitsize;
3056
3057 if (GET_CODE (cplx) == CONCAT)
3058 return XEXP (cplx, imag_p);
3059
3060 cmode = GET_MODE (cplx);
3061 imode = GET_MODE_INNER (cmode);
3062 ibitsize = GET_MODE_BITSIZE (imode);
3063
3064 /* Special case reads from complex constants that got spilled to memory. */
3065 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3066 {
3067 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3068 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3069 {
3070 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3071 if (CONSTANT_CLASS_P (part))
3072 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3073 }
3074 }
3075
3076 /* For MEMs simplify_gen_subreg may generate an invalid new address
3077 because, e.g., the original address is considered mode-dependent
3078 by the target, which restricts simplify_subreg from invoking
3079 adjust_address_nv. Instead of preparing fallback support for an
3080 invalid address, we call adjust_address_nv directly. */
3081 if (MEM_P (cplx))
3082 return adjust_address_nv (cplx, imode,
3083 imag_p ? GET_MODE_SIZE (imode) : 0);
3084
3085 /* If the sub-object is at least word sized, then we know that subregging
3086 will work. This special case is important, since extract_bit_field
3087 wants to operate on integer modes, and there's rarely an OImode to
3088 correspond to TCmode. */
3089 if (ibitsize >= BITS_PER_WORD
3090 /* For hard regs we have exact predicates. Assume we can split
3091 the original object if it spans an even number of hard regs.
3092 This special case is important for SCmode on 64-bit platforms
3093 where the natural size of floating-point regs is 32-bit. */
3094 || (REG_P (cplx)
3095 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3096 && REG_NREGS (cplx) % 2 == 0))
3097 {
3098 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3099 imag_p ? GET_MODE_SIZE (imode) : 0);
3100 if (ret)
3101 return ret;
3102 else
3103 /* simplify_gen_subreg may fail for sub-word MEMs. */
3104 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3105 }
3106
3107 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3108 true, NULL_RTX, imode, imode);
3109 }
3110 \f
3111 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3112 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3113 represented in NEW_MODE. If FORCE is true, this will never happen, as
3114 we'll force-create a SUBREG if needed. */
3115
3116 static rtx
3117 emit_move_change_mode (machine_mode new_mode,
3118 machine_mode old_mode, rtx x, bool force)
3119 {
3120 rtx ret;
3121
3122 if (push_operand (x, GET_MODE (x)))
3123 {
3124 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3125 MEM_COPY_ATTRIBUTES (ret, x);
3126 }
3127 else if (MEM_P (x))
3128 {
3129 /* We don't have to worry about changing the address since the
3130 size in bytes is supposed to be the same. */
3131 if (reload_in_progress)
3132 {
3133 /* Copy the MEM to change the mode and move any
3134 substitutions from the old MEM to the new one. */
3135 ret = adjust_address_nv (x, new_mode, 0);
3136 copy_replacements (x, ret);
3137 }
3138 else
3139 ret = adjust_address (x, new_mode, 0);
3140 }
3141 else
3142 {
3143 /* Note that we do want simplify_subreg's behavior of validating
3144 that the new mode is ok for a hard register. If we were to use
3145 simplify_gen_subreg, we would create the subreg, but would
3146 probably run into the target not being able to implement it. */
3147 /* Except, of course, when FORCE is true, when this is exactly what
3148 we want. Which is needed for CCmodes on some targets. */
3149 if (force)
3150 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3151 else
3152 ret = simplify_subreg (new_mode, x, old_mode, 0);
3153 }
3154
3155 return ret;
3156 }
3157
3158 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3159 an integer mode of the same size as MODE. Returns the instruction
3160 emitted, or NULL if such a move could not be generated. */
3161
3162 static rtx_insn *
3163 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3164 {
3165 machine_mode imode;
3166 enum insn_code code;
3167
3168 /* There must exist a mode of the exact size we require. */
3169 imode = int_mode_for_mode (mode);
3170 if (imode == BLKmode)
3171 return NULL;
3172
3173 /* The target must support moves in this mode. */
3174 code = optab_handler (mov_optab, imode);
3175 if (code == CODE_FOR_nothing)
3176 return NULL;
3177
3178 x = emit_move_change_mode (imode, mode, x, force);
3179 if (x == NULL_RTX)
3180 return NULL;
3181 y = emit_move_change_mode (imode, mode, y, force);
3182 if (y == NULL_RTX)
3183 return NULL;
3184 return emit_insn (GEN_FCN (code) (x, y));
3185 }
3186
3187 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3188 Return an equivalent MEM that does not use an auto-increment. */
3189
3190 rtx
3191 emit_move_resolve_push (machine_mode mode, rtx x)
3192 {
3193 enum rtx_code code = GET_CODE (XEXP (x, 0));
3194 HOST_WIDE_INT adjust;
3195 rtx temp;
3196
3197 adjust = GET_MODE_SIZE (mode);
3198 #ifdef PUSH_ROUNDING
3199 adjust = PUSH_ROUNDING (adjust);
3200 #endif
3201 if (code == PRE_DEC || code == POST_DEC)
3202 adjust = -adjust;
3203 else if (code == PRE_MODIFY || code == POST_MODIFY)
3204 {
3205 rtx expr = XEXP (XEXP (x, 0), 1);
3206 HOST_WIDE_INT val;
3207
3208 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3209 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3210 val = INTVAL (XEXP (expr, 1));
3211 if (GET_CODE (expr) == MINUS)
3212 val = -val;
3213 gcc_assert (adjust == val || adjust == -val);
3214 adjust = val;
3215 }
3216
3217 /* Do not use anti_adjust_stack, since we don't want to update
3218 stack_pointer_delta. */
3219 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3220 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3221 0, OPTAB_LIB_WIDEN);
3222 if (temp != stack_pointer_rtx)
3223 emit_move_insn (stack_pointer_rtx, temp);
3224
3225 switch (code)
3226 {
3227 case PRE_INC:
3228 case PRE_DEC:
3229 case PRE_MODIFY:
3230 temp = stack_pointer_rtx;
3231 break;
3232 case POST_INC:
3233 case POST_DEC:
3234 case POST_MODIFY:
3235 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3236 break;
3237 default:
3238 gcc_unreachable ();
3239 }
3240
3241 return replace_equiv_address (x, temp);
3242 }
3243
3244 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3245 X is known to satisfy push_operand, and MODE is known to be complex.
3246 Returns the last instruction emitted. */
3247
3248 rtx_insn *
3249 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3250 {
3251 machine_mode submode = GET_MODE_INNER (mode);
3252 bool imag_first;
3253
3254 #ifdef PUSH_ROUNDING
3255 unsigned int submodesize = GET_MODE_SIZE (submode);
3256
3257 /* In case we output to the stack, but the size is smaller than the
3258 machine can push exactly, we need to use move instructions. */
3259 if (PUSH_ROUNDING (submodesize) != submodesize)
3260 {
3261 x = emit_move_resolve_push (mode, x);
3262 return emit_move_insn (x, y);
3263 }
3264 #endif
3265
3266 /* Note that the real part always precedes the imag part in memory
3267 regardless of machine's endianness. */
3268 switch (GET_CODE (XEXP (x, 0)))
3269 {
3270 case PRE_DEC:
3271 case POST_DEC:
3272 imag_first = true;
3273 break;
3274 case PRE_INC:
3275 case POST_INC:
3276 imag_first = false;
3277 break;
3278 default:
3279 gcc_unreachable ();
3280 }
3281
3282 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3283 read_complex_part (y, imag_first));
3284 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3285 read_complex_part (y, !imag_first));
3286 }
3287
3288 /* A subroutine of emit_move_complex. Perform the move from Y to X
3289 via two moves of the parts. Returns the last instruction emitted. */
3290
3291 rtx_insn *
3292 emit_move_complex_parts (rtx x, rtx y)
3293 {
3294 /* Show the output dies here. This is necessary for SUBREGs
3295 of pseudos since we cannot track their lifetimes correctly;
3296 hard regs shouldn't appear here except as return values. */
3297 if (!reload_completed && !reload_in_progress
3298 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3299 emit_clobber (x);
3300
3301 write_complex_part (x, read_complex_part (y, false), false);
3302 write_complex_part (x, read_complex_part (y, true), true);
3303
3304 return get_last_insn ();
3305 }
3306
3307 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3308 MODE is known to be complex. Returns the last instruction emitted. */
3309
3310 static rtx_insn *
3311 emit_move_complex (machine_mode mode, rtx x, rtx y)
3312 {
3313 bool try_int;
3314
3315 /* Need to take special care for pushes, to maintain proper ordering
3316 of the data, and possibly extra padding. */
3317 if (push_operand (x, mode))
3318 return emit_move_complex_push (mode, x, y);
3319
3320 /* See if we can coerce the target into moving both values at once, except
3321 for floating point where we favor moving as parts if this is easy. */
3322 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3323 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3324 && !(REG_P (x)
3325 && HARD_REGISTER_P (x)
3326 && REG_NREGS (x) == 1)
3327 && !(REG_P (y)
3328 && HARD_REGISTER_P (y)
3329 && REG_NREGS (y) == 1))
3330 try_int = false;
3331 /* Not possible if the values are inherently not adjacent. */
3332 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3333 try_int = false;
3334 /* Is possible if both are registers (or subregs of registers). */
3335 else if (register_operand (x, mode) && register_operand (y, mode))
3336 try_int = true;
3337 /* If one of the operands is a memory, and alignment constraints
3338 are friendly enough, we may be able to do combined memory operations.
3339 We do not attempt this if Y is a constant because that combination is
3340 usually better with the by-parts thing below. */
3341 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3342 && (!STRICT_ALIGNMENT
3343 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3344 try_int = true;
3345 else
3346 try_int = false;
3347
3348 if (try_int)
3349 {
3350 rtx_insn *ret;
3351
3352 /* For memory to memory moves, optimal behavior can be had with the
3353 existing block move logic. */
3354 if (MEM_P (x) && MEM_P (y))
3355 {
3356 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3357 BLOCK_OP_NO_LIBCALL);
3358 return get_last_insn ();
3359 }
3360
3361 ret = emit_move_via_integer (mode, x, y, true);
3362 if (ret)
3363 return ret;
3364 }
3365
3366 return emit_move_complex_parts (x, y);
3367 }
3368
3369 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3370 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3371
3372 static rtx_insn *
3373 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3374 {
3375 rtx_insn *ret;
3376
3377 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3378 if (mode != CCmode)
3379 {
3380 enum insn_code code = optab_handler (mov_optab, CCmode);
3381 if (code != CODE_FOR_nothing)
3382 {
3383 x = emit_move_change_mode (CCmode, mode, x, true);
3384 y = emit_move_change_mode (CCmode, mode, y, true);
3385 return emit_insn (GEN_FCN (code) (x, y));
3386 }
3387 }
3388
3389 /* Otherwise, find the MODE_INT mode of the same width. */
3390 ret = emit_move_via_integer (mode, x, y, false);
3391 gcc_assert (ret != NULL);
3392 return ret;
3393 }
3394
3395 /* Return true if word I of OP lies entirely in the
3396 undefined bits of a paradoxical subreg. */
3397
3398 static bool
3399 undefined_operand_subword_p (const_rtx op, int i)
3400 {
3401 machine_mode innermode, innermostmode;
3402 int offset;
3403 if (GET_CODE (op) != SUBREG)
3404 return false;
3405 innermode = GET_MODE (op);
3406 innermostmode = GET_MODE (SUBREG_REG (op));
3407 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3408 /* The SUBREG_BYTE represents offset, as if the value were stored in
3409 memory, except for a paradoxical subreg where we define
3410 SUBREG_BYTE to be 0; undo this exception as in
3411 simplify_subreg. */
3412 if (SUBREG_BYTE (op) == 0
3413 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3414 {
3415 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3416 if (WORDS_BIG_ENDIAN)
3417 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3418 if (BYTES_BIG_ENDIAN)
3419 offset += difference % UNITS_PER_WORD;
3420 }
3421 if (offset >= GET_MODE_SIZE (innermostmode)
3422 || offset <= -GET_MODE_SIZE (word_mode))
3423 return true;
3424 return false;
3425 }
3426
3427 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3428 MODE is any multi-word or full-word mode that lacks a move_insn
3429 pattern. Note that you will get better code if you define such
3430 patterns, even if they must turn into multiple assembler instructions. */
3431
3432 static rtx_insn *
3433 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3434 {
3435 rtx_insn *last_insn = 0;
3436 rtx_insn *seq;
3437 rtx inner;
3438 bool need_clobber;
3439 int i;
3440
3441 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3442
3443 /* If X is a push on the stack, do the push now and replace
3444 X with a reference to the stack pointer. */
3445 if (push_operand (x, mode))
3446 x = emit_move_resolve_push (mode, x);
3447
3448 /* If we are in reload, see if either operand is a MEM whose address
3449 is scheduled for replacement. */
3450 if (reload_in_progress && MEM_P (x)
3451 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3452 x = replace_equiv_address_nv (x, inner);
3453 if (reload_in_progress && MEM_P (y)
3454 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3455 y = replace_equiv_address_nv (y, inner);
3456
3457 start_sequence ();
3458
3459 need_clobber = false;
3460 for (i = 0;
3461 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3462 i++)
3463 {
3464 rtx xpart = operand_subword (x, i, 1, mode);
3465 rtx ypart;
3466
3467 /* Do not generate code for a move if it would come entirely
3468 from the undefined bits of a paradoxical subreg. */
3469 if (undefined_operand_subword_p (y, i))
3470 continue;
3471
3472 ypart = operand_subword (y, i, 1, mode);
3473
3474 /* If we can't get a part of Y, put Y into memory if it is a
3475 constant. Otherwise, force it into a register. Then we must
3476 be able to get a part of Y. */
3477 if (ypart == 0 && CONSTANT_P (y))
3478 {
3479 y = use_anchored_address (force_const_mem (mode, y));
3480 ypart = operand_subword (y, i, 1, mode);
3481 }
3482 else if (ypart == 0)
3483 ypart = operand_subword_force (y, i, mode);
3484
3485 gcc_assert (xpart && ypart);
3486
3487 need_clobber |= (GET_CODE (xpart) == SUBREG);
3488
3489 last_insn = emit_move_insn (xpart, ypart);
3490 }
3491
3492 seq = get_insns ();
3493 end_sequence ();
3494
3495 /* Show the output dies here. This is necessary for SUBREGs
3496 of pseudos since we cannot track their lifetimes correctly;
3497 hard regs shouldn't appear here except as return values.
3498 We never want to emit such a clobber after reload. */
3499 if (x != y
3500 && ! (reload_in_progress || reload_completed)
3501 && need_clobber != 0)
3502 emit_clobber (x);
3503
3504 emit_insn (seq);
3505
3506 return last_insn;
3507 }
3508
3509 /* Low level part of emit_move_insn.
3510 Called just like emit_move_insn, but assumes X and Y
3511 are basically valid. */
3512
3513 rtx_insn *
3514 emit_move_insn_1 (rtx x, rtx y)
3515 {
3516 machine_mode mode = GET_MODE (x);
3517 enum insn_code code;
3518
3519 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3520
3521 code = optab_handler (mov_optab, mode);
3522 if (code != CODE_FOR_nothing)
3523 return emit_insn (GEN_FCN (code) (x, y));
3524
3525 /* Expand complex moves by moving real part and imag part. */
3526 if (COMPLEX_MODE_P (mode))
3527 return emit_move_complex (mode, x, y);
3528
3529 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3530 || ALL_FIXED_POINT_MODE_P (mode))
3531 {
3532 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3533
3534 /* If we can't find an integer mode, use multi words. */
3535 if (result)
3536 return result;
3537 else
3538 return emit_move_multi_word (mode, x, y);
3539 }
3540
3541 if (GET_MODE_CLASS (mode) == MODE_CC)
3542 return emit_move_ccmode (mode, x, y);
3543
3544 /* Try using a move pattern for the corresponding integer mode. This is
3545 only safe when simplify_subreg can convert MODE constants into integer
3546 constants. At present, it can only do this reliably if the value
3547 fits within a HOST_WIDE_INT. */
3548 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3549 {
3550 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3551
3552 if (ret)
3553 {
3554 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3555 return ret;
3556 }
3557 }
3558
3559 return emit_move_multi_word (mode, x, y);
3560 }
3561
3562 /* Generate code to copy Y into X.
3563 Both Y and X must have the same mode, except that
3564 Y can be a constant with VOIDmode.
3565 This mode cannot be BLKmode; use emit_block_move for that.
3566
3567 Return the last instruction emitted. */
3568
3569 rtx_insn *
3570 emit_move_insn (rtx x, rtx y)
3571 {
3572 machine_mode mode = GET_MODE (x);
3573 rtx y_cst = NULL_RTX;
3574 rtx_insn *last_insn;
3575 rtx set;
3576
3577 gcc_assert (mode != BLKmode
3578 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3579
3580 if (CONSTANT_P (y))
3581 {
3582 if (optimize
3583 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3584 && (last_insn = compress_float_constant (x, y)))
3585 return last_insn;
3586
3587 y_cst = y;
3588
3589 if (!targetm.legitimate_constant_p (mode, y))
3590 {
3591 y = force_const_mem (mode, y);
3592
3593 /* If the target's cannot_force_const_mem prevented the spill,
3594 assume that the target's move expanders will also take care
3595 of the non-legitimate constant. */
3596 if (!y)
3597 y = y_cst;
3598 else
3599 y = use_anchored_address (y);
3600 }
3601 }
3602
3603 /* If X or Y are memory references, verify that their addresses are valid
3604 for the machine. */
3605 if (MEM_P (x)
3606 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3607 MEM_ADDR_SPACE (x))
3608 && ! push_operand (x, GET_MODE (x))))
3609 x = validize_mem (x);
3610
3611 if (MEM_P (y)
3612 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3613 MEM_ADDR_SPACE (y)))
3614 y = validize_mem (y);
3615
3616 gcc_assert (mode != BLKmode);
3617
3618 last_insn = emit_move_insn_1 (x, y);
3619
3620 if (y_cst && REG_P (x)
3621 && (set = single_set (last_insn)) != NULL_RTX
3622 && SET_DEST (set) == x
3623 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3624 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3625
3626 return last_insn;
3627 }
3628
3629 /* Generate the body of an instruction to copy Y into X.
3630 It may be a list of insns, if one insn isn't enough. */
3631
3632 rtx_insn *
3633 gen_move_insn (rtx x, rtx y)
3634 {
3635 rtx_insn *seq;
3636
3637 start_sequence ();
3638 emit_move_insn_1 (x, y);
3639 seq = get_insns ();
3640 end_sequence ();
3641 return seq;
3642 }
3643
3644 /* Same as above, but return rtx (used as a callback, which must have
3645 prototype compatible with other functions returning rtx). */
3646
3647 rtx
3648 gen_move_insn_uncast (rtx x, rtx y)
3649 {
3650 return gen_move_insn (x, y);
3651 }
3652
3653 /* If Y is representable exactly in a narrower mode, and the target can
3654 perform the extension directly from constant or memory, then emit the
3655 move as an extension. */
3656
3657 static rtx_insn *
3658 compress_float_constant (rtx x, rtx y)
3659 {
3660 machine_mode dstmode = GET_MODE (x);
3661 machine_mode orig_srcmode = GET_MODE (y);
3662 machine_mode srcmode;
3663 REAL_VALUE_TYPE r;
3664 int oldcost, newcost;
3665 bool speed = optimize_insn_for_speed_p ();
3666
3667 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3668
3669 if (targetm.legitimate_constant_p (dstmode, y))
3670 oldcost = set_src_cost (y, speed);
3671 else
3672 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3673
3674 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3675 srcmode != orig_srcmode;
3676 srcmode = GET_MODE_WIDER_MODE (srcmode))
3677 {
3678 enum insn_code ic;
3679 rtx trunc_y;
3680 rtx_insn *last_insn;
3681
3682 /* Skip if the target can't extend this way. */
3683 ic = can_extend_p (dstmode, srcmode, 0);
3684 if (ic == CODE_FOR_nothing)
3685 continue;
3686
3687 /* Skip if the narrowed value isn't exact. */
3688 if (! exact_real_truncate (srcmode, &r))
3689 continue;
3690
3691 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3692
3693 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3694 {
3695 /* Skip if the target needs extra instructions to perform
3696 the extension. */
3697 if (!insn_operand_matches (ic, 1, trunc_y))
3698 continue;
3699 /* This is valid, but may not be cheaper than the original. */
3700 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3701 speed);
3702 if (oldcost < newcost)
3703 continue;
3704 }
3705 else if (float_extend_from_mem[dstmode][srcmode])
3706 {
3707 trunc_y = force_const_mem (srcmode, trunc_y);
3708 /* This is valid, but may not be cheaper than the original. */
3709 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3710 speed);
3711 if (oldcost < newcost)
3712 continue;
3713 trunc_y = validize_mem (trunc_y);
3714 }
3715 else
3716 continue;
3717
3718 /* For CSE's benefit, force the compressed constant pool entry
3719 into a new pseudo. This constant may be used in different modes,
3720 and if not, combine will put things back together for us. */
3721 trunc_y = force_reg (srcmode, trunc_y);
3722
3723 /* If x is a hard register, perform the extension into a pseudo,
3724 so that e.g. stack realignment code is aware of it. */
3725 rtx target = x;
3726 if (REG_P (x) && HARD_REGISTER_P (x))
3727 target = gen_reg_rtx (dstmode);
3728
3729 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3730 last_insn = get_last_insn ();
3731
3732 if (REG_P (target))
3733 set_unique_reg_note (last_insn, REG_EQUAL, y);
3734
3735 if (target != x)
3736 return emit_move_insn (x, target);
3737 return last_insn;
3738 }
3739
3740 return NULL;
3741 }
3742 \f
3743 /* Pushing data onto the stack. */
3744
3745 /* Push a block of length SIZE (perhaps variable)
3746 and return an rtx to address the beginning of the block.
3747 The value may be virtual_outgoing_args_rtx.
3748
3749 EXTRA is the number of bytes of padding to push in addition to SIZE.
3750 BELOW nonzero means this padding comes at low addresses;
3751 otherwise, the padding comes at high addresses. */
3752
3753 rtx
3754 push_block (rtx size, int extra, int below)
3755 {
3756 rtx temp;
3757
3758 size = convert_modes (Pmode, ptr_mode, size, 1);
3759 if (CONSTANT_P (size))
3760 anti_adjust_stack (plus_constant (Pmode, size, extra));
3761 else if (REG_P (size) && extra == 0)
3762 anti_adjust_stack (size);
3763 else
3764 {
3765 temp = copy_to_mode_reg (Pmode, size);
3766 if (extra != 0)
3767 temp = expand_binop (Pmode, add_optab, temp,
3768 gen_int_mode (extra, Pmode),
3769 temp, 0, OPTAB_LIB_WIDEN);
3770 anti_adjust_stack (temp);
3771 }
3772
3773 if (STACK_GROWS_DOWNWARD)
3774 {
3775 temp = virtual_outgoing_args_rtx;
3776 if (extra != 0 && below)
3777 temp = plus_constant (Pmode, temp, extra);
3778 }
3779 else
3780 {
3781 if (CONST_INT_P (size))
3782 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3783 -INTVAL (size) - (below ? 0 : extra));
3784 else if (extra != 0 && !below)
3785 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3786 negate_rtx (Pmode, plus_constant (Pmode, size,
3787 extra)));
3788 else
3789 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3790 negate_rtx (Pmode, size));
3791 }
3792
3793 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3794 }
3795
3796 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3797
3798 static rtx
3799 mem_autoinc_base (rtx mem)
3800 {
3801 if (MEM_P (mem))
3802 {
3803 rtx addr = XEXP (mem, 0);
3804 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3805 return XEXP (addr, 0);
3806 }
3807 return NULL;
3808 }
3809
3810 /* A utility routine used here, in reload, and in try_split. The insns
3811 after PREV up to and including LAST are known to adjust the stack,
3812 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3813 placing notes as appropriate. PREV may be NULL, indicating the
3814 entire insn sequence prior to LAST should be scanned.
3815
3816 The set of allowed stack pointer modifications is small:
3817 (1) One or more auto-inc style memory references (aka pushes),
3818 (2) One or more addition/subtraction with the SP as destination,
3819 (3) A single move insn with the SP as destination,
3820 (4) A call_pop insn,
3821 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3822
3823 Insns in the sequence that do not modify the SP are ignored,
3824 except for noreturn calls.
3825
3826 The return value is the amount of adjustment that can be trivially
3827 verified, via immediate operand or auto-inc. If the adjustment
3828 cannot be trivially extracted, the return value is INT_MIN. */
3829
3830 HOST_WIDE_INT
3831 find_args_size_adjust (rtx_insn *insn)
3832 {
3833 rtx dest, set, pat;
3834 int i;
3835
3836 pat = PATTERN (insn);
3837 set = NULL;
3838
3839 /* Look for a call_pop pattern. */
3840 if (CALL_P (insn))
3841 {
3842 /* We have to allow non-call_pop patterns for the case
3843 of emit_single_push_insn of a TLS address. */
3844 if (GET_CODE (pat) != PARALLEL)
3845 return 0;
3846
3847 /* All call_pop have a stack pointer adjust in the parallel.
3848 The call itself is always first, and the stack adjust is
3849 usually last, so search from the end. */
3850 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3851 {
3852 set = XVECEXP (pat, 0, i);
3853 if (GET_CODE (set) != SET)
3854 continue;
3855 dest = SET_DEST (set);
3856 if (dest == stack_pointer_rtx)
3857 break;
3858 }
3859 /* We'd better have found the stack pointer adjust. */
3860 if (i == 0)
3861 return 0;
3862 /* Fall through to process the extracted SET and DEST
3863 as if it was a standalone insn. */
3864 }
3865 else if (GET_CODE (pat) == SET)
3866 set = pat;
3867 else if ((set = single_set (insn)) != NULL)
3868 ;
3869 else if (GET_CODE (pat) == PARALLEL)
3870 {
3871 /* ??? Some older ports use a parallel with a stack adjust
3872 and a store for a PUSH_ROUNDING pattern, rather than a
3873 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3874 /* ??? See h8300 and m68k, pushqi1. */
3875 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3876 {
3877 set = XVECEXP (pat, 0, i);
3878 if (GET_CODE (set) != SET)
3879 continue;
3880 dest = SET_DEST (set);
3881 if (dest == stack_pointer_rtx)
3882 break;
3883
3884 /* We do not expect an auto-inc of the sp in the parallel. */
3885 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3886 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3887 != stack_pointer_rtx);
3888 }
3889 if (i < 0)
3890 return 0;
3891 }
3892 else
3893 return 0;
3894
3895 dest = SET_DEST (set);
3896
3897 /* Look for direct modifications of the stack pointer. */
3898 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3899 {
3900 /* Look for a trivial adjustment, otherwise assume nothing. */
3901 /* Note that the SPU restore_stack_block pattern refers to
3902 the stack pointer in V4SImode. Consider that non-trivial. */
3903 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3904 && GET_CODE (SET_SRC (set)) == PLUS
3905 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3906 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3907 return INTVAL (XEXP (SET_SRC (set), 1));
3908 /* ??? Reload can generate no-op moves, which will be cleaned
3909 up later. Recognize it and continue searching. */
3910 else if (rtx_equal_p (dest, SET_SRC (set)))
3911 return 0;
3912 else
3913 return HOST_WIDE_INT_MIN;
3914 }
3915 else
3916 {
3917 rtx mem, addr;
3918
3919 /* Otherwise only think about autoinc patterns. */
3920 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3921 {
3922 mem = dest;
3923 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3924 != stack_pointer_rtx);
3925 }
3926 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3927 mem = SET_SRC (set);
3928 else
3929 return 0;
3930
3931 addr = XEXP (mem, 0);
3932 switch (GET_CODE (addr))
3933 {
3934 case PRE_INC:
3935 case POST_INC:
3936 return GET_MODE_SIZE (GET_MODE (mem));
3937 case PRE_DEC:
3938 case POST_DEC:
3939 return -GET_MODE_SIZE (GET_MODE (mem));
3940 case PRE_MODIFY:
3941 case POST_MODIFY:
3942 addr = XEXP (addr, 1);
3943 gcc_assert (GET_CODE (addr) == PLUS);
3944 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3945 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3946 return INTVAL (XEXP (addr, 1));
3947 default:
3948 gcc_unreachable ();
3949 }
3950 }
3951 }
3952
3953 int
3954 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3955 {
3956 int args_size = end_args_size;
3957 bool saw_unknown = false;
3958 rtx_insn *insn;
3959
3960 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3961 {
3962 HOST_WIDE_INT this_delta;
3963
3964 if (!NONDEBUG_INSN_P (insn))
3965 continue;
3966
3967 this_delta = find_args_size_adjust (insn);
3968 if (this_delta == 0)
3969 {
3970 if (!CALL_P (insn)
3971 || ACCUMULATE_OUTGOING_ARGS
3972 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3973 continue;
3974 }
3975
3976 gcc_assert (!saw_unknown);
3977 if (this_delta == HOST_WIDE_INT_MIN)
3978 saw_unknown = true;
3979
3980 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3981 if (STACK_GROWS_DOWNWARD)
3982 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3983
3984 args_size -= this_delta;
3985 }
3986
3987 return saw_unknown ? INT_MIN : args_size;
3988 }
3989
3990 #ifdef PUSH_ROUNDING
3991 /* Emit single push insn. */
3992
3993 static void
3994 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
3995 {
3996 rtx dest_addr;
3997 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3998 rtx dest;
3999 enum insn_code icode;
4000
4001 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4002 /* If there is push pattern, use it. Otherwise try old way of throwing
4003 MEM representing push operation to move expander. */
4004 icode = optab_handler (push_optab, mode);
4005 if (icode != CODE_FOR_nothing)
4006 {
4007 struct expand_operand ops[1];
4008
4009 create_input_operand (&ops[0], x, mode);
4010 if (maybe_expand_insn (icode, 1, ops))
4011 return;
4012 }
4013 if (GET_MODE_SIZE (mode) == rounded_size)
4014 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4015 /* If we are to pad downward, adjust the stack pointer first and
4016 then store X into the stack location using an offset. This is
4017 because emit_move_insn does not know how to pad; it does not have
4018 access to type. */
4019 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4020 {
4021 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4022 HOST_WIDE_INT offset;
4023
4024 emit_move_insn (stack_pointer_rtx,
4025 expand_binop (Pmode,
4026 STACK_GROWS_DOWNWARD ? sub_optab
4027 : add_optab,
4028 stack_pointer_rtx,
4029 gen_int_mode (rounded_size, Pmode),
4030 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4031
4032 offset = (HOST_WIDE_INT) padding_size;
4033 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
4034 /* We have already decremented the stack pointer, so get the
4035 previous value. */
4036 offset += (HOST_WIDE_INT) rounded_size;
4037
4038 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
4039 /* We have already incremented the stack pointer, so get the
4040 previous value. */
4041 offset -= (HOST_WIDE_INT) rounded_size;
4042
4043 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4044 gen_int_mode (offset, Pmode));
4045 }
4046 else
4047 {
4048 if (STACK_GROWS_DOWNWARD)
4049 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4050 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4051 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4052 Pmode));
4053 else
4054 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4055 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4056 gen_int_mode (rounded_size, Pmode));
4057
4058 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4059 }
4060
4061 dest = gen_rtx_MEM (mode, dest_addr);
4062
4063 if (type != 0)
4064 {
4065 set_mem_attributes (dest, type, 1);
4066
4067 if (cfun->tail_call_marked)
4068 /* Function incoming arguments may overlap with sibling call
4069 outgoing arguments and we cannot allow reordering of reads
4070 from function arguments with stores to outgoing arguments
4071 of sibling calls. */
4072 set_mem_alias_set (dest, 0);
4073 }
4074 emit_move_insn (dest, x);
4075 }
4076
4077 /* Emit and annotate a single push insn. */
4078
4079 static void
4080 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4081 {
4082 int delta, old_delta = stack_pointer_delta;
4083 rtx_insn *prev = get_last_insn ();
4084 rtx_insn *last;
4085
4086 emit_single_push_insn_1 (mode, x, type);
4087
4088 last = get_last_insn ();
4089
4090 /* Notice the common case where we emitted exactly one insn. */
4091 if (PREV_INSN (last) == prev)
4092 {
4093 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4094 return;
4095 }
4096
4097 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4098 gcc_assert (delta == INT_MIN || delta == old_delta);
4099 }
4100 #endif
4101
4102 /* If reading SIZE bytes from X will end up reading from
4103 Y return the number of bytes that overlap. Return -1
4104 if there is no overlap or -2 if we can't determine
4105 (for example when X and Y have different base registers). */
4106
4107 static int
4108 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4109 {
4110 rtx tmp = plus_constant (Pmode, x, size);
4111 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4112
4113 if (!CONST_INT_P (sub))
4114 return -2;
4115
4116 HOST_WIDE_INT val = INTVAL (sub);
4117
4118 return IN_RANGE (val, 1, size) ? val : -1;
4119 }
4120
4121 /* Generate code to push X onto the stack, assuming it has mode MODE and
4122 type TYPE.
4123 MODE is redundant except when X is a CONST_INT (since they don't
4124 carry mode info).
4125 SIZE is an rtx for the size of data to be copied (in bytes),
4126 needed only if X is BLKmode.
4127 Return true if successful. May return false if asked to push a
4128 partial argument during a sibcall optimization (as specified by
4129 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4130 to not overlap.
4131
4132 ALIGN (in bits) is maximum alignment we can assume.
4133
4134 If PARTIAL and REG are both nonzero, then copy that many of the first
4135 bytes of X into registers starting with REG, and push the rest of X.
4136 The amount of space pushed is decreased by PARTIAL bytes.
4137 REG must be a hard register in this case.
4138 If REG is zero but PARTIAL is not, take any all others actions for an
4139 argument partially in registers, but do not actually load any
4140 registers.
4141
4142 EXTRA is the amount in bytes of extra space to leave next to this arg.
4143 This is ignored if an argument block has already been allocated.
4144
4145 On a machine that lacks real push insns, ARGS_ADDR is the address of
4146 the bottom of the argument block for this call. We use indexing off there
4147 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4148 argument block has not been preallocated.
4149
4150 ARGS_SO_FAR is the size of args previously pushed for this call.
4151
4152 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4153 for arguments passed in registers. If nonzero, it will be the number
4154 of bytes required. */
4155
4156 bool
4157 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4158 unsigned int align, int partial, rtx reg, int extra,
4159 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4160 rtx alignment_pad, bool sibcall_p)
4161 {
4162 rtx xinner;
4163 enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
4164
4165 /* Decide where to pad the argument: `downward' for below,
4166 `upward' for above, or `none' for don't pad it.
4167 Default is below for small data on big-endian machines; else above. */
4168 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4169
4170 /* Invert direction if stack is post-decrement.
4171 FIXME: why? */
4172 if (STACK_PUSH_CODE == POST_DEC)
4173 if (where_pad != none)
4174 where_pad = (where_pad == downward ? upward : downward);
4175
4176 xinner = x;
4177
4178 int nregs = partial / UNITS_PER_WORD;
4179 rtx *tmp_regs = NULL;
4180 int overlapping = 0;
4181
4182 if (mode == BLKmode
4183 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4184 {
4185 /* Copy a block into the stack, entirely or partially. */
4186
4187 rtx temp;
4188 int used;
4189 int offset;
4190 int skip;
4191
4192 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4193 used = partial - offset;
4194
4195 if (mode != BLKmode)
4196 {
4197 /* A value is to be stored in an insufficiently aligned
4198 stack slot; copy via a suitably aligned slot if
4199 necessary. */
4200 size = GEN_INT (GET_MODE_SIZE (mode));
4201 if (!MEM_P (xinner))
4202 {
4203 temp = assign_temp (type, 1, 1);
4204 emit_move_insn (temp, xinner);
4205 xinner = temp;
4206 }
4207 }
4208
4209 gcc_assert (size);
4210
4211 /* USED is now the # of bytes we need not copy to the stack
4212 because registers will take care of them. */
4213
4214 if (partial != 0)
4215 xinner = adjust_address (xinner, BLKmode, used);
4216
4217 /* If the partial register-part of the arg counts in its stack size,
4218 skip the part of stack space corresponding to the registers.
4219 Otherwise, start copying to the beginning of the stack space,
4220 by setting SKIP to 0. */
4221 skip = (reg_parm_stack_space == 0) ? 0 : used;
4222
4223 #ifdef PUSH_ROUNDING
4224 /* Do it with several push insns if that doesn't take lots of insns
4225 and if there is no difficulty with push insns that skip bytes
4226 on the stack for alignment purposes. */
4227 if (args_addr == 0
4228 && PUSH_ARGS
4229 && CONST_INT_P (size)
4230 && skip == 0
4231 && MEM_ALIGN (xinner) >= align
4232 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4233 /* Here we avoid the case of a structure whose weak alignment
4234 forces many pushes of a small amount of data,
4235 and such small pushes do rounding that causes trouble. */
4236 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4237 || align >= BIGGEST_ALIGNMENT
4238 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4239 == (align / BITS_PER_UNIT)))
4240 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4241 {
4242 /* Push padding now if padding above and stack grows down,
4243 or if padding below and stack grows up.
4244 But if space already allocated, this has already been done. */
4245 if (extra && args_addr == 0
4246 && where_pad != none && where_pad != stack_direction)
4247 anti_adjust_stack (GEN_INT (extra));
4248
4249 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4250 }
4251 else
4252 #endif /* PUSH_ROUNDING */
4253 {
4254 rtx target;
4255
4256 /* Otherwise make space on the stack and copy the data
4257 to the address of that space. */
4258
4259 /* Deduct words put into registers from the size we must copy. */
4260 if (partial != 0)
4261 {
4262 if (CONST_INT_P (size))
4263 size = GEN_INT (INTVAL (size) - used);
4264 else
4265 size = expand_binop (GET_MODE (size), sub_optab, size,
4266 gen_int_mode (used, GET_MODE (size)),
4267 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4268 }
4269
4270 /* Get the address of the stack space.
4271 In this case, we do not deal with EXTRA separately.
4272 A single stack adjust will do. */
4273 if (! args_addr)
4274 {
4275 temp = push_block (size, extra, where_pad == downward);
4276 extra = 0;
4277 }
4278 else if (CONST_INT_P (args_so_far))
4279 temp = memory_address (BLKmode,
4280 plus_constant (Pmode, args_addr,
4281 skip + INTVAL (args_so_far)));
4282 else
4283 temp = memory_address (BLKmode,
4284 plus_constant (Pmode,
4285 gen_rtx_PLUS (Pmode,
4286 args_addr,
4287 args_so_far),
4288 skip));
4289
4290 if (!ACCUMULATE_OUTGOING_ARGS)
4291 {
4292 /* If the source is referenced relative to the stack pointer,
4293 copy it to another register to stabilize it. We do not need
4294 to do this if we know that we won't be changing sp. */
4295
4296 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4297 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4298 temp = copy_to_reg (temp);
4299 }
4300
4301 target = gen_rtx_MEM (BLKmode, temp);
4302
4303 /* We do *not* set_mem_attributes here, because incoming arguments
4304 may overlap with sibling call outgoing arguments and we cannot
4305 allow reordering of reads from function arguments with stores
4306 to outgoing arguments of sibling calls. We do, however, want
4307 to record the alignment of the stack slot. */
4308 /* ALIGN may well be better aligned than TYPE, e.g. due to
4309 PARM_BOUNDARY. Assume the caller isn't lying. */
4310 set_mem_align (target, align);
4311
4312 /* If part should go in registers and pushing to that part would
4313 overwrite some of the values that need to go into regs, load the
4314 overlapping values into temporary pseudos to be moved into the hard
4315 regs at the end after the stack pushing has completed.
4316 We cannot load them directly into the hard regs here because
4317 they can be clobbered by the block move expansions.
4318 See PR 65358. */
4319
4320 if (partial > 0 && reg != 0 && mode == BLKmode
4321 && GET_CODE (reg) != PARALLEL)
4322 {
4323 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4324 if (overlapping > 0)
4325 {
4326 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4327 overlapping /= UNITS_PER_WORD;
4328
4329 tmp_regs = XALLOCAVEC (rtx, overlapping);
4330
4331 for (int i = 0; i < overlapping; i++)
4332 tmp_regs[i] = gen_reg_rtx (word_mode);
4333
4334 for (int i = 0; i < overlapping; i++)
4335 emit_move_insn (tmp_regs[i],
4336 operand_subword_force (target, i, mode));
4337 }
4338 else if (overlapping == -1)
4339 overlapping = 0;
4340 /* Could not determine whether there is overlap.
4341 Fail the sibcall. */
4342 else
4343 {
4344 overlapping = 0;
4345 if (sibcall_p)
4346 return false;
4347 }
4348 }
4349 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4350 }
4351 }
4352 else if (partial > 0)
4353 {
4354 /* Scalar partly in registers. */
4355
4356 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4357 int i;
4358 int not_stack;
4359 /* # bytes of start of argument
4360 that we must make space for but need not store. */
4361 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4362 int args_offset = INTVAL (args_so_far);
4363 int skip;
4364
4365 /* Push padding now if padding above and stack grows down,
4366 or if padding below and stack grows up.
4367 But if space already allocated, this has already been done. */
4368 if (extra && args_addr == 0
4369 && where_pad != none && where_pad != stack_direction)
4370 anti_adjust_stack (GEN_INT (extra));
4371
4372 /* If we make space by pushing it, we might as well push
4373 the real data. Otherwise, we can leave OFFSET nonzero
4374 and leave the space uninitialized. */
4375 if (args_addr == 0)
4376 offset = 0;
4377
4378 /* Now NOT_STACK gets the number of words that we don't need to
4379 allocate on the stack. Convert OFFSET to words too. */
4380 not_stack = (partial - offset) / UNITS_PER_WORD;
4381 offset /= UNITS_PER_WORD;
4382
4383 /* If the partial register-part of the arg counts in its stack size,
4384 skip the part of stack space corresponding to the registers.
4385 Otherwise, start copying to the beginning of the stack space,
4386 by setting SKIP to 0. */
4387 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4388
4389 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4390 x = validize_mem (force_const_mem (mode, x));
4391
4392 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4393 SUBREGs of such registers are not allowed. */
4394 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4395 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4396 x = copy_to_reg (x);
4397
4398 /* Loop over all the words allocated on the stack for this arg. */
4399 /* We can do it by words, because any scalar bigger than a word
4400 has a size a multiple of a word. */
4401 for (i = size - 1; i >= not_stack; i--)
4402 if (i >= not_stack + offset)
4403 if (!emit_push_insn (operand_subword_force (x, i, mode),
4404 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4405 0, args_addr,
4406 GEN_INT (args_offset + ((i - not_stack + skip)
4407 * UNITS_PER_WORD)),
4408 reg_parm_stack_space, alignment_pad, sibcall_p))
4409 return false;
4410 }
4411 else
4412 {
4413 rtx addr;
4414 rtx dest;
4415
4416 /* Push padding now if padding above and stack grows down,
4417 or if padding below and stack grows up.
4418 But if space already allocated, this has already been done. */
4419 if (extra && args_addr == 0
4420 && where_pad != none && where_pad != stack_direction)
4421 anti_adjust_stack (GEN_INT (extra));
4422
4423 #ifdef PUSH_ROUNDING
4424 if (args_addr == 0 && PUSH_ARGS)
4425 emit_single_push_insn (mode, x, type);
4426 else
4427 #endif
4428 {
4429 if (CONST_INT_P (args_so_far))
4430 addr
4431 = memory_address (mode,
4432 plus_constant (Pmode, args_addr,
4433 INTVAL (args_so_far)));
4434 else
4435 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4436 args_so_far));
4437 dest = gen_rtx_MEM (mode, addr);
4438
4439 /* We do *not* set_mem_attributes here, because incoming arguments
4440 may overlap with sibling call outgoing arguments and we cannot
4441 allow reordering of reads from function arguments with stores
4442 to outgoing arguments of sibling calls. We do, however, want
4443 to record the alignment of the stack slot. */
4444 /* ALIGN may well be better aligned than TYPE, e.g. due to
4445 PARM_BOUNDARY. Assume the caller isn't lying. */
4446 set_mem_align (dest, align);
4447
4448 emit_move_insn (dest, x);
4449 }
4450 }
4451
4452 /* Move the partial arguments into the registers and any overlapping
4453 values that we moved into the pseudos in tmp_regs. */
4454 if (partial > 0 && reg != 0)
4455 {
4456 /* Handle calls that pass values in multiple non-contiguous locations.
4457 The Irix 6 ABI has examples of this. */
4458 if (GET_CODE (reg) == PARALLEL)
4459 emit_group_load (reg, x, type, -1);
4460 else
4461 {
4462 gcc_assert (partial % UNITS_PER_WORD == 0);
4463 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4464
4465 for (int i = 0; i < overlapping; i++)
4466 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4467 + nregs - overlapping + i),
4468 tmp_regs[i]);
4469
4470 }
4471 }
4472
4473 if (extra && args_addr == 0 && where_pad == stack_direction)
4474 anti_adjust_stack (GEN_INT (extra));
4475
4476 if (alignment_pad && args_addr == 0)
4477 anti_adjust_stack (alignment_pad);
4478
4479 return true;
4480 }
4481 \f
4482 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4483 operations. */
4484
4485 static rtx
4486 get_subtarget (rtx x)
4487 {
4488 return (optimize
4489 || x == 0
4490 /* Only registers can be subtargets. */
4491 || !REG_P (x)
4492 /* Don't use hard regs to avoid extending their life. */
4493 || REGNO (x) < FIRST_PSEUDO_REGISTER
4494 ? 0 : x);
4495 }
4496
4497 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4498 FIELD is a bitfield. Returns true if the optimization was successful,
4499 and there's nothing else to do. */
4500
4501 static bool
4502 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4503 unsigned HOST_WIDE_INT bitpos,
4504 unsigned HOST_WIDE_INT bitregion_start,
4505 unsigned HOST_WIDE_INT bitregion_end,
4506 machine_mode mode1, rtx str_rtx,
4507 tree to, tree src)
4508 {
4509 machine_mode str_mode = GET_MODE (str_rtx);
4510 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4511 tree op0, op1;
4512 rtx value, result;
4513 optab binop;
4514 gimple srcstmt;
4515 enum tree_code code;
4516
4517 if (mode1 != VOIDmode
4518 || bitsize >= BITS_PER_WORD
4519 || str_bitsize > BITS_PER_WORD
4520 || TREE_SIDE_EFFECTS (to)
4521 || TREE_THIS_VOLATILE (to))
4522 return false;
4523
4524 STRIP_NOPS (src);
4525 if (TREE_CODE (src) != SSA_NAME)
4526 return false;
4527 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4528 return false;
4529
4530 srcstmt = get_gimple_for_ssa_name (src);
4531 if (!srcstmt
4532 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4533 return false;
4534
4535 code = gimple_assign_rhs_code (srcstmt);
4536
4537 op0 = gimple_assign_rhs1 (srcstmt);
4538
4539 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4540 to find its initialization. Hopefully the initialization will
4541 be from a bitfield load. */
4542 if (TREE_CODE (op0) == SSA_NAME)
4543 {
4544 gimple op0stmt = get_gimple_for_ssa_name (op0);
4545
4546 /* We want to eventually have OP0 be the same as TO, which
4547 should be a bitfield. */
4548 if (!op0stmt
4549 || !is_gimple_assign (op0stmt)
4550 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4551 return false;
4552 op0 = gimple_assign_rhs1 (op0stmt);
4553 }
4554
4555 op1 = gimple_assign_rhs2 (srcstmt);
4556
4557 if (!operand_equal_p (to, op0, 0))
4558 return false;
4559
4560 if (MEM_P (str_rtx))
4561 {
4562 unsigned HOST_WIDE_INT offset1;
4563
4564 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4565 str_mode = word_mode;
4566 str_mode = get_best_mode (bitsize, bitpos,
4567 bitregion_start, bitregion_end,
4568 MEM_ALIGN (str_rtx), str_mode, 0);
4569 if (str_mode == VOIDmode)
4570 return false;
4571 str_bitsize = GET_MODE_BITSIZE (str_mode);
4572
4573 offset1 = bitpos;
4574 bitpos %= str_bitsize;
4575 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4576 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4577 }
4578 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4579 return false;
4580
4581 /* If the bit field covers the whole REG/MEM, store_field
4582 will likely generate better code. */
4583 if (bitsize >= str_bitsize)
4584 return false;
4585
4586 /* We can't handle fields split across multiple entities. */
4587 if (bitpos + bitsize > str_bitsize)
4588 return false;
4589
4590 if (BYTES_BIG_ENDIAN)
4591 bitpos = str_bitsize - bitpos - bitsize;
4592
4593 switch (code)
4594 {
4595 case PLUS_EXPR:
4596 case MINUS_EXPR:
4597 /* For now, just optimize the case of the topmost bitfield
4598 where we don't need to do any masking and also
4599 1 bit bitfields where xor can be used.
4600 We might win by one instruction for the other bitfields
4601 too if insv/extv instructions aren't used, so that
4602 can be added later. */
4603 if (bitpos + bitsize != str_bitsize
4604 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4605 break;
4606
4607 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4608 value = convert_modes (str_mode,
4609 TYPE_MODE (TREE_TYPE (op1)), value,
4610 TYPE_UNSIGNED (TREE_TYPE (op1)));
4611
4612 /* We may be accessing data outside the field, which means
4613 we can alias adjacent data. */
4614 if (MEM_P (str_rtx))
4615 {
4616 str_rtx = shallow_copy_rtx (str_rtx);
4617 set_mem_alias_set (str_rtx, 0);
4618 set_mem_expr (str_rtx, 0);
4619 }
4620
4621 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4622 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4623 {
4624 value = expand_and (str_mode, value, const1_rtx, NULL);
4625 binop = xor_optab;
4626 }
4627 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4628 result = expand_binop (str_mode, binop, str_rtx,
4629 value, str_rtx, 1, OPTAB_WIDEN);
4630 if (result != str_rtx)
4631 emit_move_insn (str_rtx, result);
4632 return true;
4633
4634 case BIT_IOR_EXPR:
4635 case BIT_XOR_EXPR:
4636 if (TREE_CODE (op1) != INTEGER_CST)
4637 break;
4638 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4639 value = convert_modes (str_mode,
4640 TYPE_MODE (TREE_TYPE (op1)), value,
4641 TYPE_UNSIGNED (TREE_TYPE (op1)));
4642
4643 /* We may be accessing data outside the field, which means
4644 we can alias adjacent data. */
4645 if (MEM_P (str_rtx))
4646 {
4647 str_rtx = shallow_copy_rtx (str_rtx);
4648 set_mem_alias_set (str_rtx, 0);
4649 set_mem_expr (str_rtx, 0);
4650 }
4651
4652 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4653 if (bitpos + bitsize != str_bitsize)
4654 {
4655 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4656 str_mode);
4657 value = expand_and (str_mode, value, mask, NULL_RTX);
4658 }
4659 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4660 result = expand_binop (str_mode, binop, str_rtx,
4661 value, str_rtx, 1, OPTAB_WIDEN);
4662 if (result != str_rtx)
4663 emit_move_insn (str_rtx, result);
4664 return true;
4665
4666 default:
4667 break;
4668 }
4669
4670 return false;
4671 }
4672
4673 /* In the C++ memory model, consecutive bit fields in a structure are
4674 considered one memory location.
4675
4676 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4677 returns the bit range of consecutive bits in which this COMPONENT_REF
4678 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4679 and *OFFSET may be adjusted in the process.
4680
4681 If the access does not need to be restricted, 0 is returned in both
4682 *BITSTART and *BITEND. */
4683
4684 static void
4685 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4686 unsigned HOST_WIDE_INT *bitend,
4687 tree exp,
4688 HOST_WIDE_INT *bitpos,
4689 tree *offset)
4690 {
4691 HOST_WIDE_INT bitoffset;
4692 tree field, repr;
4693
4694 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4695
4696 field = TREE_OPERAND (exp, 1);
4697 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4698 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4699 need to limit the range we can access. */
4700 if (!repr)
4701 {
4702 *bitstart = *bitend = 0;
4703 return;
4704 }
4705
4706 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4707 part of a larger bit field, then the representative does not serve any
4708 useful purpose. This can occur in Ada. */
4709 if (handled_component_p (TREE_OPERAND (exp, 0)))
4710 {
4711 machine_mode rmode;
4712 HOST_WIDE_INT rbitsize, rbitpos;
4713 tree roffset;
4714 int unsignedp;
4715 int volatilep = 0;
4716 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4717 &roffset, &rmode, &unsignedp, &volatilep, false);
4718 if ((rbitpos % BITS_PER_UNIT) != 0)
4719 {
4720 *bitstart = *bitend = 0;
4721 return;
4722 }
4723 }
4724
4725 /* Compute the adjustment to bitpos from the offset of the field
4726 relative to the representative. DECL_FIELD_OFFSET of field and
4727 repr are the same by construction if they are not constants,
4728 see finish_bitfield_layout. */
4729 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4730 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4731 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4732 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4733 else
4734 bitoffset = 0;
4735 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4736 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4737
4738 /* If the adjustment is larger than bitpos, we would have a negative bit
4739 position for the lower bound and this may wreak havoc later. Adjust
4740 offset and bitpos to make the lower bound non-negative in that case. */
4741 if (bitoffset > *bitpos)
4742 {
4743 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4744 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4745
4746 *bitpos += adjust;
4747 if (*offset == NULL_TREE)
4748 *offset = size_int (-adjust / BITS_PER_UNIT);
4749 else
4750 *offset
4751 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4752 *bitstart = 0;
4753 }
4754 else
4755 *bitstart = *bitpos - bitoffset;
4756
4757 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4758 }
4759
4760 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4761 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4762 DECL_RTL was not set yet, return NORTL. */
4763
4764 static inline bool
4765 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4766 {
4767 if (TREE_CODE (addr) != ADDR_EXPR)
4768 return false;
4769
4770 tree base = TREE_OPERAND (addr, 0);
4771
4772 if (!DECL_P (base)
4773 || TREE_ADDRESSABLE (base)
4774 || DECL_MODE (base) == BLKmode)
4775 return false;
4776
4777 if (!DECL_RTL_SET_P (base))
4778 return nortl;
4779
4780 return (!MEM_P (DECL_RTL (base)));
4781 }
4782
4783 /* Returns true if the MEM_REF REF refers to an object that does not
4784 reside in memory and has non-BLKmode. */
4785
4786 static inline bool
4787 mem_ref_refers_to_non_mem_p (tree ref)
4788 {
4789 tree base = TREE_OPERAND (ref, 0);
4790 return addr_expr_of_non_mem_decl_p_1 (base, false);
4791 }
4792
4793 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4794 is true, try generating a nontemporal store. */
4795
4796 void
4797 expand_assignment (tree to, tree from, bool nontemporal)
4798 {
4799 rtx to_rtx = 0;
4800 rtx result;
4801 machine_mode mode;
4802 unsigned int align;
4803 enum insn_code icode;
4804
4805 /* Don't crash if the lhs of the assignment was erroneous. */
4806 if (TREE_CODE (to) == ERROR_MARK)
4807 {
4808 expand_normal (from);
4809 return;
4810 }
4811
4812 /* Optimize away no-op moves without side-effects. */
4813 if (operand_equal_p (to, from, 0))
4814 return;
4815
4816 /* Handle misaligned stores. */
4817 mode = TYPE_MODE (TREE_TYPE (to));
4818 if ((TREE_CODE (to) == MEM_REF
4819 || TREE_CODE (to) == TARGET_MEM_REF)
4820 && mode != BLKmode
4821 && !mem_ref_refers_to_non_mem_p (to)
4822 && ((align = get_object_alignment (to))
4823 < GET_MODE_ALIGNMENT (mode))
4824 && (((icode = optab_handler (movmisalign_optab, mode))
4825 != CODE_FOR_nothing)
4826 || SLOW_UNALIGNED_ACCESS (mode, align)))
4827 {
4828 rtx reg, mem;
4829
4830 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4831 reg = force_not_mem (reg);
4832 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4833
4834 if (icode != CODE_FOR_nothing)
4835 {
4836 struct expand_operand ops[2];
4837
4838 create_fixed_operand (&ops[0], mem);
4839 create_input_operand (&ops[1], reg, mode);
4840 /* The movmisalign<mode> pattern cannot fail, else the assignment
4841 would silently be omitted. */
4842 expand_insn (icode, 2, ops);
4843 }
4844 else
4845 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4846 return;
4847 }
4848
4849 /* Assignment of a structure component needs special treatment
4850 if the structure component's rtx is not simply a MEM.
4851 Assignment of an array element at a constant index, and assignment of
4852 an array element in an unaligned packed structure field, has the same
4853 problem. Same for (partially) storing into a non-memory object. */
4854 if (handled_component_p (to)
4855 || (TREE_CODE (to) == MEM_REF
4856 && mem_ref_refers_to_non_mem_p (to))
4857 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4858 {
4859 machine_mode mode1;
4860 HOST_WIDE_INT bitsize, bitpos;
4861 unsigned HOST_WIDE_INT bitregion_start = 0;
4862 unsigned HOST_WIDE_INT bitregion_end = 0;
4863 tree offset;
4864 int unsignedp;
4865 int volatilep = 0;
4866 tree tem;
4867
4868 push_temp_slots ();
4869 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4870 &unsignedp, &volatilep, true);
4871
4872 /* Make sure bitpos is not negative, it can wreak havoc later. */
4873 if (bitpos < 0)
4874 {
4875 gcc_assert (offset == NULL_TREE);
4876 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4877 ? 3 : exact_log2 (BITS_PER_UNIT)));
4878 bitpos &= BITS_PER_UNIT - 1;
4879 }
4880
4881 if (TREE_CODE (to) == COMPONENT_REF
4882 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4883 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4884 /* The C++ memory model naturally applies to byte-aligned fields.
4885 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4886 BITSIZE are not byte-aligned, there is no need to limit the range
4887 we can access. This can occur with packed structures in Ada. */
4888 else if (bitsize > 0
4889 && bitsize % BITS_PER_UNIT == 0
4890 && bitpos % BITS_PER_UNIT == 0)
4891 {
4892 bitregion_start = bitpos;
4893 bitregion_end = bitpos + bitsize - 1;
4894 }
4895
4896 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4897
4898 /* If the field has a mode, we want to access it in the
4899 field's mode, not the computed mode.
4900 If a MEM has VOIDmode (external with incomplete type),
4901 use BLKmode for it instead. */
4902 if (MEM_P (to_rtx))
4903 {
4904 if (mode1 != VOIDmode)
4905 to_rtx = adjust_address (to_rtx, mode1, 0);
4906 else if (GET_MODE (to_rtx) == VOIDmode)
4907 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4908 }
4909
4910 if (offset != 0)
4911 {
4912 machine_mode address_mode;
4913 rtx offset_rtx;
4914
4915 if (!MEM_P (to_rtx))
4916 {
4917 /* We can get constant negative offsets into arrays with broken
4918 user code. Translate this to a trap instead of ICEing. */
4919 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4920 expand_builtin_trap ();
4921 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4922 }
4923
4924 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4925 address_mode = get_address_mode (to_rtx);
4926 if (GET_MODE (offset_rtx) != address_mode)
4927 {
4928 /* We cannot be sure that the RTL in offset_rtx is valid outside
4929 of a memory address context, so force it into a register
4930 before attempting to convert it to the desired mode. */
4931 offset_rtx = force_operand (offset_rtx, NULL_RTX);
4932 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4933 }
4934
4935 /* If we have an expression in OFFSET_RTX and a non-zero
4936 byte offset in BITPOS, adding the byte offset before the
4937 OFFSET_RTX results in better intermediate code, which makes
4938 later rtl optimization passes perform better.
4939
4940 We prefer intermediate code like this:
4941
4942 r124:DI=r123:DI+0x18
4943 [r124:DI]=r121:DI
4944
4945 ... instead of ...
4946
4947 r124:DI=r123:DI+0x10
4948 [r124:DI+0x8]=r121:DI
4949
4950 This is only done for aligned data values, as these can
4951 be expected to result in single move instructions. */
4952 if (mode1 != VOIDmode
4953 && bitpos != 0
4954 && bitsize > 0
4955 && (bitpos % bitsize) == 0
4956 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4957 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4958 {
4959 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4960 bitregion_start = 0;
4961 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4962 bitregion_end -= bitpos;
4963 bitpos = 0;
4964 }
4965
4966 to_rtx = offset_address (to_rtx, offset_rtx,
4967 highest_pow2_factor_for_target (to,
4968 offset));
4969 }
4970
4971 /* No action is needed if the target is not a memory and the field
4972 lies completely outside that target. This can occur if the source
4973 code contains an out-of-bounds access to a small array. */
4974 if (!MEM_P (to_rtx)
4975 && GET_MODE (to_rtx) != BLKmode
4976 && (unsigned HOST_WIDE_INT) bitpos
4977 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4978 {
4979 expand_normal (from);
4980 result = NULL;
4981 }
4982 /* Handle expand_expr of a complex value returning a CONCAT. */
4983 else if (GET_CODE (to_rtx) == CONCAT)
4984 {
4985 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4986 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4987 && bitpos == 0
4988 && bitsize == mode_bitsize)
4989 result = store_expr (from, to_rtx, false, nontemporal);
4990 else if (bitsize == mode_bitsize / 2
4991 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4992 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4993 nontemporal);
4994 else if (bitpos + bitsize <= mode_bitsize / 2)
4995 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4996 bitregion_start, bitregion_end,
4997 mode1, from,
4998 get_alias_set (to), nontemporal);
4999 else if (bitpos >= mode_bitsize / 2)
5000 result = store_field (XEXP (to_rtx, 1), bitsize,
5001 bitpos - mode_bitsize / 2,
5002 bitregion_start, bitregion_end,
5003 mode1, from,
5004 get_alias_set (to), nontemporal);
5005 else if (bitpos == 0 && bitsize == mode_bitsize)
5006 {
5007 rtx from_rtx;
5008 result = expand_normal (from);
5009 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
5010 TYPE_MODE (TREE_TYPE (from)), 0);
5011 emit_move_insn (XEXP (to_rtx, 0),
5012 read_complex_part (from_rtx, false));
5013 emit_move_insn (XEXP (to_rtx, 1),
5014 read_complex_part (from_rtx, true));
5015 }
5016 else
5017 {
5018 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
5019 GET_MODE_SIZE (GET_MODE (to_rtx)));
5020 write_complex_part (temp, XEXP (to_rtx, 0), false);
5021 write_complex_part (temp, XEXP (to_rtx, 1), true);
5022 result = store_field (temp, bitsize, bitpos,
5023 bitregion_start, bitregion_end,
5024 mode1, from,
5025 get_alias_set (to), nontemporal);
5026 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
5027 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
5028 }
5029 }
5030 else
5031 {
5032 if (MEM_P (to_rtx))
5033 {
5034 /* If the field is at offset zero, we could have been given the
5035 DECL_RTX of the parent struct. Don't munge it. */
5036 to_rtx = shallow_copy_rtx (to_rtx);
5037 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5038 if (volatilep)
5039 MEM_VOLATILE_P (to_rtx) = 1;
5040 }
5041
5042 if (optimize_bitfield_assignment_op (bitsize, bitpos,
5043 bitregion_start, bitregion_end,
5044 mode1,
5045 to_rtx, to, from))
5046 result = NULL;
5047 else
5048 result = store_field (to_rtx, bitsize, bitpos,
5049 bitregion_start, bitregion_end,
5050 mode1, from,
5051 get_alias_set (to), nontemporal);
5052 }
5053
5054 if (result)
5055 preserve_temp_slots (result);
5056 pop_temp_slots ();
5057 return;
5058 }
5059
5060 /* If the rhs is a function call and its value is not an aggregate,
5061 call the function before we start to compute the lhs.
5062 This is needed for correct code for cases such as
5063 val = setjmp (buf) on machines where reference to val
5064 requires loading up part of an address in a separate insn.
5065
5066 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5067 since it might be a promoted variable where the zero- or sign- extension
5068 needs to be done. Handling this in the normal way is safe because no
5069 computation is done before the call. The same is true for SSA names. */
5070 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5071 && COMPLETE_TYPE_P (TREE_TYPE (from))
5072 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5073 && ! (((TREE_CODE (to) == VAR_DECL
5074 || TREE_CODE (to) == PARM_DECL
5075 || TREE_CODE (to) == RESULT_DECL)
5076 && REG_P (DECL_RTL (to)))
5077 || TREE_CODE (to) == SSA_NAME))
5078 {
5079 rtx value;
5080 rtx bounds;
5081
5082 push_temp_slots ();
5083 value = expand_normal (from);
5084
5085 /* Split value and bounds to store them separately. */
5086 chkp_split_slot (value, &value, &bounds);
5087
5088 if (to_rtx == 0)
5089 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5090
5091 /* Handle calls that return values in multiple non-contiguous locations.
5092 The Irix 6 ABI has examples of this. */
5093 if (GET_CODE (to_rtx) == PARALLEL)
5094 {
5095 if (GET_CODE (value) == PARALLEL)
5096 emit_group_move (to_rtx, value);
5097 else
5098 emit_group_load (to_rtx, value, TREE_TYPE (from),
5099 int_size_in_bytes (TREE_TYPE (from)));
5100 }
5101 else if (GET_CODE (value) == PARALLEL)
5102 emit_group_store (to_rtx, value, TREE_TYPE (from),
5103 int_size_in_bytes (TREE_TYPE (from)));
5104 else if (GET_MODE (to_rtx) == BLKmode)
5105 {
5106 /* Handle calls that return BLKmode values in registers. */
5107 if (REG_P (value))
5108 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5109 else
5110 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5111 }
5112 else
5113 {
5114 if (POINTER_TYPE_P (TREE_TYPE (to)))
5115 value = convert_memory_address_addr_space
5116 (GET_MODE (to_rtx), value,
5117 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5118
5119 emit_move_insn (to_rtx, value);
5120 }
5121
5122 /* Store bounds if required. */
5123 if (bounds
5124 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5125 {
5126 gcc_assert (MEM_P (to_rtx));
5127 chkp_emit_bounds_store (bounds, value, to_rtx);
5128 }
5129
5130 preserve_temp_slots (to_rtx);
5131 pop_temp_slots ();
5132 return;
5133 }
5134
5135 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5136 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5137
5138 /* Don't move directly into a return register. */
5139 if (TREE_CODE (to) == RESULT_DECL
5140 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5141 {
5142 rtx temp;
5143
5144 push_temp_slots ();
5145
5146 /* If the source is itself a return value, it still is in a pseudo at
5147 this point so we can move it back to the return register directly. */
5148 if (REG_P (to_rtx)
5149 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5150 && TREE_CODE (from) != CALL_EXPR)
5151 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5152 else
5153 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5154
5155 /* Handle calls that return values in multiple non-contiguous locations.
5156 The Irix 6 ABI has examples of this. */
5157 if (GET_CODE (to_rtx) == PARALLEL)
5158 {
5159 if (GET_CODE (temp) == PARALLEL)
5160 emit_group_move (to_rtx, temp);
5161 else
5162 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5163 int_size_in_bytes (TREE_TYPE (from)));
5164 }
5165 else if (temp)
5166 emit_move_insn (to_rtx, temp);
5167
5168 preserve_temp_slots (to_rtx);
5169 pop_temp_slots ();
5170 return;
5171 }
5172
5173 /* In case we are returning the contents of an object which overlaps
5174 the place the value is being stored, use a safe function when copying
5175 a value through a pointer into a structure value return block. */
5176 if (TREE_CODE (to) == RESULT_DECL
5177 && TREE_CODE (from) == INDIRECT_REF
5178 && ADDR_SPACE_GENERIC_P
5179 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5180 && refs_may_alias_p (to, from)
5181 && cfun->returns_struct
5182 && !cfun->returns_pcc_struct)
5183 {
5184 rtx from_rtx, size;
5185
5186 push_temp_slots ();
5187 size = expr_size (from);
5188 from_rtx = expand_normal (from);
5189
5190 emit_library_call (memmove_libfunc, LCT_NORMAL,
5191 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5192 XEXP (from_rtx, 0), Pmode,
5193 convert_to_mode (TYPE_MODE (sizetype),
5194 size, TYPE_UNSIGNED (sizetype)),
5195 TYPE_MODE (sizetype));
5196
5197 preserve_temp_slots (to_rtx);
5198 pop_temp_slots ();
5199 return;
5200 }
5201
5202 /* Compute FROM and store the value in the rtx we got. */
5203
5204 push_temp_slots ();
5205 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, to);
5206 preserve_temp_slots (result);
5207 pop_temp_slots ();
5208 return;
5209 }
5210
5211 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5212 succeeded, false otherwise. */
5213
5214 bool
5215 emit_storent_insn (rtx to, rtx from)
5216 {
5217 struct expand_operand ops[2];
5218 machine_mode mode = GET_MODE (to);
5219 enum insn_code code = optab_handler (storent_optab, mode);
5220
5221 if (code == CODE_FOR_nothing)
5222 return false;
5223
5224 create_fixed_operand (&ops[0], to);
5225 create_input_operand (&ops[1], from, mode);
5226 return maybe_expand_insn (code, 2, ops);
5227 }
5228
5229 /* Generate code for computing expression EXP,
5230 and storing the value into TARGET.
5231
5232 If the mode is BLKmode then we may return TARGET itself.
5233 It turns out that in BLKmode it doesn't cause a problem.
5234 because C has no operators that could combine two different
5235 assignments into the same BLKmode object with different values
5236 with no sequence point. Will other languages need this to
5237 be more thorough?
5238
5239 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5240 stack, and block moves may need to be treated specially.
5241
5242 If NONTEMPORAL is true, try using a nontemporal store instruction.
5243
5244 If BTARGET is not NULL then computed bounds of EXP are
5245 associated with BTARGET. */
5246
5247 rtx
5248 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5249 bool nontemporal, tree btarget)
5250 {
5251 rtx temp;
5252 rtx alt_rtl = NULL_RTX;
5253 location_t loc = curr_insn_location ();
5254
5255 if (VOID_TYPE_P (TREE_TYPE (exp)))
5256 {
5257 /* C++ can generate ?: expressions with a throw expression in one
5258 branch and an rvalue in the other. Here, we resolve attempts to
5259 store the throw expression's nonexistent result. */
5260 gcc_assert (!call_param_p);
5261 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5262 return NULL_RTX;
5263 }
5264 if (TREE_CODE (exp) == COMPOUND_EXPR)
5265 {
5266 /* Perform first part of compound expression, then assign from second
5267 part. */
5268 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5269 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5270 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5271 call_param_p, nontemporal, btarget);
5272 }
5273 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5274 {
5275 /* For conditional expression, get safe form of the target. Then
5276 test the condition, doing the appropriate assignment on either
5277 side. This avoids the creation of unnecessary temporaries.
5278 For non-BLKmode, it is more efficient not to do this. */
5279
5280 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5281
5282 do_pending_stack_adjust ();
5283 NO_DEFER_POP;
5284 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5285 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5286 nontemporal, btarget);
5287 emit_jump_insn (gen_jump (lab2));
5288 emit_barrier ();
5289 emit_label (lab1);
5290 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5291 nontemporal, btarget);
5292 emit_label (lab2);
5293 OK_DEFER_POP;
5294
5295 return NULL_RTX;
5296 }
5297 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5298 /* If this is a scalar in a register that is stored in a wider mode
5299 than the declared mode, compute the result into its declared mode
5300 and then convert to the wider mode. Our value is the computed
5301 expression. */
5302 {
5303 rtx inner_target = 0;
5304
5305 /* We can do the conversion inside EXP, which will often result
5306 in some optimizations. Do the conversion in two steps: first
5307 change the signedness, if needed, then the extend. But don't
5308 do this if the type of EXP is a subtype of something else
5309 since then the conversion might involve more than just
5310 converting modes. */
5311 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5312 && TREE_TYPE (TREE_TYPE (exp)) == 0
5313 && GET_MODE_PRECISION (GET_MODE (target))
5314 == TYPE_PRECISION (TREE_TYPE (exp)))
5315 {
5316 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5317 TYPE_UNSIGNED (TREE_TYPE (exp))))
5318 {
5319 /* Some types, e.g. Fortran's logical*4, won't have a signed
5320 version, so use the mode instead. */
5321 tree ntype
5322 = (signed_or_unsigned_type_for
5323 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5324 if (ntype == NULL)
5325 ntype = lang_hooks.types.type_for_mode
5326 (TYPE_MODE (TREE_TYPE (exp)),
5327 SUBREG_PROMOTED_SIGN (target));
5328
5329 exp = fold_convert_loc (loc, ntype, exp);
5330 }
5331
5332 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5333 (GET_MODE (SUBREG_REG (target)),
5334 SUBREG_PROMOTED_SIGN (target)),
5335 exp);
5336
5337 inner_target = SUBREG_REG (target);
5338 }
5339
5340 temp = expand_expr (exp, inner_target, VOIDmode,
5341 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5342
5343 /* Handle bounds returned by call. */
5344 if (TREE_CODE (exp) == CALL_EXPR)
5345 {
5346 rtx bounds;
5347 chkp_split_slot (temp, &temp, &bounds);
5348 if (bounds && btarget)
5349 {
5350 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5351 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5352 chkp_set_rtl_bounds (btarget, tmp);
5353 }
5354 }
5355
5356 /* If TEMP is a VOIDmode constant, use convert_modes to make
5357 sure that we properly convert it. */
5358 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5359 {
5360 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5361 temp, SUBREG_PROMOTED_SIGN (target));
5362 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5363 GET_MODE (target), temp,
5364 SUBREG_PROMOTED_SIGN (target));
5365 }
5366
5367 convert_move (SUBREG_REG (target), temp,
5368 SUBREG_PROMOTED_SIGN (target));
5369
5370 return NULL_RTX;
5371 }
5372 else if ((TREE_CODE (exp) == STRING_CST
5373 || (TREE_CODE (exp) == MEM_REF
5374 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5375 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5376 == STRING_CST
5377 && integer_zerop (TREE_OPERAND (exp, 1))))
5378 && !nontemporal && !call_param_p
5379 && MEM_P (target))
5380 {
5381 /* Optimize initialization of an array with a STRING_CST. */
5382 HOST_WIDE_INT exp_len, str_copy_len;
5383 rtx dest_mem;
5384 tree str = TREE_CODE (exp) == STRING_CST
5385 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5386
5387 exp_len = int_expr_size (exp);
5388 if (exp_len <= 0)
5389 goto normal_expr;
5390
5391 if (TREE_STRING_LENGTH (str) <= 0)
5392 goto normal_expr;
5393
5394 str_copy_len = strlen (TREE_STRING_POINTER (str));
5395 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5396 goto normal_expr;
5397
5398 str_copy_len = TREE_STRING_LENGTH (str);
5399 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5400 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5401 {
5402 str_copy_len += STORE_MAX_PIECES - 1;
5403 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5404 }
5405 str_copy_len = MIN (str_copy_len, exp_len);
5406 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5407 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5408 MEM_ALIGN (target), false))
5409 goto normal_expr;
5410
5411 dest_mem = target;
5412
5413 dest_mem = store_by_pieces (dest_mem,
5414 str_copy_len, builtin_strncpy_read_str,
5415 CONST_CAST (char *,
5416 TREE_STRING_POINTER (str)),
5417 MEM_ALIGN (target), false,
5418 exp_len > str_copy_len ? 1 : 0);
5419 if (exp_len > str_copy_len)
5420 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5421 GEN_INT (exp_len - str_copy_len),
5422 BLOCK_OP_NORMAL);
5423 return NULL_RTX;
5424 }
5425 else
5426 {
5427 rtx tmp_target;
5428
5429 normal_expr:
5430 /* If we want to use a nontemporal store, force the value to
5431 register first. */
5432 tmp_target = nontemporal ? NULL_RTX : target;
5433 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5434 (call_param_p
5435 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5436 &alt_rtl, false);
5437
5438 /* Handle bounds returned by call. */
5439 if (TREE_CODE (exp) == CALL_EXPR)
5440 {
5441 rtx bounds;
5442 chkp_split_slot (temp, &temp, &bounds);
5443 if (bounds && btarget)
5444 {
5445 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5446 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5447 chkp_set_rtl_bounds (btarget, tmp);
5448 }
5449 }
5450 }
5451
5452 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5453 the same as that of TARGET, adjust the constant. This is needed, for
5454 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5455 only a word-sized value. */
5456 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5457 && TREE_CODE (exp) != ERROR_MARK
5458 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5459 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5460 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5461
5462 /* If value was not generated in the target, store it there.
5463 Convert the value to TARGET's type first if necessary and emit the
5464 pending incrementations that have been queued when expanding EXP.
5465 Note that we cannot emit the whole queue blindly because this will
5466 effectively disable the POST_INC optimization later.
5467
5468 If TEMP and TARGET compare equal according to rtx_equal_p, but
5469 one or both of them are volatile memory refs, we have to distinguish
5470 two cases:
5471 - expand_expr has used TARGET. In this case, we must not generate
5472 another copy. This can be detected by TARGET being equal according
5473 to == .
5474 - expand_expr has not used TARGET - that means that the source just
5475 happens to have the same RTX form. Since temp will have been created
5476 by expand_expr, it will compare unequal according to == .
5477 We must generate a copy in this case, to reach the correct number
5478 of volatile memory references. */
5479
5480 if ((! rtx_equal_p (temp, target)
5481 || (temp != target && (side_effects_p (temp)
5482 || side_effects_p (target))))
5483 && TREE_CODE (exp) != ERROR_MARK
5484 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5485 but TARGET is not valid memory reference, TEMP will differ
5486 from TARGET although it is really the same location. */
5487 && !(alt_rtl
5488 && rtx_equal_p (alt_rtl, target)
5489 && !side_effects_p (alt_rtl)
5490 && !side_effects_p (target))
5491 /* If there's nothing to copy, don't bother. Don't call
5492 expr_size unless necessary, because some front-ends (C++)
5493 expr_size-hook must not be given objects that are not
5494 supposed to be bit-copied or bit-initialized. */
5495 && expr_size (exp) != const0_rtx)
5496 {
5497 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5498 {
5499 if (GET_MODE (target) == BLKmode)
5500 {
5501 /* Handle calls that return BLKmode values in registers. */
5502 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5503 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5504 else
5505 store_bit_field (target,
5506 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5507 0, 0, 0, GET_MODE (temp), temp);
5508 }
5509 else
5510 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5511 }
5512
5513 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5514 {
5515 /* Handle copying a string constant into an array. The string
5516 constant may be shorter than the array. So copy just the string's
5517 actual length, and clear the rest. First get the size of the data
5518 type of the string, which is actually the size of the target. */
5519 rtx size = expr_size (exp);
5520
5521 if (CONST_INT_P (size)
5522 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5523 emit_block_move (target, temp, size,
5524 (call_param_p
5525 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5526 else
5527 {
5528 machine_mode pointer_mode
5529 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5530 machine_mode address_mode = get_address_mode (target);
5531
5532 /* Compute the size of the data to copy from the string. */
5533 tree copy_size
5534 = size_binop_loc (loc, MIN_EXPR,
5535 make_tree (sizetype, size),
5536 size_int (TREE_STRING_LENGTH (exp)));
5537 rtx copy_size_rtx
5538 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5539 (call_param_p
5540 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5541 rtx_code_label *label = 0;
5542
5543 /* Copy that much. */
5544 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5545 TYPE_UNSIGNED (sizetype));
5546 emit_block_move (target, temp, copy_size_rtx,
5547 (call_param_p
5548 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5549
5550 /* Figure out how much is left in TARGET that we have to clear.
5551 Do all calculations in pointer_mode. */
5552 if (CONST_INT_P (copy_size_rtx))
5553 {
5554 size = plus_constant (address_mode, size,
5555 -INTVAL (copy_size_rtx));
5556 target = adjust_address (target, BLKmode,
5557 INTVAL (copy_size_rtx));
5558 }
5559 else
5560 {
5561 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5562 copy_size_rtx, NULL_RTX, 0,
5563 OPTAB_LIB_WIDEN);
5564
5565 if (GET_MODE (copy_size_rtx) != address_mode)
5566 copy_size_rtx = convert_to_mode (address_mode,
5567 copy_size_rtx,
5568 TYPE_UNSIGNED (sizetype));
5569
5570 target = offset_address (target, copy_size_rtx,
5571 highest_pow2_factor (copy_size));
5572 label = gen_label_rtx ();
5573 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5574 GET_MODE (size), 0, label);
5575 }
5576
5577 if (size != const0_rtx)
5578 clear_storage (target, size, BLOCK_OP_NORMAL);
5579
5580 if (label)
5581 emit_label (label);
5582 }
5583 }
5584 /* Handle calls that return values in multiple non-contiguous locations.
5585 The Irix 6 ABI has examples of this. */
5586 else if (GET_CODE (target) == PARALLEL)
5587 {
5588 if (GET_CODE (temp) == PARALLEL)
5589 emit_group_move (target, temp);
5590 else
5591 emit_group_load (target, temp, TREE_TYPE (exp),
5592 int_size_in_bytes (TREE_TYPE (exp)));
5593 }
5594 else if (GET_CODE (temp) == PARALLEL)
5595 emit_group_store (target, temp, TREE_TYPE (exp),
5596 int_size_in_bytes (TREE_TYPE (exp)));
5597 else if (GET_MODE (temp) == BLKmode)
5598 emit_block_move (target, temp, expr_size (exp),
5599 (call_param_p
5600 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5601 /* If we emit a nontemporal store, there is nothing else to do. */
5602 else if (nontemporal && emit_storent_insn (target, temp))
5603 ;
5604 else
5605 {
5606 temp = force_operand (temp, target);
5607 if (temp != target)
5608 emit_move_insn (target, temp);
5609 }
5610 }
5611
5612 return NULL_RTX;
5613 }
5614
5615 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5616 rtx
5617 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5618 {
5619 return store_expr_with_bounds (exp, target, call_param_p, nontemporal, NULL);
5620 }
5621 \f
5622 /* Return true if field F of structure TYPE is a flexible array. */
5623
5624 static bool
5625 flexible_array_member_p (const_tree f, const_tree type)
5626 {
5627 const_tree tf;
5628
5629 tf = TREE_TYPE (f);
5630 return (DECL_CHAIN (f) == NULL
5631 && TREE_CODE (tf) == ARRAY_TYPE
5632 && TYPE_DOMAIN (tf)
5633 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5634 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5635 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5636 && int_size_in_bytes (type) >= 0);
5637 }
5638
5639 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5640 must have in order for it to completely initialize a value of type TYPE.
5641 Return -1 if the number isn't known.
5642
5643 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5644
5645 static HOST_WIDE_INT
5646 count_type_elements (const_tree type, bool for_ctor_p)
5647 {
5648 switch (TREE_CODE (type))
5649 {
5650 case ARRAY_TYPE:
5651 {
5652 tree nelts;
5653
5654 nelts = array_type_nelts (type);
5655 if (nelts && tree_fits_uhwi_p (nelts))
5656 {
5657 unsigned HOST_WIDE_INT n;
5658
5659 n = tree_to_uhwi (nelts) + 1;
5660 if (n == 0 || for_ctor_p)
5661 return n;
5662 else
5663 return n * count_type_elements (TREE_TYPE (type), false);
5664 }
5665 return for_ctor_p ? -1 : 1;
5666 }
5667
5668 case RECORD_TYPE:
5669 {
5670 unsigned HOST_WIDE_INT n;
5671 tree f;
5672
5673 n = 0;
5674 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5675 if (TREE_CODE (f) == FIELD_DECL)
5676 {
5677 if (!for_ctor_p)
5678 n += count_type_elements (TREE_TYPE (f), false);
5679 else if (!flexible_array_member_p (f, type))
5680 /* Don't count flexible arrays, which are not supposed
5681 to be initialized. */
5682 n += 1;
5683 }
5684
5685 return n;
5686 }
5687
5688 case UNION_TYPE:
5689 case QUAL_UNION_TYPE:
5690 {
5691 tree f;
5692 HOST_WIDE_INT n, m;
5693
5694 gcc_assert (!for_ctor_p);
5695 /* Estimate the number of scalars in each field and pick the
5696 maximum. Other estimates would do instead; the idea is simply
5697 to make sure that the estimate is not sensitive to the ordering
5698 of the fields. */
5699 n = 1;
5700 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5701 if (TREE_CODE (f) == FIELD_DECL)
5702 {
5703 m = count_type_elements (TREE_TYPE (f), false);
5704 /* If the field doesn't span the whole union, add an extra
5705 scalar for the rest. */
5706 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5707 TYPE_SIZE (type)) != 1)
5708 m++;
5709 if (n < m)
5710 n = m;
5711 }
5712 return n;
5713 }
5714
5715 case COMPLEX_TYPE:
5716 return 2;
5717
5718 case VECTOR_TYPE:
5719 return TYPE_VECTOR_SUBPARTS (type);
5720
5721 case INTEGER_TYPE:
5722 case REAL_TYPE:
5723 case FIXED_POINT_TYPE:
5724 case ENUMERAL_TYPE:
5725 case BOOLEAN_TYPE:
5726 case POINTER_TYPE:
5727 case OFFSET_TYPE:
5728 case REFERENCE_TYPE:
5729 case NULLPTR_TYPE:
5730 return 1;
5731
5732 case ERROR_MARK:
5733 return 0;
5734
5735 case VOID_TYPE:
5736 case METHOD_TYPE:
5737 case FUNCTION_TYPE:
5738 case LANG_TYPE:
5739 default:
5740 gcc_unreachable ();
5741 }
5742 }
5743
5744 /* Helper for categorize_ctor_elements. Identical interface. */
5745
5746 static bool
5747 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5748 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5749 {
5750 unsigned HOST_WIDE_INT idx;
5751 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5752 tree value, purpose, elt_type;
5753
5754 /* Whether CTOR is a valid constant initializer, in accordance with what
5755 initializer_constant_valid_p does. If inferred from the constructor
5756 elements, true until proven otherwise. */
5757 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5758 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5759
5760 nz_elts = 0;
5761 init_elts = 0;
5762 num_fields = 0;
5763 elt_type = NULL_TREE;
5764
5765 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5766 {
5767 HOST_WIDE_INT mult = 1;
5768
5769 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5770 {
5771 tree lo_index = TREE_OPERAND (purpose, 0);
5772 tree hi_index = TREE_OPERAND (purpose, 1);
5773
5774 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5775 mult = (tree_to_uhwi (hi_index)
5776 - tree_to_uhwi (lo_index) + 1);
5777 }
5778 num_fields += mult;
5779 elt_type = TREE_TYPE (value);
5780
5781 switch (TREE_CODE (value))
5782 {
5783 case CONSTRUCTOR:
5784 {
5785 HOST_WIDE_INT nz = 0, ic = 0;
5786
5787 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5788 p_complete);
5789
5790 nz_elts += mult * nz;
5791 init_elts += mult * ic;
5792
5793 if (const_from_elts_p && const_p)
5794 const_p = const_elt_p;
5795 }
5796 break;
5797
5798 case INTEGER_CST:
5799 case REAL_CST:
5800 case FIXED_CST:
5801 if (!initializer_zerop (value))
5802 nz_elts += mult;
5803 init_elts += mult;
5804 break;
5805
5806 case STRING_CST:
5807 nz_elts += mult * TREE_STRING_LENGTH (value);
5808 init_elts += mult * TREE_STRING_LENGTH (value);
5809 break;
5810
5811 case COMPLEX_CST:
5812 if (!initializer_zerop (TREE_REALPART (value)))
5813 nz_elts += mult;
5814 if (!initializer_zerop (TREE_IMAGPART (value)))
5815 nz_elts += mult;
5816 init_elts += mult;
5817 break;
5818
5819 case VECTOR_CST:
5820 {
5821 unsigned i;
5822 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5823 {
5824 tree v = VECTOR_CST_ELT (value, i);
5825 if (!initializer_zerop (v))
5826 nz_elts += mult;
5827 init_elts += mult;
5828 }
5829 }
5830 break;
5831
5832 default:
5833 {
5834 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5835 nz_elts += mult * tc;
5836 init_elts += mult * tc;
5837
5838 if (const_from_elts_p && const_p)
5839 const_p = initializer_constant_valid_p (value, elt_type)
5840 != NULL_TREE;
5841 }
5842 break;
5843 }
5844 }
5845
5846 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5847 num_fields, elt_type))
5848 *p_complete = false;
5849
5850 *p_nz_elts += nz_elts;
5851 *p_init_elts += init_elts;
5852
5853 return const_p;
5854 }
5855
5856 /* Examine CTOR to discover:
5857 * how many scalar fields are set to nonzero values,
5858 and place it in *P_NZ_ELTS;
5859 * how many scalar fields in total are in CTOR,
5860 and place it in *P_ELT_COUNT.
5861 * whether the constructor is complete -- in the sense that every
5862 meaningful byte is explicitly given a value --
5863 and place it in *P_COMPLETE.
5864
5865 Return whether or not CTOR is a valid static constant initializer, the same
5866 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5867
5868 bool
5869 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5870 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5871 {
5872 *p_nz_elts = 0;
5873 *p_init_elts = 0;
5874 *p_complete = true;
5875
5876 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5877 }
5878
5879 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5880 of which had type LAST_TYPE. Each element was itself a complete
5881 initializer, in the sense that every meaningful byte was explicitly
5882 given a value. Return true if the same is true for the constructor
5883 as a whole. */
5884
5885 bool
5886 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5887 const_tree last_type)
5888 {
5889 if (TREE_CODE (type) == UNION_TYPE
5890 || TREE_CODE (type) == QUAL_UNION_TYPE)
5891 {
5892 if (num_elts == 0)
5893 return false;
5894
5895 gcc_assert (num_elts == 1 && last_type);
5896
5897 /* ??? We could look at each element of the union, and find the
5898 largest element. Which would avoid comparing the size of the
5899 initialized element against any tail padding in the union.
5900 Doesn't seem worth the effort... */
5901 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5902 }
5903
5904 return count_type_elements (type, true) == num_elts;
5905 }
5906
5907 /* Return 1 if EXP contains mostly (3/4) zeros. */
5908
5909 static int
5910 mostly_zeros_p (const_tree exp)
5911 {
5912 if (TREE_CODE (exp) == CONSTRUCTOR)
5913 {
5914 HOST_WIDE_INT nz_elts, init_elts;
5915 bool complete_p;
5916
5917 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5918 return !complete_p || nz_elts < init_elts / 4;
5919 }
5920
5921 return initializer_zerop (exp);
5922 }
5923
5924 /* Return 1 if EXP contains all zeros. */
5925
5926 static int
5927 all_zeros_p (const_tree exp)
5928 {
5929 if (TREE_CODE (exp) == CONSTRUCTOR)
5930 {
5931 HOST_WIDE_INT nz_elts, init_elts;
5932 bool complete_p;
5933
5934 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5935 return nz_elts == 0;
5936 }
5937
5938 return initializer_zerop (exp);
5939 }
5940 \f
5941 /* Helper function for store_constructor.
5942 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5943 CLEARED is as for store_constructor.
5944 ALIAS_SET is the alias set to use for any stores.
5945
5946 This provides a recursive shortcut back to store_constructor when it isn't
5947 necessary to go through store_field. This is so that we can pass through
5948 the cleared field to let store_constructor know that we may not have to
5949 clear a substructure if the outer structure has already been cleared. */
5950
5951 static void
5952 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5953 HOST_WIDE_INT bitpos, machine_mode mode,
5954 tree exp, int cleared, alias_set_type alias_set)
5955 {
5956 if (TREE_CODE (exp) == CONSTRUCTOR
5957 /* We can only call store_constructor recursively if the size and
5958 bit position are on a byte boundary. */
5959 && bitpos % BITS_PER_UNIT == 0
5960 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5961 /* If we have a nonzero bitpos for a register target, then we just
5962 let store_field do the bitfield handling. This is unlikely to
5963 generate unnecessary clear instructions anyways. */
5964 && (bitpos == 0 || MEM_P (target)))
5965 {
5966 if (MEM_P (target))
5967 target
5968 = adjust_address (target,
5969 GET_MODE (target) == BLKmode
5970 || 0 != (bitpos
5971 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5972 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5973
5974
5975 /* Update the alias set, if required. */
5976 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5977 && MEM_ALIAS_SET (target) != 0)
5978 {
5979 target = copy_rtx (target);
5980 set_mem_alias_set (target, alias_set);
5981 }
5982
5983 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5984 }
5985 else
5986 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5987 }
5988
5989
5990 /* Returns the number of FIELD_DECLs in TYPE. */
5991
5992 static int
5993 fields_length (const_tree type)
5994 {
5995 tree t = TYPE_FIELDS (type);
5996 int count = 0;
5997
5998 for (; t; t = DECL_CHAIN (t))
5999 if (TREE_CODE (t) == FIELD_DECL)
6000 ++count;
6001
6002 return count;
6003 }
6004
6005
6006 /* Store the value of constructor EXP into the rtx TARGET.
6007 TARGET is either a REG or a MEM; we know it cannot conflict, since
6008 safe_from_p has been called.
6009 CLEARED is true if TARGET is known to have been zero'd.
6010 SIZE is the number of bytes of TARGET we are allowed to modify: this
6011 may not be the same as the size of EXP if we are assigning to a field
6012 which has been packed to exclude padding bits. */
6013
6014 static void
6015 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
6016 {
6017 tree type = TREE_TYPE (exp);
6018 #ifdef WORD_REGISTER_OPERATIONS
6019 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
6020 #endif
6021
6022 switch (TREE_CODE (type))
6023 {
6024 case RECORD_TYPE:
6025 case UNION_TYPE:
6026 case QUAL_UNION_TYPE:
6027 {
6028 unsigned HOST_WIDE_INT idx;
6029 tree field, value;
6030
6031 /* If size is zero or the target is already cleared, do nothing. */
6032 if (size == 0 || cleared)
6033 cleared = 1;
6034 /* We either clear the aggregate or indicate the value is dead. */
6035 else if ((TREE_CODE (type) == UNION_TYPE
6036 || TREE_CODE (type) == QUAL_UNION_TYPE)
6037 && ! CONSTRUCTOR_ELTS (exp))
6038 /* If the constructor is empty, clear the union. */
6039 {
6040 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6041 cleared = 1;
6042 }
6043
6044 /* If we are building a static constructor into a register,
6045 set the initial value as zero so we can fold the value into
6046 a constant. But if more than one register is involved,
6047 this probably loses. */
6048 else if (REG_P (target) && TREE_STATIC (exp)
6049 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6050 {
6051 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6052 cleared = 1;
6053 }
6054
6055 /* If the constructor has fewer fields than the structure or
6056 if we are initializing the structure to mostly zeros, clear
6057 the whole structure first. Don't do this if TARGET is a
6058 register whose mode size isn't equal to SIZE since
6059 clear_storage can't handle this case. */
6060 else if (size > 0
6061 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6062 != fields_length (type))
6063 || mostly_zeros_p (exp))
6064 && (!REG_P (target)
6065 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6066 == size)))
6067 {
6068 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6069 cleared = 1;
6070 }
6071
6072 if (REG_P (target) && !cleared)
6073 emit_clobber (target);
6074
6075 /* Store each element of the constructor into the
6076 corresponding field of TARGET. */
6077 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6078 {
6079 machine_mode mode;
6080 HOST_WIDE_INT bitsize;
6081 HOST_WIDE_INT bitpos = 0;
6082 tree offset;
6083 rtx to_rtx = target;
6084
6085 /* Just ignore missing fields. We cleared the whole
6086 structure, above, if any fields are missing. */
6087 if (field == 0)
6088 continue;
6089
6090 if (cleared && initializer_zerop (value))
6091 continue;
6092
6093 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6094 bitsize = tree_to_uhwi (DECL_SIZE (field));
6095 else
6096 bitsize = -1;
6097
6098 mode = DECL_MODE (field);
6099 if (DECL_BIT_FIELD (field))
6100 mode = VOIDmode;
6101
6102 offset = DECL_FIELD_OFFSET (field);
6103 if (tree_fits_shwi_p (offset)
6104 && tree_fits_shwi_p (bit_position (field)))
6105 {
6106 bitpos = int_bit_position (field);
6107 offset = 0;
6108 }
6109 else
6110 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6111
6112 if (offset)
6113 {
6114 machine_mode address_mode;
6115 rtx offset_rtx;
6116
6117 offset
6118 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6119 make_tree (TREE_TYPE (exp),
6120 target));
6121
6122 offset_rtx = expand_normal (offset);
6123 gcc_assert (MEM_P (to_rtx));
6124
6125 address_mode = get_address_mode (to_rtx);
6126 if (GET_MODE (offset_rtx) != address_mode)
6127 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6128
6129 to_rtx = offset_address (to_rtx, offset_rtx,
6130 highest_pow2_factor (offset));
6131 }
6132
6133 #ifdef WORD_REGISTER_OPERATIONS
6134 /* If this initializes a field that is smaller than a
6135 word, at the start of a word, try to widen it to a full
6136 word. This special case allows us to output C++ member
6137 function initializations in a form that the optimizers
6138 can understand. */
6139 if (REG_P (target)
6140 && bitsize < BITS_PER_WORD
6141 && bitpos % BITS_PER_WORD == 0
6142 && GET_MODE_CLASS (mode) == MODE_INT
6143 && TREE_CODE (value) == INTEGER_CST
6144 && exp_size >= 0
6145 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6146 {
6147 tree type = TREE_TYPE (value);
6148
6149 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6150 {
6151 type = lang_hooks.types.type_for_mode
6152 (word_mode, TYPE_UNSIGNED (type));
6153 value = fold_convert (type, value);
6154 }
6155
6156 if (BYTES_BIG_ENDIAN)
6157 value
6158 = fold_build2 (LSHIFT_EXPR, type, value,
6159 build_int_cst (type,
6160 BITS_PER_WORD - bitsize));
6161 bitsize = BITS_PER_WORD;
6162 mode = word_mode;
6163 }
6164 #endif
6165
6166 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6167 && DECL_NONADDRESSABLE_P (field))
6168 {
6169 to_rtx = copy_rtx (to_rtx);
6170 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6171 }
6172
6173 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6174 value, cleared,
6175 get_alias_set (TREE_TYPE (field)));
6176 }
6177 break;
6178 }
6179 case ARRAY_TYPE:
6180 {
6181 tree value, index;
6182 unsigned HOST_WIDE_INT i;
6183 int need_to_clear;
6184 tree domain;
6185 tree elttype = TREE_TYPE (type);
6186 int const_bounds_p;
6187 HOST_WIDE_INT minelt = 0;
6188 HOST_WIDE_INT maxelt = 0;
6189
6190 domain = TYPE_DOMAIN (type);
6191 const_bounds_p = (TYPE_MIN_VALUE (domain)
6192 && TYPE_MAX_VALUE (domain)
6193 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6194 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6195
6196 /* If we have constant bounds for the range of the type, get them. */
6197 if (const_bounds_p)
6198 {
6199 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6200 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6201 }
6202
6203 /* If the constructor has fewer elements than the array, clear
6204 the whole array first. Similarly if this is static
6205 constructor of a non-BLKmode object. */
6206 if (cleared)
6207 need_to_clear = 0;
6208 else if (REG_P (target) && TREE_STATIC (exp))
6209 need_to_clear = 1;
6210 else
6211 {
6212 unsigned HOST_WIDE_INT idx;
6213 tree index, value;
6214 HOST_WIDE_INT count = 0, zero_count = 0;
6215 need_to_clear = ! const_bounds_p;
6216
6217 /* This loop is a more accurate version of the loop in
6218 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6219 is also needed to check for missing elements. */
6220 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6221 {
6222 HOST_WIDE_INT this_node_count;
6223
6224 if (need_to_clear)
6225 break;
6226
6227 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6228 {
6229 tree lo_index = TREE_OPERAND (index, 0);
6230 tree hi_index = TREE_OPERAND (index, 1);
6231
6232 if (! tree_fits_uhwi_p (lo_index)
6233 || ! tree_fits_uhwi_p (hi_index))
6234 {
6235 need_to_clear = 1;
6236 break;
6237 }
6238
6239 this_node_count = (tree_to_uhwi (hi_index)
6240 - tree_to_uhwi (lo_index) + 1);
6241 }
6242 else
6243 this_node_count = 1;
6244
6245 count += this_node_count;
6246 if (mostly_zeros_p (value))
6247 zero_count += this_node_count;
6248 }
6249
6250 /* Clear the entire array first if there are any missing
6251 elements, or if the incidence of zero elements is >=
6252 75%. */
6253 if (! need_to_clear
6254 && (count < maxelt - minelt + 1
6255 || 4 * zero_count >= 3 * count))
6256 need_to_clear = 1;
6257 }
6258
6259 if (need_to_clear && size > 0)
6260 {
6261 if (REG_P (target))
6262 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6263 else
6264 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6265 cleared = 1;
6266 }
6267
6268 if (!cleared && REG_P (target))
6269 /* Inform later passes that the old value is dead. */
6270 emit_clobber (target);
6271
6272 /* Store each element of the constructor into the
6273 corresponding element of TARGET, determined by counting the
6274 elements. */
6275 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6276 {
6277 machine_mode mode;
6278 HOST_WIDE_INT bitsize;
6279 HOST_WIDE_INT bitpos;
6280 rtx xtarget = target;
6281
6282 if (cleared && initializer_zerop (value))
6283 continue;
6284
6285 mode = TYPE_MODE (elttype);
6286 if (mode == BLKmode)
6287 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6288 ? tree_to_uhwi (TYPE_SIZE (elttype))
6289 : -1);
6290 else
6291 bitsize = GET_MODE_BITSIZE (mode);
6292
6293 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6294 {
6295 tree lo_index = TREE_OPERAND (index, 0);
6296 tree hi_index = TREE_OPERAND (index, 1);
6297 rtx index_r, pos_rtx;
6298 HOST_WIDE_INT lo, hi, count;
6299 tree position;
6300
6301 /* If the range is constant and "small", unroll the loop. */
6302 if (const_bounds_p
6303 && tree_fits_shwi_p (lo_index)
6304 && tree_fits_shwi_p (hi_index)
6305 && (lo = tree_to_shwi (lo_index),
6306 hi = tree_to_shwi (hi_index),
6307 count = hi - lo + 1,
6308 (!MEM_P (target)
6309 || count <= 2
6310 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6311 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6312 <= 40 * 8)))))
6313 {
6314 lo -= minelt; hi -= minelt;
6315 for (; lo <= hi; lo++)
6316 {
6317 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6318
6319 if (MEM_P (target)
6320 && !MEM_KEEP_ALIAS_SET_P (target)
6321 && TREE_CODE (type) == ARRAY_TYPE
6322 && TYPE_NONALIASED_COMPONENT (type))
6323 {
6324 target = copy_rtx (target);
6325 MEM_KEEP_ALIAS_SET_P (target) = 1;
6326 }
6327
6328 store_constructor_field
6329 (target, bitsize, bitpos, mode, value, cleared,
6330 get_alias_set (elttype));
6331 }
6332 }
6333 else
6334 {
6335 rtx_code_label *loop_start = gen_label_rtx ();
6336 rtx_code_label *loop_end = gen_label_rtx ();
6337 tree exit_cond;
6338
6339 expand_normal (hi_index);
6340
6341 index = build_decl (EXPR_LOCATION (exp),
6342 VAR_DECL, NULL_TREE, domain);
6343 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6344 SET_DECL_RTL (index, index_r);
6345 store_expr (lo_index, index_r, 0, false);
6346
6347 /* Build the head of the loop. */
6348 do_pending_stack_adjust ();
6349 emit_label (loop_start);
6350
6351 /* Assign value to element index. */
6352 position =
6353 fold_convert (ssizetype,
6354 fold_build2 (MINUS_EXPR,
6355 TREE_TYPE (index),
6356 index,
6357 TYPE_MIN_VALUE (domain)));
6358
6359 position =
6360 size_binop (MULT_EXPR, position,
6361 fold_convert (ssizetype,
6362 TYPE_SIZE_UNIT (elttype)));
6363
6364 pos_rtx = expand_normal (position);
6365 xtarget = offset_address (target, pos_rtx,
6366 highest_pow2_factor (position));
6367 xtarget = adjust_address (xtarget, mode, 0);
6368 if (TREE_CODE (value) == CONSTRUCTOR)
6369 store_constructor (value, xtarget, cleared,
6370 bitsize / BITS_PER_UNIT);
6371 else
6372 store_expr (value, xtarget, 0, false);
6373
6374 /* Generate a conditional jump to exit the loop. */
6375 exit_cond = build2 (LT_EXPR, integer_type_node,
6376 index, hi_index);
6377 jumpif (exit_cond, loop_end, -1);
6378
6379 /* Update the loop counter, and jump to the head of
6380 the loop. */
6381 expand_assignment (index,
6382 build2 (PLUS_EXPR, TREE_TYPE (index),
6383 index, integer_one_node),
6384 false);
6385
6386 emit_jump (loop_start);
6387
6388 /* Build the end of the loop. */
6389 emit_label (loop_end);
6390 }
6391 }
6392 else if ((index != 0 && ! tree_fits_shwi_p (index))
6393 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6394 {
6395 tree position;
6396
6397 if (index == 0)
6398 index = ssize_int (1);
6399
6400 if (minelt)
6401 index = fold_convert (ssizetype,
6402 fold_build2 (MINUS_EXPR,
6403 TREE_TYPE (index),
6404 index,
6405 TYPE_MIN_VALUE (domain)));
6406
6407 position =
6408 size_binop (MULT_EXPR, index,
6409 fold_convert (ssizetype,
6410 TYPE_SIZE_UNIT (elttype)));
6411 xtarget = offset_address (target,
6412 expand_normal (position),
6413 highest_pow2_factor (position));
6414 xtarget = adjust_address (xtarget, mode, 0);
6415 store_expr (value, xtarget, 0, false);
6416 }
6417 else
6418 {
6419 if (index != 0)
6420 bitpos = ((tree_to_shwi (index) - minelt)
6421 * tree_to_uhwi (TYPE_SIZE (elttype)));
6422 else
6423 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6424
6425 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6426 && TREE_CODE (type) == ARRAY_TYPE
6427 && TYPE_NONALIASED_COMPONENT (type))
6428 {
6429 target = copy_rtx (target);
6430 MEM_KEEP_ALIAS_SET_P (target) = 1;
6431 }
6432 store_constructor_field (target, bitsize, bitpos, mode, value,
6433 cleared, get_alias_set (elttype));
6434 }
6435 }
6436 break;
6437 }
6438
6439 case VECTOR_TYPE:
6440 {
6441 unsigned HOST_WIDE_INT idx;
6442 constructor_elt *ce;
6443 int i;
6444 int need_to_clear;
6445 int icode = CODE_FOR_nothing;
6446 tree elttype = TREE_TYPE (type);
6447 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6448 machine_mode eltmode = TYPE_MODE (elttype);
6449 HOST_WIDE_INT bitsize;
6450 HOST_WIDE_INT bitpos;
6451 rtvec vector = NULL;
6452 unsigned n_elts;
6453 alias_set_type alias;
6454
6455 gcc_assert (eltmode != BLKmode);
6456
6457 n_elts = TYPE_VECTOR_SUBPARTS (type);
6458 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6459 {
6460 machine_mode mode = GET_MODE (target);
6461
6462 icode = (int) optab_handler (vec_init_optab, mode);
6463 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6464 if (icode != CODE_FOR_nothing)
6465 {
6466 tree value;
6467
6468 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6469 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6470 {
6471 icode = CODE_FOR_nothing;
6472 break;
6473 }
6474 }
6475 if (icode != CODE_FOR_nothing)
6476 {
6477 unsigned int i;
6478
6479 vector = rtvec_alloc (n_elts);
6480 for (i = 0; i < n_elts; i++)
6481 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6482 }
6483 }
6484
6485 /* If the constructor has fewer elements than the vector,
6486 clear the whole array first. Similarly if this is static
6487 constructor of a non-BLKmode object. */
6488 if (cleared)
6489 need_to_clear = 0;
6490 else if (REG_P (target) && TREE_STATIC (exp))
6491 need_to_clear = 1;
6492 else
6493 {
6494 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6495 tree value;
6496
6497 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6498 {
6499 int n_elts_here = tree_to_uhwi
6500 (int_const_binop (TRUNC_DIV_EXPR,
6501 TYPE_SIZE (TREE_TYPE (value)),
6502 TYPE_SIZE (elttype)));
6503
6504 count += n_elts_here;
6505 if (mostly_zeros_p (value))
6506 zero_count += n_elts_here;
6507 }
6508
6509 /* Clear the entire vector first if there are any missing elements,
6510 or if the incidence of zero elements is >= 75%. */
6511 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6512 }
6513
6514 if (need_to_clear && size > 0 && !vector)
6515 {
6516 if (REG_P (target))
6517 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6518 else
6519 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6520 cleared = 1;
6521 }
6522
6523 /* Inform later passes that the old value is dead. */
6524 if (!cleared && !vector && REG_P (target))
6525 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6526
6527 if (MEM_P (target))
6528 alias = MEM_ALIAS_SET (target);
6529 else
6530 alias = get_alias_set (elttype);
6531
6532 /* Store each element of the constructor into the corresponding
6533 element of TARGET, determined by counting the elements. */
6534 for (idx = 0, i = 0;
6535 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6536 idx++, i += bitsize / elt_size)
6537 {
6538 HOST_WIDE_INT eltpos;
6539 tree value = ce->value;
6540
6541 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6542 if (cleared && initializer_zerop (value))
6543 continue;
6544
6545 if (ce->index)
6546 eltpos = tree_to_uhwi (ce->index);
6547 else
6548 eltpos = i;
6549
6550 if (vector)
6551 {
6552 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6553 elements. */
6554 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6555 RTVEC_ELT (vector, eltpos)
6556 = expand_normal (value);
6557 }
6558 else
6559 {
6560 machine_mode value_mode =
6561 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6562 ? TYPE_MODE (TREE_TYPE (value))
6563 : eltmode;
6564 bitpos = eltpos * elt_size;
6565 store_constructor_field (target, bitsize, bitpos, value_mode,
6566 value, cleared, alias);
6567 }
6568 }
6569
6570 if (vector)
6571 emit_insn (GEN_FCN (icode)
6572 (target,
6573 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6574 break;
6575 }
6576
6577 default:
6578 gcc_unreachable ();
6579 }
6580 }
6581
6582 /* Store the value of EXP (an expression tree)
6583 into a subfield of TARGET which has mode MODE and occupies
6584 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6585 If MODE is VOIDmode, it means that we are storing into a bit-field.
6586
6587 BITREGION_START is bitpos of the first bitfield in this region.
6588 BITREGION_END is the bitpos of the ending bitfield in this region.
6589 These two fields are 0, if the C++ memory model does not apply,
6590 or we are not interested in keeping track of bitfield regions.
6591
6592 Always return const0_rtx unless we have something particular to
6593 return.
6594
6595 ALIAS_SET is the alias set for the destination. This value will
6596 (in general) be different from that for TARGET, since TARGET is a
6597 reference to the containing structure.
6598
6599 If NONTEMPORAL is true, try generating a nontemporal store. */
6600
6601 static rtx
6602 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6603 unsigned HOST_WIDE_INT bitregion_start,
6604 unsigned HOST_WIDE_INT bitregion_end,
6605 machine_mode mode, tree exp,
6606 alias_set_type alias_set, bool nontemporal)
6607 {
6608 if (TREE_CODE (exp) == ERROR_MARK)
6609 return const0_rtx;
6610
6611 /* If we have nothing to store, do nothing unless the expression has
6612 side-effects. */
6613 if (bitsize == 0)
6614 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6615
6616 if (GET_CODE (target) == CONCAT)
6617 {
6618 /* We're storing into a struct containing a single __complex. */
6619
6620 gcc_assert (!bitpos);
6621 return store_expr (exp, target, 0, nontemporal);
6622 }
6623
6624 /* If the structure is in a register or if the component
6625 is a bit field, we cannot use addressing to access it.
6626 Use bit-field techniques or SUBREG to store in it. */
6627
6628 if (mode == VOIDmode
6629 || (mode != BLKmode && ! direct_store[(int) mode]
6630 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6631 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6632 || REG_P (target)
6633 || GET_CODE (target) == SUBREG
6634 /* If the field isn't aligned enough to store as an ordinary memref,
6635 store it as a bit field. */
6636 || (mode != BLKmode
6637 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6638 || bitpos % GET_MODE_ALIGNMENT (mode))
6639 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6640 || (bitpos % BITS_PER_UNIT != 0)))
6641 || (bitsize >= 0 && mode != BLKmode
6642 && GET_MODE_BITSIZE (mode) > bitsize)
6643 /* If the RHS and field are a constant size and the size of the
6644 RHS isn't the same size as the bitfield, we must use bitfield
6645 operations. */
6646 || (bitsize >= 0
6647 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6648 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6649 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6650 decl we must use bitfield operations. */
6651 || (bitsize >= 0
6652 && TREE_CODE (exp) == MEM_REF
6653 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6654 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6655 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6656 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6657 {
6658 rtx temp;
6659 gimple nop_def;
6660
6661 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6662 implies a mask operation. If the precision is the same size as
6663 the field we're storing into, that mask is redundant. This is
6664 particularly common with bit field assignments generated by the
6665 C front end. */
6666 nop_def = get_def_for_expr (exp, NOP_EXPR);
6667 if (nop_def)
6668 {
6669 tree type = TREE_TYPE (exp);
6670 if (INTEGRAL_TYPE_P (type)
6671 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6672 && bitsize == TYPE_PRECISION (type))
6673 {
6674 tree op = gimple_assign_rhs1 (nop_def);
6675 type = TREE_TYPE (op);
6676 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6677 exp = op;
6678 }
6679 }
6680
6681 temp = expand_normal (exp);
6682
6683 /* If BITSIZE is narrower than the size of the type of EXP
6684 we will be narrowing TEMP. Normally, what's wanted are the
6685 low-order bits. However, if EXP's type is a record and this is
6686 big-endian machine, we want the upper BITSIZE bits. */
6687 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6688 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6689 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6690 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6691 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6692 NULL_RTX, 1);
6693
6694 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6695 if (mode != VOIDmode && mode != BLKmode
6696 && mode != TYPE_MODE (TREE_TYPE (exp)))
6697 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6698
6699 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6700 are both BLKmode, both must be in memory and BITPOS must be aligned
6701 on a byte boundary. If so, we simply do a block copy. Likewise for
6702 a BLKmode-like TARGET. */
6703 if (GET_CODE (temp) != PARALLEL
6704 && GET_MODE (temp) == BLKmode
6705 && (GET_MODE (target) == BLKmode
6706 || (MEM_P (target)
6707 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6708 && (bitpos % BITS_PER_UNIT) == 0
6709 && (bitsize % BITS_PER_UNIT) == 0)))
6710 {
6711 gcc_assert (MEM_P (target) && MEM_P (temp)
6712 && (bitpos % BITS_PER_UNIT) == 0);
6713
6714 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6715 emit_block_move (target, temp,
6716 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6717 / BITS_PER_UNIT),
6718 BLOCK_OP_NORMAL);
6719
6720 return const0_rtx;
6721 }
6722
6723 /* Handle calls that return values in multiple non-contiguous locations.
6724 The Irix 6 ABI has examples of this. */
6725 if (GET_CODE (temp) == PARALLEL)
6726 {
6727 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6728 rtx temp_target;
6729 if (mode == BLKmode || mode == VOIDmode)
6730 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6731 temp_target = gen_reg_rtx (mode);
6732 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6733 temp = temp_target;
6734 }
6735 else if (mode == BLKmode)
6736 {
6737 /* Handle calls that return BLKmode values in registers. */
6738 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6739 {
6740 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6741 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6742 temp = temp_target;
6743 }
6744 else
6745 {
6746 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6747 rtx temp_target;
6748 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6749 temp_target = gen_reg_rtx (mode);
6750 temp_target
6751 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6752 temp_target, mode, mode);
6753 temp = temp_target;
6754 }
6755 }
6756
6757 /* Store the value in the bitfield. */
6758 store_bit_field (target, bitsize, bitpos,
6759 bitregion_start, bitregion_end,
6760 mode, temp);
6761
6762 return const0_rtx;
6763 }
6764 else
6765 {
6766 /* Now build a reference to just the desired component. */
6767 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6768
6769 if (to_rtx == target)
6770 to_rtx = copy_rtx (to_rtx);
6771
6772 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6773 set_mem_alias_set (to_rtx, alias_set);
6774
6775 return store_expr (exp, to_rtx, 0, nontemporal);
6776 }
6777 }
6778 \f
6779 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6780 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6781 codes and find the ultimate containing object, which we return.
6782
6783 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6784 bit position, and *PUNSIGNEDP to the signedness of the field.
6785 If the position of the field is variable, we store a tree
6786 giving the variable offset (in units) in *POFFSET.
6787 This offset is in addition to the bit position.
6788 If the position is not variable, we store 0 in *POFFSET.
6789
6790 If any of the extraction expressions is volatile,
6791 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6792
6793 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6794 Otherwise, it is a mode that can be used to access the field.
6795
6796 If the field describes a variable-sized object, *PMODE is set to
6797 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6798 this case, but the address of the object can be found.
6799
6800 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6801 look through nodes that serve as markers of a greater alignment than
6802 the one that can be deduced from the expression. These nodes make it
6803 possible for front-ends to prevent temporaries from being created by
6804 the middle-end on alignment considerations. For that purpose, the
6805 normal operating mode at high-level is to always pass FALSE so that
6806 the ultimate containing object is really returned; moreover, the
6807 associated predicate handled_component_p will always return TRUE
6808 on these nodes, thus indicating that they are essentially handled
6809 by get_inner_reference. TRUE should only be passed when the caller
6810 is scanning the expression in order to build another representation
6811 and specifically knows how to handle these nodes; as such, this is
6812 the normal operating mode in the RTL expanders. */
6813
6814 tree
6815 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6816 HOST_WIDE_INT *pbitpos, tree *poffset,
6817 machine_mode *pmode, int *punsignedp,
6818 int *pvolatilep, bool keep_aligning)
6819 {
6820 tree size_tree = 0;
6821 machine_mode mode = VOIDmode;
6822 bool blkmode_bitfield = false;
6823 tree offset = size_zero_node;
6824 offset_int bit_offset = 0;
6825
6826 /* First get the mode, signedness, and size. We do this from just the
6827 outermost expression. */
6828 *pbitsize = -1;
6829 if (TREE_CODE (exp) == COMPONENT_REF)
6830 {
6831 tree field = TREE_OPERAND (exp, 1);
6832 size_tree = DECL_SIZE (field);
6833 if (flag_strict_volatile_bitfields > 0
6834 && TREE_THIS_VOLATILE (exp)
6835 && DECL_BIT_FIELD_TYPE (field)
6836 && DECL_MODE (field) != BLKmode)
6837 /* Volatile bitfields should be accessed in the mode of the
6838 field's type, not the mode computed based on the bit
6839 size. */
6840 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6841 else if (!DECL_BIT_FIELD (field))
6842 mode = DECL_MODE (field);
6843 else if (DECL_MODE (field) == BLKmode)
6844 blkmode_bitfield = true;
6845
6846 *punsignedp = DECL_UNSIGNED (field);
6847 }
6848 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6849 {
6850 size_tree = TREE_OPERAND (exp, 1);
6851 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6852 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6853
6854 /* For vector types, with the correct size of access, use the mode of
6855 inner type. */
6856 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6857 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6858 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6859 mode = TYPE_MODE (TREE_TYPE (exp));
6860 }
6861 else
6862 {
6863 mode = TYPE_MODE (TREE_TYPE (exp));
6864 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6865
6866 if (mode == BLKmode)
6867 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6868 else
6869 *pbitsize = GET_MODE_BITSIZE (mode);
6870 }
6871
6872 if (size_tree != 0)
6873 {
6874 if (! tree_fits_uhwi_p (size_tree))
6875 mode = BLKmode, *pbitsize = -1;
6876 else
6877 *pbitsize = tree_to_uhwi (size_tree);
6878 }
6879
6880 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6881 and find the ultimate containing object. */
6882 while (1)
6883 {
6884 switch (TREE_CODE (exp))
6885 {
6886 case BIT_FIELD_REF:
6887 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6888 break;
6889
6890 case COMPONENT_REF:
6891 {
6892 tree field = TREE_OPERAND (exp, 1);
6893 tree this_offset = component_ref_field_offset (exp);
6894
6895 /* If this field hasn't been filled in yet, don't go past it.
6896 This should only happen when folding expressions made during
6897 type construction. */
6898 if (this_offset == 0)
6899 break;
6900
6901 offset = size_binop (PLUS_EXPR, offset, this_offset);
6902 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6903
6904 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6905 }
6906 break;
6907
6908 case ARRAY_REF:
6909 case ARRAY_RANGE_REF:
6910 {
6911 tree index = TREE_OPERAND (exp, 1);
6912 tree low_bound = array_ref_low_bound (exp);
6913 tree unit_size = array_ref_element_size (exp);
6914
6915 /* We assume all arrays have sizes that are a multiple of a byte.
6916 First subtract the lower bound, if any, in the type of the
6917 index, then convert to sizetype and multiply by the size of
6918 the array element. */
6919 if (! integer_zerop (low_bound))
6920 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6921 index, low_bound);
6922
6923 offset = size_binop (PLUS_EXPR, offset,
6924 size_binop (MULT_EXPR,
6925 fold_convert (sizetype, index),
6926 unit_size));
6927 }
6928 break;
6929
6930 case REALPART_EXPR:
6931 break;
6932
6933 case IMAGPART_EXPR:
6934 bit_offset += *pbitsize;
6935 break;
6936
6937 case VIEW_CONVERT_EXPR:
6938 if (keep_aligning && STRICT_ALIGNMENT
6939 && (TYPE_ALIGN (TREE_TYPE (exp))
6940 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6941 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6942 < BIGGEST_ALIGNMENT)
6943 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6944 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6945 goto done;
6946 break;
6947
6948 case MEM_REF:
6949 /* Hand back the decl for MEM[&decl, off]. */
6950 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6951 {
6952 tree off = TREE_OPERAND (exp, 1);
6953 if (!integer_zerop (off))
6954 {
6955 offset_int boff, coff = mem_ref_offset (exp);
6956 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6957 bit_offset += boff;
6958 }
6959 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6960 }
6961 goto done;
6962
6963 default:
6964 goto done;
6965 }
6966
6967 /* If any reference in the chain is volatile, the effect is volatile. */
6968 if (TREE_THIS_VOLATILE (exp))
6969 *pvolatilep = 1;
6970
6971 exp = TREE_OPERAND (exp, 0);
6972 }
6973 done:
6974
6975 /* If OFFSET is constant, see if we can return the whole thing as a
6976 constant bit position. Make sure to handle overflow during
6977 this conversion. */
6978 if (TREE_CODE (offset) == INTEGER_CST)
6979 {
6980 offset_int tem = wi::sext (wi::to_offset (offset),
6981 TYPE_PRECISION (sizetype));
6982 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6983 tem += bit_offset;
6984 if (wi::fits_shwi_p (tem))
6985 {
6986 *pbitpos = tem.to_shwi ();
6987 *poffset = offset = NULL_TREE;
6988 }
6989 }
6990
6991 /* Otherwise, split it up. */
6992 if (offset)
6993 {
6994 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6995 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
6996 {
6997 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6998 offset_int tem = bit_offset.and_not (mask);
6999 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
7000 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
7001 bit_offset -= tem;
7002 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
7003 offset = size_binop (PLUS_EXPR, offset,
7004 wide_int_to_tree (sizetype, tem));
7005 }
7006
7007 *pbitpos = bit_offset.to_shwi ();
7008 *poffset = offset;
7009 }
7010
7011 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7012 if (mode == VOIDmode
7013 && blkmode_bitfield
7014 && (*pbitpos % BITS_PER_UNIT) == 0
7015 && (*pbitsize % BITS_PER_UNIT) == 0)
7016 *pmode = BLKmode;
7017 else
7018 *pmode = mode;
7019
7020 return exp;
7021 }
7022
7023 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7024
7025 static unsigned HOST_WIDE_INT
7026 target_align (const_tree target)
7027 {
7028 /* We might have a chain of nested references with intermediate misaligning
7029 bitfields components, so need to recurse to find out. */
7030
7031 unsigned HOST_WIDE_INT this_align, outer_align;
7032
7033 switch (TREE_CODE (target))
7034 {
7035 case BIT_FIELD_REF:
7036 return 1;
7037
7038 case COMPONENT_REF:
7039 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7040 outer_align = target_align (TREE_OPERAND (target, 0));
7041 return MIN (this_align, outer_align);
7042
7043 case ARRAY_REF:
7044 case ARRAY_RANGE_REF:
7045 this_align = TYPE_ALIGN (TREE_TYPE (target));
7046 outer_align = target_align (TREE_OPERAND (target, 0));
7047 return MIN (this_align, outer_align);
7048
7049 CASE_CONVERT:
7050 case NON_LVALUE_EXPR:
7051 case VIEW_CONVERT_EXPR:
7052 this_align = TYPE_ALIGN (TREE_TYPE (target));
7053 outer_align = target_align (TREE_OPERAND (target, 0));
7054 return MAX (this_align, outer_align);
7055
7056 default:
7057 return TYPE_ALIGN (TREE_TYPE (target));
7058 }
7059 }
7060
7061 \f
7062 /* Given an rtx VALUE that may contain additions and multiplications, return
7063 an equivalent value that just refers to a register, memory, or constant.
7064 This is done by generating instructions to perform the arithmetic and
7065 returning a pseudo-register containing the value.
7066
7067 The returned value may be a REG, SUBREG, MEM or constant. */
7068
7069 rtx
7070 force_operand (rtx value, rtx target)
7071 {
7072 rtx op1, op2;
7073 /* Use subtarget as the target for operand 0 of a binary operation. */
7074 rtx subtarget = get_subtarget (target);
7075 enum rtx_code code = GET_CODE (value);
7076
7077 /* Check for subreg applied to an expression produced by loop optimizer. */
7078 if (code == SUBREG
7079 && !REG_P (SUBREG_REG (value))
7080 && !MEM_P (SUBREG_REG (value)))
7081 {
7082 value
7083 = simplify_gen_subreg (GET_MODE (value),
7084 force_reg (GET_MODE (SUBREG_REG (value)),
7085 force_operand (SUBREG_REG (value),
7086 NULL_RTX)),
7087 GET_MODE (SUBREG_REG (value)),
7088 SUBREG_BYTE (value));
7089 code = GET_CODE (value);
7090 }
7091
7092 /* Check for a PIC address load. */
7093 if ((code == PLUS || code == MINUS)
7094 && XEXP (value, 0) == pic_offset_table_rtx
7095 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7096 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7097 || GET_CODE (XEXP (value, 1)) == CONST))
7098 {
7099 if (!subtarget)
7100 subtarget = gen_reg_rtx (GET_MODE (value));
7101 emit_move_insn (subtarget, value);
7102 return subtarget;
7103 }
7104
7105 if (ARITHMETIC_P (value))
7106 {
7107 op2 = XEXP (value, 1);
7108 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7109 subtarget = 0;
7110 if (code == MINUS && CONST_INT_P (op2))
7111 {
7112 code = PLUS;
7113 op2 = negate_rtx (GET_MODE (value), op2);
7114 }
7115
7116 /* Check for an addition with OP2 a constant integer and our first
7117 operand a PLUS of a virtual register and something else. In that
7118 case, we want to emit the sum of the virtual register and the
7119 constant first and then add the other value. This allows virtual
7120 register instantiation to simply modify the constant rather than
7121 creating another one around this addition. */
7122 if (code == PLUS && CONST_INT_P (op2)
7123 && GET_CODE (XEXP (value, 0)) == PLUS
7124 && REG_P (XEXP (XEXP (value, 0), 0))
7125 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7126 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7127 {
7128 rtx temp = expand_simple_binop (GET_MODE (value), code,
7129 XEXP (XEXP (value, 0), 0), op2,
7130 subtarget, 0, OPTAB_LIB_WIDEN);
7131 return expand_simple_binop (GET_MODE (value), code, temp,
7132 force_operand (XEXP (XEXP (value,
7133 0), 1), 0),
7134 target, 0, OPTAB_LIB_WIDEN);
7135 }
7136
7137 op1 = force_operand (XEXP (value, 0), subtarget);
7138 op2 = force_operand (op2, NULL_RTX);
7139 switch (code)
7140 {
7141 case MULT:
7142 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7143 case DIV:
7144 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7145 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7146 target, 1, OPTAB_LIB_WIDEN);
7147 else
7148 return expand_divmod (0,
7149 FLOAT_MODE_P (GET_MODE (value))
7150 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7151 GET_MODE (value), op1, op2, target, 0);
7152 case MOD:
7153 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7154 target, 0);
7155 case UDIV:
7156 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7157 target, 1);
7158 case UMOD:
7159 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7160 target, 1);
7161 case ASHIFTRT:
7162 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7163 target, 0, OPTAB_LIB_WIDEN);
7164 default:
7165 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7166 target, 1, OPTAB_LIB_WIDEN);
7167 }
7168 }
7169 if (UNARY_P (value))
7170 {
7171 if (!target)
7172 target = gen_reg_rtx (GET_MODE (value));
7173 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7174 switch (code)
7175 {
7176 case ZERO_EXTEND:
7177 case SIGN_EXTEND:
7178 case TRUNCATE:
7179 case FLOAT_EXTEND:
7180 case FLOAT_TRUNCATE:
7181 convert_move (target, op1, code == ZERO_EXTEND);
7182 return target;
7183
7184 case FIX:
7185 case UNSIGNED_FIX:
7186 expand_fix (target, op1, code == UNSIGNED_FIX);
7187 return target;
7188
7189 case FLOAT:
7190 case UNSIGNED_FLOAT:
7191 expand_float (target, op1, code == UNSIGNED_FLOAT);
7192 return target;
7193
7194 default:
7195 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7196 }
7197 }
7198
7199 #ifdef INSN_SCHEDULING
7200 /* On machines that have insn scheduling, we want all memory reference to be
7201 explicit, so we need to deal with such paradoxical SUBREGs. */
7202 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7203 value
7204 = simplify_gen_subreg (GET_MODE (value),
7205 force_reg (GET_MODE (SUBREG_REG (value)),
7206 force_operand (SUBREG_REG (value),
7207 NULL_RTX)),
7208 GET_MODE (SUBREG_REG (value)),
7209 SUBREG_BYTE (value));
7210 #endif
7211
7212 return value;
7213 }
7214 \f
7215 /* Subroutine of expand_expr: return nonzero iff there is no way that
7216 EXP can reference X, which is being modified. TOP_P is nonzero if this
7217 call is going to be used to determine whether we need a temporary
7218 for EXP, as opposed to a recursive call to this function.
7219
7220 It is always safe for this routine to return zero since it merely
7221 searches for optimization opportunities. */
7222
7223 int
7224 safe_from_p (const_rtx x, tree exp, int top_p)
7225 {
7226 rtx exp_rtl = 0;
7227 int i, nops;
7228
7229 if (x == 0
7230 /* If EXP has varying size, we MUST use a target since we currently
7231 have no way of allocating temporaries of variable size
7232 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7233 So we assume here that something at a higher level has prevented a
7234 clash. This is somewhat bogus, but the best we can do. Only
7235 do this when X is BLKmode and when we are at the top level. */
7236 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7237 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7238 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7239 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7240 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7241 != INTEGER_CST)
7242 && GET_MODE (x) == BLKmode)
7243 /* If X is in the outgoing argument area, it is always safe. */
7244 || (MEM_P (x)
7245 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7246 || (GET_CODE (XEXP (x, 0)) == PLUS
7247 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7248 return 1;
7249
7250 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7251 find the underlying pseudo. */
7252 if (GET_CODE (x) == SUBREG)
7253 {
7254 x = SUBREG_REG (x);
7255 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7256 return 0;
7257 }
7258
7259 /* Now look at our tree code and possibly recurse. */
7260 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7261 {
7262 case tcc_declaration:
7263 exp_rtl = DECL_RTL_IF_SET (exp);
7264 break;
7265
7266 case tcc_constant:
7267 return 1;
7268
7269 case tcc_exceptional:
7270 if (TREE_CODE (exp) == TREE_LIST)
7271 {
7272 while (1)
7273 {
7274 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7275 return 0;
7276 exp = TREE_CHAIN (exp);
7277 if (!exp)
7278 return 1;
7279 if (TREE_CODE (exp) != TREE_LIST)
7280 return safe_from_p (x, exp, 0);
7281 }
7282 }
7283 else if (TREE_CODE (exp) == CONSTRUCTOR)
7284 {
7285 constructor_elt *ce;
7286 unsigned HOST_WIDE_INT idx;
7287
7288 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7289 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7290 || !safe_from_p (x, ce->value, 0))
7291 return 0;
7292 return 1;
7293 }
7294 else if (TREE_CODE (exp) == ERROR_MARK)
7295 return 1; /* An already-visited SAVE_EXPR? */
7296 else
7297 return 0;
7298
7299 case tcc_statement:
7300 /* The only case we look at here is the DECL_INITIAL inside a
7301 DECL_EXPR. */
7302 return (TREE_CODE (exp) != DECL_EXPR
7303 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7304 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7305 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7306
7307 case tcc_binary:
7308 case tcc_comparison:
7309 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7310 return 0;
7311 /* Fall through. */
7312
7313 case tcc_unary:
7314 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7315
7316 case tcc_expression:
7317 case tcc_reference:
7318 case tcc_vl_exp:
7319 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7320 the expression. If it is set, we conflict iff we are that rtx or
7321 both are in memory. Otherwise, we check all operands of the
7322 expression recursively. */
7323
7324 switch (TREE_CODE (exp))
7325 {
7326 case ADDR_EXPR:
7327 /* If the operand is static or we are static, we can't conflict.
7328 Likewise if we don't conflict with the operand at all. */
7329 if (staticp (TREE_OPERAND (exp, 0))
7330 || TREE_STATIC (exp)
7331 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7332 return 1;
7333
7334 /* Otherwise, the only way this can conflict is if we are taking
7335 the address of a DECL a that address if part of X, which is
7336 very rare. */
7337 exp = TREE_OPERAND (exp, 0);
7338 if (DECL_P (exp))
7339 {
7340 if (!DECL_RTL_SET_P (exp)
7341 || !MEM_P (DECL_RTL (exp)))
7342 return 0;
7343 else
7344 exp_rtl = XEXP (DECL_RTL (exp), 0);
7345 }
7346 break;
7347
7348 case MEM_REF:
7349 if (MEM_P (x)
7350 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7351 get_alias_set (exp)))
7352 return 0;
7353 break;
7354
7355 case CALL_EXPR:
7356 /* Assume that the call will clobber all hard registers and
7357 all of memory. */
7358 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7359 || MEM_P (x))
7360 return 0;
7361 break;
7362
7363 case WITH_CLEANUP_EXPR:
7364 case CLEANUP_POINT_EXPR:
7365 /* Lowered by gimplify.c. */
7366 gcc_unreachable ();
7367
7368 case SAVE_EXPR:
7369 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7370
7371 default:
7372 break;
7373 }
7374
7375 /* If we have an rtx, we do not need to scan our operands. */
7376 if (exp_rtl)
7377 break;
7378
7379 nops = TREE_OPERAND_LENGTH (exp);
7380 for (i = 0; i < nops; i++)
7381 if (TREE_OPERAND (exp, i) != 0
7382 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7383 return 0;
7384
7385 break;
7386
7387 case tcc_type:
7388 /* Should never get a type here. */
7389 gcc_unreachable ();
7390 }
7391
7392 /* If we have an rtl, find any enclosed object. Then see if we conflict
7393 with it. */
7394 if (exp_rtl)
7395 {
7396 if (GET_CODE (exp_rtl) == SUBREG)
7397 {
7398 exp_rtl = SUBREG_REG (exp_rtl);
7399 if (REG_P (exp_rtl)
7400 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7401 return 0;
7402 }
7403
7404 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7405 are memory and they conflict. */
7406 return ! (rtx_equal_p (x, exp_rtl)
7407 || (MEM_P (x) && MEM_P (exp_rtl)
7408 && true_dependence (exp_rtl, VOIDmode, x)));
7409 }
7410
7411 /* If we reach here, it is safe. */
7412 return 1;
7413 }
7414
7415 \f
7416 /* Return the highest power of two that EXP is known to be a multiple of.
7417 This is used in updating alignment of MEMs in array references. */
7418
7419 unsigned HOST_WIDE_INT
7420 highest_pow2_factor (const_tree exp)
7421 {
7422 unsigned HOST_WIDE_INT ret;
7423 int trailing_zeros = tree_ctz (exp);
7424 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7425 return BIGGEST_ALIGNMENT;
7426 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7427 if (ret > BIGGEST_ALIGNMENT)
7428 return BIGGEST_ALIGNMENT;
7429 return ret;
7430 }
7431
7432 /* Similar, except that the alignment requirements of TARGET are
7433 taken into account. Assume it is at least as aligned as its
7434 type, unless it is a COMPONENT_REF in which case the layout of
7435 the structure gives the alignment. */
7436
7437 static unsigned HOST_WIDE_INT
7438 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7439 {
7440 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7441 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7442
7443 return MAX (factor, talign);
7444 }
7445 \f
7446 /* Convert the tree comparison code TCODE to the rtl one where the
7447 signedness is UNSIGNEDP. */
7448
7449 static enum rtx_code
7450 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7451 {
7452 enum rtx_code code;
7453 switch (tcode)
7454 {
7455 case EQ_EXPR:
7456 code = EQ;
7457 break;
7458 case NE_EXPR:
7459 code = NE;
7460 break;
7461 case LT_EXPR:
7462 code = unsignedp ? LTU : LT;
7463 break;
7464 case LE_EXPR:
7465 code = unsignedp ? LEU : LE;
7466 break;
7467 case GT_EXPR:
7468 code = unsignedp ? GTU : GT;
7469 break;
7470 case GE_EXPR:
7471 code = unsignedp ? GEU : GE;
7472 break;
7473 case UNORDERED_EXPR:
7474 code = UNORDERED;
7475 break;
7476 case ORDERED_EXPR:
7477 code = ORDERED;
7478 break;
7479 case UNLT_EXPR:
7480 code = UNLT;
7481 break;
7482 case UNLE_EXPR:
7483 code = UNLE;
7484 break;
7485 case UNGT_EXPR:
7486 code = UNGT;
7487 break;
7488 case UNGE_EXPR:
7489 code = UNGE;
7490 break;
7491 case UNEQ_EXPR:
7492 code = UNEQ;
7493 break;
7494 case LTGT_EXPR:
7495 code = LTGT;
7496 break;
7497
7498 default:
7499 gcc_unreachable ();
7500 }
7501 return code;
7502 }
7503
7504 /* Subroutine of expand_expr. Expand the two operands of a binary
7505 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7506 The value may be stored in TARGET if TARGET is nonzero. The
7507 MODIFIER argument is as documented by expand_expr. */
7508
7509 void
7510 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7511 enum expand_modifier modifier)
7512 {
7513 if (! safe_from_p (target, exp1, 1))
7514 target = 0;
7515 if (operand_equal_p (exp0, exp1, 0))
7516 {
7517 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7518 *op1 = copy_rtx (*op0);
7519 }
7520 else
7521 {
7522 /* If we need to preserve evaluation order, copy exp0 into its own
7523 temporary variable so that it can't be clobbered by exp1. */
7524 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7525 exp0 = save_expr (exp0);
7526 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7527 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7528 }
7529 }
7530
7531 \f
7532 /* Return a MEM that contains constant EXP. DEFER is as for
7533 output_constant_def and MODIFIER is as for expand_expr. */
7534
7535 static rtx
7536 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7537 {
7538 rtx mem;
7539
7540 mem = output_constant_def (exp, defer);
7541 if (modifier != EXPAND_INITIALIZER)
7542 mem = use_anchored_address (mem);
7543 return mem;
7544 }
7545
7546 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7547 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7548
7549 static rtx
7550 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7551 enum expand_modifier modifier, addr_space_t as)
7552 {
7553 rtx result, subtarget;
7554 tree inner, offset;
7555 HOST_WIDE_INT bitsize, bitpos;
7556 int volatilep, unsignedp;
7557 machine_mode mode1;
7558
7559 /* If we are taking the address of a constant and are at the top level,
7560 we have to use output_constant_def since we can't call force_const_mem
7561 at top level. */
7562 /* ??? This should be considered a front-end bug. We should not be
7563 generating ADDR_EXPR of something that isn't an LVALUE. The only
7564 exception here is STRING_CST. */
7565 if (CONSTANT_CLASS_P (exp))
7566 {
7567 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7568 if (modifier < EXPAND_SUM)
7569 result = force_operand (result, target);
7570 return result;
7571 }
7572
7573 /* Everything must be something allowed by is_gimple_addressable. */
7574 switch (TREE_CODE (exp))
7575 {
7576 case INDIRECT_REF:
7577 /* This case will happen via recursion for &a->b. */
7578 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7579
7580 case MEM_REF:
7581 {
7582 tree tem = TREE_OPERAND (exp, 0);
7583 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7584 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7585 return expand_expr (tem, target, tmode, modifier);
7586 }
7587
7588 case CONST_DECL:
7589 /* Expand the initializer like constants above. */
7590 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7591 0, modifier), 0);
7592 if (modifier < EXPAND_SUM)
7593 result = force_operand (result, target);
7594 return result;
7595
7596 case REALPART_EXPR:
7597 /* The real part of the complex number is always first, therefore
7598 the address is the same as the address of the parent object. */
7599 offset = 0;
7600 bitpos = 0;
7601 inner = TREE_OPERAND (exp, 0);
7602 break;
7603
7604 case IMAGPART_EXPR:
7605 /* The imaginary part of the complex number is always second.
7606 The expression is therefore always offset by the size of the
7607 scalar type. */
7608 offset = 0;
7609 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7610 inner = TREE_OPERAND (exp, 0);
7611 break;
7612
7613 case COMPOUND_LITERAL_EXPR:
7614 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7615 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7616 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7617 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7618 the initializers aren't gimplified. */
7619 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7620 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7621 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7622 target, tmode, modifier, as);
7623 /* FALLTHRU */
7624 default:
7625 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7626 expand_expr, as that can have various side effects; LABEL_DECLs for
7627 example, may not have their DECL_RTL set yet. Expand the rtl of
7628 CONSTRUCTORs too, which should yield a memory reference for the
7629 constructor's contents. Assume language specific tree nodes can
7630 be expanded in some interesting way. */
7631 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7632 if (DECL_P (exp)
7633 || TREE_CODE (exp) == CONSTRUCTOR
7634 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7635 {
7636 result = expand_expr (exp, target, tmode,
7637 modifier == EXPAND_INITIALIZER
7638 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7639
7640 /* If the DECL isn't in memory, then the DECL wasn't properly
7641 marked TREE_ADDRESSABLE, which will be either a front-end
7642 or a tree optimizer bug. */
7643
7644 if (TREE_ADDRESSABLE (exp)
7645 && ! MEM_P (result)
7646 && ! targetm.calls.allocate_stack_slots_for_args ())
7647 {
7648 error ("local frame unavailable (naked function?)");
7649 return result;
7650 }
7651 else
7652 gcc_assert (MEM_P (result));
7653 result = XEXP (result, 0);
7654
7655 /* ??? Is this needed anymore? */
7656 if (DECL_P (exp))
7657 TREE_USED (exp) = 1;
7658
7659 if (modifier != EXPAND_INITIALIZER
7660 && modifier != EXPAND_CONST_ADDRESS
7661 && modifier != EXPAND_SUM)
7662 result = force_operand (result, target);
7663 return result;
7664 }
7665
7666 /* Pass FALSE as the last argument to get_inner_reference although
7667 we are expanding to RTL. The rationale is that we know how to
7668 handle "aligning nodes" here: we can just bypass them because
7669 they won't change the final object whose address will be returned
7670 (they actually exist only for that purpose). */
7671 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7672 &mode1, &unsignedp, &volatilep, false);
7673 break;
7674 }
7675
7676 /* We must have made progress. */
7677 gcc_assert (inner != exp);
7678
7679 subtarget = offset || bitpos ? NULL_RTX : target;
7680 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7681 inner alignment, force the inner to be sufficiently aligned. */
7682 if (CONSTANT_CLASS_P (inner)
7683 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7684 {
7685 inner = copy_node (inner);
7686 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7687 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7688 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7689 }
7690 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7691
7692 if (offset)
7693 {
7694 rtx tmp;
7695
7696 if (modifier != EXPAND_NORMAL)
7697 result = force_operand (result, NULL);
7698 tmp = expand_expr (offset, NULL_RTX, tmode,
7699 modifier == EXPAND_INITIALIZER
7700 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7701
7702 /* expand_expr is allowed to return an object in a mode other
7703 than TMODE. If it did, we need to convert. */
7704 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7705 tmp = convert_modes (tmode, GET_MODE (tmp),
7706 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7707 result = convert_memory_address_addr_space (tmode, result, as);
7708 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7709
7710 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7711 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7712 else
7713 {
7714 subtarget = bitpos ? NULL_RTX : target;
7715 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7716 1, OPTAB_LIB_WIDEN);
7717 }
7718 }
7719
7720 if (bitpos)
7721 {
7722 /* Someone beforehand should have rejected taking the address
7723 of such an object. */
7724 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7725
7726 result = convert_memory_address_addr_space (tmode, result, as);
7727 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7728 if (modifier < EXPAND_SUM)
7729 result = force_operand (result, target);
7730 }
7731
7732 return result;
7733 }
7734
7735 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7736 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7737
7738 static rtx
7739 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7740 enum expand_modifier modifier)
7741 {
7742 addr_space_t as = ADDR_SPACE_GENERIC;
7743 machine_mode address_mode = Pmode;
7744 machine_mode pointer_mode = ptr_mode;
7745 machine_mode rmode;
7746 rtx result;
7747
7748 /* Target mode of VOIDmode says "whatever's natural". */
7749 if (tmode == VOIDmode)
7750 tmode = TYPE_MODE (TREE_TYPE (exp));
7751
7752 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7753 {
7754 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7755 address_mode = targetm.addr_space.address_mode (as);
7756 pointer_mode = targetm.addr_space.pointer_mode (as);
7757 }
7758
7759 /* We can get called with some Weird Things if the user does silliness
7760 like "(short) &a". In that case, convert_memory_address won't do
7761 the right thing, so ignore the given target mode. */
7762 if (tmode != address_mode && tmode != pointer_mode)
7763 tmode = address_mode;
7764
7765 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7766 tmode, modifier, as);
7767
7768 /* Despite expand_expr claims concerning ignoring TMODE when not
7769 strictly convenient, stuff breaks if we don't honor it. Note
7770 that combined with the above, we only do this for pointer modes. */
7771 rmode = GET_MODE (result);
7772 if (rmode == VOIDmode)
7773 rmode = tmode;
7774 if (rmode != tmode)
7775 result = convert_memory_address_addr_space (tmode, result, as);
7776
7777 return result;
7778 }
7779
7780 /* Generate code for computing CONSTRUCTOR EXP.
7781 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7782 is TRUE, instead of creating a temporary variable in memory
7783 NULL is returned and the caller needs to handle it differently. */
7784
7785 static rtx
7786 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7787 bool avoid_temp_mem)
7788 {
7789 tree type = TREE_TYPE (exp);
7790 machine_mode mode = TYPE_MODE (type);
7791
7792 /* Try to avoid creating a temporary at all. This is possible
7793 if all of the initializer is zero.
7794 FIXME: try to handle all [0..255] initializers we can handle
7795 with memset. */
7796 if (TREE_STATIC (exp)
7797 && !TREE_ADDRESSABLE (exp)
7798 && target != 0 && mode == BLKmode
7799 && all_zeros_p (exp))
7800 {
7801 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7802 return target;
7803 }
7804
7805 /* All elts simple constants => refer to a constant in memory. But
7806 if this is a non-BLKmode mode, let it store a field at a time
7807 since that should make a CONST_INT, CONST_WIDE_INT or
7808 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7809 use, it is best to store directly into the target unless the type
7810 is large enough that memcpy will be used. If we are making an
7811 initializer and all operands are constant, put it in memory as
7812 well.
7813
7814 FIXME: Avoid trying to fill vector constructors piece-meal.
7815 Output them with output_constant_def below unless we're sure
7816 they're zeros. This should go away when vector initializers
7817 are treated like VECTOR_CST instead of arrays. */
7818 if ((TREE_STATIC (exp)
7819 && ((mode == BLKmode
7820 && ! (target != 0 && safe_from_p (target, exp, 1)))
7821 || TREE_ADDRESSABLE (exp)
7822 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7823 && (! can_move_by_pieces
7824 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7825 TYPE_ALIGN (type)))
7826 && ! mostly_zeros_p (exp))))
7827 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7828 && TREE_CONSTANT (exp)))
7829 {
7830 rtx constructor;
7831
7832 if (avoid_temp_mem)
7833 return NULL_RTX;
7834
7835 constructor = expand_expr_constant (exp, 1, modifier);
7836
7837 if (modifier != EXPAND_CONST_ADDRESS
7838 && modifier != EXPAND_INITIALIZER
7839 && modifier != EXPAND_SUM)
7840 constructor = validize_mem (constructor);
7841
7842 return constructor;
7843 }
7844
7845 /* Handle calls that pass values in multiple non-contiguous
7846 locations. The Irix 6 ABI has examples of this. */
7847 if (target == 0 || ! safe_from_p (target, exp, 1)
7848 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7849 {
7850 if (avoid_temp_mem)
7851 return NULL_RTX;
7852
7853 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7854 }
7855
7856 store_constructor (exp, target, 0, int_expr_size (exp));
7857 return target;
7858 }
7859
7860
7861 /* expand_expr: generate code for computing expression EXP.
7862 An rtx for the computed value is returned. The value is never null.
7863 In the case of a void EXP, const0_rtx is returned.
7864
7865 The value may be stored in TARGET if TARGET is nonzero.
7866 TARGET is just a suggestion; callers must assume that
7867 the rtx returned may not be the same as TARGET.
7868
7869 If TARGET is CONST0_RTX, it means that the value will be ignored.
7870
7871 If TMODE is not VOIDmode, it suggests generating the
7872 result in mode TMODE. But this is done only when convenient.
7873 Otherwise, TMODE is ignored and the value generated in its natural mode.
7874 TMODE is just a suggestion; callers must assume that
7875 the rtx returned may not have mode TMODE.
7876
7877 Note that TARGET may have neither TMODE nor MODE. In that case, it
7878 probably will not be used.
7879
7880 If MODIFIER is EXPAND_SUM then when EXP is an addition
7881 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7882 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7883 products as above, or REG or MEM, or constant.
7884 Ordinarily in such cases we would output mul or add instructions
7885 and then return a pseudo reg containing the sum.
7886
7887 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7888 it also marks a label as absolutely required (it can't be dead).
7889 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7890 This is used for outputting expressions used in initializers.
7891
7892 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7893 with a constant address even if that address is not normally legitimate.
7894 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7895
7896 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7897 a call parameter. Such targets require special care as we haven't yet
7898 marked TARGET so that it's safe from being trashed by libcalls. We
7899 don't want to use TARGET for anything but the final result;
7900 Intermediate values must go elsewhere. Additionally, calls to
7901 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7902
7903 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7904 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7905 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7906 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7907 recursively.
7908
7909 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7910 In this case, we don't adjust a returned MEM rtx that wouldn't be
7911 sufficiently aligned for its mode; instead, it's up to the caller
7912 to deal with it afterwards. This is used to make sure that unaligned
7913 base objects for which out-of-bounds accesses are supported, for
7914 example record types with trailing arrays, aren't realigned behind
7915 the back of the caller.
7916 The normal operating mode is to pass FALSE for this parameter. */
7917
7918 rtx
7919 expand_expr_real (tree exp, rtx target, machine_mode tmode,
7920 enum expand_modifier modifier, rtx *alt_rtl,
7921 bool inner_reference_p)
7922 {
7923 rtx ret;
7924
7925 /* Handle ERROR_MARK before anybody tries to access its type. */
7926 if (TREE_CODE (exp) == ERROR_MARK
7927 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7928 {
7929 ret = CONST0_RTX (tmode);
7930 return ret ? ret : const0_rtx;
7931 }
7932
7933 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7934 inner_reference_p);
7935 return ret;
7936 }
7937
7938 /* Try to expand the conditional expression which is represented by
7939 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7940 return the rtl reg which repsents the result. Otherwise return
7941 NULL_RTL. */
7942
7943 static rtx
7944 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7945 tree treeop1 ATTRIBUTE_UNUSED,
7946 tree treeop2 ATTRIBUTE_UNUSED)
7947 {
7948 rtx insn;
7949 rtx op00, op01, op1, op2;
7950 enum rtx_code comparison_code;
7951 machine_mode comparison_mode;
7952 gimple srcstmt;
7953 rtx temp;
7954 tree type = TREE_TYPE (treeop1);
7955 int unsignedp = TYPE_UNSIGNED (type);
7956 machine_mode mode = TYPE_MODE (type);
7957 machine_mode orig_mode = mode;
7958
7959 /* If we cannot do a conditional move on the mode, try doing it
7960 with the promoted mode. */
7961 if (!can_conditionally_move_p (mode))
7962 {
7963 mode = promote_mode (type, mode, &unsignedp);
7964 if (!can_conditionally_move_p (mode))
7965 return NULL_RTX;
7966 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7967 }
7968 else
7969 temp = assign_temp (type, 0, 1);
7970
7971 start_sequence ();
7972 expand_operands (treeop1, treeop2,
7973 temp, &op1, &op2, EXPAND_NORMAL);
7974
7975 if (TREE_CODE (treeop0) == SSA_NAME
7976 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7977 {
7978 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7979 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7980 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7981 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7982 comparison_mode = TYPE_MODE (type);
7983 unsignedp = TYPE_UNSIGNED (type);
7984 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7985 }
7986 else if (COMPARISON_CLASS_P (treeop0))
7987 {
7988 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7989 enum tree_code cmpcode = TREE_CODE (treeop0);
7990 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7991 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7992 unsignedp = TYPE_UNSIGNED (type);
7993 comparison_mode = TYPE_MODE (type);
7994 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7995 }
7996 else
7997 {
7998 op00 = expand_normal (treeop0);
7999 op01 = const0_rtx;
8000 comparison_code = NE;
8001 comparison_mode = GET_MODE (op00);
8002 if (comparison_mode == VOIDmode)
8003 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8004 }
8005
8006 if (GET_MODE (op1) != mode)
8007 op1 = gen_lowpart (mode, op1);
8008
8009 if (GET_MODE (op2) != mode)
8010 op2 = gen_lowpart (mode, op2);
8011
8012 /* Try to emit the conditional move. */
8013 insn = emit_conditional_move (temp, comparison_code,
8014 op00, op01, comparison_mode,
8015 op1, op2, mode,
8016 unsignedp);
8017
8018 /* If we could do the conditional move, emit the sequence,
8019 and return. */
8020 if (insn)
8021 {
8022 rtx_insn *seq = get_insns ();
8023 end_sequence ();
8024 emit_insn (seq);
8025 return convert_modes (orig_mode, mode, temp, 0);
8026 }
8027
8028 /* Otherwise discard the sequence and fall back to code with
8029 branches. */
8030 end_sequence ();
8031 return NULL_RTX;
8032 }
8033
8034 rtx
8035 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8036 enum expand_modifier modifier)
8037 {
8038 rtx op0, op1, op2, temp;
8039 rtx_code_label *lab;
8040 tree type;
8041 int unsignedp;
8042 machine_mode mode;
8043 enum tree_code code = ops->code;
8044 optab this_optab;
8045 rtx subtarget, original_target;
8046 int ignore;
8047 bool reduce_bit_field;
8048 location_t loc = ops->location;
8049 tree treeop0, treeop1, treeop2;
8050 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8051 ? reduce_to_bit_field_precision ((expr), \
8052 target, \
8053 type) \
8054 : (expr))
8055
8056 type = ops->type;
8057 mode = TYPE_MODE (type);
8058 unsignedp = TYPE_UNSIGNED (type);
8059
8060 treeop0 = ops->op0;
8061 treeop1 = ops->op1;
8062 treeop2 = ops->op2;
8063
8064 /* We should be called only on simple (binary or unary) expressions,
8065 exactly those that are valid in gimple expressions that aren't
8066 GIMPLE_SINGLE_RHS (or invalid). */
8067 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8068 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8069 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8070
8071 ignore = (target == const0_rtx
8072 || ((CONVERT_EXPR_CODE_P (code)
8073 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8074 && TREE_CODE (type) == VOID_TYPE));
8075
8076 /* We should be called only if we need the result. */
8077 gcc_assert (!ignore);
8078
8079 /* An operation in what may be a bit-field type needs the
8080 result to be reduced to the precision of the bit-field type,
8081 which is narrower than that of the type's mode. */
8082 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8083 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8084
8085 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8086 target = 0;
8087
8088 /* Use subtarget as the target for operand 0 of a binary operation. */
8089 subtarget = get_subtarget (target);
8090 original_target = target;
8091
8092 switch (code)
8093 {
8094 case NON_LVALUE_EXPR:
8095 case PAREN_EXPR:
8096 CASE_CONVERT:
8097 if (treeop0 == error_mark_node)
8098 return const0_rtx;
8099
8100 if (TREE_CODE (type) == UNION_TYPE)
8101 {
8102 tree valtype = TREE_TYPE (treeop0);
8103
8104 /* If both input and output are BLKmode, this conversion isn't doing
8105 anything except possibly changing memory attribute. */
8106 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8107 {
8108 rtx result = expand_expr (treeop0, target, tmode,
8109 modifier);
8110
8111 result = copy_rtx (result);
8112 set_mem_attributes (result, type, 0);
8113 return result;
8114 }
8115
8116 if (target == 0)
8117 {
8118 if (TYPE_MODE (type) != BLKmode)
8119 target = gen_reg_rtx (TYPE_MODE (type));
8120 else
8121 target = assign_temp (type, 1, 1);
8122 }
8123
8124 if (MEM_P (target))
8125 /* Store data into beginning of memory target. */
8126 store_expr (treeop0,
8127 adjust_address (target, TYPE_MODE (valtype), 0),
8128 modifier == EXPAND_STACK_PARM,
8129 false);
8130
8131 else
8132 {
8133 gcc_assert (REG_P (target));
8134
8135 /* Store this field into a union of the proper type. */
8136 store_field (target,
8137 MIN ((int_size_in_bytes (TREE_TYPE
8138 (treeop0))
8139 * BITS_PER_UNIT),
8140 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8141 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8142 }
8143
8144 /* Return the entire union. */
8145 return target;
8146 }
8147
8148 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8149 {
8150 op0 = expand_expr (treeop0, target, VOIDmode,
8151 modifier);
8152
8153 /* If the signedness of the conversion differs and OP0 is
8154 a promoted SUBREG, clear that indication since we now
8155 have to do the proper extension. */
8156 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8157 && GET_CODE (op0) == SUBREG)
8158 SUBREG_PROMOTED_VAR_P (op0) = 0;
8159
8160 return REDUCE_BIT_FIELD (op0);
8161 }
8162
8163 op0 = expand_expr (treeop0, NULL_RTX, mode,
8164 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8165 if (GET_MODE (op0) == mode)
8166 ;
8167
8168 /* If OP0 is a constant, just convert it into the proper mode. */
8169 else if (CONSTANT_P (op0))
8170 {
8171 tree inner_type = TREE_TYPE (treeop0);
8172 machine_mode inner_mode = GET_MODE (op0);
8173
8174 if (inner_mode == VOIDmode)
8175 inner_mode = TYPE_MODE (inner_type);
8176
8177 if (modifier == EXPAND_INITIALIZER)
8178 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8179 subreg_lowpart_offset (mode,
8180 inner_mode));
8181 else
8182 op0= convert_modes (mode, inner_mode, op0,
8183 TYPE_UNSIGNED (inner_type));
8184 }
8185
8186 else if (modifier == EXPAND_INITIALIZER)
8187 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8188
8189 else if (target == 0)
8190 op0 = convert_to_mode (mode, op0,
8191 TYPE_UNSIGNED (TREE_TYPE
8192 (treeop0)));
8193 else
8194 {
8195 convert_move (target, op0,
8196 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8197 op0 = target;
8198 }
8199
8200 return REDUCE_BIT_FIELD (op0);
8201
8202 case ADDR_SPACE_CONVERT_EXPR:
8203 {
8204 tree treeop0_type = TREE_TYPE (treeop0);
8205 addr_space_t as_to;
8206 addr_space_t as_from;
8207
8208 gcc_assert (POINTER_TYPE_P (type));
8209 gcc_assert (POINTER_TYPE_P (treeop0_type));
8210
8211 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8212 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8213
8214 /* Conversions between pointers to the same address space should
8215 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8216 gcc_assert (as_to != as_from);
8217
8218 /* Ask target code to handle conversion between pointers
8219 to overlapping address spaces. */
8220 if (targetm.addr_space.subset_p (as_to, as_from)
8221 || targetm.addr_space.subset_p (as_from, as_to))
8222 {
8223 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8224 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8225 gcc_assert (op0);
8226 return op0;
8227 }
8228
8229 /* For disjoint address spaces, converting anything but
8230 a null pointer invokes undefined behaviour. We simply
8231 always return a null pointer here. */
8232 return CONST0_RTX (mode);
8233 }
8234
8235 case POINTER_PLUS_EXPR:
8236 /* Even though the sizetype mode and the pointer's mode can be different
8237 expand is able to handle this correctly and get the correct result out
8238 of the PLUS_EXPR code. */
8239 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8240 if sizetype precision is smaller than pointer precision. */
8241 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8242 treeop1 = fold_convert_loc (loc, type,
8243 fold_convert_loc (loc, ssizetype,
8244 treeop1));
8245 /* If sizetype precision is larger than pointer precision, truncate the
8246 offset to have matching modes. */
8247 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8248 treeop1 = fold_convert_loc (loc, type, treeop1);
8249
8250 case PLUS_EXPR:
8251 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8252 something else, make sure we add the register to the constant and
8253 then to the other thing. This case can occur during strength
8254 reduction and doing it this way will produce better code if the
8255 frame pointer or argument pointer is eliminated.
8256
8257 fold-const.c will ensure that the constant is always in the inner
8258 PLUS_EXPR, so the only case we need to do anything about is if
8259 sp, ap, or fp is our second argument, in which case we must swap
8260 the innermost first argument and our second argument. */
8261
8262 if (TREE_CODE (treeop0) == PLUS_EXPR
8263 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8264 && TREE_CODE (treeop1) == VAR_DECL
8265 && (DECL_RTL (treeop1) == frame_pointer_rtx
8266 || DECL_RTL (treeop1) == stack_pointer_rtx
8267 || DECL_RTL (treeop1) == arg_pointer_rtx))
8268 {
8269 gcc_unreachable ();
8270 }
8271
8272 /* If the result is to be ptr_mode and we are adding an integer to
8273 something, we might be forming a constant. So try to use
8274 plus_constant. If it produces a sum and we can't accept it,
8275 use force_operand. This allows P = &ARR[const] to generate
8276 efficient code on machines where a SYMBOL_REF is not a valid
8277 address.
8278
8279 If this is an EXPAND_SUM call, always return the sum. */
8280 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8281 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8282 {
8283 if (modifier == EXPAND_STACK_PARM)
8284 target = 0;
8285 if (TREE_CODE (treeop0) == INTEGER_CST
8286 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8287 && TREE_CONSTANT (treeop1))
8288 {
8289 rtx constant_part;
8290 HOST_WIDE_INT wc;
8291 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8292
8293 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8294 EXPAND_SUM);
8295 /* Use wi::shwi to ensure that the constant is
8296 truncated according to the mode of OP1, then sign extended
8297 to a HOST_WIDE_INT. Using the constant directly can result
8298 in non-canonical RTL in a 64x32 cross compile. */
8299 wc = TREE_INT_CST_LOW (treeop0);
8300 constant_part =
8301 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8302 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8303 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8304 op1 = force_operand (op1, target);
8305 return REDUCE_BIT_FIELD (op1);
8306 }
8307
8308 else if (TREE_CODE (treeop1) == INTEGER_CST
8309 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8310 && TREE_CONSTANT (treeop0))
8311 {
8312 rtx constant_part;
8313 HOST_WIDE_INT wc;
8314 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8315
8316 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8317 (modifier == EXPAND_INITIALIZER
8318 ? EXPAND_INITIALIZER : EXPAND_SUM));
8319 if (! CONSTANT_P (op0))
8320 {
8321 op1 = expand_expr (treeop1, NULL_RTX,
8322 VOIDmode, modifier);
8323 /* Return a PLUS if modifier says it's OK. */
8324 if (modifier == EXPAND_SUM
8325 || modifier == EXPAND_INITIALIZER)
8326 return simplify_gen_binary (PLUS, mode, op0, op1);
8327 goto binop2;
8328 }
8329 /* Use wi::shwi to ensure that the constant is
8330 truncated according to the mode of OP1, then sign extended
8331 to a HOST_WIDE_INT. Using the constant directly can result
8332 in non-canonical RTL in a 64x32 cross compile. */
8333 wc = TREE_INT_CST_LOW (treeop1);
8334 constant_part
8335 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8336 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8337 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8338 op0 = force_operand (op0, target);
8339 return REDUCE_BIT_FIELD (op0);
8340 }
8341 }
8342
8343 /* Use TER to expand pointer addition of a negated value
8344 as pointer subtraction. */
8345 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8346 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8347 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8348 && TREE_CODE (treeop1) == SSA_NAME
8349 && TYPE_MODE (TREE_TYPE (treeop0))
8350 == TYPE_MODE (TREE_TYPE (treeop1)))
8351 {
8352 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8353 if (def)
8354 {
8355 treeop1 = gimple_assign_rhs1 (def);
8356 code = MINUS_EXPR;
8357 goto do_minus;
8358 }
8359 }
8360
8361 /* No sense saving up arithmetic to be done
8362 if it's all in the wrong mode to form part of an address.
8363 And force_operand won't know whether to sign-extend or
8364 zero-extend. */
8365 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8366 || mode != ptr_mode)
8367 {
8368 expand_operands (treeop0, treeop1,
8369 subtarget, &op0, &op1, EXPAND_NORMAL);
8370 if (op0 == const0_rtx)
8371 return op1;
8372 if (op1 == const0_rtx)
8373 return op0;
8374 goto binop2;
8375 }
8376
8377 expand_operands (treeop0, treeop1,
8378 subtarget, &op0, &op1, modifier);
8379 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8380
8381 case MINUS_EXPR:
8382 do_minus:
8383 /* For initializers, we are allowed to return a MINUS of two
8384 symbolic constants. Here we handle all cases when both operands
8385 are constant. */
8386 /* Handle difference of two symbolic constants,
8387 for the sake of an initializer. */
8388 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8389 && really_constant_p (treeop0)
8390 && really_constant_p (treeop1))
8391 {
8392 expand_operands (treeop0, treeop1,
8393 NULL_RTX, &op0, &op1, modifier);
8394
8395 /* If the last operand is a CONST_INT, use plus_constant of
8396 the negated constant. Else make the MINUS. */
8397 if (CONST_INT_P (op1))
8398 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8399 -INTVAL (op1)));
8400 else
8401 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8402 }
8403
8404 /* No sense saving up arithmetic to be done
8405 if it's all in the wrong mode to form part of an address.
8406 And force_operand won't know whether to sign-extend or
8407 zero-extend. */
8408 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8409 || mode != ptr_mode)
8410 goto binop;
8411
8412 expand_operands (treeop0, treeop1,
8413 subtarget, &op0, &op1, modifier);
8414
8415 /* Convert A - const to A + (-const). */
8416 if (CONST_INT_P (op1))
8417 {
8418 op1 = negate_rtx (mode, op1);
8419 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8420 }
8421
8422 goto binop2;
8423
8424 case WIDEN_MULT_PLUS_EXPR:
8425 case WIDEN_MULT_MINUS_EXPR:
8426 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8427 op2 = expand_normal (treeop2);
8428 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8429 target, unsignedp);
8430 return target;
8431
8432 case WIDEN_MULT_EXPR:
8433 /* If first operand is constant, swap them.
8434 Thus the following special case checks need only
8435 check the second operand. */
8436 if (TREE_CODE (treeop0) == INTEGER_CST)
8437 {
8438 tree t1 = treeop0;
8439 treeop0 = treeop1;
8440 treeop1 = t1;
8441 }
8442
8443 /* First, check if we have a multiplication of one signed and one
8444 unsigned operand. */
8445 if (TREE_CODE (treeop1) != INTEGER_CST
8446 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8447 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8448 {
8449 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8450 this_optab = usmul_widen_optab;
8451 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8452 != CODE_FOR_nothing)
8453 {
8454 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8455 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8456 EXPAND_NORMAL);
8457 else
8458 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8459 EXPAND_NORMAL);
8460 /* op0 and op1 might still be constant, despite the above
8461 != INTEGER_CST check. Handle it. */
8462 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8463 {
8464 op0 = convert_modes (innermode, mode, op0, true);
8465 op1 = convert_modes (innermode, mode, op1, false);
8466 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8467 target, unsignedp));
8468 }
8469 goto binop3;
8470 }
8471 }
8472 /* Check for a multiplication with matching signedness. */
8473 else if ((TREE_CODE (treeop1) == INTEGER_CST
8474 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8475 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8476 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8477 {
8478 tree op0type = TREE_TYPE (treeop0);
8479 machine_mode innermode = TYPE_MODE (op0type);
8480 bool zextend_p = TYPE_UNSIGNED (op0type);
8481 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8482 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8483
8484 if (TREE_CODE (treeop0) != INTEGER_CST)
8485 {
8486 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8487 != CODE_FOR_nothing)
8488 {
8489 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8490 EXPAND_NORMAL);
8491 /* op0 and op1 might still be constant, despite the above
8492 != INTEGER_CST check. Handle it. */
8493 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8494 {
8495 widen_mult_const:
8496 op0 = convert_modes (innermode, mode, op0, zextend_p);
8497 op1
8498 = convert_modes (innermode, mode, op1,
8499 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8500 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8501 target,
8502 unsignedp));
8503 }
8504 temp = expand_widening_mult (mode, op0, op1, target,
8505 unsignedp, this_optab);
8506 return REDUCE_BIT_FIELD (temp);
8507 }
8508 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8509 != CODE_FOR_nothing
8510 && innermode == word_mode)
8511 {
8512 rtx htem, hipart;
8513 op0 = expand_normal (treeop0);
8514 if (TREE_CODE (treeop1) == INTEGER_CST)
8515 op1 = convert_modes (innermode, mode,
8516 expand_normal (treeop1),
8517 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8518 else
8519 op1 = expand_normal (treeop1);
8520 /* op0 and op1 might still be constant, despite the above
8521 != INTEGER_CST check. Handle it. */
8522 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8523 goto widen_mult_const;
8524 temp = expand_binop (mode, other_optab, op0, op1, target,
8525 unsignedp, OPTAB_LIB_WIDEN);
8526 hipart = gen_highpart (innermode, temp);
8527 htem = expand_mult_highpart_adjust (innermode, hipart,
8528 op0, op1, hipart,
8529 zextend_p);
8530 if (htem != hipart)
8531 emit_move_insn (hipart, htem);
8532 return REDUCE_BIT_FIELD (temp);
8533 }
8534 }
8535 }
8536 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8537 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8538 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8539 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8540
8541 case FMA_EXPR:
8542 {
8543 optab opt = fma_optab;
8544 gimple def0, def2;
8545
8546 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8547 call. */
8548 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8549 {
8550 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8551 tree call_expr;
8552
8553 gcc_assert (fn != NULL_TREE);
8554 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8555 return expand_builtin (call_expr, target, subtarget, mode, false);
8556 }
8557
8558 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8559 /* The multiplication is commutative - look at its 2nd operand
8560 if the first isn't fed by a negate. */
8561 if (!def0)
8562 {
8563 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8564 /* Swap operands if the 2nd operand is fed by a negate. */
8565 if (def0)
8566 {
8567 tree tem = treeop0;
8568 treeop0 = treeop1;
8569 treeop1 = tem;
8570 }
8571 }
8572 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8573
8574 op0 = op2 = NULL;
8575
8576 if (def0 && def2
8577 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8578 {
8579 opt = fnms_optab;
8580 op0 = expand_normal (gimple_assign_rhs1 (def0));
8581 op2 = expand_normal (gimple_assign_rhs1 (def2));
8582 }
8583 else if (def0
8584 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8585 {
8586 opt = fnma_optab;
8587 op0 = expand_normal (gimple_assign_rhs1 (def0));
8588 }
8589 else if (def2
8590 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8591 {
8592 opt = fms_optab;
8593 op2 = expand_normal (gimple_assign_rhs1 (def2));
8594 }
8595
8596 if (op0 == NULL)
8597 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8598 if (op2 == NULL)
8599 op2 = expand_normal (treeop2);
8600 op1 = expand_normal (treeop1);
8601
8602 return expand_ternary_op (TYPE_MODE (type), opt,
8603 op0, op1, op2, target, 0);
8604 }
8605
8606 case MULT_EXPR:
8607 /* If this is a fixed-point operation, then we cannot use the code
8608 below because "expand_mult" doesn't support sat/no-sat fixed-point
8609 multiplications. */
8610 if (ALL_FIXED_POINT_MODE_P (mode))
8611 goto binop;
8612
8613 /* If first operand is constant, swap them.
8614 Thus the following special case checks need only
8615 check the second operand. */
8616 if (TREE_CODE (treeop0) == INTEGER_CST)
8617 {
8618 tree t1 = treeop0;
8619 treeop0 = treeop1;
8620 treeop1 = t1;
8621 }
8622
8623 /* Attempt to return something suitable for generating an
8624 indexed address, for machines that support that. */
8625
8626 if (modifier == EXPAND_SUM && mode == ptr_mode
8627 && tree_fits_shwi_p (treeop1))
8628 {
8629 tree exp1 = treeop1;
8630
8631 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8632 EXPAND_SUM);
8633
8634 if (!REG_P (op0))
8635 op0 = force_operand (op0, NULL_RTX);
8636 if (!REG_P (op0))
8637 op0 = copy_to_mode_reg (mode, op0);
8638
8639 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8640 gen_int_mode (tree_to_shwi (exp1),
8641 TYPE_MODE (TREE_TYPE (exp1)))));
8642 }
8643
8644 if (modifier == EXPAND_STACK_PARM)
8645 target = 0;
8646
8647 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8648 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8649
8650 case TRUNC_DIV_EXPR:
8651 case FLOOR_DIV_EXPR:
8652 case CEIL_DIV_EXPR:
8653 case ROUND_DIV_EXPR:
8654 case EXACT_DIV_EXPR:
8655 /* If this is a fixed-point operation, then we cannot use the code
8656 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8657 divisions. */
8658 if (ALL_FIXED_POINT_MODE_P (mode))
8659 goto binop;
8660
8661 if (modifier == EXPAND_STACK_PARM)
8662 target = 0;
8663 /* Possible optimization: compute the dividend with EXPAND_SUM
8664 then if the divisor is constant can optimize the case
8665 where some terms of the dividend have coeffs divisible by it. */
8666 expand_operands (treeop0, treeop1,
8667 subtarget, &op0, &op1, EXPAND_NORMAL);
8668 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8669
8670 case RDIV_EXPR:
8671 goto binop;
8672
8673 case MULT_HIGHPART_EXPR:
8674 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8675 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8676 gcc_assert (temp);
8677 return temp;
8678
8679 case TRUNC_MOD_EXPR:
8680 case FLOOR_MOD_EXPR:
8681 case CEIL_MOD_EXPR:
8682 case ROUND_MOD_EXPR:
8683 if (modifier == EXPAND_STACK_PARM)
8684 target = 0;
8685 expand_operands (treeop0, treeop1,
8686 subtarget, &op0, &op1, EXPAND_NORMAL);
8687 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8688
8689 case FIXED_CONVERT_EXPR:
8690 op0 = expand_normal (treeop0);
8691 if (target == 0 || modifier == EXPAND_STACK_PARM)
8692 target = gen_reg_rtx (mode);
8693
8694 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8695 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8696 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8697 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8698 else
8699 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8700 return target;
8701
8702 case FIX_TRUNC_EXPR:
8703 op0 = expand_normal (treeop0);
8704 if (target == 0 || modifier == EXPAND_STACK_PARM)
8705 target = gen_reg_rtx (mode);
8706 expand_fix (target, op0, unsignedp);
8707 return target;
8708
8709 case FLOAT_EXPR:
8710 op0 = expand_normal (treeop0);
8711 if (target == 0 || modifier == EXPAND_STACK_PARM)
8712 target = gen_reg_rtx (mode);
8713 /* expand_float can't figure out what to do if FROM has VOIDmode.
8714 So give it the correct mode. With -O, cse will optimize this. */
8715 if (GET_MODE (op0) == VOIDmode)
8716 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8717 op0);
8718 expand_float (target, op0,
8719 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8720 return target;
8721
8722 case NEGATE_EXPR:
8723 op0 = expand_expr (treeop0, subtarget,
8724 VOIDmode, EXPAND_NORMAL);
8725 if (modifier == EXPAND_STACK_PARM)
8726 target = 0;
8727 temp = expand_unop (mode,
8728 optab_for_tree_code (NEGATE_EXPR, type,
8729 optab_default),
8730 op0, target, 0);
8731 gcc_assert (temp);
8732 return REDUCE_BIT_FIELD (temp);
8733
8734 case ABS_EXPR:
8735 op0 = expand_expr (treeop0, subtarget,
8736 VOIDmode, EXPAND_NORMAL);
8737 if (modifier == EXPAND_STACK_PARM)
8738 target = 0;
8739
8740 /* ABS_EXPR is not valid for complex arguments. */
8741 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8742 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8743
8744 /* Unsigned abs is simply the operand. Testing here means we don't
8745 risk generating incorrect code below. */
8746 if (TYPE_UNSIGNED (type))
8747 return op0;
8748
8749 return expand_abs (mode, op0, target, unsignedp,
8750 safe_from_p (target, treeop0, 1));
8751
8752 case MAX_EXPR:
8753 case MIN_EXPR:
8754 target = original_target;
8755 if (target == 0
8756 || modifier == EXPAND_STACK_PARM
8757 || (MEM_P (target) && MEM_VOLATILE_P (target))
8758 || GET_MODE (target) != mode
8759 || (REG_P (target)
8760 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8761 target = gen_reg_rtx (mode);
8762 expand_operands (treeop0, treeop1,
8763 target, &op0, &op1, EXPAND_NORMAL);
8764
8765 /* First try to do it with a special MIN or MAX instruction.
8766 If that does not win, use a conditional jump to select the proper
8767 value. */
8768 this_optab = optab_for_tree_code (code, type, optab_default);
8769 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8770 OPTAB_WIDEN);
8771 if (temp != 0)
8772 return temp;
8773
8774 /* At this point, a MEM target is no longer useful; we will get better
8775 code without it. */
8776
8777 if (! REG_P (target))
8778 target = gen_reg_rtx (mode);
8779
8780 /* If op1 was placed in target, swap op0 and op1. */
8781 if (target != op0 && target == op1)
8782 std::swap (op0, op1);
8783
8784 /* We generate better code and avoid problems with op1 mentioning
8785 target by forcing op1 into a pseudo if it isn't a constant. */
8786 if (! CONSTANT_P (op1))
8787 op1 = force_reg (mode, op1);
8788
8789 {
8790 enum rtx_code comparison_code;
8791 rtx cmpop1 = op1;
8792
8793 if (code == MAX_EXPR)
8794 comparison_code = unsignedp ? GEU : GE;
8795 else
8796 comparison_code = unsignedp ? LEU : LE;
8797
8798 /* Canonicalize to comparisons against 0. */
8799 if (op1 == const1_rtx)
8800 {
8801 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8802 or (a != 0 ? a : 1) for unsigned.
8803 For MIN we are safe converting (a <= 1 ? a : 1)
8804 into (a <= 0 ? a : 1) */
8805 cmpop1 = const0_rtx;
8806 if (code == MAX_EXPR)
8807 comparison_code = unsignedp ? NE : GT;
8808 }
8809 if (op1 == constm1_rtx && !unsignedp)
8810 {
8811 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8812 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8813 cmpop1 = const0_rtx;
8814 if (code == MIN_EXPR)
8815 comparison_code = LT;
8816 }
8817
8818 /* Use a conditional move if possible. */
8819 if (can_conditionally_move_p (mode))
8820 {
8821 rtx insn;
8822
8823 start_sequence ();
8824
8825 /* Try to emit the conditional move. */
8826 insn = emit_conditional_move (target, comparison_code,
8827 op0, cmpop1, mode,
8828 op0, op1, mode,
8829 unsignedp);
8830
8831 /* If we could do the conditional move, emit the sequence,
8832 and return. */
8833 if (insn)
8834 {
8835 rtx_insn *seq = get_insns ();
8836 end_sequence ();
8837 emit_insn (seq);
8838 return target;
8839 }
8840
8841 /* Otherwise discard the sequence and fall back to code with
8842 branches. */
8843 end_sequence ();
8844 }
8845
8846 if (target != op0)
8847 emit_move_insn (target, op0);
8848
8849 lab = gen_label_rtx ();
8850 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8851 unsignedp, mode, NULL_RTX, NULL, lab,
8852 -1);
8853 }
8854 emit_move_insn (target, op1);
8855 emit_label (lab);
8856 return target;
8857
8858 case BIT_NOT_EXPR:
8859 op0 = expand_expr (treeop0, subtarget,
8860 VOIDmode, EXPAND_NORMAL);
8861 if (modifier == EXPAND_STACK_PARM)
8862 target = 0;
8863 /* In case we have to reduce the result to bitfield precision
8864 for unsigned bitfield expand this as XOR with a proper constant
8865 instead. */
8866 if (reduce_bit_field && TYPE_UNSIGNED (type))
8867 {
8868 wide_int mask = wi::mask (TYPE_PRECISION (type),
8869 false, GET_MODE_PRECISION (mode));
8870
8871 temp = expand_binop (mode, xor_optab, op0,
8872 immed_wide_int_const (mask, mode),
8873 target, 1, OPTAB_LIB_WIDEN);
8874 }
8875 else
8876 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8877 gcc_assert (temp);
8878 return temp;
8879
8880 /* ??? Can optimize bitwise operations with one arg constant.
8881 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8882 and (a bitwise1 b) bitwise2 b (etc)
8883 but that is probably not worth while. */
8884
8885 case BIT_AND_EXPR:
8886 case BIT_IOR_EXPR:
8887 case BIT_XOR_EXPR:
8888 goto binop;
8889
8890 case LROTATE_EXPR:
8891 case RROTATE_EXPR:
8892 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8893 || (GET_MODE_PRECISION (TYPE_MODE (type))
8894 == TYPE_PRECISION (type)));
8895 /* fall through */
8896
8897 case LSHIFT_EXPR:
8898 case RSHIFT_EXPR:
8899 /* If this is a fixed-point operation, then we cannot use the code
8900 below because "expand_shift" doesn't support sat/no-sat fixed-point
8901 shifts. */
8902 if (ALL_FIXED_POINT_MODE_P (mode))
8903 goto binop;
8904
8905 if (! safe_from_p (subtarget, treeop1, 1))
8906 subtarget = 0;
8907 if (modifier == EXPAND_STACK_PARM)
8908 target = 0;
8909 op0 = expand_expr (treeop0, subtarget,
8910 VOIDmode, EXPAND_NORMAL);
8911 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8912 unsignedp);
8913 if (code == LSHIFT_EXPR)
8914 temp = REDUCE_BIT_FIELD (temp);
8915 return temp;
8916
8917 /* Could determine the answer when only additive constants differ. Also,
8918 the addition of one can be handled by changing the condition. */
8919 case LT_EXPR:
8920 case LE_EXPR:
8921 case GT_EXPR:
8922 case GE_EXPR:
8923 case EQ_EXPR:
8924 case NE_EXPR:
8925 case UNORDERED_EXPR:
8926 case ORDERED_EXPR:
8927 case UNLT_EXPR:
8928 case UNLE_EXPR:
8929 case UNGT_EXPR:
8930 case UNGE_EXPR:
8931 case UNEQ_EXPR:
8932 case LTGT_EXPR:
8933 {
8934 temp = do_store_flag (ops,
8935 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8936 tmode != VOIDmode ? tmode : mode);
8937 if (temp)
8938 return temp;
8939
8940 /* Use a compare and a jump for BLKmode comparisons, or for function
8941 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8942
8943 if ((target == 0
8944 || modifier == EXPAND_STACK_PARM
8945 || ! safe_from_p (target, treeop0, 1)
8946 || ! safe_from_p (target, treeop1, 1)
8947 /* Make sure we don't have a hard reg (such as function's return
8948 value) live across basic blocks, if not optimizing. */
8949 || (!optimize && REG_P (target)
8950 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8951 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8952
8953 emit_move_insn (target, const0_rtx);
8954
8955 rtx_code_label *lab1 = gen_label_rtx ();
8956 jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
8957
8958 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8959 emit_move_insn (target, constm1_rtx);
8960 else
8961 emit_move_insn (target, const1_rtx);
8962
8963 emit_label (lab1);
8964 return target;
8965 }
8966 case COMPLEX_EXPR:
8967 /* Get the rtx code of the operands. */
8968 op0 = expand_normal (treeop0);
8969 op1 = expand_normal (treeop1);
8970
8971 if (!target)
8972 target = gen_reg_rtx (TYPE_MODE (type));
8973 else
8974 /* If target overlaps with op1, then either we need to force
8975 op1 into a pseudo (if target also overlaps with op0),
8976 or write the complex parts in reverse order. */
8977 switch (GET_CODE (target))
8978 {
8979 case CONCAT:
8980 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8981 {
8982 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8983 {
8984 complex_expr_force_op1:
8985 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8986 emit_move_insn (temp, op1);
8987 op1 = temp;
8988 break;
8989 }
8990 complex_expr_swap_order:
8991 /* Move the imaginary (op1) and real (op0) parts to their
8992 location. */
8993 write_complex_part (target, op1, true);
8994 write_complex_part (target, op0, false);
8995
8996 return target;
8997 }
8998 break;
8999 case MEM:
9000 temp = adjust_address_nv (target,
9001 GET_MODE_INNER (GET_MODE (target)), 0);
9002 if (reg_overlap_mentioned_p (temp, op1))
9003 {
9004 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9005 temp = adjust_address_nv (target, imode,
9006 GET_MODE_SIZE (imode));
9007 if (reg_overlap_mentioned_p (temp, op0))
9008 goto complex_expr_force_op1;
9009 goto complex_expr_swap_order;
9010 }
9011 break;
9012 default:
9013 if (reg_overlap_mentioned_p (target, op1))
9014 {
9015 if (reg_overlap_mentioned_p (target, op0))
9016 goto complex_expr_force_op1;
9017 goto complex_expr_swap_order;
9018 }
9019 break;
9020 }
9021
9022 /* Move the real (op0) and imaginary (op1) parts to their location. */
9023 write_complex_part (target, op0, false);
9024 write_complex_part (target, op1, true);
9025
9026 return target;
9027
9028 case WIDEN_SUM_EXPR:
9029 {
9030 tree oprnd0 = treeop0;
9031 tree oprnd1 = treeop1;
9032
9033 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9034 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9035 target, unsignedp);
9036 return target;
9037 }
9038
9039 case REDUC_MAX_EXPR:
9040 case REDUC_MIN_EXPR:
9041 case REDUC_PLUS_EXPR:
9042 {
9043 op0 = expand_normal (treeop0);
9044 this_optab = optab_for_tree_code (code, type, optab_default);
9045 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9046
9047 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9048 {
9049 struct expand_operand ops[2];
9050 enum insn_code icode = optab_handler (this_optab, vec_mode);
9051
9052 create_output_operand (&ops[0], target, mode);
9053 create_input_operand (&ops[1], op0, vec_mode);
9054 if (maybe_expand_insn (icode, 2, ops))
9055 {
9056 target = ops[0].value;
9057 if (GET_MODE (target) != mode)
9058 return gen_lowpart (tmode, target);
9059 return target;
9060 }
9061 }
9062 /* Fall back to optab with vector result, and then extract scalar. */
9063 this_optab = scalar_reduc_to_vector (this_optab, type);
9064 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9065 gcc_assert (temp);
9066 /* The tree code produces a scalar result, but (somewhat by convention)
9067 the optab produces a vector with the result in element 0 if
9068 little-endian, or element N-1 if big-endian. So pull the scalar
9069 result out of that element. */
9070 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9071 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9072 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9073 target, mode, mode);
9074 gcc_assert (temp);
9075 return temp;
9076 }
9077
9078 case VEC_UNPACK_HI_EXPR:
9079 case VEC_UNPACK_LO_EXPR:
9080 {
9081 op0 = expand_normal (treeop0);
9082 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9083 target, unsignedp);
9084 gcc_assert (temp);
9085 return temp;
9086 }
9087
9088 case VEC_UNPACK_FLOAT_HI_EXPR:
9089 case VEC_UNPACK_FLOAT_LO_EXPR:
9090 {
9091 op0 = expand_normal (treeop0);
9092 /* The signedness is determined from input operand. */
9093 temp = expand_widen_pattern_expr
9094 (ops, op0, NULL_RTX, NULL_RTX,
9095 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9096
9097 gcc_assert (temp);
9098 return temp;
9099 }
9100
9101 case VEC_WIDEN_MULT_HI_EXPR:
9102 case VEC_WIDEN_MULT_LO_EXPR:
9103 case VEC_WIDEN_MULT_EVEN_EXPR:
9104 case VEC_WIDEN_MULT_ODD_EXPR:
9105 case VEC_WIDEN_LSHIFT_HI_EXPR:
9106 case VEC_WIDEN_LSHIFT_LO_EXPR:
9107 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9108 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9109 target, unsignedp);
9110 gcc_assert (target);
9111 return target;
9112
9113 case VEC_PACK_TRUNC_EXPR:
9114 case VEC_PACK_SAT_EXPR:
9115 case VEC_PACK_FIX_TRUNC_EXPR:
9116 mode = TYPE_MODE (TREE_TYPE (treeop0));
9117 goto binop;
9118
9119 case VEC_PERM_EXPR:
9120 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9121 op2 = expand_normal (treeop2);
9122
9123 /* Careful here: if the target doesn't support integral vector modes,
9124 a constant selection vector could wind up smooshed into a normal
9125 integral constant. */
9126 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9127 {
9128 tree sel_type = TREE_TYPE (treeop2);
9129 machine_mode vmode
9130 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9131 TYPE_VECTOR_SUBPARTS (sel_type));
9132 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9133 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9134 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9135 }
9136 else
9137 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9138
9139 temp = expand_vec_perm (mode, op0, op1, op2, target);
9140 gcc_assert (temp);
9141 return temp;
9142
9143 case DOT_PROD_EXPR:
9144 {
9145 tree oprnd0 = treeop0;
9146 tree oprnd1 = treeop1;
9147 tree oprnd2 = treeop2;
9148 rtx op2;
9149
9150 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9151 op2 = expand_normal (oprnd2);
9152 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9153 target, unsignedp);
9154 return target;
9155 }
9156
9157 case SAD_EXPR:
9158 {
9159 tree oprnd0 = treeop0;
9160 tree oprnd1 = treeop1;
9161 tree oprnd2 = treeop2;
9162 rtx op2;
9163
9164 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9165 op2 = expand_normal (oprnd2);
9166 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9167 target, unsignedp);
9168 return target;
9169 }
9170
9171 case REALIGN_LOAD_EXPR:
9172 {
9173 tree oprnd0 = treeop0;
9174 tree oprnd1 = treeop1;
9175 tree oprnd2 = treeop2;
9176 rtx op2;
9177
9178 this_optab = optab_for_tree_code (code, type, optab_default);
9179 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9180 op2 = expand_normal (oprnd2);
9181 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9182 target, unsignedp);
9183 gcc_assert (temp);
9184 return temp;
9185 }
9186
9187 case COND_EXPR:
9188 {
9189 /* A COND_EXPR with its type being VOID_TYPE represents a
9190 conditional jump and is handled in
9191 expand_gimple_cond_expr. */
9192 gcc_assert (!VOID_TYPE_P (type));
9193
9194 /* Note that COND_EXPRs whose type is a structure or union
9195 are required to be constructed to contain assignments of
9196 a temporary variable, so that we can evaluate them here
9197 for side effect only. If type is void, we must do likewise. */
9198
9199 gcc_assert (!TREE_ADDRESSABLE (type)
9200 && !ignore
9201 && TREE_TYPE (treeop1) != void_type_node
9202 && TREE_TYPE (treeop2) != void_type_node);
9203
9204 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9205 if (temp)
9206 return temp;
9207
9208 /* If we are not to produce a result, we have no target. Otherwise,
9209 if a target was specified use it; it will not be used as an
9210 intermediate target unless it is safe. If no target, use a
9211 temporary. */
9212
9213 if (modifier != EXPAND_STACK_PARM
9214 && original_target
9215 && safe_from_p (original_target, treeop0, 1)
9216 && GET_MODE (original_target) == mode
9217 && !MEM_P (original_target))
9218 temp = original_target;
9219 else
9220 temp = assign_temp (type, 0, 1);
9221
9222 do_pending_stack_adjust ();
9223 NO_DEFER_POP;
9224 rtx_code_label *lab0 = gen_label_rtx ();
9225 rtx_code_label *lab1 = gen_label_rtx ();
9226 jumpifnot (treeop0, lab0, -1);
9227 store_expr (treeop1, temp,
9228 modifier == EXPAND_STACK_PARM,
9229 false);
9230
9231 emit_jump_insn (gen_jump (lab1));
9232 emit_barrier ();
9233 emit_label (lab0);
9234 store_expr (treeop2, temp,
9235 modifier == EXPAND_STACK_PARM,
9236 false);
9237
9238 emit_label (lab1);
9239 OK_DEFER_POP;
9240 return temp;
9241 }
9242
9243 case VEC_COND_EXPR:
9244 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9245 return target;
9246
9247 default:
9248 gcc_unreachable ();
9249 }
9250
9251 /* Here to do an ordinary binary operator. */
9252 binop:
9253 expand_operands (treeop0, treeop1,
9254 subtarget, &op0, &op1, EXPAND_NORMAL);
9255 binop2:
9256 this_optab = optab_for_tree_code (code, type, optab_default);
9257 binop3:
9258 if (modifier == EXPAND_STACK_PARM)
9259 target = 0;
9260 temp = expand_binop (mode, this_optab, op0, op1, target,
9261 unsignedp, OPTAB_LIB_WIDEN);
9262 gcc_assert (temp);
9263 /* Bitwise operations do not need bitfield reduction as we expect their
9264 operands being properly truncated. */
9265 if (code == BIT_XOR_EXPR
9266 || code == BIT_AND_EXPR
9267 || code == BIT_IOR_EXPR)
9268 return temp;
9269 return REDUCE_BIT_FIELD (temp);
9270 }
9271 #undef REDUCE_BIT_FIELD
9272
9273
9274 /* Return TRUE if expression STMT is suitable for replacement.
9275 Never consider memory loads as replaceable, because those don't ever lead
9276 into constant expressions. */
9277
9278 static bool
9279 stmt_is_replaceable_p (gimple stmt)
9280 {
9281 if (ssa_is_replaceable_p (stmt))
9282 {
9283 /* Don't move around loads. */
9284 if (!gimple_assign_single_p (stmt)
9285 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9286 return true;
9287 }
9288 return false;
9289 }
9290
9291 rtx
9292 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9293 enum expand_modifier modifier, rtx *alt_rtl,
9294 bool inner_reference_p)
9295 {
9296 rtx op0, op1, temp, decl_rtl;
9297 tree type;
9298 int unsignedp;
9299 machine_mode mode;
9300 enum tree_code code = TREE_CODE (exp);
9301 rtx subtarget, original_target;
9302 int ignore;
9303 tree context;
9304 bool reduce_bit_field;
9305 location_t loc = EXPR_LOCATION (exp);
9306 struct separate_ops ops;
9307 tree treeop0, treeop1, treeop2;
9308 tree ssa_name = NULL_TREE;
9309 gimple g;
9310
9311 type = TREE_TYPE (exp);
9312 mode = TYPE_MODE (type);
9313 unsignedp = TYPE_UNSIGNED (type);
9314
9315 treeop0 = treeop1 = treeop2 = NULL_TREE;
9316 if (!VL_EXP_CLASS_P (exp))
9317 switch (TREE_CODE_LENGTH (code))
9318 {
9319 default:
9320 case 3: treeop2 = TREE_OPERAND (exp, 2);
9321 case 2: treeop1 = TREE_OPERAND (exp, 1);
9322 case 1: treeop0 = TREE_OPERAND (exp, 0);
9323 case 0: break;
9324 }
9325 ops.code = code;
9326 ops.type = type;
9327 ops.op0 = treeop0;
9328 ops.op1 = treeop1;
9329 ops.op2 = treeop2;
9330 ops.location = loc;
9331
9332 ignore = (target == const0_rtx
9333 || ((CONVERT_EXPR_CODE_P (code)
9334 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9335 && TREE_CODE (type) == VOID_TYPE));
9336
9337 /* An operation in what may be a bit-field type needs the
9338 result to be reduced to the precision of the bit-field type,
9339 which is narrower than that of the type's mode. */
9340 reduce_bit_field = (!ignore
9341 && INTEGRAL_TYPE_P (type)
9342 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9343
9344 /* If we are going to ignore this result, we need only do something
9345 if there is a side-effect somewhere in the expression. If there
9346 is, short-circuit the most common cases here. Note that we must
9347 not call expand_expr with anything but const0_rtx in case this
9348 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9349
9350 if (ignore)
9351 {
9352 if (! TREE_SIDE_EFFECTS (exp))
9353 return const0_rtx;
9354
9355 /* Ensure we reference a volatile object even if value is ignored, but
9356 don't do this if all we are doing is taking its address. */
9357 if (TREE_THIS_VOLATILE (exp)
9358 && TREE_CODE (exp) != FUNCTION_DECL
9359 && mode != VOIDmode && mode != BLKmode
9360 && modifier != EXPAND_CONST_ADDRESS)
9361 {
9362 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9363 if (MEM_P (temp))
9364 copy_to_reg (temp);
9365 return const0_rtx;
9366 }
9367
9368 if (TREE_CODE_CLASS (code) == tcc_unary
9369 || code == BIT_FIELD_REF
9370 || code == COMPONENT_REF
9371 || code == INDIRECT_REF)
9372 return expand_expr (treeop0, const0_rtx, VOIDmode,
9373 modifier);
9374
9375 else if (TREE_CODE_CLASS (code) == tcc_binary
9376 || TREE_CODE_CLASS (code) == tcc_comparison
9377 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9378 {
9379 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9380 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9381 return const0_rtx;
9382 }
9383
9384 target = 0;
9385 }
9386
9387 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9388 target = 0;
9389
9390 /* Use subtarget as the target for operand 0 of a binary operation. */
9391 subtarget = get_subtarget (target);
9392 original_target = target;
9393
9394 switch (code)
9395 {
9396 case LABEL_DECL:
9397 {
9398 tree function = decl_function_context (exp);
9399
9400 temp = label_rtx (exp);
9401 temp = gen_rtx_LABEL_REF (Pmode, temp);
9402
9403 if (function != current_function_decl
9404 && function != 0)
9405 LABEL_REF_NONLOCAL_P (temp) = 1;
9406
9407 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9408 return temp;
9409 }
9410
9411 case SSA_NAME:
9412 /* ??? ivopts calls expander, without any preparation from
9413 out-of-ssa. So fake instructions as if this was an access to the
9414 base variable. This unnecessarily allocates a pseudo, see how we can
9415 reuse it, if partition base vars have it set already. */
9416 if (!currently_expanding_to_rtl)
9417 {
9418 tree var = SSA_NAME_VAR (exp);
9419 if (var && DECL_RTL_SET_P (var))
9420 return DECL_RTL (var);
9421 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9422 LAST_VIRTUAL_REGISTER + 1);
9423 }
9424
9425 g = get_gimple_for_ssa_name (exp);
9426 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9427 if (g == NULL
9428 && modifier == EXPAND_INITIALIZER
9429 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9430 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9431 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9432 g = SSA_NAME_DEF_STMT (exp);
9433 if (g)
9434 {
9435 rtx r;
9436 ops.code = gimple_assign_rhs_code (g);
9437 switch (get_gimple_rhs_class (ops.code))
9438 {
9439 case GIMPLE_TERNARY_RHS:
9440 ops.op2 = gimple_assign_rhs3 (g);
9441 /* Fallthru */
9442 case GIMPLE_BINARY_RHS:
9443 ops.op1 = gimple_assign_rhs2 (g);
9444
9445 /* Try to expand conditonal compare. */
9446 if (targetm.gen_ccmp_first)
9447 {
9448 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9449 r = expand_ccmp_expr (g);
9450 if (r)
9451 break;
9452 }
9453 /* Fallthru */
9454 case GIMPLE_UNARY_RHS:
9455 ops.op0 = gimple_assign_rhs1 (g);
9456 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9457 ops.location = gimple_location (g);
9458 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9459 break;
9460 case GIMPLE_SINGLE_RHS:
9461 {
9462 location_t saved_loc = curr_insn_location ();
9463 set_curr_insn_location (gimple_location (g));
9464 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9465 tmode, modifier, NULL, inner_reference_p);
9466 set_curr_insn_location (saved_loc);
9467 break;
9468 }
9469 default:
9470 gcc_unreachable ();
9471 }
9472 if (REG_P (r) && !REG_EXPR (r))
9473 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9474 return r;
9475 }
9476
9477 ssa_name = exp;
9478 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9479 exp = SSA_NAME_VAR (ssa_name);
9480 goto expand_decl_rtl;
9481
9482 case PARM_DECL:
9483 case VAR_DECL:
9484 /* If a static var's type was incomplete when the decl was written,
9485 but the type is complete now, lay out the decl now. */
9486 if (DECL_SIZE (exp) == 0
9487 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9488 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9489 layout_decl (exp, 0);
9490
9491 /* ... fall through ... */
9492
9493 case FUNCTION_DECL:
9494 case RESULT_DECL:
9495 decl_rtl = DECL_RTL (exp);
9496 expand_decl_rtl:
9497 gcc_assert (decl_rtl);
9498 decl_rtl = copy_rtx (decl_rtl);
9499 /* Record writes to register variables. */
9500 if (modifier == EXPAND_WRITE
9501 && REG_P (decl_rtl)
9502 && HARD_REGISTER_P (decl_rtl))
9503 add_to_hard_reg_set (&crtl->asm_clobbers,
9504 GET_MODE (decl_rtl), REGNO (decl_rtl));
9505
9506 /* Ensure variable marked as used even if it doesn't go through
9507 a parser. If it hasn't be used yet, write out an external
9508 definition. */
9509 TREE_USED (exp) = 1;
9510
9511 /* Show we haven't gotten RTL for this yet. */
9512 temp = 0;
9513
9514 /* Variables inherited from containing functions should have
9515 been lowered by this point. */
9516 context = decl_function_context (exp);
9517 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9518 || context == current_function_decl
9519 || TREE_STATIC (exp)
9520 || DECL_EXTERNAL (exp)
9521 /* ??? C++ creates functions that are not TREE_STATIC. */
9522 || TREE_CODE (exp) == FUNCTION_DECL);
9523
9524 /* This is the case of an array whose size is to be determined
9525 from its initializer, while the initializer is still being parsed.
9526 ??? We aren't parsing while expanding anymore. */
9527
9528 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9529 temp = validize_mem (decl_rtl);
9530
9531 /* If DECL_RTL is memory, we are in the normal case and the
9532 address is not valid, get the address into a register. */
9533
9534 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9535 {
9536 if (alt_rtl)
9537 *alt_rtl = decl_rtl;
9538 decl_rtl = use_anchored_address (decl_rtl);
9539 if (modifier != EXPAND_CONST_ADDRESS
9540 && modifier != EXPAND_SUM
9541 && !memory_address_addr_space_p (DECL_MODE (exp),
9542 XEXP (decl_rtl, 0),
9543 MEM_ADDR_SPACE (decl_rtl)))
9544 temp = replace_equiv_address (decl_rtl,
9545 copy_rtx (XEXP (decl_rtl, 0)));
9546 }
9547
9548 /* If we got something, return it. But first, set the alignment
9549 if the address is a register. */
9550 if (temp != 0)
9551 {
9552 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9553 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9554
9555 return temp;
9556 }
9557
9558 /* If the mode of DECL_RTL does not match that of the decl,
9559 there are two cases: we are dealing with a BLKmode value
9560 that is returned in a register, or we are dealing with
9561 a promoted value. In the latter case, return a SUBREG
9562 of the wanted mode, but mark it so that we know that it
9563 was already extended. */
9564 if (REG_P (decl_rtl)
9565 && DECL_MODE (exp) != BLKmode
9566 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9567 {
9568 machine_mode pmode;
9569
9570 /* Get the signedness to be used for this variable. Ensure we get
9571 the same mode we got when the variable was declared. */
9572 if (code == SSA_NAME
9573 && (g = SSA_NAME_DEF_STMT (ssa_name))
9574 && gimple_code (g) == GIMPLE_CALL
9575 && !gimple_call_internal_p (g))
9576 pmode = promote_function_mode (type, mode, &unsignedp,
9577 gimple_call_fntype (g),
9578 2);
9579 else
9580 pmode = promote_decl_mode (exp, &unsignedp);
9581 gcc_assert (GET_MODE (decl_rtl) == pmode);
9582
9583 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9584 SUBREG_PROMOTED_VAR_P (temp) = 1;
9585 SUBREG_PROMOTED_SET (temp, unsignedp);
9586 return temp;
9587 }
9588
9589 return decl_rtl;
9590
9591 case INTEGER_CST:
9592 /* Given that TYPE_PRECISION (type) is not always equal to
9593 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9594 the former to the latter according to the signedness of the
9595 type. */
9596 temp = immed_wide_int_const (wide_int::from
9597 (exp,
9598 GET_MODE_PRECISION (TYPE_MODE (type)),
9599 TYPE_SIGN (type)),
9600 TYPE_MODE (type));
9601 return temp;
9602
9603 case VECTOR_CST:
9604 {
9605 tree tmp = NULL_TREE;
9606 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9607 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9608 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9609 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9610 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9611 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9612 return const_vector_from_tree (exp);
9613 if (GET_MODE_CLASS (mode) == MODE_INT)
9614 {
9615 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9616 if (type_for_mode)
9617 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9618 }
9619 if (!tmp)
9620 {
9621 vec<constructor_elt, va_gc> *v;
9622 unsigned i;
9623 vec_alloc (v, VECTOR_CST_NELTS (exp));
9624 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9625 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9626 tmp = build_constructor (type, v);
9627 }
9628 return expand_expr (tmp, ignore ? const0_rtx : target,
9629 tmode, modifier);
9630 }
9631
9632 case CONST_DECL:
9633 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9634
9635 case REAL_CST:
9636 /* If optimized, generate immediate CONST_DOUBLE
9637 which will be turned into memory by reload if necessary.
9638
9639 We used to force a register so that loop.c could see it. But
9640 this does not allow gen_* patterns to perform optimizations with
9641 the constants. It also produces two insns in cases like "x = 1.0;".
9642 On most machines, floating-point constants are not permitted in
9643 many insns, so we'd end up copying it to a register in any case.
9644
9645 Now, we do the copying in expand_binop, if appropriate. */
9646 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9647 TYPE_MODE (TREE_TYPE (exp)));
9648
9649 case FIXED_CST:
9650 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9651 TYPE_MODE (TREE_TYPE (exp)));
9652
9653 case COMPLEX_CST:
9654 /* Handle evaluating a complex constant in a CONCAT target. */
9655 if (original_target && GET_CODE (original_target) == CONCAT)
9656 {
9657 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9658 rtx rtarg, itarg;
9659
9660 rtarg = XEXP (original_target, 0);
9661 itarg = XEXP (original_target, 1);
9662
9663 /* Move the real and imaginary parts separately. */
9664 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9665 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9666
9667 if (op0 != rtarg)
9668 emit_move_insn (rtarg, op0);
9669 if (op1 != itarg)
9670 emit_move_insn (itarg, op1);
9671
9672 return original_target;
9673 }
9674
9675 /* ... fall through ... */
9676
9677 case STRING_CST:
9678 temp = expand_expr_constant (exp, 1, modifier);
9679
9680 /* temp contains a constant address.
9681 On RISC machines where a constant address isn't valid,
9682 make some insns to get that address into a register. */
9683 if (modifier != EXPAND_CONST_ADDRESS
9684 && modifier != EXPAND_INITIALIZER
9685 && modifier != EXPAND_SUM
9686 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9687 MEM_ADDR_SPACE (temp)))
9688 return replace_equiv_address (temp,
9689 copy_rtx (XEXP (temp, 0)));
9690 return temp;
9691
9692 case SAVE_EXPR:
9693 {
9694 tree val = treeop0;
9695 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9696 inner_reference_p);
9697
9698 if (!SAVE_EXPR_RESOLVED_P (exp))
9699 {
9700 /* We can indeed still hit this case, typically via builtin
9701 expanders calling save_expr immediately before expanding
9702 something. Assume this means that we only have to deal
9703 with non-BLKmode values. */
9704 gcc_assert (GET_MODE (ret) != BLKmode);
9705
9706 val = build_decl (curr_insn_location (),
9707 VAR_DECL, NULL, TREE_TYPE (exp));
9708 DECL_ARTIFICIAL (val) = 1;
9709 DECL_IGNORED_P (val) = 1;
9710 treeop0 = val;
9711 TREE_OPERAND (exp, 0) = treeop0;
9712 SAVE_EXPR_RESOLVED_P (exp) = 1;
9713
9714 if (!CONSTANT_P (ret))
9715 ret = copy_to_reg (ret);
9716 SET_DECL_RTL (val, ret);
9717 }
9718
9719 return ret;
9720 }
9721
9722
9723 case CONSTRUCTOR:
9724 /* If we don't need the result, just ensure we evaluate any
9725 subexpressions. */
9726 if (ignore)
9727 {
9728 unsigned HOST_WIDE_INT idx;
9729 tree value;
9730
9731 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9732 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9733
9734 return const0_rtx;
9735 }
9736
9737 return expand_constructor (exp, target, modifier, false);
9738
9739 case TARGET_MEM_REF:
9740 {
9741 addr_space_t as
9742 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9743 enum insn_code icode;
9744 unsigned int align;
9745
9746 op0 = addr_for_mem_ref (exp, as, true);
9747 op0 = memory_address_addr_space (mode, op0, as);
9748 temp = gen_rtx_MEM (mode, op0);
9749 set_mem_attributes (temp, exp, 0);
9750 set_mem_addr_space (temp, as);
9751 align = get_object_alignment (exp);
9752 if (modifier != EXPAND_WRITE
9753 && modifier != EXPAND_MEMORY
9754 && mode != BLKmode
9755 && align < GET_MODE_ALIGNMENT (mode)
9756 /* If the target does not have special handling for unaligned
9757 loads of mode then it can use regular moves for them. */
9758 && ((icode = optab_handler (movmisalign_optab, mode))
9759 != CODE_FOR_nothing))
9760 {
9761 struct expand_operand ops[2];
9762
9763 /* We've already validated the memory, and we're creating a
9764 new pseudo destination. The predicates really can't fail,
9765 nor can the generator. */
9766 create_output_operand (&ops[0], NULL_RTX, mode);
9767 create_fixed_operand (&ops[1], temp);
9768 expand_insn (icode, 2, ops);
9769 temp = ops[0].value;
9770 }
9771 return temp;
9772 }
9773
9774 case MEM_REF:
9775 {
9776 addr_space_t as
9777 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9778 machine_mode address_mode;
9779 tree base = TREE_OPERAND (exp, 0);
9780 gimple def_stmt;
9781 enum insn_code icode;
9782 unsigned align;
9783 /* Handle expansion of non-aliased memory with non-BLKmode. That
9784 might end up in a register. */
9785 if (mem_ref_refers_to_non_mem_p (exp))
9786 {
9787 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9788 base = TREE_OPERAND (base, 0);
9789 if (offset == 0
9790 && tree_fits_uhwi_p (TYPE_SIZE (type))
9791 && (GET_MODE_BITSIZE (DECL_MODE (base))
9792 == tree_to_uhwi (TYPE_SIZE (type))))
9793 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9794 target, tmode, modifier);
9795 if (TYPE_MODE (type) == BLKmode)
9796 {
9797 temp = assign_stack_temp (DECL_MODE (base),
9798 GET_MODE_SIZE (DECL_MODE (base)));
9799 store_expr (base, temp, 0, false);
9800 temp = adjust_address (temp, BLKmode, offset);
9801 set_mem_size (temp, int_size_in_bytes (type));
9802 return temp;
9803 }
9804 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9805 bitsize_int (offset * BITS_PER_UNIT));
9806 return expand_expr (exp, target, tmode, modifier);
9807 }
9808 address_mode = targetm.addr_space.address_mode (as);
9809 base = TREE_OPERAND (exp, 0);
9810 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9811 {
9812 tree mask = gimple_assign_rhs2 (def_stmt);
9813 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9814 gimple_assign_rhs1 (def_stmt), mask);
9815 TREE_OPERAND (exp, 0) = base;
9816 }
9817 align = get_object_alignment (exp);
9818 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9819 op0 = memory_address_addr_space (mode, op0, as);
9820 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9821 {
9822 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9823 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9824 op0 = memory_address_addr_space (mode, op0, as);
9825 }
9826 temp = gen_rtx_MEM (mode, op0);
9827 set_mem_attributes (temp, exp, 0);
9828 set_mem_addr_space (temp, as);
9829 if (TREE_THIS_VOLATILE (exp))
9830 MEM_VOLATILE_P (temp) = 1;
9831 if (modifier != EXPAND_WRITE
9832 && modifier != EXPAND_MEMORY
9833 && !inner_reference_p
9834 && mode != BLKmode
9835 && align < GET_MODE_ALIGNMENT (mode))
9836 {
9837 if ((icode = optab_handler (movmisalign_optab, mode))
9838 != CODE_FOR_nothing)
9839 {
9840 struct expand_operand ops[2];
9841
9842 /* We've already validated the memory, and we're creating a
9843 new pseudo destination. The predicates really can't fail,
9844 nor can the generator. */
9845 create_output_operand (&ops[0], NULL_RTX, mode);
9846 create_fixed_operand (&ops[1], temp);
9847 expand_insn (icode, 2, ops);
9848 temp = ops[0].value;
9849 }
9850 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9851 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9852 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9853 (modifier == EXPAND_STACK_PARM
9854 ? NULL_RTX : target),
9855 mode, mode);
9856 }
9857 return temp;
9858 }
9859
9860 case ARRAY_REF:
9861
9862 {
9863 tree array = treeop0;
9864 tree index = treeop1;
9865 tree init;
9866
9867 /* Fold an expression like: "foo"[2].
9868 This is not done in fold so it won't happen inside &.
9869 Don't fold if this is for wide characters since it's too
9870 difficult to do correctly and this is a very rare case. */
9871
9872 if (modifier != EXPAND_CONST_ADDRESS
9873 && modifier != EXPAND_INITIALIZER
9874 && modifier != EXPAND_MEMORY)
9875 {
9876 tree t = fold_read_from_constant_string (exp);
9877
9878 if (t)
9879 return expand_expr (t, target, tmode, modifier);
9880 }
9881
9882 /* If this is a constant index into a constant array,
9883 just get the value from the array. Handle both the cases when
9884 we have an explicit constructor and when our operand is a variable
9885 that was declared const. */
9886
9887 if (modifier != EXPAND_CONST_ADDRESS
9888 && modifier != EXPAND_INITIALIZER
9889 && modifier != EXPAND_MEMORY
9890 && TREE_CODE (array) == CONSTRUCTOR
9891 && ! TREE_SIDE_EFFECTS (array)
9892 && TREE_CODE (index) == INTEGER_CST)
9893 {
9894 unsigned HOST_WIDE_INT ix;
9895 tree field, value;
9896
9897 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9898 field, value)
9899 if (tree_int_cst_equal (field, index))
9900 {
9901 if (!TREE_SIDE_EFFECTS (value))
9902 return expand_expr (fold (value), target, tmode, modifier);
9903 break;
9904 }
9905 }
9906
9907 else if (optimize >= 1
9908 && modifier != EXPAND_CONST_ADDRESS
9909 && modifier != EXPAND_INITIALIZER
9910 && modifier != EXPAND_MEMORY
9911 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9912 && TREE_CODE (index) == INTEGER_CST
9913 && (TREE_CODE (array) == VAR_DECL
9914 || TREE_CODE (array) == CONST_DECL)
9915 && (init = ctor_for_folding (array)) != error_mark_node)
9916 {
9917 if (init == NULL_TREE)
9918 {
9919 tree value = build_zero_cst (type);
9920 if (TREE_CODE (value) == CONSTRUCTOR)
9921 {
9922 /* If VALUE is a CONSTRUCTOR, this optimization is only
9923 useful if this doesn't store the CONSTRUCTOR into
9924 memory. If it does, it is more efficient to just
9925 load the data from the array directly. */
9926 rtx ret = expand_constructor (value, target,
9927 modifier, true);
9928 if (ret == NULL_RTX)
9929 value = NULL_TREE;
9930 }
9931
9932 if (value)
9933 return expand_expr (value, target, tmode, modifier);
9934 }
9935 else if (TREE_CODE (init) == CONSTRUCTOR)
9936 {
9937 unsigned HOST_WIDE_INT ix;
9938 tree field, value;
9939
9940 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9941 field, value)
9942 if (tree_int_cst_equal (field, index))
9943 {
9944 if (TREE_SIDE_EFFECTS (value))
9945 break;
9946
9947 if (TREE_CODE (value) == CONSTRUCTOR)
9948 {
9949 /* If VALUE is a CONSTRUCTOR, this
9950 optimization is only useful if
9951 this doesn't store the CONSTRUCTOR
9952 into memory. If it does, it is more
9953 efficient to just load the data from
9954 the array directly. */
9955 rtx ret = expand_constructor (value, target,
9956 modifier, true);
9957 if (ret == NULL_RTX)
9958 break;
9959 }
9960
9961 return
9962 expand_expr (fold (value), target, tmode, modifier);
9963 }
9964 }
9965 else if (TREE_CODE (init) == STRING_CST)
9966 {
9967 tree low_bound = array_ref_low_bound (exp);
9968 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9969
9970 /* Optimize the special case of a zero lower bound.
9971
9972 We convert the lower bound to sizetype to avoid problems
9973 with constant folding. E.g. suppose the lower bound is
9974 1 and its mode is QI. Without the conversion
9975 (ARRAY + (INDEX - (unsigned char)1))
9976 becomes
9977 (ARRAY + (-(unsigned char)1) + INDEX)
9978 which becomes
9979 (ARRAY + 255 + INDEX). Oops! */
9980 if (!integer_zerop (low_bound))
9981 index1 = size_diffop_loc (loc, index1,
9982 fold_convert_loc (loc, sizetype,
9983 low_bound));
9984
9985 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9986 {
9987 tree type = TREE_TYPE (TREE_TYPE (init));
9988 machine_mode mode = TYPE_MODE (type);
9989
9990 if (GET_MODE_CLASS (mode) == MODE_INT
9991 && GET_MODE_SIZE (mode) == 1)
9992 return gen_int_mode (TREE_STRING_POINTER (init)
9993 [TREE_INT_CST_LOW (index1)],
9994 mode);
9995 }
9996 }
9997 }
9998 }
9999 goto normal_inner_ref;
10000
10001 case COMPONENT_REF:
10002 /* If the operand is a CONSTRUCTOR, we can just extract the
10003 appropriate field if it is present. */
10004 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10005 {
10006 unsigned HOST_WIDE_INT idx;
10007 tree field, value;
10008
10009 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10010 idx, field, value)
10011 if (field == treeop1
10012 /* We can normally use the value of the field in the
10013 CONSTRUCTOR. However, if this is a bitfield in
10014 an integral mode that we can fit in a HOST_WIDE_INT,
10015 we must mask only the number of bits in the bitfield,
10016 since this is done implicitly by the constructor. If
10017 the bitfield does not meet either of those conditions,
10018 we can't do this optimization. */
10019 && (! DECL_BIT_FIELD (field)
10020 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10021 && (GET_MODE_PRECISION (DECL_MODE (field))
10022 <= HOST_BITS_PER_WIDE_INT))))
10023 {
10024 if (DECL_BIT_FIELD (field)
10025 && modifier == EXPAND_STACK_PARM)
10026 target = 0;
10027 op0 = expand_expr (value, target, tmode, modifier);
10028 if (DECL_BIT_FIELD (field))
10029 {
10030 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10031 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10032
10033 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10034 {
10035 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10036 imode);
10037 op0 = expand_and (imode, op0, op1, target);
10038 }
10039 else
10040 {
10041 int count = GET_MODE_PRECISION (imode) - bitsize;
10042
10043 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10044 target, 0);
10045 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10046 target, 0);
10047 }
10048 }
10049
10050 return op0;
10051 }
10052 }
10053 goto normal_inner_ref;
10054
10055 case BIT_FIELD_REF:
10056 case ARRAY_RANGE_REF:
10057 normal_inner_ref:
10058 {
10059 machine_mode mode1, mode2;
10060 HOST_WIDE_INT bitsize, bitpos;
10061 tree offset;
10062 int volatilep = 0, must_force_mem;
10063 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10064 &mode1, &unsignedp, &volatilep, true);
10065 rtx orig_op0, memloc;
10066 bool clear_mem_expr = false;
10067
10068 /* If we got back the original object, something is wrong. Perhaps
10069 we are evaluating an expression too early. In any event, don't
10070 infinitely recurse. */
10071 gcc_assert (tem != exp);
10072
10073 /* If TEM's type is a union of variable size, pass TARGET to the inner
10074 computation, since it will need a temporary and TARGET is known
10075 to have to do. This occurs in unchecked conversion in Ada. */
10076 orig_op0 = op0
10077 = expand_expr_real (tem,
10078 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10079 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10080 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10081 != INTEGER_CST)
10082 && modifier != EXPAND_STACK_PARM
10083 ? target : NULL_RTX),
10084 VOIDmode,
10085 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10086 NULL, true);
10087
10088 /* If the field has a mode, we want to access it in the
10089 field's mode, not the computed mode.
10090 If a MEM has VOIDmode (external with incomplete type),
10091 use BLKmode for it instead. */
10092 if (MEM_P (op0))
10093 {
10094 if (mode1 != VOIDmode)
10095 op0 = adjust_address (op0, mode1, 0);
10096 else if (GET_MODE (op0) == VOIDmode)
10097 op0 = adjust_address (op0, BLKmode, 0);
10098 }
10099
10100 mode2
10101 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10102
10103 /* If we have either an offset, a BLKmode result, or a reference
10104 outside the underlying object, we must force it to memory.
10105 Such a case can occur in Ada if we have unchecked conversion
10106 of an expression from a scalar type to an aggregate type or
10107 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10108 passed a partially uninitialized object or a view-conversion
10109 to a larger size. */
10110 must_force_mem = (offset
10111 || mode1 == BLKmode
10112 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10113
10114 /* Handle CONCAT first. */
10115 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10116 {
10117 if (bitpos == 0
10118 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10119 return op0;
10120 if (bitpos == 0
10121 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10122 && bitsize)
10123 {
10124 op0 = XEXP (op0, 0);
10125 mode2 = GET_MODE (op0);
10126 }
10127 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10128 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10129 && bitpos
10130 && bitsize)
10131 {
10132 op0 = XEXP (op0, 1);
10133 bitpos = 0;
10134 mode2 = GET_MODE (op0);
10135 }
10136 else
10137 /* Otherwise force into memory. */
10138 must_force_mem = 1;
10139 }
10140
10141 /* If this is a constant, put it in a register if it is a legitimate
10142 constant and we don't need a memory reference. */
10143 if (CONSTANT_P (op0)
10144 && mode2 != BLKmode
10145 && targetm.legitimate_constant_p (mode2, op0)
10146 && !must_force_mem)
10147 op0 = force_reg (mode2, op0);
10148
10149 /* Otherwise, if this is a constant, try to force it to the constant
10150 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10151 is a legitimate constant. */
10152 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10153 op0 = validize_mem (memloc);
10154
10155 /* Otherwise, if this is a constant or the object is not in memory
10156 and need be, put it there. */
10157 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10158 {
10159 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10160 emit_move_insn (memloc, op0);
10161 op0 = memloc;
10162 clear_mem_expr = true;
10163 }
10164
10165 if (offset)
10166 {
10167 machine_mode address_mode;
10168 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10169 EXPAND_SUM);
10170
10171 gcc_assert (MEM_P (op0));
10172
10173 address_mode = get_address_mode (op0);
10174 if (GET_MODE (offset_rtx) != address_mode)
10175 {
10176 /* We cannot be sure that the RTL in offset_rtx is valid outside
10177 of a memory address context, so force it into a register
10178 before attempting to convert it to the desired mode. */
10179 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10180 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10181 }
10182
10183 /* See the comment in expand_assignment for the rationale. */
10184 if (mode1 != VOIDmode
10185 && bitpos != 0
10186 && bitsize > 0
10187 && (bitpos % bitsize) == 0
10188 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10189 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10190 {
10191 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10192 bitpos = 0;
10193 }
10194
10195 op0 = offset_address (op0, offset_rtx,
10196 highest_pow2_factor (offset));
10197 }
10198
10199 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10200 record its alignment as BIGGEST_ALIGNMENT. */
10201 if (MEM_P (op0) && bitpos == 0 && offset != 0
10202 && is_aligning_offset (offset, tem))
10203 set_mem_align (op0, BIGGEST_ALIGNMENT);
10204
10205 /* Don't forget about volatility even if this is a bitfield. */
10206 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10207 {
10208 if (op0 == orig_op0)
10209 op0 = copy_rtx (op0);
10210
10211 MEM_VOLATILE_P (op0) = 1;
10212 }
10213
10214 /* In cases where an aligned union has an unaligned object
10215 as a field, we might be extracting a BLKmode value from
10216 an integer-mode (e.g., SImode) object. Handle this case
10217 by doing the extract into an object as wide as the field
10218 (which we know to be the width of a basic mode), then
10219 storing into memory, and changing the mode to BLKmode. */
10220 if (mode1 == VOIDmode
10221 || REG_P (op0) || GET_CODE (op0) == SUBREG
10222 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10223 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10224 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10225 && modifier != EXPAND_CONST_ADDRESS
10226 && modifier != EXPAND_INITIALIZER
10227 && modifier != EXPAND_MEMORY)
10228 /* If the bitfield is volatile and the bitsize
10229 is narrower than the access size of the bitfield,
10230 we need to extract bitfields from the access. */
10231 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10232 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10233 && mode1 != BLKmode
10234 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10235 /* If the field isn't aligned enough to fetch as a memref,
10236 fetch it as a bit field. */
10237 || (mode1 != BLKmode
10238 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10239 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10240 || (MEM_P (op0)
10241 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10242 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10243 && modifier != EXPAND_MEMORY
10244 && ((modifier == EXPAND_CONST_ADDRESS
10245 || modifier == EXPAND_INITIALIZER)
10246 ? STRICT_ALIGNMENT
10247 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10248 || (bitpos % BITS_PER_UNIT != 0)))
10249 /* If the type and the field are a constant size and the
10250 size of the type isn't the same size as the bitfield,
10251 we must use bitfield operations. */
10252 || (bitsize >= 0
10253 && TYPE_SIZE (TREE_TYPE (exp))
10254 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10255 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10256 bitsize)))
10257 {
10258 machine_mode ext_mode = mode;
10259
10260 if (ext_mode == BLKmode
10261 && ! (target != 0 && MEM_P (op0)
10262 && MEM_P (target)
10263 && bitpos % BITS_PER_UNIT == 0))
10264 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10265
10266 if (ext_mode == BLKmode)
10267 {
10268 if (target == 0)
10269 target = assign_temp (type, 1, 1);
10270
10271 /* ??? Unlike the similar test a few lines below, this one is
10272 very likely obsolete. */
10273 if (bitsize == 0)
10274 return target;
10275
10276 /* In this case, BITPOS must start at a byte boundary and
10277 TARGET, if specified, must be a MEM. */
10278 gcc_assert (MEM_P (op0)
10279 && (!target || MEM_P (target))
10280 && !(bitpos % BITS_PER_UNIT));
10281
10282 emit_block_move (target,
10283 adjust_address (op0, VOIDmode,
10284 bitpos / BITS_PER_UNIT),
10285 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10286 / BITS_PER_UNIT),
10287 (modifier == EXPAND_STACK_PARM
10288 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10289
10290 return target;
10291 }
10292
10293 /* If we have nothing to extract, the result will be 0 for targets
10294 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10295 return 0 for the sake of consistency, as reading a zero-sized
10296 bitfield is valid in Ada and the value is fully specified. */
10297 if (bitsize == 0)
10298 return const0_rtx;
10299
10300 op0 = validize_mem (op0);
10301
10302 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10303 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10304
10305 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10306 (modifier == EXPAND_STACK_PARM
10307 ? NULL_RTX : target),
10308 ext_mode, ext_mode);
10309
10310 /* If the result is a record type and BITSIZE is narrower than
10311 the mode of OP0, an integral mode, and this is a big endian
10312 machine, we must put the field into the high-order bits. */
10313 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10314 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10315 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10316 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10317 GET_MODE_BITSIZE (GET_MODE (op0))
10318 - bitsize, op0, 1);
10319
10320 /* If the result type is BLKmode, store the data into a temporary
10321 of the appropriate type, but with the mode corresponding to the
10322 mode for the data we have (op0's mode). */
10323 if (mode == BLKmode)
10324 {
10325 rtx new_rtx
10326 = assign_stack_temp_for_type (ext_mode,
10327 GET_MODE_BITSIZE (ext_mode),
10328 type);
10329 emit_move_insn (new_rtx, op0);
10330 op0 = copy_rtx (new_rtx);
10331 PUT_MODE (op0, BLKmode);
10332 }
10333
10334 return op0;
10335 }
10336
10337 /* If the result is BLKmode, use that to access the object
10338 now as well. */
10339 if (mode == BLKmode)
10340 mode1 = BLKmode;
10341
10342 /* Get a reference to just this component. */
10343 if (modifier == EXPAND_CONST_ADDRESS
10344 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10345 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10346 else
10347 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10348
10349 if (op0 == orig_op0)
10350 op0 = copy_rtx (op0);
10351
10352 set_mem_attributes (op0, exp, 0);
10353
10354 if (REG_P (XEXP (op0, 0)))
10355 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10356
10357 /* If op0 is a temporary because the original expressions was forced
10358 to memory, clear MEM_EXPR so that the original expression cannot
10359 be marked as addressable through MEM_EXPR of the temporary. */
10360 if (clear_mem_expr)
10361 set_mem_expr (op0, NULL_TREE);
10362
10363 MEM_VOLATILE_P (op0) |= volatilep;
10364 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10365 || modifier == EXPAND_CONST_ADDRESS
10366 || modifier == EXPAND_INITIALIZER)
10367 return op0;
10368
10369 if (target == 0)
10370 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10371
10372 convert_move (target, op0, unsignedp);
10373 return target;
10374 }
10375
10376 case OBJ_TYPE_REF:
10377 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10378
10379 case CALL_EXPR:
10380 /* All valid uses of __builtin_va_arg_pack () are removed during
10381 inlining. */
10382 if (CALL_EXPR_VA_ARG_PACK (exp))
10383 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10384 {
10385 tree fndecl = get_callee_fndecl (exp), attr;
10386
10387 if (fndecl
10388 && (attr = lookup_attribute ("error",
10389 DECL_ATTRIBUTES (fndecl))) != NULL)
10390 error ("%Kcall to %qs declared with attribute error: %s",
10391 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10392 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10393 if (fndecl
10394 && (attr = lookup_attribute ("warning",
10395 DECL_ATTRIBUTES (fndecl))) != NULL)
10396 warning_at (tree_nonartificial_location (exp),
10397 0, "%Kcall to %qs declared with attribute warning: %s",
10398 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10399 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10400
10401 /* Check for a built-in function. */
10402 if (fndecl && DECL_BUILT_IN (fndecl))
10403 {
10404 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10405 if (CALL_WITH_BOUNDS_P (exp))
10406 return expand_builtin_with_bounds (exp, target, subtarget,
10407 tmode, ignore);
10408 else
10409 return expand_builtin (exp, target, subtarget, tmode, ignore);
10410 }
10411 }
10412 return expand_call (exp, target, ignore);
10413
10414 case VIEW_CONVERT_EXPR:
10415 op0 = NULL_RTX;
10416
10417 /* If we are converting to BLKmode, try to avoid an intermediate
10418 temporary by fetching an inner memory reference. */
10419 if (mode == BLKmode
10420 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10421 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10422 && handled_component_p (treeop0))
10423 {
10424 machine_mode mode1;
10425 HOST_WIDE_INT bitsize, bitpos;
10426 tree offset;
10427 int unsignedp;
10428 int volatilep = 0;
10429 tree tem
10430 = get_inner_reference (treeop0, &bitsize, &bitpos,
10431 &offset, &mode1, &unsignedp, &volatilep,
10432 true);
10433 rtx orig_op0;
10434
10435 /* ??? We should work harder and deal with non-zero offsets. */
10436 if (!offset
10437 && (bitpos % BITS_PER_UNIT) == 0
10438 && bitsize >= 0
10439 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10440 {
10441 /* See the normal_inner_ref case for the rationale. */
10442 orig_op0
10443 = expand_expr_real (tem,
10444 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10445 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10446 != INTEGER_CST)
10447 && modifier != EXPAND_STACK_PARM
10448 ? target : NULL_RTX),
10449 VOIDmode,
10450 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10451 NULL, true);
10452
10453 if (MEM_P (orig_op0))
10454 {
10455 op0 = orig_op0;
10456
10457 /* Get a reference to just this component. */
10458 if (modifier == EXPAND_CONST_ADDRESS
10459 || modifier == EXPAND_SUM
10460 || modifier == EXPAND_INITIALIZER)
10461 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10462 else
10463 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10464
10465 if (op0 == orig_op0)
10466 op0 = copy_rtx (op0);
10467
10468 set_mem_attributes (op0, treeop0, 0);
10469 if (REG_P (XEXP (op0, 0)))
10470 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10471
10472 MEM_VOLATILE_P (op0) |= volatilep;
10473 }
10474 }
10475 }
10476
10477 if (!op0)
10478 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10479 NULL, inner_reference_p);
10480
10481 /* If the input and output modes are both the same, we are done. */
10482 if (mode == GET_MODE (op0))
10483 ;
10484 /* If neither mode is BLKmode, and both modes are the same size
10485 then we can use gen_lowpart. */
10486 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10487 && (GET_MODE_PRECISION (mode)
10488 == GET_MODE_PRECISION (GET_MODE (op0)))
10489 && !COMPLEX_MODE_P (GET_MODE (op0)))
10490 {
10491 if (GET_CODE (op0) == SUBREG)
10492 op0 = force_reg (GET_MODE (op0), op0);
10493 temp = gen_lowpart_common (mode, op0);
10494 if (temp)
10495 op0 = temp;
10496 else
10497 {
10498 if (!REG_P (op0) && !MEM_P (op0))
10499 op0 = force_reg (GET_MODE (op0), op0);
10500 op0 = gen_lowpart (mode, op0);
10501 }
10502 }
10503 /* If both types are integral, convert from one mode to the other. */
10504 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10505 op0 = convert_modes (mode, GET_MODE (op0), op0,
10506 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10507 /* If the output type is a bit-field type, do an extraction. */
10508 else if (reduce_bit_field)
10509 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10510 TYPE_UNSIGNED (type), NULL_RTX,
10511 mode, mode);
10512 /* As a last resort, spill op0 to memory, and reload it in a
10513 different mode. */
10514 else if (!MEM_P (op0))
10515 {
10516 /* If the operand is not a MEM, force it into memory. Since we
10517 are going to be changing the mode of the MEM, don't call
10518 force_const_mem for constants because we don't allow pool
10519 constants to change mode. */
10520 tree inner_type = TREE_TYPE (treeop0);
10521
10522 gcc_assert (!TREE_ADDRESSABLE (exp));
10523
10524 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10525 target
10526 = assign_stack_temp_for_type
10527 (TYPE_MODE (inner_type),
10528 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10529
10530 emit_move_insn (target, op0);
10531 op0 = target;
10532 }
10533
10534 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10535 output type is such that the operand is known to be aligned, indicate
10536 that it is. Otherwise, we need only be concerned about alignment for
10537 non-BLKmode results. */
10538 if (MEM_P (op0))
10539 {
10540 enum insn_code icode;
10541
10542 if (TYPE_ALIGN_OK (type))
10543 {
10544 /* ??? Copying the MEM without substantially changing it might
10545 run afoul of the code handling volatile memory references in
10546 store_expr, which assumes that TARGET is returned unmodified
10547 if it has been used. */
10548 op0 = copy_rtx (op0);
10549 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10550 }
10551 else if (modifier != EXPAND_WRITE
10552 && modifier != EXPAND_MEMORY
10553 && !inner_reference_p
10554 && mode != BLKmode
10555 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10556 {
10557 /* If the target does have special handling for unaligned
10558 loads of mode then use them. */
10559 if ((icode = optab_handler (movmisalign_optab, mode))
10560 != CODE_FOR_nothing)
10561 {
10562 rtx reg, insn;
10563
10564 op0 = adjust_address (op0, mode, 0);
10565 /* We've already validated the memory, and we're creating a
10566 new pseudo destination. The predicates really can't
10567 fail. */
10568 reg = gen_reg_rtx (mode);
10569
10570 /* Nor can the insn generator. */
10571 insn = GEN_FCN (icode) (reg, op0);
10572 emit_insn (insn);
10573 return reg;
10574 }
10575 else if (STRICT_ALIGNMENT)
10576 {
10577 tree inner_type = TREE_TYPE (treeop0);
10578 HOST_WIDE_INT temp_size
10579 = MAX (int_size_in_bytes (inner_type),
10580 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10581 rtx new_rtx
10582 = assign_stack_temp_for_type (mode, temp_size, type);
10583 rtx new_with_op0_mode
10584 = adjust_address (new_rtx, GET_MODE (op0), 0);
10585
10586 gcc_assert (!TREE_ADDRESSABLE (exp));
10587
10588 if (GET_MODE (op0) == BLKmode)
10589 emit_block_move (new_with_op0_mode, op0,
10590 GEN_INT (GET_MODE_SIZE (mode)),
10591 (modifier == EXPAND_STACK_PARM
10592 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10593 else
10594 emit_move_insn (new_with_op0_mode, op0);
10595
10596 op0 = new_rtx;
10597 }
10598 }
10599
10600 op0 = adjust_address (op0, mode, 0);
10601 }
10602
10603 return op0;
10604
10605 case MODIFY_EXPR:
10606 {
10607 tree lhs = treeop0;
10608 tree rhs = treeop1;
10609 gcc_assert (ignore);
10610
10611 /* Check for |= or &= of a bitfield of size one into another bitfield
10612 of size 1. In this case, (unless we need the result of the
10613 assignment) we can do this more efficiently with a
10614 test followed by an assignment, if necessary.
10615
10616 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10617 things change so we do, this code should be enhanced to
10618 support it. */
10619 if (TREE_CODE (lhs) == COMPONENT_REF
10620 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10621 || TREE_CODE (rhs) == BIT_AND_EXPR)
10622 && TREE_OPERAND (rhs, 0) == lhs
10623 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10624 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10625 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10626 {
10627 rtx_code_label *label = gen_label_rtx ();
10628 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10629 do_jump (TREE_OPERAND (rhs, 1),
10630 value ? label : 0,
10631 value ? 0 : label, -1);
10632 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10633 false);
10634 do_pending_stack_adjust ();
10635 emit_label (label);
10636 return const0_rtx;
10637 }
10638
10639 expand_assignment (lhs, rhs, false);
10640 return const0_rtx;
10641 }
10642
10643 case ADDR_EXPR:
10644 return expand_expr_addr_expr (exp, target, tmode, modifier);
10645
10646 case REALPART_EXPR:
10647 op0 = expand_normal (treeop0);
10648 return read_complex_part (op0, false);
10649
10650 case IMAGPART_EXPR:
10651 op0 = expand_normal (treeop0);
10652 return read_complex_part (op0, true);
10653
10654 case RETURN_EXPR:
10655 case LABEL_EXPR:
10656 case GOTO_EXPR:
10657 case SWITCH_EXPR:
10658 case ASM_EXPR:
10659 /* Expanded in cfgexpand.c. */
10660 gcc_unreachable ();
10661
10662 case TRY_CATCH_EXPR:
10663 case CATCH_EXPR:
10664 case EH_FILTER_EXPR:
10665 case TRY_FINALLY_EXPR:
10666 /* Lowered by tree-eh.c. */
10667 gcc_unreachable ();
10668
10669 case WITH_CLEANUP_EXPR:
10670 case CLEANUP_POINT_EXPR:
10671 case TARGET_EXPR:
10672 case CASE_LABEL_EXPR:
10673 case VA_ARG_EXPR:
10674 case BIND_EXPR:
10675 case INIT_EXPR:
10676 case CONJ_EXPR:
10677 case COMPOUND_EXPR:
10678 case PREINCREMENT_EXPR:
10679 case PREDECREMENT_EXPR:
10680 case POSTINCREMENT_EXPR:
10681 case POSTDECREMENT_EXPR:
10682 case LOOP_EXPR:
10683 case EXIT_EXPR:
10684 case COMPOUND_LITERAL_EXPR:
10685 /* Lowered by gimplify.c. */
10686 gcc_unreachable ();
10687
10688 case FDESC_EXPR:
10689 /* Function descriptors are not valid except for as
10690 initialization constants, and should not be expanded. */
10691 gcc_unreachable ();
10692
10693 case WITH_SIZE_EXPR:
10694 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10695 have pulled out the size to use in whatever context it needed. */
10696 return expand_expr_real (treeop0, original_target, tmode,
10697 modifier, alt_rtl, inner_reference_p);
10698
10699 default:
10700 return expand_expr_real_2 (&ops, target, tmode, modifier);
10701 }
10702 }
10703 \f
10704 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10705 signedness of TYPE), possibly returning the result in TARGET. */
10706 static rtx
10707 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10708 {
10709 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10710 if (target && GET_MODE (target) != GET_MODE (exp))
10711 target = 0;
10712 /* For constant values, reduce using build_int_cst_type. */
10713 if (CONST_INT_P (exp))
10714 {
10715 HOST_WIDE_INT value = INTVAL (exp);
10716 tree t = build_int_cst_type (type, value);
10717 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10718 }
10719 else if (TYPE_UNSIGNED (type))
10720 {
10721 machine_mode mode = GET_MODE (exp);
10722 rtx mask = immed_wide_int_const
10723 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10724 return expand_and (mode, exp, mask, target);
10725 }
10726 else
10727 {
10728 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10729 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10730 exp, count, target, 0);
10731 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10732 exp, count, target, 0);
10733 }
10734 }
10735 \f
10736 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10737 when applied to the address of EXP produces an address known to be
10738 aligned more than BIGGEST_ALIGNMENT. */
10739
10740 static int
10741 is_aligning_offset (const_tree offset, const_tree exp)
10742 {
10743 /* Strip off any conversions. */
10744 while (CONVERT_EXPR_P (offset))
10745 offset = TREE_OPERAND (offset, 0);
10746
10747 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10748 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10749 if (TREE_CODE (offset) != BIT_AND_EXPR
10750 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10751 || compare_tree_int (TREE_OPERAND (offset, 1),
10752 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10753 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10754 return 0;
10755
10756 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10757 It must be NEGATE_EXPR. Then strip any more conversions. */
10758 offset = TREE_OPERAND (offset, 0);
10759 while (CONVERT_EXPR_P (offset))
10760 offset = TREE_OPERAND (offset, 0);
10761
10762 if (TREE_CODE (offset) != NEGATE_EXPR)
10763 return 0;
10764
10765 offset = TREE_OPERAND (offset, 0);
10766 while (CONVERT_EXPR_P (offset))
10767 offset = TREE_OPERAND (offset, 0);
10768
10769 /* This must now be the address of EXP. */
10770 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10771 }
10772 \f
10773 /* Return the tree node if an ARG corresponds to a string constant or zero
10774 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10775 in bytes within the string that ARG is accessing. The type of the
10776 offset will be `sizetype'. */
10777
10778 tree
10779 string_constant (tree arg, tree *ptr_offset)
10780 {
10781 tree array, offset, lower_bound;
10782 STRIP_NOPS (arg);
10783
10784 if (TREE_CODE (arg) == ADDR_EXPR)
10785 {
10786 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10787 {
10788 *ptr_offset = size_zero_node;
10789 return TREE_OPERAND (arg, 0);
10790 }
10791 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10792 {
10793 array = TREE_OPERAND (arg, 0);
10794 offset = size_zero_node;
10795 }
10796 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10797 {
10798 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10799 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10800 if (TREE_CODE (array) != STRING_CST
10801 && TREE_CODE (array) != VAR_DECL)
10802 return 0;
10803
10804 /* Check if the array has a nonzero lower bound. */
10805 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10806 if (!integer_zerop (lower_bound))
10807 {
10808 /* If the offset and base aren't both constants, return 0. */
10809 if (TREE_CODE (lower_bound) != INTEGER_CST)
10810 return 0;
10811 if (TREE_CODE (offset) != INTEGER_CST)
10812 return 0;
10813 /* Adjust offset by the lower bound. */
10814 offset = size_diffop (fold_convert (sizetype, offset),
10815 fold_convert (sizetype, lower_bound));
10816 }
10817 }
10818 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10819 {
10820 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10821 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10822 if (TREE_CODE (array) != ADDR_EXPR)
10823 return 0;
10824 array = TREE_OPERAND (array, 0);
10825 if (TREE_CODE (array) != STRING_CST
10826 && TREE_CODE (array) != VAR_DECL)
10827 return 0;
10828 }
10829 else
10830 return 0;
10831 }
10832 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10833 {
10834 tree arg0 = TREE_OPERAND (arg, 0);
10835 tree arg1 = TREE_OPERAND (arg, 1);
10836
10837 STRIP_NOPS (arg0);
10838 STRIP_NOPS (arg1);
10839
10840 if (TREE_CODE (arg0) == ADDR_EXPR
10841 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10842 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10843 {
10844 array = TREE_OPERAND (arg0, 0);
10845 offset = arg1;
10846 }
10847 else if (TREE_CODE (arg1) == ADDR_EXPR
10848 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10849 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10850 {
10851 array = TREE_OPERAND (arg1, 0);
10852 offset = arg0;
10853 }
10854 else
10855 return 0;
10856 }
10857 else
10858 return 0;
10859
10860 if (TREE_CODE (array) == STRING_CST)
10861 {
10862 *ptr_offset = fold_convert (sizetype, offset);
10863 return array;
10864 }
10865 else if (TREE_CODE (array) == VAR_DECL
10866 || TREE_CODE (array) == CONST_DECL)
10867 {
10868 int length;
10869 tree init = ctor_for_folding (array);
10870
10871 /* Variables initialized to string literals can be handled too. */
10872 if (init == error_mark_node
10873 || !init
10874 || TREE_CODE (init) != STRING_CST)
10875 return 0;
10876
10877 /* Avoid const char foo[4] = "abcde"; */
10878 if (DECL_SIZE_UNIT (array) == NULL_TREE
10879 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10880 || (length = TREE_STRING_LENGTH (init)) <= 0
10881 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10882 return 0;
10883
10884 /* If variable is bigger than the string literal, OFFSET must be constant
10885 and inside of the bounds of the string literal. */
10886 offset = fold_convert (sizetype, offset);
10887 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10888 && (! tree_fits_uhwi_p (offset)
10889 || compare_tree_int (offset, length) >= 0))
10890 return 0;
10891
10892 *ptr_offset = offset;
10893 return init;
10894 }
10895
10896 return 0;
10897 }
10898 \f
10899 /* Generate code to calculate OPS, and exploded expression
10900 using a store-flag instruction and return an rtx for the result.
10901 OPS reflects a comparison.
10902
10903 If TARGET is nonzero, store the result there if convenient.
10904
10905 Return zero if there is no suitable set-flag instruction
10906 available on this machine.
10907
10908 Once expand_expr has been called on the arguments of the comparison,
10909 we are committed to doing the store flag, since it is not safe to
10910 re-evaluate the expression. We emit the store-flag insn by calling
10911 emit_store_flag, but only expand the arguments if we have a reason
10912 to believe that emit_store_flag will be successful. If we think that
10913 it will, but it isn't, we have to simulate the store-flag with a
10914 set/jump/set sequence. */
10915
10916 static rtx
10917 do_store_flag (sepops ops, rtx target, machine_mode mode)
10918 {
10919 enum rtx_code code;
10920 tree arg0, arg1, type;
10921 tree tem;
10922 machine_mode operand_mode;
10923 int unsignedp;
10924 rtx op0, op1;
10925 rtx subtarget = target;
10926 location_t loc = ops->location;
10927
10928 arg0 = ops->op0;
10929 arg1 = ops->op1;
10930
10931 /* Don't crash if the comparison was erroneous. */
10932 if (arg0 == error_mark_node || arg1 == error_mark_node)
10933 return const0_rtx;
10934
10935 type = TREE_TYPE (arg0);
10936 operand_mode = TYPE_MODE (type);
10937 unsignedp = TYPE_UNSIGNED (type);
10938
10939 /* We won't bother with BLKmode store-flag operations because it would mean
10940 passing a lot of information to emit_store_flag. */
10941 if (operand_mode == BLKmode)
10942 return 0;
10943
10944 /* We won't bother with store-flag operations involving function pointers
10945 when function pointers must be canonicalized before comparisons. */
10946 #ifdef HAVE_canonicalize_funcptr_for_compare
10947 if (HAVE_canonicalize_funcptr_for_compare
10948 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10949 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10950 == FUNCTION_TYPE))
10951 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10952 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10953 == FUNCTION_TYPE))))
10954 return 0;
10955 #endif
10956
10957 STRIP_NOPS (arg0);
10958 STRIP_NOPS (arg1);
10959
10960 /* For vector typed comparisons emit code to generate the desired
10961 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10962 expander for this. */
10963 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10964 {
10965 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10966 tree if_true = constant_boolean_node (true, ops->type);
10967 tree if_false = constant_boolean_node (false, ops->type);
10968 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10969 }
10970
10971 /* Get the rtx comparison code to use. We know that EXP is a comparison
10972 operation of some type. Some comparisons against 1 and -1 can be
10973 converted to comparisons with zero. Do so here so that the tests
10974 below will be aware that we have a comparison with zero. These
10975 tests will not catch constants in the first operand, but constants
10976 are rarely passed as the first operand. */
10977
10978 switch (ops->code)
10979 {
10980 case EQ_EXPR:
10981 code = EQ;
10982 break;
10983 case NE_EXPR:
10984 code = NE;
10985 break;
10986 case LT_EXPR:
10987 if (integer_onep (arg1))
10988 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10989 else
10990 code = unsignedp ? LTU : LT;
10991 break;
10992 case LE_EXPR:
10993 if (! unsignedp && integer_all_onesp (arg1))
10994 arg1 = integer_zero_node, code = LT;
10995 else
10996 code = unsignedp ? LEU : LE;
10997 break;
10998 case GT_EXPR:
10999 if (! unsignedp && integer_all_onesp (arg1))
11000 arg1 = integer_zero_node, code = GE;
11001 else
11002 code = unsignedp ? GTU : GT;
11003 break;
11004 case GE_EXPR:
11005 if (integer_onep (arg1))
11006 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11007 else
11008 code = unsignedp ? GEU : GE;
11009 break;
11010
11011 case UNORDERED_EXPR:
11012 code = UNORDERED;
11013 break;
11014 case ORDERED_EXPR:
11015 code = ORDERED;
11016 break;
11017 case UNLT_EXPR:
11018 code = UNLT;
11019 break;
11020 case UNLE_EXPR:
11021 code = UNLE;
11022 break;
11023 case UNGT_EXPR:
11024 code = UNGT;
11025 break;
11026 case UNGE_EXPR:
11027 code = UNGE;
11028 break;
11029 case UNEQ_EXPR:
11030 code = UNEQ;
11031 break;
11032 case LTGT_EXPR:
11033 code = LTGT;
11034 break;
11035
11036 default:
11037 gcc_unreachable ();
11038 }
11039
11040 /* Put a constant second. */
11041 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11042 || TREE_CODE (arg0) == FIXED_CST)
11043 {
11044 tem = arg0; arg0 = arg1; arg1 = tem;
11045 code = swap_condition (code);
11046 }
11047
11048 /* If this is an equality or inequality test of a single bit, we can
11049 do this by shifting the bit being tested to the low-order bit and
11050 masking the result with the constant 1. If the condition was EQ,
11051 we xor it with 1. This does not require an scc insn and is faster
11052 than an scc insn even if we have it.
11053
11054 The code to make this transformation was moved into fold_single_bit_test,
11055 so we just call into the folder and expand its result. */
11056
11057 if ((code == NE || code == EQ)
11058 && integer_zerop (arg1)
11059 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11060 {
11061 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11062 if (srcstmt
11063 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11064 {
11065 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11066 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11067 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11068 gimple_assign_rhs1 (srcstmt),
11069 gimple_assign_rhs2 (srcstmt));
11070 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11071 if (temp)
11072 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11073 }
11074 }
11075
11076 if (! get_subtarget (target)
11077 || GET_MODE (subtarget) != operand_mode)
11078 subtarget = 0;
11079
11080 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11081
11082 if (target == 0)
11083 target = gen_reg_rtx (mode);
11084
11085 /* Try a cstore if possible. */
11086 return emit_store_flag_force (target, code, op0, op1,
11087 operand_mode, unsignedp,
11088 (TYPE_PRECISION (ops->type) == 1
11089 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11090 }
11091 \f
11092
11093 /* Stubs in case we haven't got a casesi insn. */
11094 #ifndef HAVE_casesi
11095 # define HAVE_casesi 0
11096 # define gen_casesi(a, b, c, d, e) (0)
11097 # define CODE_FOR_casesi CODE_FOR_nothing
11098 #endif
11099
11100 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11101 0 otherwise (i.e. if there is no casesi instruction).
11102
11103 DEFAULT_PROBABILITY is the probability of jumping to the default
11104 label. */
11105 int
11106 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11107 rtx table_label, rtx default_label, rtx fallback_label,
11108 int default_probability)
11109 {
11110 struct expand_operand ops[5];
11111 machine_mode index_mode = SImode;
11112 rtx op1, op2, index;
11113
11114 if (! HAVE_casesi)
11115 return 0;
11116
11117 /* Convert the index to SImode. */
11118 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11119 {
11120 machine_mode omode = TYPE_MODE (index_type);
11121 rtx rangertx = expand_normal (range);
11122
11123 /* We must handle the endpoints in the original mode. */
11124 index_expr = build2 (MINUS_EXPR, index_type,
11125 index_expr, minval);
11126 minval = integer_zero_node;
11127 index = expand_normal (index_expr);
11128 if (default_label)
11129 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11130 omode, 1, default_label,
11131 default_probability);
11132 /* Now we can safely truncate. */
11133 index = convert_to_mode (index_mode, index, 0);
11134 }
11135 else
11136 {
11137 if (TYPE_MODE (index_type) != index_mode)
11138 {
11139 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11140 index_expr = fold_convert (index_type, index_expr);
11141 }
11142
11143 index = expand_normal (index_expr);
11144 }
11145
11146 do_pending_stack_adjust ();
11147
11148 op1 = expand_normal (minval);
11149 op2 = expand_normal (range);
11150
11151 create_input_operand (&ops[0], index, index_mode);
11152 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11153 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11154 create_fixed_operand (&ops[3], table_label);
11155 create_fixed_operand (&ops[4], (default_label
11156 ? default_label
11157 : fallback_label));
11158 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11159 return 1;
11160 }
11161
11162 /* Attempt to generate a tablejump instruction; same concept. */
11163 /* Subroutine of the next function.
11164
11165 INDEX is the value being switched on, with the lowest value
11166 in the table already subtracted.
11167 MODE is its expected mode (needed if INDEX is constant).
11168 RANGE is the length of the jump table.
11169 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11170
11171 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11172 index value is out of range.
11173 DEFAULT_PROBABILITY is the probability of jumping to
11174 the default label. */
11175
11176 static void
11177 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11178 rtx default_label, int default_probability)
11179 {
11180 rtx temp, vector;
11181
11182 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11183 cfun->cfg->max_jumptable_ents = INTVAL (range);
11184
11185 /* Do an unsigned comparison (in the proper mode) between the index
11186 expression and the value which represents the length of the range.
11187 Since we just finished subtracting the lower bound of the range
11188 from the index expression, this comparison allows us to simultaneously
11189 check that the original index expression value is both greater than
11190 or equal to the minimum value of the range and less than or equal to
11191 the maximum value of the range. */
11192
11193 if (default_label)
11194 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11195 default_label, default_probability);
11196
11197
11198 /* If index is in range, it must fit in Pmode.
11199 Convert to Pmode so we can index with it. */
11200 if (mode != Pmode)
11201 index = convert_to_mode (Pmode, index, 1);
11202
11203 /* Don't let a MEM slip through, because then INDEX that comes
11204 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11205 and break_out_memory_refs will go to work on it and mess it up. */
11206 #ifdef PIC_CASE_VECTOR_ADDRESS
11207 if (flag_pic && !REG_P (index))
11208 index = copy_to_mode_reg (Pmode, index);
11209 #endif
11210
11211 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11212 GET_MODE_SIZE, because this indicates how large insns are. The other
11213 uses should all be Pmode, because they are addresses. This code
11214 could fail if addresses and insns are not the same size. */
11215 index = simplify_gen_binary (MULT, Pmode, index,
11216 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11217 Pmode));
11218 index = simplify_gen_binary (PLUS, Pmode, index,
11219 gen_rtx_LABEL_REF (Pmode, table_label));
11220
11221 #ifdef PIC_CASE_VECTOR_ADDRESS
11222 if (flag_pic)
11223 index = PIC_CASE_VECTOR_ADDRESS (index);
11224 else
11225 #endif
11226 index = memory_address (CASE_VECTOR_MODE, index);
11227 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11228 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11229 convert_move (temp, vector, 0);
11230
11231 emit_jump_insn (gen_tablejump (temp, table_label));
11232
11233 /* If we are generating PIC code or if the table is PC-relative, the
11234 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11235 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11236 emit_barrier ();
11237 }
11238
11239 int
11240 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11241 rtx table_label, rtx default_label, int default_probability)
11242 {
11243 rtx index;
11244
11245 if (! HAVE_tablejump)
11246 return 0;
11247
11248 index_expr = fold_build2 (MINUS_EXPR, index_type,
11249 fold_convert (index_type, index_expr),
11250 fold_convert (index_type, minval));
11251 index = expand_normal (index_expr);
11252 do_pending_stack_adjust ();
11253
11254 do_tablejump (index, TYPE_MODE (index_type),
11255 convert_modes (TYPE_MODE (index_type),
11256 TYPE_MODE (TREE_TYPE (range)),
11257 expand_normal (range),
11258 TYPE_UNSIGNED (TREE_TYPE (range))),
11259 table_label, default_label, default_probability);
11260 return 1;
11261 }
11262
11263 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11264 static rtx
11265 const_vector_from_tree (tree exp)
11266 {
11267 rtvec v;
11268 unsigned i;
11269 int units;
11270 tree elt;
11271 machine_mode inner, mode;
11272
11273 mode = TYPE_MODE (TREE_TYPE (exp));
11274
11275 if (initializer_zerop (exp))
11276 return CONST0_RTX (mode);
11277
11278 units = GET_MODE_NUNITS (mode);
11279 inner = GET_MODE_INNER (mode);
11280
11281 v = rtvec_alloc (units);
11282
11283 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11284 {
11285 elt = VECTOR_CST_ELT (exp, i);
11286
11287 if (TREE_CODE (elt) == REAL_CST)
11288 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11289 inner);
11290 else if (TREE_CODE (elt) == FIXED_CST)
11291 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11292 inner);
11293 else
11294 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11295 }
11296
11297 return gen_rtx_CONST_VECTOR (mode, v);
11298 }
11299
11300 /* Build a decl for a personality function given a language prefix. */
11301
11302 tree
11303 build_personality_function (const char *lang)
11304 {
11305 const char *unwind_and_version;
11306 tree decl, type;
11307 char *name;
11308
11309 switch (targetm_common.except_unwind_info (&global_options))
11310 {
11311 case UI_NONE:
11312 return NULL;
11313 case UI_SJLJ:
11314 unwind_and_version = "_sj0";
11315 break;
11316 case UI_DWARF2:
11317 case UI_TARGET:
11318 unwind_and_version = "_v0";
11319 break;
11320 case UI_SEH:
11321 unwind_and_version = "_seh0";
11322 break;
11323 default:
11324 gcc_unreachable ();
11325 }
11326
11327 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11328
11329 type = build_function_type_list (integer_type_node, integer_type_node,
11330 long_long_unsigned_type_node,
11331 ptr_type_node, ptr_type_node, NULL_TREE);
11332 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11333 get_identifier (name), type);
11334 DECL_ARTIFICIAL (decl) = 1;
11335 DECL_EXTERNAL (decl) = 1;
11336 TREE_PUBLIC (decl) = 1;
11337
11338 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11339 are the flags assigned by targetm.encode_section_info. */
11340 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11341
11342 return decl;
11343 }
11344
11345 /* Extracts the personality function of DECL and returns the corresponding
11346 libfunc. */
11347
11348 rtx
11349 get_personality_function (tree decl)
11350 {
11351 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11352 enum eh_personality_kind pk;
11353
11354 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11355 if (pk == eh_personality_none)
11356 return NULL;
11357
11358 if (!personality
11359 && pk == eh_personality_any)
11360 personality = lang_hooks.eh_personality ();
11361
11362 if (pk == eh_personality_lang)
11363 gcc_assert (personality != NULL_TREE);
11364
11365 return XEXP (DECL_RTL (personality), 0);
11366 }
11367
11368 /* Returns a tree for the size of EXP in bytes. */
11369
11370 static tree
11371 tree_expr_size (const_tree exp)
11372 {
11373 if (DECL_P (exp)
11374 && DECL_SIZE_UNIT (exp) != 0)
11375 return DECL_SIZE_UNIT (exp);
11376 else
11377 return size_in_bytes (TREE_TYPE (exp));
11378 }
11379
11380 /* Return an rtx for the size in bytes of the value of EXP. */
11381
11382 rtx
11383 expr_size (tree exp)
11384 {
11385 tree size;
11386
11387 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11388 size = TREE_OPERAND (exp, 1);
11389 else
11390 {
11391 size = tree_expr_size (exp);
11392 gcc_assert (size);
11393 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11394 }
11395
11396 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11397 }
11398
11399 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11400 if the size can vary or is larger than an integer. */
11401
11402 static HOST_WIDE_INT
11403 int_expr_size (tree exp)
11404 {
11405 tree size;
11406
11407 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11408 size = TREE_OPERAND (exp, 1);
11409 else
11410 {
11411 size = tree_expr_size (exp);
11412 gcc_assert (size);
11413 }
11414
11415 if (size == 0 || !tree_fits_shwi_p (size))
11416 return -1;
11417
11418 return tree_to_shwi (size);
11419 }
11420
11421 #include "gt-expr.h"