genattrtab.c (write_header): Include hash-set.h...
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "hash-set.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "flags.h"
41 #include "regs.h"
42 #include "hard-reg-set.h"
43 #include "except.h"
44 #include "input.h"
45 #include "function.h"
46 #include "insn-config.h"
47 #include "insn-attr.h"
48 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "optabs.h"
52 #include "libfuncs.h"
53 #include "recog.h"
54 #include "reload.h"
55 #include "typeclass.h"
56 #include "toplev.h"
57 #include "langhooks.h"
58 #include "intl.h"
59 #include "tm_p.h"
60 #include "tree-iterator.h"
61 #include "predict.h"
62 #include "dominance.h"
63 #include "cfg.h"
64 #include "basic-block.h"
65 #include "tree-ssa-alias.h"
66 #include "internal-fn.h"
67 #include "gimple-expr.h"
68 #include "is-a.h"
69 #include "gimple.h"
70 #include "gimple-ssa.h"
71 #include "hash-map.h"
72 #include "plugin-api.h"
73 #include "ipa-ref.h"
74 #include "cgraph.h"
75 #include "tree-ssanames.h"
76 #include "target.h"
77 #include "common/common-target.h"
78 #include "timevar.h"
79 #include "df.h"
80 #include "diagnostic.h"
81 #include "tree-ssa-live.h"
82 #include "tree-outof-ssa.h"
83 #include "target-globals.h"
84 #include "params.h"
85 #include "tree-ssa-address.h"
86 #include "cfgexpand.h"
87 #include "builtins.h"
88 #include "tree-chkp.h"
89 #include "rtl-chkp.h"
90 #include "ccmp.h"
91
92 #ifndef STACK_PUSH_CODE
93 #ifdef STACK_GROWS_DOWNWARD
94 #define STACK_PUSH_CODE PRE_DEC
95 #else
96 #define STACK_PUSH_CODE PRE_INC
97 #endif
98 #endif
99
100
101 /* If this is nonzero, we do not bother generating VOLATILE
102 around volatile memory references, and we are willing to
103 output indirect addresses. If cse is to follow, we reject
104 indirect addresses so a useful potential cse is generated;
105 if it is used only once, instruction combination will produce
106 the same indirect address eventually. */
107 int cse_not_expected;
108
109 /* This structure is used by move_by_pieces to describe the move to
110 be performed. */
111 struct move_by_pieces_d
112 {
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 rtx from;
118 rtx from_addr;
119 int autinc_from;
120 int explicit_inc_from;
121 unsigned HOST_WIDE_INT len;
122 HOST_WIDE_INT offset;
123 int reverse;
124 };
125
126 /* This structure is used by store_by_pieces to describe the clear to
127 be performed. */
128
129 struct store_by_pieces_d
130 {
131 rtx to;
132 rtx to_addr;
133 int autinc_to;
134 int explicit_inc_to;
135 unsigned HOST_WIDE_INT len;
136 HOST_WIDE_INT offset;
137 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
138 void *constfundata;
139 int reverse;
140 };
141
142 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
143 struct move_by_pieces_d *);
144 static bool block_move_libcall_safe_for_call_parm (void);
145 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
146 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
147 unsigned HOST_WIDE_INT);
148 static tree emit_block_move_libcall_fn (int);
149 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
150 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
151 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
152 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
153 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
154 struct store_by_pieces_d *);
155 static tree clear_storage_libcall_fn (int);
156 static rtx_insn *compress_float_constant (rtx, rtx);
157 static rtx get_subtarget (rtx);
158 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
159 HOST_WIDE_INT, machine_mode,
160 tree, int, alias_set_type);
161 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
162 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
163 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
164 machine_mode, tree, alias_set_type, bool);
165
166 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
167
168 static int is_aligning_offset (const_tree, const_tree);
169 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
170 static rtx do_store_flag (sepops, rtx, machine_mode);
171 #ifdef PUSH_ROUNDING
172 static void emit_single_push_insn (machine_mode, rtx, tree);
173 #endif
174 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
175 static rtx const_vector_from_tree (tree);
176
177 \f
178 /* This is run to set up which modes can be used
179 directly in memory and to initialize the block move optab. It is run
180 at the beginning of compilation and when the target is reinitialized. */
181
182 void
183 init_expr_target (void)
184 {
185 rtx insn, pat;
186 machine_mode mode;
187 int num_clobbers;
188 rtx mem, mem1;
189 rtx reg;
190
191 /* Try indexing by frame ptr and try by stack ptr.
192 It is known that on the Convex the stack ptr isn't a valid index.
193 With luck, one or the other is valid on any machine. */
194 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
195 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
196
197 /* A scratch register we can modify in-place below to avoid
198 useless RTL allocations. */
199 reg = gen_rtx_REG (VOIDmode, -1);
200
201 insn = rtx_alloc (INSN);
202 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
203 PATTERN (insn) = pat;
204
205 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
206 mode = (machine_mode) ((int) mode + 1))
207 {
208 int regno;
209
210 direct_load[(int) mode] = direct_store[(int) mode] = 0;
211 PUT_MODE (mem, mode);
212 PUT_MODE (mem1, mode);
213 PUT_MODE (reg, mode);
214
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
217
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
221 regno++)
222 {
223 if (! HARD_REGNO_MODE_OK (regno, mode))
224 continue;
225
226 SET_REGNO (reg, regno);
227
228 SET_SRC (pat) = mem;
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
232
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
237
238 SET_SRC (pat) = reg;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
242
243 SET_SRC (pat) = reg;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
247 }
248 }
249
250 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
251
252 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
253 mode = GET_MODE_WIDER_MODE (mode))
254 {
255 machine_mode srcmode;
256 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
257 srcmode = GET_MODE_WIDER_MODE (srcmode))
258 {
259 enum insn_code ic;
260
261 ic = can_extend_p (mode, srcmode, 0);
262 if (ic == CODE_FOR_nothing)
263 continue;
264
265 PUT_MODE (mem, srcmode);
266
267 if (insn_operand_matches (ic, 1, mem))
268 float_extend_from_mem[mode][srcmode] = true;
269 }
270 }
271 }
272
273 /* This is run at the start of compiling a function. */
274
275 void
276 init_expr (void)
277 {
278 memset (&crtl->expr, 0, sizeof (crtl->expr));
279 }
280 \f
281 /* Copy data from FROM to TO, where the machine modes are not the same.
282 Both modes may be integer, or both may be floating, or both may be
283 fixed-point.
284 UNSIGNEDP should be nonzero if FROM is an unsigned type.
285 This causes zero-extension instead of sign-extension. */
286
287 void
288 convert_move (rtx to, rtx from, int unsignedp)
289 {
290 machine_mode to_mode = GET_MODE (to);
291 machine_mode from_mode = GET_MODE (from);
292 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
293 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
294 enum insn_code code;
295 rtx libcall;
296
297 /* rtx code for making an equivalent value. */
298 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
299 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
300
301
302 gcc_assert (to_real == from_real);
303 gcc_assert (to_mode != BLKmode);
304 gcc_assert (from_mode != BLKmode);
305
306 /* If the source and destination are already the same, then there's
307 nothing to do. */
308 if (to == from)
309 return;
310
311 /* If FROM is a SUBREG that indicates that we have already done at least
312 the required extension, strip it. We don't handle such SUBREGs as
313 TO here. */
314
315 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
316 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
317 >= GET_MODE_PRECISION (to_mode))
318 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
319 from = gen_lowpart (to_mode, from), from_mode = to_mode;
320
321 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
322
323 if (to_mode == from_mode
324 || (from_mode == VOIDmode && CONSTANT_P (from)))
325 {
326 emit_move_insn (to, from);
327 return;
328 }
329
330 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
331 {
332 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
333
334 if (VECTOR_MODE_P (to_mode))
335 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
336 else
337 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
338
339 emit_move_insn (to, from);
340 return;
341 }
342
343 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
344 {
345 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
346 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
347 return;
348 }
349
350 if (to_real)
351 {
352 rtx value;
353 rtx_insn *insns;
354 convert_optab tab;
355
356 gcc_assert ((GET_MODE_PRECISION (from_mode)
357 != GET_MODE_PRECISION (to_mode))
358 || (DECIMAL_FLOAT_MODE_P (from_mode)
359 != DECIMAL_FLOAT_MODE_P (to_mode)));
360
361 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
362 /* Conversion between decimal float and binary float, same size. */
363 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
364 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
365 tab = sext_optab;
366 else
367 tab = trunc_optab;
368
369 /* Try converting directly if the insn is supported. */
370
371 code = convert_optab_handler (tab, to_mode, from_mode);
372 if (code != CODE_FOR_nothing)
373 {
374 emit_unop_insn (code, to, from,
375 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
376 return;
377 }
378
379 /* Otherwise use a libcall. */
380 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
381
382 /* Is this conversion implemented yet? */
383 gcc_assert (libcall);
384
385 start_sequence ();
386 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
387 1, from, from_mode);
388 insns = get_insns ();
389 end_sequence ();
390 emit_libcall_block (insns, to, value,
391 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
392 from)
393 : gen_rtx_FLOAT_EXTEND (to_mode, from));
394 return;
395 }
396
397 /* Handle pointer conversion. */ /* SPEE 900220. */
398 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
399 {
400 convert_optab ctab;
401
402 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
403 ctab = trunc_optab;
404 else if (unsignedp)
405 ctab = zext_optab;
406 else
407 ctab = sext_optab;
408
409 if (convert_optab_handler (ctab, to_mode, from_mode)
410 != CODE_FOR_nothing)
411 {
412 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
413 to, from, UNKNOWN);
414 return;
415 }
416 }
417
418 /* Targets are expected to provide conversion insns between PxImode and
419 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
420 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
421 {
422 machine_mode full_mode
423 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
424
425 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
426 != CODE_FOR_nothing);
427
428 if (full_mode != from_mode)
429 from = convert_to_mode (full_mode, from, unsignedp);
430 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
431 to, from, UNKNOWN);
432 return;
433 }
434 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
435 {
436 rtx new_from;
437 machine_mode full_mode
438 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
439 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
440 enum insn_code icode;
441
442 icode = convert_optab_handler (ctab, full_mode, from_mode);
443 gcc_assert (icode != CODE_FOR_nothing);
444
445 if (to_mode == full_mode)
446 {
447 emit_unop_insn (icode, to, from, UNKNOWN);
448 return;
449 }
450
451 new_from = gen_reg_rtx (full_mode);
452 emit_unop_insn (icode, new_from, from, UNKNOWN);
453
454 /* else proceed to integer conversions below. */
455 from_mode = full_mode;
456 from = new_from;
457 }
458
459 /* Make sure both are fixed-point modes or both are not. */
460 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
461 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
462 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
463 {
464 /* If we widen from_mode to to_mode and they are in the same class,
465 we won't saturate the result.
466 Otherwise, always saturate the result to play safe. */
467 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
468 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
469 expand_fixed_convert (to, from, 0, 0);
470 else
471 expand_fixed_convert (to, from, 0, 1);
472 return;
473 }
474
475 /* Now both modes are integers. */
476
477 /* Handle expanding beyond a word. */
478 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
479 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
480 {
481 rtx_insn *insns;
482 rtx lowpart;
483 rtx fill_value;
484 rtx lowfrom;
485 int i;
486 machine_mode lowpart_mode;
487 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
488
489 /* Try converting directly if the insn is supported. */
490 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
491 != CODE_FOR_nothing)
492 {
493 /* If FROM is a SUBREG, put it into a register. Do this
494 so that we always generate the same set of insns for
495 better cse'ing; if an intermediate assignment occurred,
496 we won't be doing the operation directly on the SUBREG. */
497 if (optimize > 0 && GET_CODE (from) == SUBREG)
498 from = force_reg (from_mode, from);
499 emit_unop_insn (code, to, from, equiv_code);
500 return;
501 }
502 /* Next, try converting via full word. */
503 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
504 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
505 != CODE_FOR_nothing))
506 {
507 rtx word_to = gen_reg_rtx (word_mode);
508 if (REG_P (to))
509 {
510 if (reg_overlap_mentioned_p (to, from))
511 from = force_reg (from_mode, from);
512 emit_clobber (to);
513 }
514 convert_move (word_to, from, unsignedp);
515 emit_unop_insn (code, to, word_to, equiv_code);
516 return;
517 }
518
519 /* No special multiword conversion insn; do it by hand. */
520 start_sequence ();
521
522 /* Since we will turn this into a no conflict block, we must ensure the
523 the source does not overlap the target so force it into an isolated
524 register when maybe so. Likewise for any MEM input, since the
525 conversion sequence might require several references to it and we
526 must ensure we're getting the same value every time. */
527
528 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
529 from = force_reg (from_mode, from);
530
531 /* Get a copy of FROM widened to a word, if necessary. */
532 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
533 lowpart_mode = word_mode;
534 else
535 lowpart_mode = from_mode;
536
537 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
538
539 lowpart = gen_lowpart (lowpart_mode, to);
540 emit_move_insn (lowpart, lowfrom);
541
542 /* Compute the value to put in each remaining word. */
543 if (unsignedp)
544 fill_value = const0_rtx;
545 else
546 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
547 LT, lowfrom, const0_rtx,
548 lowpart_mode, 0, -1);
549
550 /* Fill the remaining words. */
551 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 {
553 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
554 rtx subword = operand_subword (to, index, 1, to_mode);
555
556 gcc_assert (subword);
557
558 if (fill_value != subword)
559 emit_move_insn (subword, fill_value);
560 }
561
562 insns = get_insns ();
563 end_sequence ();
564
565 emit_insn (insns);
566 return;
567 }
568
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
571 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
572 {
573 if (!((MEM_P (from)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0),
577 MEM_ADDR_SPACE (from)))
578 || REG_P (from)
579 || GET_CODE (from) == SUBREG))
580 from = force_reg (from_mode, from);
581 convert_move (to, gen_lowpart (word_mode, from), 0);
582 return;
583 }
584
585 /* Now follow all the conversions between integers
586 no more than a word long. */
587
588 /* For truncation, usually we can just refer to FROM in a narrower mode. */
589 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
590 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
591 {
592 if (!((MEM_P (from)
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0),
596 MEM_ADDR_SPACE (from)))
597 || REG_P (from)
598 || GET_CODE (from) == SUBREG))
599 from = force_reg (from_mode, from);
600 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
601 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
602 from = copy_to_reg (from);
603 emit_move_insn (to, gen_lowpart (to_mode, from));
604 return;
605 }
606
607 /* Handle extension. */
608 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
609 {
610 /* Convert directly if that works. */
611 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
612 != CODE_FOR_nothing)
613 {
614 emit_unop_insn (code, to, from, equiv_code);
615 return;
616 }
617 else
618 {
619 machine_mode intermediate;
620 rtx tmp;
621 int shift_amount;
622
623 /* Search for a mode to convert via. */
624 for (intermediate = from_mode; intermediate != VOIDmode;
625 intermediate = GET_MODE_WIDER_MODE (intermediate))
626 if (((can_extend_p (to_mode, intermediate, unsignedp)
627 != CODE_FOR_nothing)
628 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
629 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
630 && (can_extend_p (intermediate, from_mode, unsignedp)
631 != CODE_FOR_nothing))
632 {
633 convert_move (to, convert_to_mode (intermediate, from,
634 unsignedp), unsignedp);
635 return;
636 }
637
638 /* No suitable intermediate mode.
639 Generate what we need with shifts. */
640 shift_amount = (GET_MODE_PRECISION (to_mode)
641 - GET_MODE_PRECISION (from_mode));
642 from = gen_lowpart (to_mode, force_reg (from_mode, from));
643 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
644 to, unsignedp);
645 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
646 to, unsignedp);
647 if (tmp != to)
648 emit_move_insn (to, tmp);
649 return;
650 }
651 }
652
653 /* Support special truncate insns for certain modes. */
654 if (convert_optab_handler (trunc_optab, to_mode,
655 from_mode) != CODE_FOR_nothing)
656 {
657 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
658 to, from, UNKNOWN);
659 return;
660 }
661
662 /* Handle truncation of volatile memrefs, and so on;
663 the things that couldn't be truncated directly,
664 and for which there was no special instruction.
665
666 ??? Code above formerly short-circuited this, for most integer
667 mode pairs, with a force_reg in from_mode followed by a recursive
668 call to this routine. Appears always to have been wrong. */
669 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
670 {
671 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
672 emit_move_insn (to, temp);
673 return;
674 }
675
676 /* Mode combination is not recognized. */
677 gcc_unreachable ();
678 }
679
680 /* Return an rtx for a value that would result
681 from converting X to mode MODE.
682 Both X and MODE may be floating, or both integer.
683 UNSIGNEDP is nonzero if X is an unsigned value.
684 This can be done by referring to a part of X in place
685 or by copying to a new temporary with conversion. */
686
687 rtx
688 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
689 {
690 return convert_modes (mode, VOIDmode, x, unsignedp);
691 }
692
693 /* Return an rtx for a value that would result
694 from converting X from mode OLDMODE to mode MODE.
695 Both modes may be floating, or both integer.
696 UNSIGNEDP is nonzero if X is an unsigned value.
697
698 This can be done by referring to a part of X in place
699 or by copying to a new temporary with conversion.
700
701 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
702
703 rtx
704 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
705 {
706 rtx temp;
707
708 /* If FROM is a SUBREG that indicates that we have already done at least
709 the required extension, strip it. */
710
711 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
712 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
713 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
714 x = gen_lowpart (mode, SUBREG_REG (x));
715
716 if (GET_MODE (x) != VOIDmode)
717 oldmode = GET_MODE (x);
718
719 if (mode == oldmode)
720 return x;
721
722 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
723 {
724 /* If the caller did not tell us the old mode, then there is not
725 much to do with respect to canonicalization. We have to
726 assume that all the bits are significant. */
727 if (GET_MODE_CLASS (oldmode) != MODE_INT)
728 oldmode = MAX_MODE_INT;
729 wide_int w = wide_int::from (std::make_pair (x, oldmode),
730 GET_MODE_PRECISION (mode),
731 unsignedp ? UNSIGNED : SIGNED);
732 return immed_wide_int_const (w, mode);
733 }
734
735 /* We can do this with a gen_lowpart if both desired and current modes
736 are integer, and this is either a constant integer, a register, or a
737 non-volatile MEM. */
738 if (GET_MODE_CLASS (mode) == MODE_INT
739 && GET_MODE_CLASS (oldmode) == MODE_INT
740 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
741 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
742 || (REG_P (x)
743 && (!HARD_REGISTER_P (x)
744 || HARD_REGNO_MODE_OK (REGNO (x), mode))
745 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
746
747 return gen_lowpart (mode, x);
748
749 /* Converting from integer constant into mode is always equivalent to an
750 subreg operation. */
751 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
752 {
753 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
754 return simplify_gen_subreg (mode, x, oldmode, 0);
755 }
756
757 temp = gen_reg_rtx (mode);
758 convert_move (temp, x, unsignedp);
759 return temp;
760 }
761 \f
762 /* Return the largest alignment we can use for doing a move (or store)
763 of MAX_PIECES. ALIGN is the largest alignment we could use. */
764
765 static unsigned int
766 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
767 {
768 machine_mode tmode;
769
770 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
771 if (align >= GET_MODE_ALIGNMENT (tmode))
772 align = GET_MODE_ALIGNMENT (tmode);
773 else
774 {
775 machine_mode tmode, xmode;
776
777 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
778 tmode != VOIDmode;
779 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
780 if (GET_MODE_SIZE (tmode) > max_pieces
781 || SLOW_UNALIGNED_ACCESS (tmode, align))
782 break;
783
784 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
785 }
786
787 return align;
788 }
789
790 /* Return the widest integer mode no wider than SIZE. If no such mode
791 can be found, return VOIDmode. */
792
793 static machine_mode
794 widest_int_mode_for_size (unsigned int size)
795 {
796 machine_mode tmode, mode = VOIDmode;
797
798 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
799 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
800 if (GET_MODE_SIZE (tmode) < size)
801 mode = tmode;
802
803 return mode;
804 }
805
806 /* Determine whether the LEN bytes can be moved by using several move
807 instructions. Return nonzero if a call to move_by_pieces should
808 succeed. */
809
810 int
811 can_move_by_pieces (unsigned HOST_WIDE_INT len,
812 unsigned int align)
813 {
814 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
815 optimize_insn_for_speed_p ());
816 }
817
818 /* Generate several move instructions to copy LEN bytes from block FROM to
819 block TO. (These are MEM rtx's with BLKmode).
820
821 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
822 used to push FROM to the stack.
823
824 ALIGN is maximum stack alignment we can assume.
825
826 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
827 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
828 stpcpy. */
829
830 rtx
831 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
832 unsigned int align, int endp)
833 {
834 struct move_by_pieces_d data;
835 machine_mode to_addr_mode;
836 machine_mode from_addr_mode = get_address_mode (from);
837 rtx to_addr, from_addr = XEXP (from, 0);
838 unsigned int max_size = MOVE_MAX_PIECES + 1;
839 enum insn_code icode;
840
841 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
842
843 data.offset = 0;
844 data.from_addr = from_addr;
845 if (to)
846 {
847 to_addr_mode = get_address_mode (to);
848 to_addr = XEXP (to, 0);
849 data.to = to;
850 data.autinc_to
851 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
852 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
853 data.reverse
854 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
855 }
856 else
857 {
858 to_addr_mode = VOIDmode;
859 to_addr = NULL_RTX;
860 data.to = NULL_RTX;
861 data.autinc_to = 1;
862 #ifdef STACK_GROWS_DOWNWARD
863 data.reverse = 1;
864 #else
865 data.reverse = 0;
866 #endif
867 }
868 data.to_addr = to_addr;
869 data.from = from;
870 data.autinc_from
871 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
872 || GET_CODE (from_addr) == POST_INC
873 || GET_CODE (from_addr) == POST_DEC);
874
875 data.explicit_inc_from = 0;
876 data.explicit_inc_to = 0;
877 if (data.reverse) data.offset = len;
878 data.len = len;
879
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data.autinc_from && data.autinc_to)
884 && move_by_pieces_ninsns (len, align, max_size) > 2)
885 {
886 /* Find the mode of the largest move...
887 MODE might not be used depending on the definitions of the
888 USE_* macros below. */
889 machine_mode mode ATTRIBUTE_UNUSED
890 = widest_int_mode_for_size (max_size);
891
892 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
893 {
894 data.from_addr = copy_to_mode_reg (from_addr_mode,
895 plus_constant (from_addr_mode,
896 from_addr, len));
897 data.autinc_from = 1;
898 data.explicit_inc_from = -1;
899 }
900 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
901 {
902 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
903 data.autinc_from = 1;
904 data.explicit_inc_from = 1;
905 }
906 if (!data.autinc_from && CONSTANT_P (from_addr))
907 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
908 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
909 {
910 data.to_addr = copy_to_mode_reg (to_addr_mode,
911 plus_constant (to_addr_mode,
912 to_addr, len));
913 data.autinc_to = 1;
914 data.explicit_inc_to = -1;
915 }
916 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
917 {
918 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
919 data.autinc_to = 1;
920 data.explicit_inc_to = 1;
921 }
922 if (!data.autinc_to && CONSTANT_P (to_addr))
923 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
924 }
925
926 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
927
928 /* First move what we can in the largest integer mode, then go to
929 successively smaller modes. */
930
931 while (max_size > 1 && data.len > 0)
932 {
933 machine_mode mode = widest_int_mode_for_size (max_size);
934
935 if (mode == VOIDmode)
936 break;
937
938 icode = optab_handler (mov_optab, mode);
939 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
940 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
941
942 max_size = GET_MODE_SIZE (mode);
943 }
944
945 /* The code above should have handled everything. */
946 gcc_assert (!data.len);
947
948 if (endp)
949 {
950 rtx to1;
951
952 gcc_assert (!data.reverse);
953 if (data.autinc_to)
954 {
955 if (endp == 2)
956 {
957 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
958 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
959 else
960 data.to_addr = copy_to_mode_reg (to_addr_mode,
961 plus_constant (to_addr_mode,
962 data.to_addr,
963 -1));
964 }
965 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
966 data.offset);
967 }
968 else
969 {
970 if (endp == 2)
971 --data.offset;
972 to1 = adjust_address (data.to, QImode, data.offset);
973 }
974 return to1;
975 }
976 else
977 return data.to;
978 }
979
980 /* Return number of insns required to move L bytes by pieces.
981 ALIGN (in bits) is maximum alignment we can assume. */
982
983 unsigned HOST_WIDE_INT
984 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
985 unsigned int max_size)
986 {
987 unsigned HOST_WIDE_INT n_insns = 0;
988
989 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
990
991 while (max_size > 1 && l > 0)
992 {
993 machine_mode mode;
994 enum insn_code icode;
995
996 mode = widest_int_mode_for_size (max_size);
997
998 if (mode == VOIDmode)
999 break;
1000
1001 icode = optab_handler (mov_optab, mode);
1002 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1003 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1004
1005 max_size = GET_MODE_SIZE (mode);
1006 }
1007
1008 gcc_assert (!l);
1009 return n_insns;
1010 }
1011
1012 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1013 with move instructions for mode MODE. GENFUN is the gen_... function
1014 to make a move insn for that mode. DATA has all the other info. */
1015
1016 static void
1017 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1018 struct move_by_pieces_d *data)
1019 {
1020 unsigned int size = GET_MODE_SIZE (mode);
1021 rtx to1 = NULL_RTX, from1;
1022
1023 while (data->len >= size)
1024 {
1025 if (data->reverse)
1026 data->offset -= size;
1027
1028 if (data->to)
1029 {
1030 if (data->autinc_to)
1031 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1032 data->offset);
1033 else
1034 to1 = adjust_address (data->to, mode, data->offset);
1035 }
1036
1037 if (data->autinc_from)
1038 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1039 data->offset);
1040 else
1041 from1 = adjust_address (data->from, mode, data->offset);
1042
1043 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1044 emit_insn (gen_add2_insn (data->to_addr,
1045 gen_int_mode (-(HOST_WIDE_INT) size,
1046 GET_MODE (data->to_addr))));
1047 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1048 emit_insn (gen_add2_insn (data->from_addr,
1049 gen_int_mode (-(HOST_WIDE_INT) size,
1050 GET_MODE (data->from_addr))));
1051
1052 if (data->to)
1053 emit_insn ((*genfun) (to1, from1));
1054 else
1055 {
1056 #ifdef PUSH_ROUNDING
1057 emit_single_push_insn (mode, from1, NULL);
1058 #else
1059 gcc_unreachable ();
1060 #endif
1061 }
1062
1063 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1064 emit_insn (gen_add2_insn (data->to_addr,
1065 gen_int_mode (size,
1066 GET_MODE (data->to_addr))));
1067 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1068 emit_insn (gen_add2_insn (data->from_addr,
1069 gen_int_mode (size,
1070 GET_MODE (data->from_addr))));
1071
1072 if (! data->reverse)
1073 data->offset += size;
1074
1075 data->len -= size;
1076 }
1077 }
1078 \f
1079 /* Emit code to move a block Y to a block X. This may be done with
1080 string-move instructions, with multiple scalar move instructions,
1081 or with a library call.
1082
1083 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1084 SIZE is an rtx that says how long they are.
1085 ALIGN is the maximum alignment we can assume they have.
1086 METHOD describes what kind of copy this is, and what mechanisms may be used.
1087 MIN_SIZE is the minimal size of block to move
1088 MAX_SIZE is the maximal size of block to move, if it can not be represented
1089 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1090
1091 Return the address of the new block, if memcpy is called and returns it,
1092 0 otherwise. */
1093
1094 rtx
1095 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1096 unsigned int expected_align, HOST_WIDE_INT expected_size,
1097 unsigned HOST_WIDE_INT min_size,
1098 unsigned HOST_WIDE_INT max_size,
1099 unsigned HOST_WIDE_INT probable_max_size)
1100 {
1101 bool may_use_call;
1102 rtx retval = 0;
1103 unsigned int align;
1104
1105 gcc_assert (size);
1106 if (CONST_INT_P (size)
1107 && INTVAL (size) == 0)
1108 return 0;
1109
1110 switch (method)
1111 {
1112 case BLOCK_OP_NORMAL:
1113 case BLOCK_OP_TAILCALL:
1114 may_use_call = true;
1115 break;
1116
1117 case BLOCK_OP_CALL_PARM:
1118 may_use_call = block_move_libcall_safe_for_call_parm ();
1119
1120 /* Make inhibit_defer_pop nonzero around the library call
1121 to force it to pop the arguments right away. */
1122 NO_DEFER_POP;
1123 break;
1124
1125 case BLOCK_OP_NO_LIBCALL:
1126 may_use_call = false;
1127 break;
1128
1129 default:
1130 gcc_unreachable ();
1131 }
1132
1133 gcc_assert (MEM_P (x) && MEM_P (y));
1134 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1135 gcc_assert (align >= BITS_PER_UNIT);
1136
1137 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1138 block copy is more efficient for other large modes, e.g. DCmode. */
1139 x = adjust_address (x, BLKmode, 0);
1140 y = adjust_address (y, BLKmode, 0);
1141
1142 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1143 can be incorrect is coming from __builtin_memcpy. */
1144 if (CONST_INT_P (size))
1145 {
1146 x = shallow_copy_rtx (x);
1147 y = shallow_copy_rtx (y);
1148 set_mem_size (x, INTVAL (size));
1149 set_mem_size (y, INTVAL (size));
1150 }
1151
1152 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1153 move_by_pieces (x, y, INTVAL (size), align, 0);
1154 else if (emit_block_move_via_movmem (x, y, size, align,
1155 expected_align, expected_size,
1156 min_size, max_size, probable_max_size))
1157 ;
1158 else if (may_use_call
1159 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1160 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1161 {
1162 /* Since x and y are passed to a libcall, mark the corresponding
1163 tree EXPR as addressable. */
1164 tree y_expr = MEM_EXPR (y);
1165 tree x_expr = MEM_EXPR (x);
1166 if (y_expr)
1167 mark_addressable (y_expr);
1168 if (x_expr)
1169 mark_addressable (x_expr);
1170 retval = emit_block_move_via_libcall (x, y, size,
1171 method == BLOCK_OP_TAILCALL);
1172 }
1173
1174 else
1175 emit_block_move_via_loop (x, y, size, align);
1176
1177 if (method == BLOCK_OP_CALL_PARM)
1178 OK_DEFER_POP;
1179
1180 return retval;
1181 }
1182
1183 rtx
1184 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1185 {
1186 unsigned HOST_WIDE_INT max, min = 0;
1187 if (GET_CODE (size) == CONST_INT)
1188 min = max = UINTVAL (size);
1189 else
1190 max = GET_MODE_MASK (GET_MODE (size));
1191 return emit_block_move_hints (x, y, size, method, 0, -1,
1192 min, max, max);
1193 }
1194
1195 /* A subroutine of emit_block_move. Returns true if calling the
1196 block move libcall will not clobber any parameters which may have
1197 already been placed on the stack. */
1198
1199 static bool
1200 block_move_libcall_safe_for_call_parm (void)
1201 {
1202 #if defined (REG_PARM_STACK_SPACE)
1203 tree fn;
1204 #endif
1205
1206 /* If arguments are pushed on the stack, then they're safe. */
1207 if (PUSH_ARGS)
1208 return true;
1209
1210 /* If registers go on the stack anyway, any argument is sure to clobber
1211 an outgoing argument. */
1212 #if defined (REG_PARM_STACK_SPACE)
1213 fn = emit_block_move_libcall_fn (false);
1214 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1215 depend on its argument. */
1216 (void) fn;
1217 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1218 && REG_PARM_STACK_SPACE (fn) != 0)
1219 return false;
1220 #endif
1221
1222 /* If any argument goes in memory, then it might clobber an outgoing
1223 argument. */
1224 {
1225 CUMULATIVE_ARGS args_so_far_v;
1226 cumulative_args_t args_so_far;
1227 tree fn, arg;
1228
1229 fn = emit_block_move_libcall_fn (false);
1230 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1231 args_so_far = pack_cumulative_args (&args_so_far_v);
1232
1233 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1234 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1235 {
1236 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1237 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1238 NULL_TREE, true);
1239 if (!tmp || !REG_P (tmp))
1240 return false;
1241 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1242 return false;
1243 targetm.calls.function_arg_advance (args_so_far, mode,
1244 NULL_TREE, true);
1245 }
1246 }
1247 return true;
1248 }
1249
1250 /* A subroutine of emit_block_move. Expand a movmem pattern;
1251 return true if successful. */
1252
1253 static bool
1254 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1255 unsigned int expected_align, HOST_WIDE_INT expected_size,
1256 unsigned HOST_WIDE_INT min_size,
1257 unsigned HOST_WIDE_INT max_size,
1258 unsigned HOST_WIDE_INT probable_max_size)
1259 {
1260 int save_volatile_ok = volatile_ok;
1261 machine_mode mode;
1262
1263 if (expected_align < align)
1264 expected_align = align;
1265 if (expected_size != -1)
1266 {
1267 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1268 expected_size = probable_max_size;
1269 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1270 expected_size = min_size;
1271 }
1272
1273 /* Since this is a move insn, we don't care about volatility. */
1274 volatile_ok = 1;
1275
1276 /* Try the most limited insn first, because there's no point
1277 including more than one in the machine description unless
1278 the more limited one has some advantage. */
1279
1280 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1281 mode = GET_MODE_WIDER_MODE (mode))
1282 {
1283 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1284
1285 if (code != CODE_FOR_nothing
1286 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1287 here because if SIZE is less than the mode mask, as it is
1288 returned by the macro, it will definitely be less than the
1289 actual mode mask. Since SIZE is within the Pmode address
1290 space, we limit MODE to Pmode. */
1291 && ((CONST_INT_P (size)
1292 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1293 <= (GET_MODE_MASK (mode) >> 1)))
1294 || max_size <= (GET_MODE_MASK (mode) >> 1)
1295 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1296 {
1297 struct expand_operand ops[9];
1298 unsigned int nops;
1299
1300 /* ??? When called via emit_block_move_for_call, it'd be
1301 nice if there were some way to inform the backend, so
1302 that it doesn't fail the expansion because it thinks
1303 emitting the libcall would be more efficient. */
1304 nops = insn_data[(int) code].n_generator_args;
1305 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1306
1307 create_fixed_operand (&ops[0], x);
1308 create_fixed_operand (&ops[1], y);
1309 /* The check above guarantees that this size conversion is valid. */
1310 create_convert_operand_to (&ops[2], size, mode, true);
1311 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1312 if (nops >= 6)
1313 {
1314 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1315 create_integer_operand (&ops[5], expected_size);
1316 }
1317 if (nops >= 8)
1318 {
1319 create_integer_operand (&ops[6], min_size);
1320 /* If we can not represent the maximal size,
1321 make parameter NULL. */
1322 if ((HOST_WIDE_INT) max_size != -1)
1323 create_integer_operand (&ops[7], max_size);
1324 else
1325 create_fixed_operand (&ops[7], NULL);
1326 }
1327 if (nops == 9)
1328 {
1329 /* If we can not represent the maximal size,
1330 make parameter NULL. */
1331 if ((HOST_WIDE_INT) probable_max_size != -1)
1332 create_integer_operand (&ops[8], probable_max_size);
1333 else
1334 create_fixed_operand (&ops[8], NULL);
1335 }
1336 if (maybe_expand_insn (code, nops, ops))
1337 {
1338 volatile_ok = save_volatile_ok;
1339 return true;
1340 }
1341 }
1342 }
1343
1344 volatile_ok = save_volatile_ok;
1345 return false;
1346 }
1347
1348 /* A subroutine of emit_block_move. Expand a call to memcpy.
1349 Return the return value from memcpy, 0 otherwise. */
1350
1351 rtx
1352 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1353 {
1354 rtx dst_addr, src_addr;
1355 tree call_expr, fn, src_tree, dst_tree, size_tree;
1356 machine_mode size_mode;
1357 rtx retval;
1358
1359 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1360 pseudos. We can then place those new pseudos into a VAR_DECL and
1361 use them later. */
1362
1363 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1364 src_addr = copy_addr_to_reg (XEXP (src, 0));
1365
1366 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1367 src_addr = convert_memory_address (ptr_mode, src_addr);
1368
1369 dst_tree = make_tree (ptr_type_node, dst_addr);
1370 src_tree = make_tree (ptr_type_node, src_addr);
1371
1372 size_mode = TYPE_MODE (sizetype);
1373
1374 size = convert_to_mode (size_mode, size, 1);
1375 size = copy_to_mode_reg (size_mode, size);
1376
1377 /* It is incorrect to use the libcall calling conventions to call
1378 memcpy in this context. This could be a user call to memcpy and
1379 the user may wish to examine the return value from memcpy. For
1380 targets where libcalls and normal calls have different conventions
1381 for returning pointers, we could end up generating incorrect code. */
1382
1383 size_tree = make_tree (sizetype, size);
1384
1385 fn = emit_block_move_libcall_fn (true);
1386 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1387 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1388
1389 retval = expand_normal (call_expr);
1390
1391 return retval;
1392 }
1393
1394 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1395 for the function we use for block copies. */
1396
1397 static GTY(()) tree block_move_fn;
1398
1399 void
1400 init_block_move_fn (const char *asmspec)
1401 {
1402 if (!block_move_fn)
1403 {
1404 tree args, fn, attrs, attr_args;
1405
1406 fn = get_identifier ("memcpy");
1407 args = build_function_type_list (ptr_type_node, ptr_type_node,
1408 const_ptr_type_node, sizetype,
1409 NULL_TREE);
1410
1411 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1412 DECL_EXTERNAL (fn) = 1;
1413 TREE_PUBLIC (fn) = 1;
1414 DECL_ARTIFICIAL (fn) = 1;
1415 TREE_NOTHROW (fn) = 1;
1416 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1417 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1418
1419 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1420 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1421
1422 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1423
1424 block_move_fn = fn;
1425 }
1426
1427 if (asmspec)
1428 set_user_assembler_name (block_move_fn, asmspec);
1429 }
1430
1431 static tree
1432 emit_block_move_libcall_fn (int for_call)
1433 {
1434 static bool emitted_extern;
1435
1436 if (!block_move_fn)
1437 init_block_move_fn (NULL);
1438
1439 if (for_call && !emitted_extern)
1440 {
1441 emitted_extern = true;
1442 make_decl_rtl (block_move_fn);
1443 }
1444
1445 return block_move_fn;
1446 }
1447
1448 /* A subroutine of emit_block_move. Copy the data via an explicit
1449 loop. This is used only when libcalls are forbidden. */
1450 /* ??? It'd be nice to copy in hunks larger than QImode. */
1451
1452 static void
1453 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1454 unsigned int align ATTRIBUTE_UNUSED)
1455 {
1456 rtx_code_label *cmp_label, *top_label;
1457 rtx iter, x_addr, y_addr, tmp;
1458 machine_mode x_addr_mode = get_address_mode (x);
1459 machine_mode y_addr_mode = get_address_mode (y);
1460 machine_mode iter_mode;
1461
1462 iter_mode = GET_MODE (size);
1463 if (iter_mode == VOIDmode)
1464 iter_mode = word_mode;
1465
1466 top_label = gen_label_rtx ();
1467 cmp_label = gen_label_rtx ();
1468 iter = gen_reg_rtx (iter_mode);
1469
1470 emit_move_insn (iter, const0_rtx);
1471
1472 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1473 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1474 do_pending_stack_adjust ();
1475
1476 emit_jump (cmp_label);
1477 emit_label (top_label);
1478
1479 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1480 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1481
1482 if (x_addr_mode != y_addr_mode)
1483 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1484 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1485
1486 x = change_address (x, QImode, x_addr);
1487 y = change_address (y, QImode, y_addr);
1488
1489 emit_move_insn (x, y);
1490
1491 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1492 true, OPTAB_LIB_WIDEN);
1493 if (tmp != iter)
1494 emit_move_insn (iter, tmp);
1495
1496 emit_label (cmp_label);
1497
1498 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1499 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1500 }
1501 \f
1502 /* Copy all or part of a value X into registers starting at REGNO.
1503 The number of registers to be filled is NREGS. */
1504
1505 void
1506 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1507 {
1508 int i;
1509 #ifdef HAVE_load_multiple
1510 rtx pat;
1511 rtx_insn *last;
1512 #endif
1513
1514 if (nregs == 0)
1515 return;
1516
1517 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1518 x = validize_mem (force_const_mem (mode, x));
1519
1520 /* See if the machine can do this with a load multiple insn. */
1521 #ifdef HAVE_load_multiple
1522 if (HAVE_load_multiple)
1523 {
1524 last = get_last_insn ();
1525 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1526 GEN_INT (nregs));
1527 if (pat)
1528 {
1529 emit_insn (pat);
1530 return;
1531 }
1532 else
1533 delete_insns_since (last);
1534 }
1535 #endif
1536
1537 for (i = 0; i < nregs; i++)
1538 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1539 operand_subword_force (x, i, mode));
1540 }
1541
1542 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1543 The number of registers to be filled is NREGS. */
1544
1545 void
1546 move_block_from_reg (int regno, rtx x, int nregs)
1547 {
1548 int i;
1549
1550 if (nregs == 0)
1551 return;
1552
1553 /* See if the machine can do this with a store multiple insn. */
1554 #ifdef HAVE_store_multiple
1555 if (HAVE_store_multiple)
1556 {
1557 rtx_insn *last = get_last_insn ();
1558 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1559 GEN_INT (nregs));
1560 if (pat)
1561 {
1562 emit_insn (pat);
1563 return;
1564 }
1565 else
1566 delete_insns_since (last);
1567 }
1568 #endif
1569
1570 for (i = 0; i < nregs; i++)
1571 {
1572 rtx tem = operand_subword (x, i, 1, BLKmode);
1573
1574 gcc_assert (tem);
1575
1576 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1577 }
1578 }
1579
1580 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1581 ORIG, where ORIG is a non-consecutive group of registers represented by
1582 a PARALLEL. The clone is identical to the original except in that the
1583 original set of registers is replaced by a new set of pseudo registers.
1584 The new set has the same modes as the original set. */
1585
1586 rtx
1587 gen_group_rtx (rtx orig)
1588 {
1589 int i, length;
1590 rtx *tmps;
1591
1592 gcc_assert (GET_CODE (orig) == PARALLEL);
1593
1594 length = XVECLEN (orig, 0);
1595 tmps = XALLOCAVEC (rtx, length);
1596
1597 /* Skip a NULL entry in first slot. */
1598 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1599
1600 if (i)
1601 tmps[0] = 0;
1602
1603 for (; i < length; i++)
1604 {
1605 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1606 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1607
1608 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1609 }
1610
1611 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1612 }
1613
1614 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1615 except that values are placed in TMPS[i], and must later be moved
1616 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1617
1618 static void
1619 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1620 {
1621 rtx src;
1622 int start, i;
1623 machine_mode m = GET_MODE (orig_src);
1624
1625 gcc_assert (GET_CODE (dst) == PARALLEL);
1626
1627 if (m != VOIDmode
1628 && !SCALAR_INT_MODE_P (m)
1629 && !MEM_P (orig_src)
1630 && GET_CODE (orig_src) != CONCAT)
1631 {
1632 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1633 if (imode == BLKmode)
1634 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1635 else
1636 src = gen_reg_rtx (imode);
1637 if (imode != BLKmode)
1638 src = gen_lowpart (GET_MODE (orig_src), src);
1639 emit_move_insn (src, orig_src);
1640 /* ...and back again. */
1641 if (imode != BLKmode)
1642 src = gen_lowpart (imode, src);
1643 emit_group_load_1 (tmps, dst, src, type, ssize);
1644 return;
1645 }
1646
1647 /* Check for a NULL entry, used to indicate that the parameter goes
1648 both on the stack and in registers. */
1649 if (XEXP (XVECEXP (dst, 0, 0), 0))
1650 start = 0;
1651 else
1652 start = 1;
1653
1654 /* Process the pieces. */
1655 for (i = start; i < XVECLEN (dst, 0); i++)
1656 {
1657 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1658 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1659 unsigned int bytelen = GET_MODE_SIZE (mode);
1660 int shift = 0;
1661
1662 /* Handle trailing fragments that run over the size of the struct. */
1663 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1664 {
1665 /* Arrange to shift the fragment to where it belongs.
1666 extract_bit_field loads to the lsb of the reg. */
1667 if (
1668 #ifdef BLOCK_REG_PADDING
1669 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1670 == (BYTES_BIG_ENDIAN ? upward : downward)
1671 #else
1672 BYTES_BIG_ENDIAN
1673 #endif
1674 )
1675 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1676 bytelen = ssize - bytepos;
1677 gcc_assert (bytelen > 0);
1678 }
1679
1680 /* If we won't be loading directly from memory, protect the real source
1681 from strange tricks we might play; but make sure that the source can
1682 be loaded directly into the destination. */
1683 src = orig_src;
1684 if (!MEM_P (orig_src)
1685 && (!CONSTANT_P (orig_src)
1686 || (GET_MODE (orig_src) != mode
1687 && GET_MODE (orig_src) != VOIDmode)))
1688 {
1689 if (GET_MODE (orig_src) == VOIDmode)
1690 src = gen_reg_rtx (mode);
1691 else
1692 src = gen_reg_rtx (GET_MODE (orig_src));
1693
1694 emit_move_insn (src, orig_src);
1695 }
1696
1697 /* Optimize the access just a bit. */
1698 if (MEM_P (src)
1699 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1700 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1701 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1702 && bytelen == GET_MODE_SIZE (mode))
1703 {
1704 tmps[i] = gen_reg_rtx (mode);
1705 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1706 }
1707 else if (COMPLEX_MODE_P (mode)
1708 && GET_MODE (src) == mode
1709 && bytelen == GET_MODE_SIZE (mode))
1710 /* Let emit_move_complex do the bulk of the work. */
1711 tmps[i] = src;
1712 else if (GET_CODE (src) == CONCAT)
1713 {
1714 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1715 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1716
1717 if ((bytepos == 0 && bytelen == slen0)
1718 || (bytepos != 0 && bytepos + bytelen <= slen))
1719 {
1720 /* The following assumes that the concatenated objects all
1721 have the same size. In this case, a simple calculation
1722 can be used to determine the object and the bit field
1723 to be extracted. */
1724 tmps[i] = XEXP (src, bytepos / slen0);
1725 if (! CONSTANT_P (tmps[i])
1726 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1727 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1728 (bytepos % slen0) * BITS_PER_UNIT,
1729 1, NULL_RTX, mode, mode);
1730 }
1731 else
1732 {
1733 rtx mem;
1734
1735 gcc_assert (!bytepos);
1736 mem = assign_stack_temp (GET_MODE (src), slen);
1737 emit_move_insn (mem, src);
1738 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1739 0, 1, NULL_RTX, mode, mode);
1740 }
1741 }
1742 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1743 SIMD register, which is currently broken. While we get GCC
1744 to emit proper RTL for these cases, let's dump to memory. */
1745 else if (VECTOR_MODE_P (GET_MODE (dst))
1746 && REG_P (src))
1747 {
1748 int slen = GET_MODE_SIZE (GET_MODE (src));
1749 rtx mem;
1750
1751 mem = assign_stack_temp (GET_MODE (src), slen);
1752 emit_move_insn (mem, src);
1753 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1754 }
1755 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1756 && XVECLEN (dst, 0) > 1)
1757 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1758 else if (CONSTANT_P (src))
1759 {
1760 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1761
1762 if (len == ssize)
1763 tmps[i] = src;
1764 else
1765 {
1766 rtx first, second;
1767
1768 /* TODO: const_wide_int can have sizes other than this... */
1769 gcc_assert (2 * len == ssize);
1770 split_double (src, &first, &second);
1771 if (i)
1772 tmps[i] = second;
1773 else
1774 tmps[i] = first;
1775 }
1776 }
1777 else if (REG_P (src) && GET_MODE (src) == mode)
1778 tmps[i] = src;
1779 else
1780 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1781 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1782 mode, mode);
1783
1784 if (shift)
1785 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1786 shift, tmps[i], 0);
1787 }
1788 }
1789
1790 /* Emit code to move a block SRC of type TYPE to a block DST,
1791 where DST is non-consecutive registers represented by a PARALLEL.
1792 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1793 if not known. */
1794
1795 void
1796 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1797 {
1798 rtx *tmps;
1799 int i;
1800
1801 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1802 emit_group_load_1 (tmps, dst, src, type, ssize);
1803
1804 /* Copy the extracted pieces into the proper (probable) hard regs. */
1805 for (i = 0; i < XVECLEN (dst, 0); i++)
1806 {
1807 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1808 if (d == NULL)
1809 continue;
1810 emit_move_insn (d, tmps[i]);
1811 }
1812 }
1813
1814 /* Similar, but load SRC into new pseudos in a format that looks like
1815 PARALLEL. This can later be fed to emit_group_move to get things
1816 in the right place. */
1817
1818 rtx
1819 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1820 {
1821 rtvec vec;
1822 int i;
1823
1824 vec = rtvec_alloc (XVECLEN (parallel, 0));
1825 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1826
1827 /* Convert the vector to look just like the original PARALLEL, except
1828 with the computed values. */
1829 for (i = 0; i < XVECLEN (parallel, 0); i++)
1830 {
1831 rtx e = XVECEXP (parallel, 0, i);
1832 rtx d = XEXP (e, 0);
1833
1834 if (d)
1835 {
1836 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1837 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1838 }
1839 RTVEC_ELT (vec, i) = e;
1840 }
1841
1842 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1843 }
1844
1845 /* Emit code to move a block SRC to block DST, where SRC and DST are
1846 non-consecutive groups of registers, each represented by a PARALLEL. */
1847
1848 void
1849 emit_group_move (rtx dst, rtx src)
1850 {
1851 int i;
1852
1853 gcc_assert (GET_CODE (src) == PARALLEL
1854 && GET_CODE (dst) == PARALLEL
1855 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1856
1857 /* Skip first entry if NULL. */
1858 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1859 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1860 XEXP (XVECEXP (src, 0, i), 0));
1861 }
1862
1863 /* Move a group of registers represented by a PARALLEL into pseudos. */
1864
1865 rtx
1866 emit_group_move_into_temps (rtx src)
1867 {
1868 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1869 int i;
1870
1871 for (i = 0; i < XVECLEN (src, 0); i++)
1872 {
1873 rtx e = XVECEXP (src, 0, i);
1874 rtx d = XEXP (e, 0);
1875
1876 if (d)
1877 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1878 RTVEC_ELT (vec, i) = e;
1879 }
1880
1881 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1882 }
1883
1884 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1885 where SRC is non-consecutive registers represented by a PARALLEL.
1886 SSIZE represents the total size of block ORIG_DST, or -1 if not
1887 known. */
1888
1889 void
1890 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1891 {
1892 rtx *tmps, dst;
1893 int start, finish, i;
1894 machine_mode m = GET_MODE (orig_dst);
1895
1896 gcc_assert (GET_CODE (src) == PARALLEL);
1897
1898 if (!SCALAR_INT_MODE_P (m)
1899 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1900 {
1901 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1902 if (imode == BLKmode)
1903 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1904 else
1905 dst = gen_reg_rtx (imode);
1906 emit_group_store (dst, src, type, ssize);
1907 if (imode != BLKmode)
1908 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1909 emit_move_insn (orig_dst, dst);
1910 return;
1911 }
1912
1913 /* Check for a NULL entry, used to indicate that the parameter goes
1914 both on the stack and in registers. */
1915 if (XEXP (XVECEXP (src, 0, 0), 0))
1916 start = 0;
1917 else
1918 start = 1;
1919 finish = XVECLEN (src, 0);
1920
1921 tmps = XALLOCAVEC (rtx, finish);
1922
1923 /* Copy the (probable) hard regs into pseudos. */
1924 for (i = start; i < finish; i++)
1925 {
1926 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1927 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1928 {
1929 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1930 emit_move_insn (tmps[i], reg);
1931 }
1932 else
1933 tmps[i] = reg;
1934 }
1935
1936 /* If we won't be storing directly into memory, protect the real destination
1937 from strange tricks we might play. */
1938 dst = orig_dst;
1939 if (GET_CODE (dst) == PARALLEL)
1940 {
1941 rtx temp;
1942
1943 /* We can get a PARALLEL dst if there is a conditional expression in
1944 a return statement. In that case, the dst and src are the same,
1945 so no action is necessary. */
1946 if (rtx_equal_p (dst, src))
1947 return;
1948
1949 /* It is unclear if we can ever reach here, but we may as well handle
1950 it. Allocate a temporary, and split this into a store/load to/from
1951 the temporary. */
1952 temp = assign_stack_temp (GET_MODE (dst), ssize);
1953 emit_group_store (temp, src, type, ssize);
1954 emit_group_load (dst, temp, type, ssize);
1955 return;
1956 }
1957 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1958 {
1959 machine_mode outer = GET_MODE (dst);
1960 machine_mode inner;
1961 HOST_WIDE_INT bytepos;
1962 bool done = false;
1963 rtx temp;
1964
1965 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1966 dst = gen_reg_rtx (outer);
1967
1968 /* Make life a bit easier for combine. */
1969 /* If the first element of the vector is the low part
1970 of the destination mode, use a paradoxical subreg to
1971 initialize the destination. */
1972 if (start < finish)
1973 {
1974 inner = GET_MODE (tmps[start]);
1975 bytepos = subreg_lowpart_offset (inner, outer);
1976 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1977 {
1978 temp = simplify_gen_subreg (outer, tmps[start],
1979 inner, 0);
1980 if (temp)
1981 {
1982 emit_move_insn (dst, temp);
1983 done = true;
1984 start++;
1985 }
1986 }
1987 }
1988
1989 /* If the first element wasn't the low part, try the last. */
1990 if (!done
1991 && start < finish - 1)
1992 {
1993 inner = GET_MODE (tmps[finish - 1]);
1994 bytepos = subreg_lowpart_offset (inner, outer);
1995 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1996 {
1997 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1998 inner, 0);
1999 if (temp)
2000 {
2001 emit_move_insn (dst, temp);
2002 done = true;
2003 finish--;
2004 }
2005 }
2006 }
2007
2008 /* Otherwise, simply initialize the result to zero. */
2009 if (!done)
2010 emit_move_insn (dst, CONST0_RTX (outer));
2011 }
2012
2013 /* Process the pieces. */
2014 for (i = start; i < finish; i++)
2015 {
2016 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2017 machine_mode mode = GET_MODE (tmps[i]);
2018 unsigned int bytelen = GET_MODE_SIZE (mode);
2019 unsigned int adj_bytelen;
2020 rtx dest = dst;
2021
2022 /* Handle trailing fragments that run over the size of the struct. */
2023 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2024 adj_bytelen = ssize - bytepos;
2025 else
2026 adj_bytelen = bytelen;
2027
2028 if (GET_CODE (dst) == CONCAT)
2029 {
2030 if (bytepos + adj_bytelen
2031 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2032 dest = XEXP (dst, 0);
2033 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2034 {
2035 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2036 dest = XEXP (dst, 1);
2037 }
2038 else
2039 {
2040 machine_mode dest_mode = GET_MODE (dest);
2041 machine_mode tmp_mode = GET_MODE (tmps[i]);
2042
2043 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2044
2045 if (GET_MODE_ALIGNMENT (dest_mode)
2046 >= GET_MODE_ALIGNMENT (tmp_mode))
2047 {
2048 dest = assign_stack_temp (dest_mode,
2049 GET_MODE_SIZE (dest_mode));
2050 emit_move_insn (adjust_address (dest,
2051 tmp_mode,
2052 bytepos),
2053 tmps[i]);
2054 dst = dest;
2055 }
2056 else
2057 {
2058 dest = assign_stack_temp (tmp_mode,
2059 GET_MODE_SIZE (tmp_mode));
2060 emit_move_insn (dest, tmps[i]);
2061 dst = adjust_address (dest, dest_mode, bytepos);
2062 }
2063 break;
2064 }
2065 }
2066
2067 /* Handle trailing fragments that run over the size of the struct. */
2068 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2069 {
2070 /* store_bit_field always takes its value from the lsb.
2071 Move the fragment to the lsb if it's not already there. */
2072 if (
2073 #ifdef BLOCK_REG_PADDING
2074 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2075 == (BYTES_BIG_ENDIAN ? upward : downward)
2076 #else
2077 BYTES_BIG_ENDIAN
2078 #endif
2079 )
2080 {
2081 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2082 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2083 shift, tmps[i], 0);
2084 }
2085
2086 /* Make sure not to write past the end of the struct. */
2087 store_bit_field (dest,
2088 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2089 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2090 VOIDmode, tmps[i]);
2091 }
2092
2093 /* Optimize the access just a bit. */
2094 else if (MEM_P (dest)
2095 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2096 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2097 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2098 && bytelen == GET_MODE_SIZE (mode))
2099 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2100
2101 else
2102 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2103 0, 0, mode, tmps[i]);
2104 }
2105
2106 /* Copy from the pseudo into the (probable) hard reg. */
2107 if (orig_dst != dst)
2108 emit_move_insn (orig_dst, dst);
2109 }
2110
2111 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2112 of the value stored in X. */
2113
2114 rtx
2115 maybe_emit_group_store (rtx x, tree type)
2116 {
2117 machine_mode mode = TYPE_MODE (type);
2118 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2119 if (GET_CODE (x) == PARALLEL)
2120 {
2121 rtx result = gen_reg_rtx (mode);
2122 emit_group_store (result, x, type, int_size_in_bytes (type));
2123 return result;
2124 }
2125 return x;
2126 }
2127
2128 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2129
2130 This is used on targets that return BLKmode values in registers. */
2131
2132 void
2133 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2134 {
2135 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2136 rtx src = NULL, dst = NULL;
2137 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2138 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2139 machine_mode mode = GET_MODE (srcreg);
2140 machine_mode tmode = GET_MODE (target);
2141 machine_mode copy_mode;
2142
2143 /* BLKmode registers created in the back-end shouldn't have survived. */
2144 gcc_assert (mode != BLKmode);
2145
2146 /* If the structure doesn't take up a whole number of words, see whether
2147 SRCREG is padded on the left or on the right. If it's on the left,
2148 set PADDING_CORRECTION to the number of bits to skip.
2149
2150 In most ABIs, the structure will be returned at the least end of
2151 the register, which translates to right padding on little-endian
2152 targets and left padding on big-endian targets. The opposite
2153 holds if the structure is returned at the most significant
2154 end of the register. */
2155 if (bytes % UNITS_PER_WORD != 0
2156 && (targetm.calls.return_in_msb (type)
2157 ? !BYTES_BIG_ENDIAN
2158 : BYTES_BIG_ENDIAN))
2159 padding_correction
2160 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2161
2162 /* We can use a single move if we have an exact mode for the size. */
2163 else if (MEM_P (target)
2164 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2165 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2166 && bytes == GET_MODE_SIZE (mode))
2167 {
2168 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2169 return;
2170 }
2171
2172 /* And if we additionally have the same mode for a register. */
2173 else if (REG_P (target)
2174 && GET_MODE (target) == mode
2175 && bytes == GET_MODE_SIZE (mode))
2176 {
2177 emit_move_insn (target, srcreg);
2178 return;
2179 }
2180
2181 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2182 into a new pseudo which is a full word. */
2183 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2184 {
2185 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2186 mode = word_mode;
2187 }
2188
2189 /* Copy the structure BITSIZE bits at a time. If the target lives in
2190 memory, take care of not reading/writing past its end by selecting
2191 a copy mode suited to BITSIZE. This should always be possible given
2192 how it is computed.
2193
2194 If the target lives in register, make sure not to select a copy mode
2195 larger than the mode of the register.
2196
2197 We could probably emit more efficient code for machines which do not use
2198 strict alignment, but it doesn't seem worth the effort at the current
2199 time. */
2200
2201 copy_mode = word_mode;
2202 if (MEM_P (target))
2203 {
2204 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2205 if (mem_mode != BLKmode)
2206 copy_mode = mem_mode;
2207 }
2208 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2209 copy_mode = tmode;
2210
2211 for (bitpos = 0, xbitpos = padding_correction;
2212 bitpos < bytes * BITS_PER_UNIT;
2213 bitpos += bitsize, xbitpos += bitsize)
2214 {
2215 /* We need a new source operand each time xbitpos is on a
2216 word boundary and when xbitpos == padding_correction
2217 (the first time through). */
2218 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2219 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2220
2221 /* We need a new destination operand each time bitpos is on
2222 a word boundary. */
2223 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2224 dst = target;
2225 else if (bitpos % BITS_PER_WORD == 0)
2226 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2227
2228 /* Use xbitpos for the source extraction (right justified) and
2229 bitpos for the destination store (left justified). */
2230 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2231 extract_bit_field (src, bitsize,
2232 xbitpos % BITS_PER_WORD, 1,
2233 NULL_RTX, copy_mode, copy_mode));
2234 }
2235 }
2236
2237 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2238 register if it contains any data, otherwise return null.
2239
2240 This is used on targets that return BLKmode values in registers. */
2241
2242 rtx
2243 copy_blkmode_to_reg (machine_mode mode, tree src)
2244 {
2245 int i, n_regs;
2246 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2247 unsigned int bitsize;
2248 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2249 machine_mode dst_mode;
2250
2251 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2252
2253 x = expand_normal (src);
2254
2255 bytes = int_size_in_bytes (TREE_TYPE (src));
2256 if (bytes == 0)
2257 return NULL_RTX;
2258
2259 /* If the structure doesn't take up a whole number of words, see
2260 whether the register value should be padded on the left or on
2261 the right. Set PADDING_CORRECTION to the number of padding
2262 bits needed on the left side.
2263
2264 In most ABIs, the structure will be returned at the least end of
2265 the register, which translates to right padding on little-endian
2266 targets and left padding on big-endian targets. The opposite
2267 holds if the structure is returned at the most significant
2268 end of the register. */
2269 if (bytes % UNITS_PER_WORD != 0
2270 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2271 ? !BYTES_BIG_ENDIAN
2272 : BYTES_BIG_ENDIAN))
2273 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2274 * BITS_PER_UNIT));
2275
2276 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2277 dst_words = XALLOCAVEC (rtx, n_regs);
2278 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2279
2280 /* Copy the structure BITSIZE bits at a time. */
2281 for (bitpos = 0, xbitpos = padding_correction;
2282 bitpos < bytes * BITS_PER_UNIT;
2283 bitpos += bitsize, xbitpos += bitsize)
2284 {
2285 /* We need a new destination pseudo each time xbitpos is
2286 on a word boundary and when xbitpos == padding_correction
2287 (the first time through). */
2288 if (xbitpos % BITS_PER_WORD == 0
2289 || xbitpos == padding_correction)
2290 {
2291 /* Generate an appropriate register. */
2292 dst_word = gen_reg_rtx (word_mode);
2293 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2294
2295 /* Clear the destination before we move anything into it. */
2296 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2297 }
2298
2299 /* We need a new source operand each time bitpos is on a word
2300 boundary. */
2301 if (bitpos % BITS_PER_WORD == 0)
2302 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2303
2304 /* Use bitpos for the source extraction (left justified) and
2305 xbitpos for the destination store (right justified). */
2306 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2307 0, 0, word_mode,
2308 extract_bit_field (src_word, bitsize,
2309 bitpos % BITS_PER_WORD, 1,
2310 NULL_RTX, word_mode, word_mode));
2311 }
2312
2313 if (mode == BLKmode)
2314 {
2315 /* Find the smallest integer mode large enough to hold the
2316 entire structure. */
2317 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2318 mode != VOIDmode;
2319 mode = GET_MODE_WIDER_MODE (mode))
2320 /* Have we found a large enough mode? */
2321 if (GET_MODE_SIZE (mode) >= bytes)
2322 break;
2323
2324 /* A suitable mode should have been found. */
2325 gcc_assert (mode != VOIDmode);
2326 }
2327
2328 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2329 dst_mode = word_mode;
2330 else
2331 dst_mode = mode;
2332 dst = gen_reg_rtx (dst_mode);
2333
2334 for (i = 0; i < n_regs; i++)
2335 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2336
2337 if (mode != dst_mode)
2338 dst = gen_lowpart (mode, dst);
2339
2340 return dst;
2341 }
2342
2343 /* Add a USE expression for REG to the (possibly empty) list pointed
2344 to by CALL_FUSAGE. REG must denote a hard register. */
2345
2346 void
2347 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2348 {
2349 gcc_assert (REG_P (reg));
2350
2351 if (!HARD_REGISTER_P (reg))
2352 return;
2353
2354 *call_fusage
2355 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2356 }
2357
2358 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2359 to by CALL_FUSAGE. REG must denote a hard register. */
2360
2361 void
2362 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2363 {
2364 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2365
2366 *call_fusage
2367 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2368 }
2369
2370 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2371 starting at REGNO. All of these registers must be hard registers. */
2372
2373 void
2374 use_regs (rtx *call_fusage, int regno, int nregs)
2375 {
2376 int i;
2377
2378 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2379
2380 for (i = 0; i < nregs; i++)
2381 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2382 }
2383
2384 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2385 PARALLEL REGS. This is for calls that pass values in multiple
2386 non-contiguous locations. The Irix 6 ABI has examples of this. */
2387
2388 void
2389 use_group_regs (rtx *call_fusage, rtx regs)
2390 {
2391 int i;
2392
2393 for (i = 0; i < XVECLEN (regs, 0); i++)
2394 {
2395 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2396
2397 /* A NULL entry means the parameter goes both on the stack and in
2398 registers. This can also be a MEM for targets that pass values
2399 partially on the stack and partially in registers. */
2400 if (reg != 0 && REG_P (reg))
2401 use_reg (call_fusage, reg);
2402 }
2403 }
2404
2405 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2406 assigment and the code of the expresion on the RHS is CODE. Return
2407 NULL otherwise. */
2408
2409 static gimple
2410 get_def_for_expr (tree name, enum tree_code code)
2411 {
2412 gimple def_stmt;
2413
2414 if (TREE_CODE (name) != SSA_NAME)
2415 return NULL;
2416
2417 def_stmt = get_gimple_for_ssa_name (name);
2418 if (!def_stmt
2419 || gimple_assign_rhs_code (def_stmt) != code)
2420 return NULL;
2421
2422 return def_stmt;
2423 }
2424
2425 #ifdef HAVE_conditional_move
2426 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2427 assigment and the class of the expresion on the RHS is CLASS. Return
2428 NULL otherwise. */
2429
2430 static gimple
2431 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2432 {
2433 gimple def_stmt;
2434
2435 if (TREE_CODE (name) != SSA_NAME)
2436 return NULL;
2437
2438 def_stmt = get_gimple_for_ssa_name (name);
2439 if (!def_stmt
2440 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2441 return NULL;
2442
2443 return def_stmt;
2444 }
2445 #endif
2446 \f
2447
2448 /* Determine whether the LEN bytes generated by CONSTFUN can be
2449 stored to memory using several move instructions. CONSTFUNDATA is
2450 a pointer which will be passed as argument in every CONSTFUN call.
2451 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2452 a memset operation and false if it's a copy of a constant string.
2453 Return nonzero if a call to store_by_pieces should succeed. */
2454
2455 int
2456 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2457 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2458 void *constfundata, unsigned int align, bool memsetp)
2459 {
2460 unsigned HOST_WIDE_INT l;
2461 unsigned int max_size;
2462 HOST_WIDE_INT offset = 0;
2463 machine_mode mode;
2464 enum insn_code icode;
2465 int reverse;
2466 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2467 rtx cst ATTRIBUTE_UNUSED;
2468
2469 if (len == 0)
2470 return 1;
2471
2472 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2473 memsetp
2474 ? SET_BY_PIECES
2475 : STORE_BY_PIECES,
2476 optimize_insn_for_speed_p ()))
2477 return 0;
2478
2479 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2480
2481 /* We would first store what we can in the largest integer mode, then go to
2482 successively smaller modes. */
2483
2484 for (reverse = 0;
2485 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2486 reverse++)
2487 {
2488 l = len;
2489 max_size = STORE_MAX_PIECES + 1;
2490 while (max_size > 1 && l > 0)
2491 {
2492 mode = widest_int_mode_for_size (max_size);
2493
2494 if (mode == VOIDmode)
2495 break;
2496
2497 icode = optab_handler (mov_optab, mode);
2498 if (icode != CODE_FOR_nothing
2499 && align >= GET_MODE_ALIGNMENT (mode))
2500 {
2501 unsigned int size = GET_MODE_SIZE (mode);
2502
2503 while (l >= size)
2504 {
2505 if (reverse)
2506 offset -= size;
2507
2508 cst = (*constfun) (constfundata, offset, mode);
2509 if (!targetm.legitimate_constant_p (mode, cst))
2510 return 0;
2511
2512 if (!reverse)
2513 offset += size;
2514
2515 l -= size;
2516 }
2517 }
2518
2519 max_size = GET_MODE_SIZE (mode);
2520 }
2521
2522 /* The code above should have handled everything. */
2523 gcc_assert (!l);
2524 }
2525
2526 return 1;
2527 }
2528
2529 /* Generate several move instructions to store LEN bytes generated by
2530 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2531 pointer which will be passed as argument in every CONSTFUN call.
2532 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2533 a memset operation and false if it's a copy of a constant string.
2534 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2535 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2536 stpcpy. */
2537
2538 rtx
2539 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2540 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2541 void *constfundata, unsigned int align, bool memsetp, int endp)
2542 {
2543 machine_mode to_addr_mode = get_address_mode (to);
2544 struct store_by_pieces_d data;
2545
2546 if (len == 0)
2547 {
2548 gcc_assert (endp != 2);
2549 return to;
2550 }
2551
2552 gcc_assert (targetm.use_by_pieces_infrastructure_p
2553 (len, align,
2554 memsetp
2555 ? SET_BY_PIECES
2556 : STORE_BY_PIECES,
2557 optimize_insn_for_speed_p ()));
2558
2559 data.constfun = constfun;
2560 data.constfundata = constfundata;
2561 data.len = len;
2562 data.to = to;
2563 store_by_pieces_1 (&data, align);
2564 if (endp)
2565 {
2566 rtx to1;
2567
2568 gcc_assert (!data.reverse);
2569 if (data.autinc_to)
2570 {
2571 if (endp == 2)
2572 {
2573 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2574 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2575 else
2576 data.to_addr = copy_to_mode_reg (to_addr_mode,
2577 plus_constant (to_addr_mode,
2578 data.to_addr,
2579 -1));
2580 }
2581 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2582 data.offset);
2583 }
2584 else
2585 {
2586 if (endp == 2)
2587 --data.offset;
2588 to1 = adjust_address (data.to, QImode, data.offset);
2589 }
2590 return to1;
2591 }
2592 else
2593 return data.to;
2594 }
2595
2596 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2597 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2598
2599 static void
2600 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2601 {
2602 struct store_by_pieces_d data;
2603
2604 if (len == 0)
2605 return;
2606
2607 data.constfun = clear_by_pieces_1;
2608 data.constfundata = NULL;
2609 data.len = len;
2610 data.to = to;
2611 store_by_pieces_1 (&data, align);
2612 }
2613
2614 /* Callback routine for clear_by_pieces.
2615 Return const0_rtx unconditionally. */
2616
2617 static rtx
2618 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2619 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2620 machine_mode mode ATTRIBUTE_UNUSED)
2621 {
2622 return const0_rtx;
2623 }
2624
2625 /* Subroutine of clear_by_pieces and store_by_pieces.
2626 Generate several move instructions to store LEN bytes of block TO. (A MEM
2627 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2628
2629 static void
2630 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2631 unsigned int align ATTRIBUTE_UNUSED)
2632 {
2633 machine_mode to_addr_mode = get_address_mode (data->to);
2634 rtx to_addr = XEXP (data->to, 0);
2635 unsigned int max_size = STORE_MAX_PIECES + 1;
2636 enum insn_code icode;
2637
2638 data->offset = 0;
2639 data->to_addr = to_addr;
2640 data->autinc_to
2641 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2642 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2643
2644 data->explicit_inc_to = 0;
2645 data->reverse
2646 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2647 if (data->reverse)
2648 data->offset = data->len;
2649
2650 /* If storing requires more than two move insns,
2651 copy addresses to registers (to make displacements shorter)
2652 and use post-increment if available. */
2653 if (!data->autinc_to
2654 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2655 {
2656 /* Determine the main mode we'll be using.
2657 MODE might not be used depending on the definitions of the
2658 USE_* macros below. */
2659 machine_mode mode ATTRIBUTE_UNUSED
2660 = widest_int_mode_for_size (max_size);
2661
2662 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2663 {
2664 data->to_addr = copy_to_mode_reg (to_addr_mode,
2665 plus_constant (to_addr_mode,
2666 to_addr,
2667 data->len));
2668 data->autinc_to = 1;
2669 data->explicit_inc_to = -1;
2670 }
2671
2672 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2673 && ! data->autinc_to)
2674 {
2675 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2676 data->autinc_to = 1;
2677 data->explicit_inc_to = 1;
2678 }
2679
2680 if ( !data->autinc_to && CONSTANT_P (to_addr))
2681 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2682 }
2683
2684 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2685
2686 /* First store what we can in the largest integer mode, then go to
2687 successively smaller modes. */
2688
2689 while (max_size > 1 && data->len > 0)
2690 {
2691 machine_mode mode = widest_int_mode_for_size (max_size);
2692
2693 if (mode == VOIDmode)
2694 break;
2695
2696 icode = optab_handler (mov_optab, mode);
2697 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2698 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2699
2700 max_size = GET_MODE_SIZE (mode);
2701 }
2702
2703 /* The code above should have handled everything. */
2704 gcc_assert (!data->len);
2705 }
2706
2707 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2708 with move instructions for mode MODE. GENFUN is the gen_... function
2709 to make a move insn for that mode. DATA has all the other info. */
2710
2711 static void
2712 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2713 struct store_by_pieces_d *data)
2714 {
2715 unsigned int size = GET_MODE_SIZE (mode);
2716 rtx to1, cst;
2717
2718 while (data->len >= size)
2719 {
2720 if (data->reverse)
2721 data->offset -= size;
2722
2723 if (data->autinc_to)
2724 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2725 data->offset);
2726 else
2727 to1 = adjust_address (data->to, mode, data->offset);
2728
2729 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2730 emit_insn (gen_add2_insn (data->to_addr,
2731 gen_int_mode (-(HOST_WIDE_INT) size,
2732 GET_MODE (data->to_addr))));
2733
2734 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2735 emit_insn ((*genfun) (to1, cst));
2736
2737 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2738 emit_insn (gen_add2_insn (data->to_addr,
2739 gen_int_mode (size,
2740 GET_MODE (data->to_addr))));
2741
2742 if (! data->reverse)
2743 data->offset += size;
2744
2745 data->len -= size;
2746 }
2747 }
2748 \f
2749 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2750 its length in bytes. */
2751
2752 rtx
2753 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2754 unsigned int expected_align, HOST_WIDE_INT expected_size,
2755 unsigned HOST_WIDE_INT min_size,
2756 unsigned HOST_WIDE_INT max_size,
2757 unsigned HOST_WIDE_INT probable_max_size)
2758 {
2759 machine_mode mode = GET_MODE (object);
2760 unsigned int align;
2761
2762 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2763
2764 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2765 just move a zero. Otherwise, do this a piece at a time. */
2766 if (mode != BLKmode
2767 && CONST_INT_P (size)
2768 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2769 {
2770 rtx zero = CONST0_RTX (mode);
2771 if (zero != NULL)
2772 {
2773 emit_move_insn (object, zero);
2774 return NULL;
2775 }
2776
2777 if (COMPLEX_MODE_P (mode))
2778 {
2779 zero = CONST0_RTX (GET_MODE_INNER (mode));
2780 if (zero != NULL)
2781 {
2782 write_complex_part (object, zero, 0);
2783 write_complex_part (object, zero, 1);
2784 return NULL;
2785 }
2786 }
2787 }
2788
2789 if (size == const0_rtx)
2790 return NULL;
2791
2792 align = MEM_ALIGN (object);
2793
2794 if (CONST_INT_P (size)
2795 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2796 CLEAR_BY_PIECES,
2797 optimize_insn_for_speed_p ()))
2798 clear_by_pieces (object, INTVAL (size), align);
2799 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2800 expected_align, expected_size,
2801 min_size, max_size, probable_max_size))
2802 ;
2803 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2804 return set_storage_via_libcall (object, size, const0_rtx,
2805 method == BLOCK_OP_TAILCALL);
2806 else
2807 gcc_unreachable ();
2808
2809 return NULL;
2810 }
2811
2812 rtx
2813 clear_storage (rtx object, rtx size, enum block_op_methods method)
2814 {
2815 unsigned HOST_WIDE_INT max, min = 0;
2816 if (GET_CODE (size) == CONST_INT)
2817 min = max = UINTVAL (size);
2818 else
2819 max = GET_MODE_MASK (GET_MODE (size));
2820 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2821 }
2822
2823
2824 /* A subroutine of clear_storage. Expand a call to memset.
2825 Return the return value of memset, 0 otherwise. */
2826
2827 rtx
2828 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2829 {
2830 tree call_expr, fn, object_tree, size_tree, val_tree;
2831 machine_mode size_mode;
2832 rtx retval;
2833
2834 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2835 place those into new pseudos into a VAR_DECL and use them later. */
2836
2837 object = copy_addr_to_reg (XEXP (object, 0));
2838
2839 size_mode = TYPE_MODE (sizetype);
2840 size = convert_to_mode (size_mode, size, 1);
2841 size = copy_to_mode_reg (size_mode, size);
2842
2843 /* It is incorrect to use the libcall calling conventions to call
2844 memset in this context. This could be a user call to memset and
2845 the user may wish to examine the return value from memset. For
2846 targets where libcalls and normal calls have different conventions
2847 for returning pointers, we could end up generating incorrect code. */
2848
2849 object_tree = make_tree (ptr_type_node, object);
2850 if (!CONST_INT_P (val))
2851 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2852 size_tree = make_tree (sizetype, size);
2853 val_tree = make_tree (integer_type_node, val);
2854
2855 fn = clear_storage_libcall_fn (true);
2856 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2857 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2858
2859 retval = expand_normal (call_expr);
2860
2861 return retval;
2862 }
2863
2864 /* A subroutine of set_storage_via_libcall. Create the tree node
2865 for the function we use for block clears. */
2866
2867 tree block_clear_fn;
2868
2869 void
2870 init_block_clear_fn (const char *asmspec)
2871 {
2872 if (!block_clear_fn)
2873 {
2874 tree fn, args;
2875
2876 fn = get_identifier ("memset");
2877 args = build_function_type_list (ptr_type_node, ptr_type_node,
2878 integer_type_node, sizetype,
2879 NULL_TREE);
2880
2881 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2882 DECL_EXTERNAL (fn) = 1;
2883 TREE_PUBLIC (fn) = 1;
2884 DECL_ARTIFICIAL (fn) = 1;
2885 TREE_NOTHROW (fn) = 1;
2886 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2887 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2888
2889 block_clear_fn = fn;
2890 }
2891
2892 if (asmspec)
2893 set_user_assembler_name (block_clear_fn, asmspec);
2894 }
2895
2896 static tree
2897 clear_storage_libcall_fn (int for_call)
2898 {
2899 static bool emitted_extern;
2900
2901 if (!block_clear_fn)
2902 init_block_clear_fn (NULL);
2903
2904 if (for_call && !emitted_extern)
2905 {
2906 emitted_extern = true;
2907 make_decl_rtl (block_clear_fn);
2908 }
2909
2910 return block_clear_fn;
2911 }
2912 \f
2913 /* Expand a setmem pattern; return true if successful. */
2914
2915 bool
2916 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2917 unsigned int expected_align, HOST_WIDE_INT expected_size,
2918 unsigned HOST_WIDE_INT min_size,
2919 unsigned HOST_WIDE_INT max_size,
2920 unsigned HOST_WIDE_INT probable_max_size)
2921 {
2922 /* Try the most limited insn first, because there's no point
2923 including more than one in the machine description unless
2924 the more limited one has some advantage. */
2925
2926 machine_mode mode;
2927
2928 if (expected_align < align)
2929 expected_align = align;
2930 if (expected_size != -1)
2931 {
2932 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2933 expected_size = max_size;
2934 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2935 expected_size = min_size;
2936 }
2937
2938 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2939 mode = GET_MODE_WIDER_MODE (mode))
2940 {
2941 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2942
2943 if (code != CODE_FOR_nothing
2944 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2945 here because if SIZE is less than the mode mask, as it is
2946 returned by the macro, it will definitely be less than the
2947 actual mode mask. Since SIZE is within the Pmode address
2948 space, we limit MODE to Pmode. */
2949 && ((CONST_INT_P (size)
2950 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2951 <= (GET_MODE_MASK (mode) >> 1)))
2952 || max_size <= (GET_MODE_MASK (mode) >> 1)
2953 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2954 {
2955 struct expand_operand ops[9];
2956 unsigned int nops;
2957
2958 nops = insn_data[(int) code].n_generator_args;
2959 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2960
2961 create_fixed_operand (&ops[0], object);
2962 /* The check above guarantees that this size conversion is valid. */
2963 create_convert_operand_to (&ops[1], size, mode, true);
2964 create_convert_operand_from (&ops[2], val, byte_mode, true);
2965 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2966 if (nops >= 6)
2967 {
2968 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2969 create_integer_operand (&ops[5], expected_size);
2970 }
2971 if (nops >= 8)
2972 {
2973 create_integer_operand (&ops[6], min_size);
2974 /* If we can not represent the maximal size,
2975 make parameter NULL. */
2976 if ((HOST_WIDE_INT) max_size != -1)
2977 create_integer_operand (&ops[7], max_size);
2978 else
2979 create_fixed_operand (&ops[7], NULL);
2980 }
2981 if (nops == 9)
2982 {
2983 /* If we can not represent the maximal size,
2984 make parameter NULL. */
2985 if ((HOST_WIDE_INT) probable_max_size != -1)
2986 create_integer_operand (&ops[8], probable_max_size);
2987 else
2988 create_fixed_operand (&ops[8], NULL);
2989 }
2990 if (maybe_expand_insn (code, nops, ops))
2991 return true;
2992 }
2993 }
2994
2995 return false;
2996 }
2997
2998 \f
2999 /* Write to one of the components of the complex value CPLX. Write VAL to
3000 the real part if IMAG_P is false, and the imaginary part if its true. */
3001
3002 void
3003 write_complex_part (rtx cplx, rtx val, bool imag_p)
3004 {
3005 machine_mode cmode;
3006 machine_mode imode;
3007 unsigned ibitsize;
3008
3009 if (GET_CODE (cplx) == CONCAT)
3010 {
3011 emit_move_insn (XEXP (cplx, imag_p), val);
3012 return;
3013 }
3014
3015 cmode = GET_MODE (cplx);
3016 imode = GET_MODE_INNER (cmode);
3017 ibitsize = GET_MODE_BITSIZE (imode);
3018
3019 /* For MEMs simplify_gen_subreg may generate an invalid new address
3020 because, e.g., the original address is considered mode-dependent
3021 by the target, which restricts simplify_subreg from invoking
3022 adjust_address_nv. Instead of preparing fallback support for an
3023 invalid address, we call adjust_address_nv directly. */
3024 if (MEM_P (cplx))
3025 {
3026 emit_move_insn (adjust_address_nv (cplx, imode,
3027 imag_p ? GET_MODE_SIZE (imode) : 0),
3028 val);
3029 return;
3030 }
3031
3032 /* If the sub-object is at least word sized, then we know that subregging
3033 will work. This special case is important, since store_bit_field
3034 wants to operate on integer modes, and there's rarely an OImode to
3035 correspond to TCmode. */
3036 if (ibitsize >= BITS_PER_WORD
3037 /* For hard regs we have exact predicates. Assume we can split
3038 the original object if it spans an even number of hard regs.
3039 This special case is important for SCmode on 64-bit platforms
3040 where the natural size of floating-point regs is 32-bit. */
3041 || (REG_P (cplx)
3042 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3043 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3044 {
3045 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3046 imag_p ? GET_MODE_SIZE (imode) : 0);
3047 if (part)
3048 {
3049 emit_move_insn (part, val);
3050 return;
3051 }
3052 else
3053 /* simplify_gen_subreg may fail for sub-word MEMs. */
3054 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3055 }
3056
3057 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3058 }
3059
3060 /* Extract one of the components of the complex value CPLX. Extract the
3061 real part if IMAG_P is false, and the imaginary part if it's true. */
3062
3063 static rtx
3064 read_complex_part (rtx cplx, bool imag_p)
3065 {
3066 machine_mode cmode, imode;
3067 unsigned ibitsize;
3068
3069 if (GET_CODE (cplx) == CONCAT)
3070 return XEXP (cplx, imag_p);
3071
3072 cmode = GET_MODE (cplx);
3073 imode = GET_MODE_INNER (cmode);
3074 ibitsize = GET_MODE_BITSIZE (imode);
3075
3076 /* Special case reads from complex constants that got spilled to memory. */
3077 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3078 {
3079 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3080 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3081 {
3082 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3083 if (CONSTANT_CLASS_P (part))
3084 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3085 }
3086 }
3087
3088 /* For MEMs simplify_gen_subreg may generate an invalid new address
3089 because, e.g., the original address is considered mode-dependent
3090 by the target, which restricts simplify_subreg from invoking
3091 adjust_address_nv. Instead of preparing fallback support for an
3092 invalid address, we call adjust_address_nv directly. */
3093 if (MEM_P (cplx))
3094 return adjust_address_nv (cplx, imode,
3095 imag_p ? GET_MODE_SIZE (imode) : 0);
3096
3097 /* If the sub-object is at least word sized, then we know that subregging
3098 will work. This special case is important, since extract_bit_field
3099 wants to operate on integer modes, and there's rarely an OImode to
3100 correspond to TCmode. */
3101 if (ibitsize >= BITS_PER_WORD
3102 /* For hard regs we have exact predicates. Assume we can split
3103 the original object if it spans an even number of hard regs.
3104 This special case is important for SCmode on 64-bit platforms
3105 where the natural size of floating-point regs is 32-bit. */
3106 || (REG_P (cplx)
3107 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3108 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3109 {
3110 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3111 imag_p ? GET_MODE_SIZE (imode) : 0);
3112 if (ret)
3113 return ret;
3114 else
3115 /* simplify_gen_subreg may fail for sub-word MEMs. */
3116 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3117 }
3118
3119 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3120 true, NULL_RTX, imode, imode);
3121 }
3122 \f
3123 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3124 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3125 represented in NEW_MODE. If FORCE is true, this will never happen, as
3126 we'll force-create a SUBREG if needed. */
3127
3128 static rtx
3129 emit_move_change_mode (machine_mode new_mode,
3130 machine_mode old_mode, rtx x, bool force)
3131 {
3132 rtx ret;
3133
3134 if (push_operand (x, GET_MODE (x)))
3135 {
3136 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3137 MEM_COPY_ATTRIBUTES (ret, x);
3138 }
3139 else if (MEM_P (x))
3140 {
3141 /* We don't have to worry about changing the address since the
3142 size in bytes is supposed to be the same. */
3143 if (reload_in_progress)
3144 {
3145 /* Copy the MEM to change the mode and move any
3146 substitutions from the old MEM to the new one. */
3147 ret = adjust_address_nv (x, new_mode, 0);
3148 copy_replacements (x, ret);
3149 }
3150 else
3151 ret = adjust_address (x, new_mode, 0);
3152 }
3153 else
3154 {
3155 /* Note that we do want simplify_subreg's behavior of validating
3156 that the new mode is ok for a hard register. If we were to use
3157 simplify_gen_subreg, we would create the subreg, but would
3158 probably run into the target not being able to implement it. */
3159 /* Except, of course, when FORCE is true, when this is exactly what
3160 we want. Which is needed for CCmodes on some targets. */
3161 if (force)
3162 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3163 else
3164 ret = simplify_subreg (new_mode, x, old_mode, 0);
3165 }
3166
3167 return ret;
3168 }
3169
3170 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3171 an integer mode of the same size as MODE. Returns the instruction
3172 emitted, or NULL if such a move could not be generated. */
3173
3174 static rtx_insn *
3175 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3176 {
3177 machine_mode imode;
3178 enum insn_code code;
3179
3180 /* There must exist a mode of the exact size we require. */
3181 imode = int_mode_for_mode (mode);
3182 if (imode == BLKmode)
3183 return NULL;
3184
3185 /* The target must support moves in this mode. */
3186 code = optab_handler (mov_optab, imode);
3187 if (code == CODE_FOR_nothing)
3188 return NULL;
3189
3190 x = emit_move_change_mode (imode, mode, x, force);
3191 if (x == NULL_RTX)
3192 return NULL;
3193 y = emit_move_change_mode (imode, mode, y, force);
3194 if (y == NULL_RTX)
3195 return NULL;
3196 return emit_insn (GEN_FCN (code) (x, y));
3197 }
3198
3199 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3200 Return an equivalent MEM that does not use an auto-increment. */
3201
3202 rtx
3203 emit_move_resolve_push (machine_mode mode, rtx x)
3204 {
3205 enum rtx_code code = GET_CODE (XEXP (x, 0));
3206 HOST_WIDE_INT adjust;
3207 rtx temp;
3208
3209 adjust = GET_MODE_SIZE (mode);
3210 #ifdef PUSH_ROUNDING
3211 adjust = PUSH_ROUNDING (adjust);
3212 #endif
3213 if (code == PRE_DEC || code == POST_DEC)
3214 adjust = -adjust;
3215 else if (code == PRE_MODIFY || code == POST_MODIFY)
3216 {
3217 rtx expr = XEXP (XEXP (x, 0), 1);
3218 HOST_WIDE_INT val;
3219
3220 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3221 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3222 val = INTVAL (XEXP (expr, 1));
3223 if (GET_CODE (expr) == MINUS)
3224 val = -val;
3225 gcc_assert (adjust == val || adjust == -val);
3226 adjust = val;
3227 }
3228
3229 /* Do not use anti_adjust_stack, since we don't want to update
3230 stack_pointer_delta. */
3231 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3232 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3233 0, OPTAB_LIB_WIDEN);
3234 if (temp != stack_pointer_rtx)
3235 emit_move_insn (stack_pointer_rtx, temp);
3236
3237 switch (code)
3238 {
3239 case PRE_INC:
3240 case PRE_DEC:
3241 case PRE_MODIFY:
3242 temp = stack_pointer_rtx;
3243 break;
3244 case POST_INC:
3245 case POST_DEC:
3246 case POST_MODIFY:
3247 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3248 break;
3249 default:
3250 gcc_unreachable ();
3251 }
3252
3253 return replace_equiv_address (x, temp);
3254 }
3255
3256 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3257 X is known to satisfy push_operand, and MODE is known to be complex.
3258 Returns the last instruction emitted. */
3259
3260 rtx_insn *
3261 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3262 {
3263 machine_mode submode = GET_MODE_INNER (mode);
3264 bool imag_first;
3265
3266 #ifdef PUSH_ROUNDING
3267 unsigned int submodesize = GET_MODE_SIZE (submode);
3268
3269 /* In case we output to the stack, but the size is smaller than the
3270 machine can push exactly, we need to use move instructions. */
3271 if (PUSH_ROUNDING (submodesize) != submodesize)
3272 {
3273 x = emit_move_resolve_push (mode, x);
3274 return emit_move_insn (x, y);
3275 }
3276 #endif
3277
3278 /* Note that the real part always precedes the imag part in memory
3279 regardless of machine's endianness. */
3280 switch (GET_CODE (XEXP (x, 0)))
3281 {
3282 case PRE_DEC:
3283 case POST_DEC:
3284 imag_first = true;
3285 break;
3286 case PRE_INC:
3287 case POST_INC:
3288 imag_first = false;
3289 break;
3290 default:
3291 gcc_unreachable ();
3292 }
3293
3294 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3295 read_complex_part (y, imag_first));
3296 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3297 read_complex_part (y, !imag_first));
3298 }
3299
3300 /* A subroutine of emit_move_complex. Perform the move from Y to X
3301 via two moves of the parts. Returns the last instruction emitted. */
3302
3303 rtx_insn *
3304 emit_move_complex_parts (rtx x, rtx y)
3305 {
3306 /* Show the output dies here. This is necessary for SUBREGs
3307 of pseudos since we cannot track their lifetimes correctly;
3308 hard regs shouldn't appear here except as return values. */
3309 if (!reload_completed && !reload_in_progress
3310 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3311 emit_clobber (x);
3312
3313 write_complex_part (x, read_complex_part (y, false), false);
3314 write_complex_part (x, read_complex_part (y, true), true);
3315
3316 return get_last_insn ();
3317 }
3318
3319 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3320 MODE is known to be complex. Returns the last instruction emitted. */
3321
3322 static rtx_insn *
3323 emit_move_complex (machine_mode mode, rtx x, rtx y)
3324 {
3325 bool try_int;
3326
3327 /* Need to take special care for pushes, to maintain proper ordering
3328 of the data, and possibly extra padding. */
3329 if (push_operand (x, mode))
3330 return emit_move_complex_push (mode, x, y);
3331
3332 /* See if we can coerce the target into moving both values at once, except
3333 for floating point where we favor moving as parts if this is easy. */
3334 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3335 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3336 && !(REG_P (x)
3337 && HARD_REGISTER_P (x)
3338 && hard_regno_nregs[REGNO (x)][mode] == 1)
3339 && !(REG_P (y)
3340 && HARD_REGISTER_P (y)
3341 && hard_regno_nregs[REGNO (y)][mode] == 1))
3342 try_int = false;
3343 /* Not possible if the values are inherently not adjacent. */
3344 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3345 try_int = false;
3346 /* Is possible if both are registers (or subregs of registers). */
3347 else if (register_operand (x, mode) && register_operand (y, mode))
3348 try_int = true;
3349 /* If one of the operands is a memory, and alignment constraints
3350 are friendly enough, we may be able to do combined memory operations.
3351 We do not attempt this if Y is a constant because that combination is
3352 usually better with the by-parts thing below. */
3353 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3354 && (!STRICT_ALIGNMENT
3355 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3356 try_int = true;
3357 else
3358 try_int = false;
3359
3360 if (try_int)
3361 {
3362 rtx_insn *ret;
3363
3364 /* For memory to memory moves, optimal behavior can be had with the
3365 existing block move logic. */
3366 if (MEM_P (x) && MEM_P (y))
3367 {
3368 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3369 BLOCK_OP_NO_LIBCALL);
3370 return get_last_insn ();
3371 }
3372
3373 ret = emit_move_via_integer (mode, x, y, true);
3374 if (ret)
3375 return ret;
3376 }
3377
3378 return emit_move_complex_parts (x, y);
3379 }
3380
3381 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3382 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3383
3384 static rtx_insn *
3385 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3386 {
3387 rtx_insn *ret;
3388
3389 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3390 if (mode != CCmode)
3391 {
3392 enum insn_code code = optab_handler (mov_optab, CCmode);
3393 if (code != CODE_FOR_nothing)
3394 {
3395 x = emit_move_change_mode (CCmode, mode, x, true);
3396 y = emit_move_change_mode (CCmode, mode, y, true);
3397 return emit_insn (GEN_FCN (code) (x, y));
3398 }
3399 }
3400
3401 /* Otherwise, find the MODE_INT mode of the same width. */
3402 ret = emit_move_via_integer (mode, x, y, false);
3403 gcc_assert (ret != NULL);
3404 return ret;
3405 }
3406
3407 /* Return true if word I of OP lies entirely in the
3408 undefined bits of a paradoxical subreg. */
3409
3410 static bool
3411 undefined_operand_subword_p (const_rtx op, int i)
3412 {
3413 machine_mode innermode, innermostmode;
3414 int offset;
3415 if (GET_CODE (op) != SUBREG)
3416 return false;
3417 innermode = GET_MODE (op);
3418 innermostmode = GET_MODE (SUBREG_REG (op));
3419 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3420 /* The SUBREG_BYTE represents offset, as if the value were stored in
3421 memory, except for a paradoxical subreg where we define
3422 SUBREG_BYTE to be 0; undo this exception as in
3423 simplify_subreg. */
3424 if (SUBREG_BYTE (op) == 0
3425 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3426 {
3427 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3428 if (WORDS_BIG_ENDIAN)
3429 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3430 if (BYTES_BIG_ENDIAN)
3431 offset += difference % UNITS_PER_WORD;
3432 }
3433 if (offset >= GET_MODE_SIZE (innermostmode)
3434 || offset <= -GET_MODE_SIZE (word_mode))
3435 return true;
3436 return false;
3437 }
3438
3439 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3440 MODE is any multi-word or full-word mode that lacks a move_insn
3441 pattern. Note that you will get better code if you define such
3442 patterns, even if they must turn into multiple assembler instructions. */
3443
3444 static rtx_insn *
3445 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3446 {
3447 rtx_insn *last_insn = 0;
3448 rtx_insn *seq;
3449 rtx inner;
3450 bool need_clobber;
3451 int i;
3452
3453 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3454
3455 /* If X is a push on the stack, do the push now and replace
3456 X with a reference to the stack pointer. */
3457 if (push_operand (x, mode))
3458 x = emit_move_resolve_push (mode, x);
3459
3460 /* If we are in reload, see if either operand is a MEM whose address
3461 is scheduled for replacement. */
3462 if (reload_in_progress && MEM_P (x)
3463 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3464 x = replace_equiv_address_nv (x, inner);
3465 if (reload_in_progress && MEM_P (y)
3466 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3467 y = replace_equiv_address_nv (y, inner);
3468
3469 start_sequence ();
3470
3471 need_clobber = false;
3472 for (i = 0;
3473 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3474 i++)
3475 {
3476 rtx xpart = operand_subword (x, i, 1, mode);
3477 rtx ypart;
3478
3479 /* Do not generate code for a move if it would come entirely
3480 from the undefined bits of a paradoxical subreg. */
3481 if (undefined_operand_subword_p (y, i))
3482 continue;
3483
3484 ypart = operand_subword (y, i, 1, mode);
3485
3486 /* If we can't get a part of Y, put Y into memory if it is a
3487 constant. Otherwise, force it into a register. Then we must
3488 be able to get a part of Y. */
3489 if (ypart == 0 && CONSTANT_P (y))
3490 {
3491 y = use_anchored_address (force_const_mem (mode, y));
3492 ypart = operand_subword (y, i, 1, mode);
3493 }
3494 else if (ypart == 0)
3495 ypart = operand_subword_force (y, i, mode);
3496
3497 gcc_assert (xpart && ypart);
3498
3499 need_clobber |= (GET_CODE (xpart) == SUBREG);
3500
3501 last_insn = emit_move_insn (xpart, ypart);
3502 }
3503
3504 seq = get_insns ();
3505 end_sequence ();
3506
3507 /* Show the output dies here. This is necessary for SUBREGs
3508 of pseudos since we cannot track their lifetimes correctly;
3509 hard regs shouldn't appear here except as return values.
3510 We never want to emit such a clobber after reload. */
3511 if (x != y
3512 && ! (reload_in_progress || reload_completed)
3513 && need_clobber != 0)
3514 emit_clobber (x);
3515
3516 emit_insn (seq);
3517
3518 return last_insn;
3519 }
3520
3521 /* Low level part of emit_move_insn.
3522 Called just like emit_move_insn, but assumes X and Y
3523 are basically valid. */
3524
3525 rtx_insn *
3526 emit_move_insn_1 (rtx x, rtx y)
3527 {
3528 machine_mode mode = GET_MODE (x);
3529 enum insn_code code;
3530
3531 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3532
3533 code = optab_handler (mov_optab, mode);
3534 if (code != CODE_FOR_nothing)
3535 return emit_insn (GEN_FCN (code) (x, y));
3536
3537 /* Expand complex moves by moving real part and imag part. */
3538 if (COMPLEX_MODE_P (mode))
3539 return emit_move_complex (mode, x, y);
3540
3541 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3542 || ALL_FIXED_POINT_MODE_P (mode))
3543 {
3544 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3545
3546 /* If we can't find an integer mode, use multi words. */
3547 if (result)
3548 return result;
3549 else
3550 return emit_move_multi_word (mode, x, y);
3551 }
3552
3553 if (GET_MODE_CLASS (mode) == MODE_CC)
3554 return emit_move_ccmode (mode, x, y);
3555
3556 /* Try using a move pattern for the corresponding integer mode. This is
3557 only safe when simplify_subreg can convert MODE constants into integer
3558 constants. At present, it can only do this reliably if the value
3559 fits within a HOST_WIDE_INT. */
3560 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3561 {
3562 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3563
3564 if (ret)
3565 {
3566 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3567 return ret;
3568 }
3569 }
3570
3571 return emit_move_multi_word (mode, x, y);
3572 }
3573
3574 /* Generate code to copy Y into X.
3575 Both Y and X must have the same mode, except that
3576 Y can be a constant with VOIDmode.
3577 This mode cannot be BLKmode; use emit_block_move for that.
3578
3579 Return the last instruction emitted. */
3580
3581 rtx_insn *
3582 emit_move_insn (rtx x, rtx y)
3583 {
3584 machine_mode mode = GET_MODE (x);
3585 rtx y_cst = NULL_RTX;
3586 rtx_insn *last_insn;
3587 rtx set;
3588
3589 gcc_assert (mode != BLKmode
3590 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3591
3592 if (CONSTANT_P (y))
3593 {
3594 if (optimize
3595 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3596 && (last_insn = compress_float_constant (x, y)))
3597 return last_insn;
3598
3599 y_cst = y;
3600
3601 if (!targetm.legitimate_constant_p (mode, y))
3602 {
3603 y = force_const_mem (mode, y);
3604
3605 /* If the target's cannot_force_const_mem prevented the spill,
3606 assume that the target's move expanders will also take care
3607 of the non-legitimate constant. */
3608 if (!y)
3609 y = y_cst;
3610 else
3611 y = use_anchored_address (y);
3612 }
3613 }
3614
3615 /* If X or Y are memory references, verify that their addresses are valid
3616 for the machine. */
3617 if (MEM_P (x)
3618 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3619 MEM_ADDR_SPACE (x))
3620 && ! push_operand (x, GET_MODE (x))))
3621 x = validize_mem (x);
3622
3623 if (MEM_P (y)
3624 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3625 MEM_ADDR_SPACE (y)))
3626 y = validize_mem (y);
3627
3628 gcc_assert (mode != BLKmode);
3629
3630 last_insn = emit_move_insn_1 (x, y);
3631
3632 if (y_cst && REG_P (x)
3633 && (set = single_set (last_insn)) != NULL_RTX
3634 && SET_DEST (set) == x
3635 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3636 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3637
3638 return last_insn;
3639 }
3640
3641 /* Generate the body of an instruction to copy Y into X.
3642 It may be a list of insns, if one insn isn't enough. */
3643
3644 rtx
3645 gen_move_insn (rtx x, rtx y)
3646 {
3647 rtx_insn *seq;
3648
3649 start_sequence ();
3650 emit_move_insn_1 (x, y);
3651 seq = get_insns ();
3652 end_sequence ();
3653 return seq;
3654 }
3655
3656 /* If Y is representable exactly in a narrower mode, and the target can
3657 perform the extension directly from constant or memory, then emit the
3658 move as an extension. */
3659
3660 static rtx_insn *
3661 compress_float_constant (rtx x, rtx y)
3662 {
3663 machine_mode dstmode = GET_MODE (x);
3664 machine_mode orig_srcmode = GET_MODE (y);
3665 machine_mode srcmode;
3666 REAL_VALUE_TYPE r;
3667 int oldcost, newcost;
3668 bool speed = optimize_insn_for_speed_p ();
3669
3670 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3671
3672 if (targetm.legitimate_constant_p (dstmode, y))
3673 oldcost = set_src_cost (y, speed);
3674 else
3675 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3676
3677 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3678 srcmode != orig_srcmode;
3679 srcmode = GET_MODE_WIDER_MODE (srcmode))
3680 {
3681 enum insn_code ic;
3682 rtx trunc_y;
3683 rtx_insn *last_insn;
3684
3685 /* Skip if the target can't extend this way. */
3686 ic = can_extend_p (dstmode, srcmode, 0);
3687 if (ic == CODE_FOR_nothing)
3688 continue;
3689
3690 /* Skip if the narrowed value isn't exact. */
3691 if (! exact_real_truncate (srcmode, &r))
3692 continue;
3693
3694 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3695
3696 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3697 {
3698 /* Skip if the target needs extra instructions to perform
3699 the extension. */
3700 if (!insn_operand_matches (ic, 1, trunc_y))
3701 continue;
3702 /* This is valid, but may not be cheaper than the original. */
3703 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3704 speed);
3705 if (oldcost < newcost)
3706 continue;
3707 }
3708 else if (float_extend_from_mem[dstmode][srcmode])
3709 {
3710 trunc_y = force_const_mem (srcmode, trunc_y);
3711 /* This is valid, but may not be cheaper than the original. */
3712 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3713 speed);
3714 if (oldcost < newcost)
3715 continue;
3716 trunc_y = validize_mem (trunc_y);
3717 }
3718 else
3719 continue;
3720
3721 /* For CSE's benefit, force the compressed constant pool entry
3722 into a new pseudo. This constant may be used in different modes,
3723 and if not, combine will put things back together for us. */
3724 trunc_y = force_reg (srcmode, trunc_y);
3725
3726 /* If x is a hard register, perform the extension into a pseudo,
3727 so that e.g. stack realignment code is aware of it. */
3728 rtx target = x;
3729 if (REG_P (x) && HARD_REGISTER_P (x))
3730 target = gen_reg_rtx (dstmode);
3731
3732 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3733 last_insn = get_last_insn ();
3734
3735 if (REG_P (target))
3736 set_unique_reg_note (last_insn, REG_EQUAL, y);
3737
3738 if (target != x)
3739 return emit_move_insn (x, target);
3740 return last_insn;
3741 }
3742
3743 return NULL;
3744 }
3745 \f
3746 /* Pushing data onto the stack. */
3747
3748 /* Push a block of length SIZE (perhaps variable)
3749 and return an rtx to address the beginning of the block.
3750 The value may be virtual_outgoing_args_rtx.
3751
3752 EXTRA is the number of bytes of padding to push in addition to SIZE.
3753 BELOW nonzero means this padding comes at low addresses;
3754 otherwise, the padding comes at high addresses. */
3755
3756 rtx
3757 push_block (rtx size, int extra, int below)
3758 {
3759 rtx temp;
3760
3761 size = convert_modes (Pmode, ptr_mode, size, 1);
3762 if (CONSTANT_P (size))
3763 anti_adjust_stack (plus_constant (Pmode, size, extra));
3764 else if (REG_P (size) && extra == 0)
3765 anti_adjust_stack (size);
3766 else
3767 {
3768 temp = copy_to_mode_reg (Pmode, size);
3769 if (extra != 0)
3770 temp = expand_binop (Pmode, add_optab, temp,
3771 gen_int_mode (extra, Pmode),
3772 temp, 0, OPTAB_LIB_WIDEN);
3773 anti_adjust_stack (temp);
3774 }
3775
3776 #ifndef STACK_GROWS_DOWNWARD
3777 if (0)
3778 #else
3779 if (1)
3780 #endif
3781 {
3782 temp = virtual_outgoing_args_rtx;
3783 if (extra != 0 && below)
3784 temp = plus_constant (Pmode, temp, extra);
3785 }
3786 else
3787 {
3788 if (CONST_INT_P (size))
3789 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3790 -INTVAL (size) - (below ? 0 : extra));
3791 else if (extra != 0 && !below)
3792 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3793 negate_rtx (Pmode, plus_constant (Pmode, size,
3794 extra)));
3795 else
3796 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3797 negate_rtx (Pmode, size));
3798 }
3799
3800 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3801 }
3802
3803 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3804
3805 static rtx
3806 mem_autoinc_base (rtx mem)
3807 {
3808 if (MEM_P (mem))
3809 {
3810 rtx addr = XEXP (mem, 0);
3811 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3812 return XEXP (addr, 0);
3813 }
3814 return NULL;
3815 }
3816
3817 /* A utility routine used here, in reload, and in try_split. The insns
3818 after PREV up to and including LAST are known to adjust the stack,
3819 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3820 placing notes as appropriate. PREV may be NULL, indicating the
3821 entire insn sequence prior to LAST should be scanned.
3822
3823 The set of allowed stack pointer modifications is small:
3824 (1) One or more auto-inc style memory references (aka pushes),
3825 (2) One or more addition/subtraction with the SP as destination,
3826 (3) A single move insn with the SP as destination,
3827 (4) A call_pop insn,
3828 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3829
3830 Insns in the sequence that do not modify the SP are ignored,
3831 except for noreturn calls.
3832
3833 The return value is the amount of adjustment that can be trivially
3834 verified, via immediate operand or auto-inc. If the adjustment
3835 cannot be trivially extracted, the return value is INT_MIN. */
3836
3837 HOST_WIDE_INT
3838 find_args_size_adjust (rtx_insn *insn)
3839 {
3840 rtx dest, set, pat;
3841 int i;
3842
3843 pat = PATTERN (insn);
3844 set = NULL;
3845
3846 /* Look for a call_pop pattern. */
3847 if (CALL_P (insn))
3848 {
3849 /* We have to allow non-call_pop patterns for the case
3850 of emit_single_push_insn of a TLS address. */
3851 if (GET_CODE (pat) != PARALLEL)
3852 return 0;
3853
3854 /* All call_pop have a stack pointer adjust in the parallel.
3855 The call itself is always first, and the stack adjust is
3856 usually last, so search from the end. */
3857 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3858 {
3859 set = XVECEXP (pat, 0, i);
3860 if (GET_CODE (set) != SET)
3861 continue;
3862 dest = SET_DEST (set);
3863 if (dest == stack_pointer_rtx)
3864 break;
3865 }
3866 /* We'd better have found the stack pointer adjust. */
3867 if (i == 0)
3868 return 0;
3869 /* Fall through to process the extracted SET and DEST
3870 as if it was a standalone insn. */
3871 }
3872 else if (GET_CODE (pat) == SET)
3873 set = pat;
3874 else if ((set = single_set (insn)) != NULL)
3875 ;
3876 else if (GET_CODE (pat) == PARALLEL)
3877 {
3878 /* ??? Some older ports use a parallel with a stack adjust
3879 and a store for a PUSH_ROUNDING pattern, rather than a
3880 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3881 /* ??? See h8300 and m68k, pushqi1. */
3882 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3883 {
3884 set = XVECEXP (pat, 0, i);
3885 if (GET_CODE (set) != SET)
3886 continue;
3887 dest = SET_DEST (set);
3888 if (dest == stack_pointer_rtx)
3889 break;
3890
3891 /* We do not expect an auto-inc of the sp in the parallel. */
3892 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3893 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3894 != stack_pointer_rtx);
3895 }
3896 if (i < 0)
3897 return 0;
3898 }
3899 else
3900 return 0;
3901
3902 dest = SET_DEST (set);
3903
3904 /* Look for direct modifications of the stack pointer. */
3905 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3906 {
3907 /* Look for a trivial adjustment, otherwise assume nothing. */
3908 /* Note that the SPU restore_stack_block pattern refers to
3909 the stack pointer in V4SImode. Consider that non-trivial. */
3910 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3911 && GET_CODE (SET_SRC (set)) == PLUS
3912 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3913 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3914 return INTVAL (XEXP (SET_SRC (set), 1));
3915 /* ??? Reload can generate no-op moves, which will be cleaned
3916 up later. Recognize it and continue searching. */
3917 else if (rtx_equal_p (dest, SET_SRC (set)))
3918 return 0;
3919 else
3920 return HOST_WIDE_INT_MIN;
3921 }
3922 else
3923 {
3924 rtx mem, addr;
3925
3926 /* Otherwise only think about autoinc patterns. */
3927 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3928 {
3929 mem = dest;
3930 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3931 != stack_pointer_rtx);
3932 }
3933 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3934 mem = SET_SRC (set);
3935 else
3936 return 0;
3937
3938 addr = XEXP (mem, 0);
3939 switch (GET_CODE (addr))
3940 {
3941 case PRE_INC:
3942 case POST_INC:
3943 return GET_MODE_SIZE (GET_MODE (mem));
3944 case PRE_DEC:
3945 case POST_DEC:
3946 return -GET_MODE_SIZE (GET_MODE (mem));
3947 case PRE_MODIFY:
3948 case POST_MODIFY:
3949 addr = XEXP (addr, 1);
3950 gcc_assert (GET_CODE (addr) == PLUS);
3951 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3952 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3953 return INTVAL (XEXP (addr, 1));
3954 default:
3955 gcc_unreachable ();
3956 }
3957 }
3958 }
3959
3960 int
3961 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3962 {
3963 int args_size = end_args_size;
3964 bool saw_unknown = false;
3965 rtx_insn *insn;
3966
3967 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3968 {
3969 HOST_WIDE_INT this_delta;
3970
3971 if (!NONDEBUG_INSN_P (insn))
3972 continue;
3973
3974 this_delta = find_args_size_adjust (insn);
3975 if (this_delta == 0)
3976 {
3977 if (!CALL_P (insn)
3978 || ACCUMULATE_OUTGOING_ARGS
3979 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3980 continue;
3981 }
3982
3983 gcc_assert (!saw_unknown);
3984 if (this_delta == HOST_WIDE_INT_MIN)
3985 saw_unknown = true;
3986
3987 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3988 #ifdef STACK_GROWS_DOWNWARD
3989 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3990 #endif
3991 args_size -= this_delta;
3992 }
3993
3994 return saw_unknown ? INT_MIN : args_size;
3995 }
3996
3997 #ifdef PUSH_ROUNDING
3998 /* Emit single push insn. */
3999
4000 static void
4001 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4002 {
4003 rtx dest_addr;
4004 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4005 rtx dest;
4006 enum insn_code icode;
4007
4008 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4009 /* If there is push pattern, use it. Otherwise try old way of throwing
4010 MEM representing push operation to move expander. */
4011 icode = optab_handler (push_optab, mode);
4012 if (icode != CODE_FOR_nothing)
4013 {
4014 struct expand_operand ops[1];
4015
4016 create_input_operand (&ops[0], x, mode);
4017 if (maybe_expand_insn (icode, 1, ops))
4018 return;
4019 }
4020 if (GET_MODE_SIZE (mode) == rounded_size)
4021 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4022 /* If we are to pad downward, adjust the stack pointer first and
4023 then store X into the stack location using an offset. This is
4024 because emit_move_insn does not know how to pad; it does not have
4025 access to type. */
4026 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4027 {
4028 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4029 HOST_WIDE_INT offset;
4030
4031 emit_move_insn (stack_pointer_rtx,
4032 expand_binop (Pmode,
4033 #ifdef STACK_GROWS_DOWNWARD
4034 sub_optab,
4035 #else
4036 add_optab,
4037 #endif
4038 stack_pointer_rtx,
4039 gen_int_mode (rounded_size, Pmode),
4040 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4041
4042 offset = (HOST_WIDE_INT) padding_size;
4043 #ifdef STACK_GROWS_DOWNWARD
4044 if (STACK_PUSH_CODE == POST_DEC)
4045 /* We have already decremented the stack pointer, so get the
4046 previous value. */
4047 offset += (HOST_WIDE_INT) rounded_size;
4048 #else
4049 if (STACK_PUSH_CODE == POST_INC)
4050 /* We have already incremented the stack pointer, so get the
4051 previous value. */
4052 offset -= (HOST_WIDE_INT) rounded_size;
4053 #endif
4054 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4055 gen_int_mode (offset, Pmode));
4056 }
4057 else
4058 {
4059 #ifdef STACK_GROWS_DOWNWARD
4060 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4061 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4062 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4063 Pmode));
4064 #else
4065 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4066 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4067 gen_int_mode (rounded_size, Pmode));
4068 #endif
4069 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4070 }
4071
4072 dest = gen_rtx_MEM (mode, dest_addr);
4073
4074 if (type != 0)
4075 {
4076 set_mem_attributes (dest, type, 1);
4077
4078 if (cfun->tail_call_marked)
4079 /* Function incoming arguments may overlap with sibling call
4080 outgoing arguments and we cannot allow reordering of reads
4081 from function arguments with stores to outgoing arguments
4082 of sibling calls. */
4083 set_mem_alias_set (dest, 0);
4084 }
4085 emit_move_insn (dest, x);
4086 }
4087
4088 /* Emit and annotate a single push insn. */
4089
4090 static void
4091 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4092 {
4093 int delta, old_delta = stack_pointer_delta;
4094 rtx_insn *prev = get_last_insn ();
4095 rtx_insn *last;
4096
4097 emit_single_push_insn_1 (mode, x, type);
4098
4099 last = get_last_insn ();
4100
4101 /* Notice the common case where we emitted exactly one insn. */
4102 if (PREV_INSN (last) == prev)
4103 {
4104 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4105 return;
4106 }
4107
4108 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4109 gcc_assert (delta == INT_MIN || delta == old_delta);
4110 }
4111 #endif
4112
4113 /* Generate code to push X onto the stack, assuming it has mode MODE and
4114 type TYPE.
4115 MODE is redundant except when X is a CONST_INT (since they don't
4116 carry mode info).
4117 SIZE is an rtx for the size of data to be copied (in bytes),
4118 needed only if X is BLKmode.
4119
4120 ALIGN (in bits) is maximum alignment we can assume.
4121
4122 If PARTIAL and REG are both nonzero, then copy that many of the first
4123 bytes of X into registers starting with REG, and push the rest of X.
4124 The amount of space pushed is decreased by PARTIAL bytes.
4125 REG must be a hard register in this case.
4126 If REG is zero but PARTIAL is not, take any all others actions for an
4127 argument partially in registers, but do not actually load any
4128 registers.
4129
4130 EXTRA is the amount in bytes of extra space to leave next to this arg.
4131 This is ignored if an argument block has already been allocated.
4132
4133 On a machine that lacks real push insns, ARGS_ADDR is the address of
4134 the bottom of the argument block for this call. We use indexing off there
4135 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4136 argument block has not been preallocated.
4137
4138 ARGS_SO_FAR is the size of args previously pushed for this call.
4139
4140 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4141 for arguments passed in registers. If nonzero, it will be the number
4142 of bytes required. */
4143
4144 void
4145 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4146 unsigned int align, int partial, rtx reg, int extra,
4147 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4148 rtx alignment_pad)
4149 {
4150 rtx xinner;
4151 enum direction stack_direction
4152 #ifdef STACK_GROWS_DOWNWARD
4153 = downward;
4154 #else
4155 = upward;
4156 #endif
4157
4158 /* Decide where to pad the argument: `downward' for below,
4159 `upward' for above, or `none' for don't pad it.
4160 Default is below for small data on big-endian machines; else above. */
4161 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4162
4163 /* Invert direction if stack is post-decrement.
4164 FIXME: why? */
4165 if (STACK_PUSH_CODE == POST_DEC)
4166 if (where_pad != none)
4167 where_pad = (where_pad == downward ? upward : downward);
4168
4169 xinner = x;
4170
4171 if (mode == BLKmode
4172 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4173 {
4174 /* Copy a block into the stack, entirely or partially. */
4175
4176 rtx temp;
4177 int used;
4178 int offset;
4179 int skip;
4180
4181 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4182 used = partial - offset;
4183
4184 if (mode != BLKmode)
4185 {
4186 /* A value is to be stored in an insufficiently aligned
4187 stack slot; copy via a suitably aligned slot if
4188 necessary. */
4189 size = GEN_INT (GET_MODE_SIZE (mode));
4190 if (!MEM_P (xinner))
4191 {
4192 temp = assign_temp (type, 1, 1);
4193 emit_move_insn (temp, xinner);
4194 xinner = temp;
4195 }
4196 }
4197
4198 gcc_assert (size);
4199
4200 /* USED is now the # of bytes we need not copy to the stack
4201 because registers will take care of them. */
4202
4203 if (partial != 0)
4204 xinner = adjust_address (xinner, BLKmode, used);
4205
4206 /* If the partial register-part of the arg counts in its stack size,
4207 skip the part of stack space corresponding to the registers.
4208 Otherwise, start copying to the beginning of the stack space,
4209 by setting SKIP to 0. */
4210 skip = (reg_parm_stack_space == 0) ? 0 : used;
4211
4212 #ifdef PUSH_ROUNDING
4213 /* Do it with several push insns if that doesn't take lots of insns
4214 and if there is no difficulty with push insns that skip bytes
4215 on the stack for alignment purposes. */
4216 if (args_addr == 0
4217 && PUSH_ARGS
4218 && CONST_INT_P (size)
4219 && skip == 0
4220 && MEM_ALIGN (xinner) >= align
4221 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4222 /* Here we avoid the case of a structure whose weak alignment
4223 forces many pushes of a small amount of data,
4224 and such small pushes do rounding that causes trouble. */
4225 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4226 || align >= BIGGEST_ALIGNMENT
4227 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4228 == (align / BITS_PER_UNIT)))
4229 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4230 {
4231 /* Push padding now if padding above and stack grows down,
4232 or if padding below and stack grows up.
4233 But if space already allocated, this has already been done. */
4234 if (extra && args_addr == 0
4235 && where_pad != none && where_pad != stack_direction)
4236 anti_adjust_stack (GEN_INT (extra));
4237
4238 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4239 }
4240 else
4241 #endif /* PUSH_ROUNDING */
4242 {
4243 rtx target;
4244
4245 /* Otherwise make space on the stack and copy the data
4246 to the address of that space. */
4247
4248 /* Deduct words put into registers from the size we must copy. */
4249 if (partial != 0)
4250 {
4251 if (CONST_INT_P (size))
4252 size = GEN_INT (INTVAL (size) - used);
4253 else
4254 size = expand_binop (GET_MODE (size), sub_optab, size,
4255 gen_int_mode (used, GET_MODE (size)),
4256 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4257 }
4258
4259 /* Get the address of the stack space.
4260 In this case, we do not deal with EXTRA separately.
4261 A single stack adjust will do. */
4262 if (! args_addr)
4263 {
4264 temp = push_block (size, extra, where_pad == downward);
4265 extra = 0;
4266 }
4267 else if (CONST_INT_P (args_so_far))
4268 temp = memory_address (BLKmode,
4269 plus_constant (Pmode, args_addr,
4270 skip + INTVAL (args_so_far)));
4271 else
4272 temp = memory_address (BLKmode,
4273 plus_constant (Pmode,
4274 gen_rtx_PLUS (Pmode,
4275 args_addr,
4276 args_so_far),
4277 skip));
4278
4279 if (!ACCUMULATE_OUTGOING_ARGS)
4280 {
4281 /* If the source is referenced relative to the stack pointer,
4282 copy it to another register to stabilize it. We do not need
4283 to do this if we know that we won't be changing sp. */
4284
4285 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4286 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4287 temp = copy_to_reg (temp);
4288 }
4289
4290 target = gen_rtx_MEM (BLKmode, temp);
4291
4292 /* We do *not* set_mem_attributes here, because incoming arguments
4293 may overlap with sibling call outgoing arguments and we cannot
4294 allow reordering of reads from function arguments with stores
4295 to outgoing arguments of sibling calls. We do, however, want
4296 to record the alignment of the stack slot. */
4297 /* ALIGN may well be better aligned than TYPE, e.g. due to
4298 PARM_BOUNDARY. Assume the caller isn't lying. */
4299 set_mem_align (target, align);
4300
4301 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4302 }
4303 }
4304 else if (partial > 0)
4305 {
4306 /* Scalar partly in registers. */
4307
4308 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4309 int i;
4310 int not_stack;
4311 /* # bytes of start of argument
4312 that we must make space for but need not store. */
4313 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4314 int args_offset = INTVAL (args_so_far);
4315 int skip;
4316
4317 /* Push padding now if padding above and stack grows down,
4318 or if padding below and stack grows up.
4319 But if space already allocated, this has already been done. */
4320 if (extra && args_addr == 0
4321 && where_pad != none && where_pad != stack_direction)
4322 anti_adjust_stack (GEN_INT (extra));
4323
4324 /* If we make space by pushing it, we might as well push
4325 the real data. Otherwise, we can leave OFFSET nonzero
4326 and leave the space uninitialized. */
4327 if (args_addr == 0)
4328 offset = 0;
4329
4330 /* Now NOT_STACK gets the number of words that we don't need to
4331 allocate on the stack. Convert OFFSET to words too. */
4332 not_stack = (partial - offset) / UNITS_PER_WORD;
4333 offset /= UNITS_PER_WORD;
4334
4335 /* If the partial register-part of the arg counts in its stack size,
4336 skip the part of stack space corresponding to the registers.
4337 Otherwise, start copying to the beginning of the stack space,
4338 by setting SKIP to 0. */
4339 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4340
4341 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4342 x = validize_mem (force_const_mem (mode, x));
4343
4344 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4345 SUBREGs of such registers are not allowed. */
4346 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4347 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4348 x = copy_to_reg (x);
4349
4350 /* Loop over all the words allocated on the stack for this arg. */
4351 /* We can do it by words, because any scalar bigger than a word
4352 has a size a multiple of a word. */
4353 for (i = size - 1; i >= not_stack; i--)
4354 if (i >= not_stack + offset)
4355 emit_push_insn (operand_subword_force (x, i, mode),
4356 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4357 0, args_addr,
4358 GEN_INT (args_offset + ((i - not_stack + skip)
4359 * UNITS_PER_WORD)),
4360 reg_parm_stack_space, alignment_pad);
4361 }
4362 else
4363 {
4364 rtx addr;
4365 rtx dest;
4366
4367 /* Push padding now if padding above and stack grows down,
4368 or if padding below and stack grows up.
4369 But if space already allocated, this has already been done. */
4370 if (extra && args_addr == 0
4371 && where_pad != none && where_pad != stack_direction)
4372 anti_adjust_stack (GEN_INT (extra));
4373
4374 #ifdef PUSH_ROUNDING
4375 if (args_addr == 0 && PUSH_ARGS)
4376 emit_single_push_insn (mode, x, type);
4377 else
4378 #endif
4379 {
4380 if (CONST_INT_P (args_so_far))
4381 addr
4382 = memory_address (mode,
4383 plus_constant (Pmode, args_addr,
4384 INTVAL (args_so_far)));
4385 else
4386 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4387 args_so_far));
4388 dest = gen_rtx_MEM (mode, addr);
4389
4390 /* We do *not* set_mem_attributes here, because incoming arguments
4391 may overlap with sibling call outgoing arguments and we cannot
4392 allow reordering of reads from function arguments with stores
4393 to outgoing arguments of sibling calls. We do, however, want
4394 to record the alignment of the stack slot. */
4395 /* ALIGN may well be better aligned than TYPE, e.g. due to
4396 PARM_BOUNDARY. Assume the caller isn't lying. */
4397 set_mem_align (dest, align);
4398
4399 emit_move_insn (dest, x);
4400 }
4401 }
4402
4403 /* If part should go in registers, copy that part
4404 into the appropriate registers. Do this now, at the end,
4405 since mem-to-mem copies above may do function calls. */
4406 if (partial > 0 && reg != 0)
4407 {
4408 /* Handle calls that pass values in multiple non-contiguous locations.
4409 The Irix 6 ABI has examples of this. */
4410 if (GET_CODE (reg) == PARALLEL)
4411 emit_group_load (reg, x, type, -1);
4412 else
4413 {
4414 gcc_assert (partial % UNITS_PER_WORD == 0);
4415 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4416 }
4417 }
4418
4419 if (extra && args_addr == 0 && where_pad == stack_direction)
4420 anti_adjust_stack (GEN_INT (extra));
4421
4422 if (alignment_pad && args_addr == 0)
4423 anti_adjust_stack (alignment_pad);
4424 }
4425 \f
4426 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4427 operations. */
4428
4429 static rtx
4430 get_subtarget (rtx x)
4431 {
4432 return (optimize
4433 || x == 0
4434 /* Only registers can be subtargets. */
4435 || !REG_P (x)
4436 /* Don't use hard regs to avoid extending their life. */
4437 || REGNO (x) < FIRST_PSEUDO_REGISTER
4438 ? 0 : x);
4439 }
4440
4441 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4442 FIELD is a bitfield. Returns true if the optimization was successful,
4443 and there's nothing else to do. */
4444
4445 static bool
4446 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4447 unsigned HOST_WIDE_INT bitpos,
4448 unsigned HOST_WIDE_INT bitregion_start,
4449 unsigned HOST_WIDE_INT bitregion_end,
4450 machine_mode mode1, rtx str_rtx,
4451 tree to, tree src)
4452 {
4453 machine_mode str_mode = GET_MODE (str_rtx);
4454 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4455 tree op0, op1;
4456 rtx value, result;
4457 optab binop;
4458 gimple srcstmt;
4459 enum tree_code code;
4460
4461 if (mode1 != VOIDmode
4462 || bitsize >= BITS_PER_WORD
4463 || str_bitsize > BITS_PER_WORD
4464 || TREE_SIDE_EFFECTS (to)
4465 || TREE_THIS_VOLATILE (to))
4466 return false;
4467
4468 STRIP_NOPS (src);
4469 if (TREE_CODE (src) != SSA_NAME)
4470 return false;
4471 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4472 return false;
4473
4474 srcstmt = get_gimple_for_ssa_name (src);
4475 if (!srcstmt
4476 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4477 return false;
4478
4479 code = gimple_assign_rhs_code (srcstmt);
4480
4481 op0 = gimple_assign_rhs1 (srcstmt);
4482
4483 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4484 to find its initialization. Hopefully the initialization will
4485 be from a bitfield load. */
4486 if (TREE_CODE (op0) == SSA_NAME)
4487 {
4488 gimple op0stmt = get_gimple_for_ssa_name (op0);
4489
4490 /* We want to eventually have OP0 be the same as TO, which
4491 should be a bitfield. */
4492 if (!op0stmt
4493 || !is_gimple_assign (op0stmt)
4494 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4495 return false;
4496 op0 = gimple_assign_rhs1 (op0stmt);
4497 }
4498
4499 op1 = gimple_assign_rhs2 (srcstmt);
4500
4501 if (!operand_equal_p (to, op0, 0))
4502 return false;
4503
4504 if (MEM_P (str_rtx))
4505 {
4506 unsigned HOST_WIDE_INT offset1;
4507
4508 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4509 str_mode = word_mode;
4510 str_mode = get_best_mode (bitsize, bitpos,
4511 bitregion_start, bitregion_end,
4512 MEM_ALIGN (str_rtx), str_mode, 0);
4513 if (str_mode == VOIDmode)
4514 return false;
4515 str_bitsize = GET_MODE_BITSIZE (str_mode);
4516
4517 offset1 = bitpos;
4518 bitpos %= str_bitsize;
4519 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4520 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4521 }
4522 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4523 return false;
4524
4525 /* If the bit field covers the whole REG/MEM, store_field
4526 will likely generate better code. */
4527 if (bitsize >= str_bitsize)
4528 return false;
4529
4530 /* We can't handle fields split across multiple entities. */
4531 if (bitpos + bitsize > str_bitsize)
4532 return false;
4533
4534 if (BYTES_BIG_ENDIAN)
4535 bitpos = str_bitsize - bitpos - bitsize;
4536
4537 switch (code)
4538 {
4539 case PLUS_EXPR:
4540 case MINUS_EXPR:
4541 /* For now, just optimize the case of the topmost bitfield
4542 where we don't need to do any masking and also
4543 1 bit bitfields where xor can be used.
4544 We might win by one instruction for the other bitfields
4545 too if insv/extv instructions aren't used, so that
4546 can be added later. */
4547 if (bitpos + bitsize != str_bitsize
4548 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4549 break;
4550
4551 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4552 value = convert_modes (str_mode,
4553 TYPE_MODE (TREE_TYPE (op1)), value,
4554 TYPE_UNSIGNED (TREE_TYPE (op1)));
4555
4556 /* We may be accessing data outside the field, which means
4557 we can alias adjacent data. */
4558 if (MEM_P (str_rtx))
4559 {
4560 str_rtx = shallow_copy_rtx (str_rtx);
4561 set_mem_alias_set (str_rtx, 0);
4562 set_mem_expr (str_rtx, 0);
4563 }
4564
4565 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4566 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4567 {
4568 value = expand_and (str_mode, value, const1_rtx, NULL);
4569 binop = xor_optab;
4570 }
4571 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4572 result = expand_binop (str_mode, binop, str_rtx,
4573 value, str_rtx, 1, OPTAB_WIDEN);
4574 if (result != str_rtx)
4575 emit_move_insn (str_rtx, result);
4576 return true;
4577
4578 case BIT_IOR_EXPR:
4579 case BIT_XOR_EXPR:
4580 if (TREE_CODE (op1) != INTEGER_CST)
4581 break;
4582 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4583 value = convert_modes (str_mode,
4584 TYPE_MODE (TREE_TYPE (op1)), value,
4585 TYPE_UNSIGNED (TREE_TYPE (op1)));
4586
4587 /* We may be accessing data outside the field, which means
4588 we can alias adjacent data. */
4589 if (MEM_P (str_rtx))
4590 {
4591 str_rtx = shallow_copy_rtx (str_rtx);
4592 set_mem_alias_set (str_rtx, 0);
4593 set_mem_expr (str_rtx, 0);
4594 }
4595
4596 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4597 if (bitpos + bitsize != str_bitsize)
4598 {
4599 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4600 str_mode);
4601 value = expand_and (str_mode, value, mask, NULL_RTX);
4602 }
4603 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4604 result = expand_binop (str_mode, binop, str_rtx,
4605 value, str_rtx, 1, OPTAB_WIDEN);
4606 if (result != str_rtx)
4607 emit_move_insn (str_rtx, result);
4608 return true;
4609
4610 default:
4611 break;
4612 }
4613
4614 return false;
4615 }
4616
4617 /* In the C++ memory model, consecutive bit fields in a structure are
4618 considered one memory location.
4619
4620 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4621 returns the bit range of consecutive bits in which this COMPONENT_REF
4622 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4623 and *OFFSET may be adjusted in the process.
4624
4625 If the access does not need to be restricted, 0 is returned in both
4626 *BITSTART and *BITEND. */
4627
4628 static void
4629 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4630 unsigned HOST_WIDE_INT *bitend,
4631 tree exp,
4632 HOST_WIDE_INT *bitpos,
4633 tree *offset)
4634 {
4635 HOST_WIDE_INT bitoffset;
4636 tree field, repr;
4637
4638 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4639
4640 field = TREE_OPERAND (exp, 1);
4641 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4642 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4643 need to limit the range we can access. */
4644 if (!repr)
4645 {
4646 *bitstart = *bitend = 0;
4647 return;
4648 }
4649
4650 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4651 part of a larger bit field, then the representative does not serve any
4652 useful purpose. This can occur in Ada. */
4653 if (handled_component_p (TREE_OPERAND (exp, 0)))
4654 {
4655 machine_mode rmode;
4656 HOST_WIDE_INT rbitsize, rbitpos;
4657 tree roffset;
4658 int unsignedp;
4659 int volatilep = 0;
4660 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4661 &roffset, &rmode, &unsignedp, &volatilep, false);
4662 if ((rbitpos % BITS_PER_UNIT) != 0)
4663 {
4664 *bitstart = *bitend = 0;
4665 return;
4666 }
4667 }
4668
4669 /* Compute the adjustment to bitpos from the offset of the field
4670 relative to the representative. DECL_FIELD_OFFSET of field and
4671 repr are the same by construction if they are not constants,
4672 see finish_bitfield_layout. */
4673 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4674 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4675 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4676 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4677 else
4678 bitoffset = 0;
4679 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4680 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4681
4682 /* If the adjustment is larger than bitpos, we would have a negative bit
4683 position for the lower bound and this may wreak havoc later. Adjust
4684 offset and bitpos to make the lower bound non-negative in that case. */
4685 if (bitoffset > *bitpos)
4686 {
4687 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4688 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4689
4690 *bitpos += adjust;
4691 if (*offset == NULL_TREE)
4692 *offset = size_int (-adjust / BITS_PER_UNIT);
4693 else
4694 *offset
4695 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4696 *bitstart = 0;
4697 }
4698 else
4699 *bitstart = *bitpos - bitoffset;
4700
4701 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4702 }
4703
4704 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4705 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4706 DECL_RTL was not set yet, return NORTL. */
4707
4708 static inline bool
4709 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4710 {
4711 if (TREE_CODE (addr) != ADDR_EXPR)
4712 return false;
4713
4714 tree base = TREE_OPERAND (addr, 0);
4715
4716 if (!DECL_P (base)
4717 || TREE_ADDRESSABLE (base)
4718 || DECL_MODE (base) == BLKmode)
4719 return false;
4720
4721 if (!DECL_RTL_SET_P (base))
4722 return nortl;
4723
4724 return (!MEM_P (DECL_RTL (base)));
4725 }
4726
4727 /* Returns true if the MEM_REF REF refers to an object that does not
4728 reside in memory and has non-BLKmode. */
4729
4730 static inline bool
4731 mem_ref_refers_to_non_mem_p (tree ref)
4732 {
4733 tree base = TREE_OPERAND (ref, 0);
4734 return addr_expr_of_non_mem_decl_p_1 (base, false);
4735 }
4736
4737 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4738 is true, try generating a nontemporal store. */
4739
4740 void
4741 expand_assignment (tree to, tree from, bool nontemporal)
4742 {
4743 rtx to_rtx = 0;
4744 rtx result;
4745 machine_mode mode;
4746 unsigned int align;
4747 enum insn_code icode;
4748
4749 /* Don't crash if the lhs of the assignment was erroneous. */
4750 if (TREE_CODE (to) == ERROR_MARK)
4751 {
4752 expand_normal (from);
4753 return;
4754 }
4755
4756 /* Optimize away no-op moves without side-effects. */
4757 if (operand_equal_p (to, from, 0))
4758 return;
4759
4760 /* Handle misaligned stores. */
4761 mode = TYPE_MODE (TREE_TYPE (to));
4762 if ((TREE_CODE (to) == MEM_REF
4763 || TREE_CODE (to) == TARGET_MEM_REF)
4764 && mode != BLKmode
4765 && !mem_ref_refers_to_non_mem_p (to)
4766 && ((align = get_object_alignment (to))
4767 < GET_MODE_ALIGNMENT (mode))
4768 && (((icode = optab_handler (movmisalign_optab, mode))
4769 != CODE_FOR_nothing)
4770 || SLOW_UNALIGNED_ACCESS (mode, align)))
4771 {
4772 rtx reg, mem;
4773
4774 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4775 reg = force_not_mem (reg);
4776 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4777
4778 if (icode != CODE_FOR_nothing)
4779 {
4780 struct expand_operand ops[2];
4781
4782 create_fixed_operand (&ops[0], mem);
4783 create_input_operand (&ops[1], reg, mode);
4784 /* The movmisalign<mode> pattern cannot fail, else the assignment
4785 would silently be omitted. */
4786 expand_insn (icode, 2, ops);
4787 }
4788 else
4789 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4790 return;
4791 }
4792
4793 /* Assignment of a structure component needs special treatment
4794 if the structure component's rtx is not simply a MEM.
4795 Assignment of an array element at a constant index, and assignment of
4796 an array element in an unaligned packed structure field, has the same
4797 problem. Same for (partially) storing into a non-memory object. */
4798 if (handled_component_p (to)
4799 || (TREE_CODE (to) == MEM_REF
4800 && mem_ref_refers_to_non_mem_p (to))
4801 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4802 {
4803 machine_mode mode1;
4804 HOST_WIDE_INT bitsize, bitpos;
4805 unsigned HOST_WIDE_INT bitregion_start = 0;
4806 unsigned HOST_WIDE_INT bitregion_end = 0;
4807 tree offset;
4808 int unsignedp;
4809 int volatilep = 0;
4810 tree tem;
4811
4812 push_temp_slots ();
4813 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4814 &unsignedp, &volatilep, true);
4815
4816 /* Make sure bitpos is not negative, it can wreak havoc later. */
4817 if (bitpos < 0)
4818 {
4819 gcc_assert (offset == NULL_TREE);
4820 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4821 ? 3 : exact_log2 (BITS_PER_UNIT)));
4822 bitpos &= BITS_PER_UNIT - 1;
4823 }
4824
4825 if (TREE_CODE (to) == COMPONENT_REF
4826 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4827 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4828 /* The C++ memory model naturally applies to byte-aligned fields.
4829 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4830 BITSIZE are not byte-aligned, there is no need to limit the range
4831 we can access. This can occur with packed structures in Ada. */
4832 else if (bitsize > 0
4833 && bitsize % BITS_PER_UNIT == 0
4834 && bitpos % BITS_PER_UNIT == 0)
4835 {
4836 bitregion_start = bitpos;
4837 bitregion_end = bitpos + bitsize - 1;
4838 }
4839
4840 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4841
4842 /* If the field has a mode, we want to access it in the
4843 field's mode, not the computed mode.
4844 If a MEM has VOIDmode (external with incomplete type),
4845 use BLKmode for it instead. */
4846 if (MEM_P (to_rtx))
4847 {
4848 if (mode1 != VOIDmode)
4849 to_rtx = adjust_address (to_rtx, mode1, 0);
4850 else if (GET_MODE (to_rtx) == VOIDmode)
4851 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4852 }
4853
4854 if (offset != 0)
4855 {
4856 machine_mode address_mode;
4857 rtx offset_rtx;
4858
4859 if (!MEM_P (to_rtx))
4860 {
4861 /* We can get constant negative offsets into arrays with broken
4862 user code. Translate this to a trap instead of ICEing. */
4863 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4864 expand_builtin_trap ();
4865 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4866 }
4867
4868 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4869 address_mode = get_address_mode (to_rtx);
4870 if (GET_MODE (offset_rtx) != address_mode)
4871 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4872
4873 /* If we have an expression in OFFSET_RTX and a non-zero
4874 byte offset in BITPOS, adding the byte offset before the
4875 OFFSET_RTX results in better intermediate code, which makes
4876 later rtl optimization passes perform better.
4877
4878 We prefer intermediate code like this:
4879
4880 r124:DI=r123:DI+0x18
4881 [r124:DI]=r121:DI
4882
4883 ... instead of ...
4884
4885 r124:DI=r123:DI+0x10
4886 [r124:DI+0x8]=r121:DI
4887
4888 This is only done for aligned data values, as these can
4889 be expected to result in single move instructions. */
4890 if (mode1 != VOIDmode
4891 && bitpos != 0
4892 && bitsize > 0
4893 && (bitpos % bitsize) == 0
4894 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4895 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4896 {
4897 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4898 bitregion_start = 0;
4899 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4900 bitregion_end -= bitpos;
4901 bitpos = 0;
4902 }
4903
4904 to_rtx = offset_address (to_rtx, offset_rtx,
4905 highest_pow2_factor_for_target (to,
4906 offset));
4907 }
4908
4909 /* No action is needed if the target is not a memory and the field
4910 lies completely outside that target. This can occur if the source
4911 code contains an out-of-bounds access to a small array. */
4912 if (!MEM_P (to_rtx)
4913 && GET_MODE (to_rtx) != BLKmode
4914 && (unsigned HOST_WIDE_INT) bitpos
4915 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4916 {
4917 expand_normal (from);
4918 result = NULL;
4919 }
4920 /* Handle expand_expr of a complex value returning a CONCAT. */
4921 else if (GET_CODE (to_rtx) == CONCAT)
4922 {
4923 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4924 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4925 && bitpos == 0
4926 && bitsize == mode_bitsize)
4927 result = store_expr (from, to_rtx, false, nontemporal);
4928 else if (bitsize == mode_bitsize / 2
4929 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4930 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4931 nontemporal);
4932 else if (bitpos + bitsize <= mode_bitsize / 2)
4933 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4934 bitregion_start, bitregion_end,
4935 mode1, from,
4936 get_alias_set (to), nontemporal);
4937 else if (bitpos >= mode_bitsize / 2)
4938 result = store_field (XEXP (to_rtx, 1), bitsize,
4939 bitpos - mode_bitsize / 2,
4940 bitregion_start, bitregion_end,
4941 mode1, from,
4942 get_alias_set (to), nontemporal);
4943 else if (bitpos == 0 && bitsize == mode_bitsize)
4944 {
4945 rtx from_rtx;
4946 result = expand_normal (from);
4947 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4948 TYPE_MODE (TREE_TYPE (from)), 0);
4949 emit_move_insn (XEXP (to_rtx, 0),
4950 read_complex_part (from_rtx, false));
4951 emit_move_insn (XEXP (to_rtx, 1),
4952 read_complex_part (from_rtx, true));
4953 }
4954 else
4955 {
4956 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4957 GET_MODE_SIZE (GET_MODE (to_rtx)));
4958 write_complex_part (temp, XEXP (to_rtx, 0), false);
4959 write_complex_part (temp, XEXP (to_rtx, 1), true);
4960 result = store_field (temp, bitsize, bitpos,
4961 bitregion_start, bitregion_end,
4962 mode1, from,
4963 get_alias_set (to), nontemporal);
4964 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4965 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4966 }
4967 }
4968 else
4969 {
4970 if (MEM_P (to_rtx))
4971 {
4972 /* If the field is at offset zero, we could have been given the
4973 DECL_RTX of the parent struct. Don't munge it. */
4974 to_rtx = shallow_copy_rtx (to_rtx);
4975 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4976 if (volatilep)
4977 MEM_VOLATILE_P (to_rtx) = 1;
4978 }
4979
4980 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4981 bitregion_start, bitregion_end,
4982 mode1,
4983 to_rtx, to, from))
4984 result = NULL;
4985 else
4986 result = store_field (to_rtx, bitsize, bitpos,
4987 bitregion_start, bitregion_end,
4988 mode1, from,
4989 get_alias_set (to), nontemporal);
4990 }
4991
4992 if (result)
4993 preserve_temp_slots (result);
4994 pop_temp_slots ();
4995 return;
4996 }
4997
4998 /* If the rhs is a function call and its value is not an aggregate,
4999 call the function before we start to compute the lhs.
5000 This is needed for correct code for cases such as
5001 val = setjmp (buf) on machines where reference to val
5002 requires loading up part of an address in a separate insn.
5003
5004 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5005 since it might be a promoted variable where the zero- or sign- extension
5006 needs to be done. Handling this in the normal way is safe because no
5007 computation is done before the call. The same is true for SSA names. */
5008 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5009 && COMPLETE_TYPE_P (TREE_TYPE (from))
5010 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5011 && ! (((TREE_CODE (to) == VAR_DECL
5012 || TREE_CODE (to) == PARM_DECL
5013 || TREE_CODE (to) == RESULT_DECL)
5014 && REG_P (DECL_RTL (to)))
5015 || TREE_CODE (to) == SSA_NAME))
5016 {
5017 rtx value;
5018 rtx bounds;
5019
5020 push_temp_slots ();
5021 value = expand_normal (from);
5022
5023 /* Split value and bounds to store them separately. */
5024 chkp_split_slot (value, &value, &bounds);
5025
5026 if (to_rtx == 0)
5027 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5028
5029 /* Handle calls that return values in multiple non-contiguous locations.
5030 The Irix 6 ABI has examples of this. */
5031 if (GET_CODE (to_rtx) == PARALLEL)
5032 {
5033 if (GET_CODE (value) == PARALLEL)
5034 emit_group_move (to_rtx, value);
5035 else
5036 emit_group_load (to_rtx, value, TREE_TYPE (from),
5037 int_size_in_bytes (TREE_TYPE (from)));
5038 }
5039 else if (GET_CODE (value) == PARALLEL)
5040 emit_group_store (to_rtx, value, TREE_TYPE (from),
5041 int_size_in_bytes (TREE_TYPE (from)));
5042 else if (GET_MODE (to_rtx) == BLKmode)
5043 {
5044 /* Handle calls that return BLKmode values in registers. */
5045 if (REG_P (value))
5046 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5047 else
5048 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5049 }
5050 else
5051 {
5052 if (POINTER_TYPE_P (TREE_TYPE (to)))
5053 value = convert_memory_address_addr_space
5054 (GET_MODE (to_rtx), value,
5055 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5056
5057 emit_move_insn (to_rtx, value);
5058 }
5059
5060 /* Store bounds if required. */
5061 if (bounds
5062 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5063 {
5064 gcc_assert (MEM_P (to_rtx));
5065 chkp_emit_bounds_store (bounds, value, to_rtx);
5066 }
5067
5068 preserve_temp_slots (to_rtx);
5069 pop_temp_slots ();
5070 return;
5071 }
5072
5073 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5074 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5075
5076 /* Don't move directly into a return register. */
5077 if (TREE_CODE (to) == RESULT_DECL
5078 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5079 {
5080 rtx temp;
5081
5082 push_temp_slots ();
5083
5084 /* If the source is itself a return value, it still is in a pseudo at
5085 this point so we can move it back to the return register directly. */
5086 if (REG_P (to_rtx)
5087 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5088 && TREE_CODE (from) != CALL_EXPR)
5089 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5090 else
5091 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5092
5093 /* Handle calls that return values in multiple non-contiguous locations.
5094 The Irix 6 ABI has examples of this. */
5095 if (GET_CODE (to_rtx) == PARALLEL)
5096 {
5097 if (GET_CODE (temp) == PARALLEL)
5098 emit_group_move (to_rtx, temp);
5099 else
5100 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5101 int_size_in_bytes (TREE_TYPE (from)));
5102 }
5103 else if (temp)
5104 emit_move_insn (to_rtx, temp);
5105
5106 preserve_temp_slots (to_rtx);
5107 pop_temp_slots ();
5108 return;
5109 }
5110
5111 /* In case we are returning the contents of an object which overlaps
5112 the place the value is being stored, use a safe function when copying
5113 a value through a pointer into a structure value return block. */
5114 if (TREE_CODE (to) == RESULT_DECL
5115 && TREE_CODE (from) == INDIRECT_REF
5116 && ADDR_SPACE_GENERIC_P
5117 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5118 && refs_may_alias_p (to, from)
5119 && cfun->returns_struct
5120 && !cfun->returns_pcc_struct)
5121 {
5122 rtx from_rtx, size;
5123
5124 push_temp_slots ();
5125 size = expr_size (from);
5126 from_rtx = expand_normal (from);
5127
5128 emit_library_call (memmove_libfunc, LCT_NORMAL,
5129 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5130 XEXP (from_rtx, 0), Pmode,
5131 convert_to_mode (TYPE_MODE (sizetype),
5132 size, TYPE_UNSIGNED (sizetype)),
5133 TYPE_MODE (sizetype));
5134
5135 preserve_temp_slots (to_rtx);
5136 pop_temp_slots ();
5137 return;
5138 }
5139
5140 /* Compute FROM and store the value in the rtx we got. */
5141
5142 push_temp_slots ();
5143 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, to);
5144 preserve_temp_slots (result);
5145 pop_temp_slots ();
5146 return;
5147 }
5148
5149 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5150 succeeded, false otherwise. */
5151
5152 bool
5153 emit_storent_insn (rtx to, rtx from)
5154 {
5155 struct expand_operand ops[2];
5156 machine_mode mode = GET_MODE (to);
5157 enum insn_code code = optab_handler (storent_optab, mode);
5158
5159 if (code == CODE_FOR_nothing)
5160 return false;
5161
5162 create_fixed_operand (&ops[0], to);
5163 create_input_operand (&ops[1], from, mode);
5164 return maybe_expand_insn (code, 2, ops);
5165 }
5166
5167 /* Generate code for computing expression EXP,
5168 and storing the value into TARGET.
5169
5170 If the mode is BLKmode then we may return TARGET itself.
5171 It turns out that in BLKmode it doesn't cause a problem.
5172 because C has no operators that could combine two different
5173 assignments into the same BLKmode object with different values
5174 with no sequence point. Will other languages need this to
5175 be more thorough?
5176
5177 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5178 stack, and block moves may need to be treated specially.
5179
5180 If NONTEMPORAL is true, try using a nontemporal store instruction.
5181
5182 If BTARGET is not NULL then computed bounds of EXP are
5183 associated with BTARGET. */
5184
5185 rtx
5186 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5187 bool nontemporal, tree btarget)
5188 {
5189 rtx temp;
5190 rtx alt_rtl = NULL_RTX;
5191 location_t loc = curr_insn_location ();
5192
5193 if (VOID_TYPE_P (TREE_TYPE (exp)))
5194 {
5195 /* C++ can generate ?: expressions with a throw expression in one
5196 branch and an rvalue in the other. Here, we resolve attempts to
5197 store the throw expression's nonexistent result. */
5198 gcc_assert (!call_param_p);
5199 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5200 return NULL_RTX;
5201 }
5202 if (TREE_CODE (exp) == COMPOUND_EXPR)
5203 {
5204 /* Perform first part of compound expression, then assign from second
5205 part. */
5206 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5207 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5208 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5209 call_param_p, nontemporal, btarget);
5210 }
5211 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5212 {
5213 /* For conditional expression, get safe form of the target. Then
5214 test the condition, doing the appropriate assignment on either
5215 side. This avoids the creation of unnecessary temporaries.
5216 For non-BLKmode, it is more efficient not to do this. */
5217
5218 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5219
5220 do_pending_stack_adjust ();
5221 NO_DEFER_POP;
5222 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5223 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5224 nontemporal, btarget);
5225 emit_jump_insn (gen_jump (lab2));
5226 emit_barrier ();
5227 emit_label (lab1);
5228 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5229 nontemporal, btarget);
5230 emit_label (lab2);
5231 OK_DEFER_POP;
5232
5233 return NULL_RTX;
5234 }
5235 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5236 /* If this is a scalar in a register that is stored in a wider mode
5237 than the declared mode, compute the result into its declared mode
5238 and then convert to the wider mode. Our value is the computed
5239 expression. */
5240 {
5241 rtx inner_target = 0;
5242
5243 /* We can do the conversion inside EXP, which will often result
5244 in some optimizations. Do the conversion in two steps: first
5245 change the signedness, if needed, then the extend. But don't
5246 do this if the type of EXP is a subtype of something else
5247 since then the conversion might involve more than just
5248 converting modes. */
5249 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5250 && TREE_TYPE (TREE_TYPE (exp)) == 0
5251 && GET_MODE_PRECISION (GET_MODE (target))
5252 == TYPE_PRECISION (TREE_TYPE (exp)))
5253 {
5254 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5255 TYPE_UNSIGNED (TREE_TYPE (exp))))
5256 {
5257 /* Some types, e.g. Fortran's logical*4, won't have a signed
5258 version, so use the mode instead. */
5259 tree ntype
5260 = (signed_or_unsigned_type_for
5261 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5262 if (ntype == NULL)
5263 ntype = lang_hooks.types.type_for_mode
5264 (TYPE_MODE (TREE_TYPE (exp)),
5265 SUBREG_PROMOTED_SIGN (target));
5266
5267 exp = fold_convert_loc (loc, ntype, exp);
5268 }
5269
5270 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5271 (GET_MODE (SUBREG_REG (target)),
5272 SUBREG_PROMOTED_SIGN (target)),
5273 exp);
5274
5275 inner_target = SUBREG_REG (target);
5276 }
5277
5278 temp = expand_expr (exp, inner_target, VOIDmode,
5279 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5280
5281 /* Handle bounds returned by call. */
5282 if (TREE_CODE (exp) == CALL_EXPR)
5283 {
5284 rtx bounds;
5285 chkp_split_slot (temp, &temp, &bounds);
5286 if (bounds && btarget)
5287 {
5288 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5289 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5290 chkp_set_rtl_bounds (btarget, tmp);
5291 }
5292 }
5293
5294 /* If TEMP is a VOIDmode constant, use convert_modes to make
5295 sure that we properly convert it. */
5296 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5297 {
5298 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5299 temp, SUBREG_PROMOTED_SIGN (target));
5300 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5301 GET_MODE (target), temp,
5302 SUBREG_PROMOTED_SIGN (target));
5303 }
5304
5305 convert_move (SUBREG_REG (target), temp,
5306 SUBREG_PROMOTED_SIGN (target));
5307
5308 return NULL_RTX;
5309 }
5310 else if ((TREE_CODE (exp) == STRING_CST
5311 || (TREE_CODE (exp) == MEM_REF
5312 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5313 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5314 == STRING_CST
5315 && integer_zerop (TREE_OPERAND (exp, 1))))
5316 && !nontemporal && !call_param_p
5317 && MEM_P (target))
5318 {
5319 /* Optimize initialization of an array with a STRING_CST. */
5320 HOST_WIDE_INT exp_len, str_copy_len;
5321 rtx dest_mem;
5322 tree str = TREE_CODE (exp) == STRING_CST
5323 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5324
5325 exp_len = int_expr_size (exp);
5326 if (exp_len <= 0)
5327 goto normal_expr;
5328
5329 if (TREE_STRING_LENGTH (str) <= 0)
5330 goto normal_expr;
5331
5332 str_copy_len = strlen (TREE_STRING_POINTER (str));
5333 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5334 goto normal_expr;
5335
5336 str_copy_len = TREE_STRING_LENGTH (str);
5337 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5338 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5339 {
5340 str_copy_len += STORE_MAX_PIECES - 1;
5341 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5342 }
5343 str_copy_len = MIN (str_copy_len, exp_len);
5344 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5345 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5346 MEM_ALIGN (target), false))
5347 goto normal_expr;
5348
5349 dest_mem = target;
5350
5351 dest_mem = store_by_pieces (dest_mem,
5352 str_copy_len, builtin_strncpy_read_str,
5353 CONST_CAST (char *,
5354 TREE_STRING_POINTER (str)),
5355 MEM_ALIGN (target), false,
5356 exp_len > str_copy_len ? 1 : 0);
5357 if (exp_len > str_copy_len)
5358 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5359 GEN_INT (exp_len - str_copy_len),
5360 BLOCK_OP_NORMAL);
5361 return NULL_RTX;
5362 }
5363 else
5364 {
5365 rtx tmp_target;
5366
5367 normal_expr:
5368 /* If we want to use a nontemporal store, force the value to
5369 register first. */
5370 tmp_target = nontemporal ? NULL_RTX : target;
5371 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5372 (call_param_p
5373 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5374 &alt_rtl, false);
5375
5376 /* Handle bounds returned by call. */
5377 if (TREE_CODE (exp) == CALL_EXPR)
5378 {
5379 rtx bounds;
5380 chkp_split_slot (temp, &temp, &bounds);
5381 if (bounds && btarget)
5382 {
5383 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5384 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5385 chkp_set_rtl_bounds (btarget, tmp);
5386 }
5387 }
5388 }
5389
5390 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5391 the same as that of TARGET, adjust the constant. This is needed, for
5392 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5393 only a word-sized value. */
5394 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5395 && TREE_CODE (exp) != ERROR_MARK
5396 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5397 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5398 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5399
5400 /* If value was not generated in the target, store it there.
5401 Convert the value to TARGET's type first if necessary and emit the
5402 pending incrementations that have been queued when expanding EXP.
5403 Note that we cannot emit the whole queue blindly because this will
5404 effectively disable the POST_INC optimization later.
5405
5406 If TEMP and TARGET compare equal according to rtx_equal_p, but
5407 one or both of them are volatile memory refs, we have to distinguish
5408 two cases:
5409 - expand_expr has used TARGET. In this case, we must not generate
5410 another copy. This can be detected by TARGET being equal according
5411 to == .
5412 - expand_expr has not used TARGET - that means that the source just
5413 happens to have the same RTX form. Since temp will have been created
5414 by expand_expr, it will compare unequal according to == .
5415 We must generate a copy in this case, to reach the correct number
5416 of volatile memory references. */
5417
5418 if ((! rtx_equal_p (temp, target)
5419 || (temp != target && (side_effects_p (temp)
5420 || side_effects_p (target))))
5421 && TREE_CODE (exp) != ERROR_MARK
5422 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5423 but TARGET is not valid memory reference, TEMP will differ
5424 from TARGET although it is really the same location. */
5425 && !(alt_rtl
5426 && rtx_equal_p (alt_rtl, target)
5427 && !side_effects_p (alt_rtl)
5428 && !side_effects_p (target))
5429 /* If there's nothing to copy, don't bother. Don't call
5430 expr_size unless necessary, because some front-ends (C++)
5431 expr_size-hook must not be given objects that are not
5432 supposed to be bit-copied or bit-initialized. */
5433 && expr_size (exp) != const0_rtx)
5434 {
5435 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5436 {
5437 if (GET_MODE (target) == BLKmode)
5438 {
5439 /* Handle calls that return BLKmode values in registers. */
5440 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5441 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5442 else
5443 store_bit_field (target,
5444 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5445 0, 0, 0, GET_MODE (temp), temp);
5446 }
5447 else
5448 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5449 }
5450
5451 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5452 {
5453 /* Handle copying a string constant into an array. The string
5454 constant may be shorter than the array. So copy just the string's
5455 actual length, and clear the rest. First get the size of the data
5456 type of the string, which is actually the size of the target. */
5457 rtx size = expr_size (exp);
5458
5459 if (CONST_INT_P (size)
5460 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5461 emit_block_move (target, temp, size,
5462 (call_param_p
5463 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5464 else
5465 {
5466 machine_mode pointer_mode
5467 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5468 machine_mode address_mode = get_address_mode (target);
5469
5470 /* Compute the size of the data to copy from the string. */
5471 tree copy_size
5472 = size_binop_loc (loc, MIN_EXPR,
5473 make_tree (sizetype, size),
5474 size_int (TREE_STRING_LENGTH (exp)));
5475 rtx copy_size_rtx
5476 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5477 (call_param_p
5478 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5479 rtx_code_label *label = 0;
5480
5481 /* Copy that much. */
5482 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5483 TYPE_UNSIGNED (sizetype));
5484 emit_block_move (target, temp, copy_size_rtx,
5485 (call_param_p
5486 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5487
5488 /* Figure out how much is left in TARGET that we have to clear.
5489 Do all calculations in pointer_mode. */
5490 if (CONST_INT_P (copy_size_rtx))
5491 {
5492 size = plus_constant (address_mode, size,
5493 -INTVAL (copy_size_rtx));
5494 target = adjust_address (target, BLKmode,
5495 INTVAL (copy_size_rtx));
5496 }
5497 else
5498 {
5499 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5500 copy_size_rtx, NULL_RTX, 0,
5501 OPTAB_LIB_WIDEN);
5502
5503 if (GET_MODE (copy_size_rtx) != address_mode)
5504 copy_size_rtx = convert_to_mode (address_mode,
5505 copy_size_rtx,
5506 TYPE_UNSIGNED (sizetype));
5507
5508 target = offset_address (target, copy_size_rtx,
5509 highest_pow2_factor (copy_size));
5510 label = gen_label_rtx ();
5511 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5512 GET_MODE (size), 0, label);
5513 }
5514
5515 if (size != const0_rtx)
5516 clear_storage (target, size, BLOCK_OP_NORMAL);
5517
5518 if (label)
5519 emit_label (label);
5520 }
5521 }
5522 /* Handle calls that return values in multiple non-contiguous locations.
5523 The Irix 6 ABI has examples of this. */
5524 else if (GET_CODE (target) == PARALLEL)
5525 {
5526 if (GET_CODE (temp) == PARALLEL)
5527 emit_group_move (target, temp);
5528 else
5529 emit_group_load (target, temp, TREE_TYPE (exp),
5530 int_size_in_bytes (TREE_TYPE (exp)));
5531 }
5532 else if (GET_CODE (temp) == PARALLEL)
5533 emit_group_store (target, temp, TREE_TYPE (exp),
5534 int_size_in_bytes (TREE_TYPE (exp)));
5535 else if (GET_MODE (temp) == BLKmode)
5536 emit_block_move (target, temp, expr_size (exp),
5537 (call_param_p
5538 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5539 /* If we emit a nontemporal store, there is nothing else to do. */
5540 else if (nontemporal && emit_storent_insn (target, temp))
5541 ;
5542 else
5543 {
5544 temp = force_operand (temp, target);
5545 if (temp != target)
5546 emit_move_insn (target, temp);
5547 }
5548 }
5549
5550 return NULL_RTX;
5551 }
5552
5553 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5554 rtx
5555 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5556 {
5557 return store_expr_with_bounds (exp, target, call_param_p, nontemporal, NULL);
5558 }
5559 \f
5560 /* Return true if field F of structure TYPE is a flexible array. */
5561
5562 static bool
5563 flexible_array_member_p (const_tree f, const_tree type)
5564 {
5565 const_tree tf;
5566
5567 tf = TREE_TYPE (f);
5568 return (DECL_CHAIN (f) == NULL
5569 && TREE_CODE (tf) == ARRAY_TYPE
5570 && TYPE_DOMAIN (tf)
5571 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5572 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5573 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5574 && int_size_in_bytes (type) >= 0);
5575 }
5576
5577 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5578 must have in order for it to completely initialize a value of type TYPE.
5579 Return -1 if the number isn't known.
5580
5581 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5582
5583 static HOST_WIDE_INT
5584 count_type_elements (const_tree type, bool for_ctor_p)
5585 {
5586 switch (TREE_CODE (type))
5587 {
5588 case ARRAY_TYPE:
5589 {
5590 tree nelts;
5591
5592 nelts = array_type_nelts (type);
5593 if (nelts && tree_fits_uhwi_p (nelts))
5594 {
5595 unsigned HOST_WIDE_INT n;
5596
5597 n = tree_to_uhwi (nelts) + 1;
5598 if (n == 0 || for_ctor_p)
5599 return n;
5600 else
5601 return n * count_type_elements (TREE_TYPE (type), false);
5602 }
5603 return for_ctor_p ? -1 : 1;
5604 }
5605
5606 case RECORD_TYPE:
5607 {
5608 unsigned HOST_WIDE_INT n;
5609 tree f;
5610
5611 n = 0;
5612 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5613 if (TREE_CODE (f) == FIELD_DECL)
5614 {
5615 if (!for_ctor_p)
5616 n += count_type_elements (TREE_TYPE (f), false);
5617 else if (!flexible_array_member_p (f, type))
5618 /* Don't count flexible arrays, which are not supposed
5619 to be initialized. */
5620 n += 1;
5621 }
5622
5623 return n;
5624 }
5625
5626 case UNION_TYPE:
5627 case QUAL_UNION_TYPE:
5628 {
5629 tree f;
5630 HOST_WIDE_INT n, m;
5631
5632 gcc_assert (!for_ctor_p);
5633 /* Estimate the number of scalars in each field and pick the
5634 maximum. Other estimates would do instead; the idea is simply
5635 to make sure that the estimate is not sensitive to the ordering
5636 of the fields. */
5637 n = 1;
5638 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5639 if (TREE_CODE (f) == FIELD_DECL)
5640 {
5641 m = count_type_elements (TREE_TYPE (f), false);
5642 /* If the field doesn't span the whole union, add an extra
5643 scalar for the rest. */
5644 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5645 TYPE_SIZE (type)) != 1)
5646 m++;
5647 if (n < m)
5648 n = m;
5649 }
5650 return n;
5651 }
5652
5653 case COMPLEX_TYPE:
5654 return 2;
5655
5656 case VECTOR_TYPE:
5657 return TYPE_VECTOR_SUBPARTS (type);
5658
5659 case INTEGER_TYPE:
5660 case REAL_TYPE:
5661 case FIXED_POINT_TYPE:
5662 case ENUMERAL_TYPE:
5663 case BOOLEAN_TYPE:
5664 case POINTER_TYPE:
5665 case OFFSET_TYPE:
5666 case REFERENCE_TYPE:
5667 case NULLPTR_TYPE:
5668 return 1;
5669
5670 case ERROR_MARK:
5671 return 0;
5672
5673 case VOID_TYPE:
5674 case METHOD_TYPE:
5675 case FUNCTION_TYPE:
5676 case LANG_TYPE:
5677 default:
5678 gcc_unreachable ();
5679 }
5680 }
5681
5682 /* Helper for categorize_ctor_elements. Identical interface. */
5683
5684 static bool
5685 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5686 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5687 {
5688 unsigned HOST_WIDE_INT idx;
5689 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5690 tree value, purpose, elt_type;
5691
5692 /* Whether CTOR is a valid constant initializer, in accordance with what
5693 initializer_constant_valid_p does. If inferred from the constructor
5694 elements, true until proven otherwise. */
5695 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5696 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5697
5698 nz_elts = 0;
5699 init_elts = 0;
5700 num_fields = 0;
5701 elt_type = NULL_TREE;
5702
5703 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5704 {
5705 HOST_WIDE_INT mult = 1;
5706
5707 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5708 {
5709 tree lo_index = TREE_OPERAND (purpose, 0);
5710 tree hi_index = TREE_OPERAND (purpose, 1);
5711
5712 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5713 mult = (tree_to_uhwi (hi_index)
5714 - tree_to_uhwi (lo_index) + 1);
5715 }
5716 num_fields += mult;
5717 elt_type = TREE_TYPE (value);
5718
5719 switch (TREE_CODE (value))
5720 {
5721 case CONSTRUCTOR:
5722 {
5723 HOST_WIDE_INT nz = 0, ic = 0;
5724
5725 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5726 p_complete);
5727
5728 nz_elts += mult * nz;
5729 init_elts += mult * ic;
5730
5731 if (const_from_elts_p && const_p)
5732 const_p = const_elt_p;
5733 }
5734 break;
5735
5736 case INTEGER_CST:
5737 case REAL_CST:
5738 case FIXED_CST:
5739 if (!initializer_zerop (value))
5740 nz_elts += mult;
5741 init_elts += mult;
5742 break;
5743
5744 case STRING_CST:
5745 nz_elts += mult * TREE_STRING_LENGTH (value);
5746 init_elts += mult * TREE_STRING_LENGTH (value);
5747 break;
5748
5749 case COMPLEX_CST:
5750 if (!initializer_zerop (TREE_REALPART (value)))
5751 nz_elts += mult;
5752 if (!initializer_zerop (TREE_IMAGPART (value)))
5753 nz_elts += mult;
5754 init_elts += mult;
5755 break;
5756
5757 case VECTOR_CST:
5758 {
5759 unsigned i;
5760 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5761 {
5762 tree v = VECTOR_CST_ELT (value, i);
5763 if (!initializer_zerop (v))
5764 nz_elts += mult;
5765 init_elts += mult;
5766 }
5767 }
5768 break;
5769
5770 default:
5771 {
5772 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5773 nz_elts += mult * tc;
5774 init_elts += mult * tc;
5775
5776 if (const_from_elts_p && const_p)
5777 const_p = initializer_constant_valid_p (value, elt_type)
5778 != NULL_TREE;
5779 }
5780 break;
5781 }
5782 }
5783
5784 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5785 num_fields, elt_type))
5786 *p_complete = false;
5787
5788 *p_nz_elts += nz_elts;
5789 *p_init_elts += init_elts;
5790
5791 return const_p;
5792 }
5793
5794 /* Examine CTOR to discover:
5795 * how many scalar fields are set to nonzero values,
5796 and place it in *P_NZ_ELTS;
5797 * how many scalar fields in total are in CTOR,
5798 and place it in *P_ELT_COUNT.
5799 * whether the constructor is complete -- in the sense that every
5800 meaningful byte is explicitly given a value --
5801 and place it in *P_COMPLETE.
5802
5803 Return whether or not CTOR is a valid static constant initializer, the same
5804 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5805
5806 bool
5807 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5808 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5809 {
5810 *p_nz_elts = 0;
5811 *p_init_elts = 0;
5812 *p_complete = true;
5813
5814 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5815 }
5816
5817 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5818 of which had type LAST_TYPE. Each element was itself a complete
5819 initializer, in the sense that every meaningful byte was explicitly
5820 given a value. Return true if the same is true for the constructor
5821 as a whole. */
5822
5823 bool
5824 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5825 const_tree last_type)
5826 {
5827 if (TREE_CODE (type) == UNION_TYPE
5828 || TREE_CODE (type) == QUAL_UNION_TYPE)
5829 {
5830 if (num_elts == 0)
5831 return false;
5832
5833 gcc_assert (num_elts == 1 && last_type);
5834
5835 /* ??? We could look at each element of the union, and find the
5836 largest element. Which would avoid comparing the size of the
5837 initialized element against any tail padding in the union.
5838 Doesn't seem worth the effort... */
5839 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5840 }
5841
5842 return count_type_elements (type, true) == num_elts;
5843 }
5844
5845 /* Return 1 if EXP contains mostly (3/4) zeros. */
5846
5847 static int
5848 mostly_zeros_p (const_tree exp)
5849 {
5850 if (TREE_CODE (exp) == CONSTRUCTOR)
5851 {
5852 HOST_WIDE_INT nz_elts, init_elts;
5853 bool complete_p;
5854
5855 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5856 return !complete_p || nz_elts < init_elts / 4;
5857 }
5858
5859 return initializer_zerop (exp);
5860 }
5861
5862 /* Return 1 if EXP contains all zeros. */
5863
5864 static int
5865 all_zeros_p (const_tree exp)
5866 {
5867 if (TREE_CODE (exp) == CONSTRUCTOR)
5868 {
5869 HOST_WIDE_INT nz_elts, init_elts;
5870 bool complete_p;
5871
5872 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5873 return nz_elts == 0;
5874 }
5875
5876 return initializer_zerop (exp);
5877 }
5878 \f
5879 /* Helper function for store_constructor.
5880 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5881 CLEARED is as for store_constructor.
5882 ALIAS_SET is the alias set to use for any stores.
5883
5884 This provides a recursive shortcut back to store_constructor when it isn't
5885 necessary to go through store_field. This is so that we can pass through
5886 the cleared field to let store_constructor know that we may not have to
5887 clear a substructure if the outer structure has already been cleared. */
5888
5889 static void
5890 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5891 HOST_WIDE_INT bitpos, machine_mode mode,
5892 tree exp, int cleared, alias_set_type alias_set)
5893 {
5894 if (TREE_CODE (exp) == CONSTRUCTOR
5895 /* We can only call store_constructor recursively if the size and
5896 bit position are on a byte boundary. */
5897 && bitpos % BITS_PER_UNIT == 0
5898 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5899 /* If we have a nonzero bitpos for a register target, then we just
5900 let store_field do the bitfield handling. This is unlikely to
5901 generate unnecessary clear instructions anyways. */
5902 && (bitpos == 0 || MEM_P (target)))
5903 {
5904 if (MEM_P (target))
5905 target
5906 = adjust_address (target,
5907 GET_MODE (target) == BLKmode
5908 || 0 != (bitpos
5909 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5910 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5911
5912
5913 /* Update the alias set, if required. */
5914 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5915 && MEM_ALIAS_SET (target) != 0)
5916 {
5917 target = copy_rtx (target);
5918 set_mem_alias_set (target, alias_set);
5919 }
5920
5921 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5922 }
5923 else
5924 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5925 }
5926
5927
5928 /* Returns the number of FIELD_DECLs in TYPE. */
5929
5930 static int
5931 fields_length (const_tree type)
5932 {
5933 tree t = TYPE_FIELDS (type);
5934 int count = 0;
5935
5936 for (; t; t = DECL_CHAIN (t))
5937 if (TREE_CODE (t) == FIELD_DECL)
5938 ++count;
5939
5940 return count;
5941 }
5942
5943
5944 /* Store the value of constructor EXP into the rtx TARGET.
5945 TARGET is either a REG or a MEM; we know it cannot conflict, since
5946 safe_from_p has been called.
5947 CLEARED is true if TARGET is known to have been zero'd.
5948 SIZE is the number of bytes of TARGET we are allowed to modify: this
5949 may not be the same as the size of EXP if we are assigning to a field
5950 which has been packed to exclude padding bits. */
5951
5952 static void
5953 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5954 {
5955 tree type = TREE_TYPE (exp);
5956 #ifdef WORD_REGISTER_OPERATIONS
5957 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5958 #endif
5959
5960 switch (TREE_CODE (type))
5961 {
5962 case RECORD_TYPE:
5963 case UNION_TYPE:
5964 case QUAL_UNION_TYPE:
5965 {
5966 unsigned HOST_WIDE_INT idx;
5967 tree field, value;
5968
5969 /* If size is zero or the target is already cleared, do nothing. */
5970 if (size == 0 || cleared)
5971 cleared = 1;
5972 /* We either clear the aggregate or indicate the value is dead. */
5973 else if ((TREE_CODE (type) == UNION_TYPE
5974 || TREE_CODE (type) == QUAL_UNION_TYPE)
5975 && ! CONSTRUCTOR_ELTS (exp))
5976 /* If the constructor is empty, clear the union. */
5977 {
5978 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5979 cleared = 1;
5980 }
5981
5982 /* If we are building a static constructor into a register,
5983 set the initial value as zero so we can fold the value into
5984 a constant. But if more than one register is involved,
5985 this probably loses. */
5986 else if (REG_P (target) && TREE_STATIC (exp)
5987 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5988 {
5989 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5990 cleared = 1;
5991 }
5992
5993 /* If the constructor has fewer fields than the structure or
5994 if we are initializing the structure to mostly zeros, clear
5995 the whole structure first. Don't do this if TARGET is a
5996 register whose mode size isn't equal to SIZE since
5997 clear_storage can't handle this case. */
5998 else if (size > 0
5999 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6000 != fields_length (type))
6001 || mostly_zeros_p (exp))
6002 && (!REG_P (target)
6003 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6004 == size)))
6005 {
6006 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6007 cleared = 1;
6008 }
6009
6010 if (REG_P (target) && !cleared)
6011 emit_clobber (target);
6012
6013 /* Store each element of the constructor into the
6014 corresponding field of TARGET. */
6015 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6016 {
6017 machine_mode mode;
6018 HOST_WIDE_INT bitsize;
6019 HOST_WIDE_INT bitpos = 0;
6020 tree offset;
6021 rtx to_rtx = target;
6022
6023 /* Just ignore missing fields. We cleared the whole
6024 structure, above, if any fields are missing. */
6025 if (field == 0)
6026 continue;
6027
6028 if (cleared && initializer_zerop (value))
6029 continue;
6030
6031 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6032 bitsize = tree_to_uhwi (DECL_SIZE (field));
6033 else
6034 bitsize = -1;
6035
6036 mode = DECL_MODE (field);
6037 if (DECL_BIT_FIELD (field))
6038 mode = VOIDmode;
6039
6040 offset = DECL_FIELD_OFFSET (field);
6041 if (tree_fits_shwi_p (offset)
6042 && tree_fits_shwi_p (bit_position (field)))
6043 {
6044 bitpos = int_bit_position (field);
6045 offset = 0;
6046 }
6047 else
6048 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6049
6050 if (offset)
6051 {
6052 machine_mode address_mode;
6053 rtx offset_rtx;
6054
6055 offset
6056 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6057 make_tree (TREE_TYPE (exp),
6058 target));
6059
6060 offset_rtx = expand_normal (offset);
6061 gcc_assert (MEM_P (to_rtx));
6062
6063 address_mode = get_address_mode (to_rtx);
6064 if (GET_MODE (offset_rtx) != address_mode)
6065 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6066
6067 to_rtx = offset_address (to_rtx, offset_rtx,
6068 highest_pow2_factor (offset));
6069 }
6070
6071 #ifdef WORD_REGISTER_OPERATIONS
6072 /* If this initializes a field that is smaller than a
6073 word, at the start of a word, try to widen it to a full
6074 word. This special case allows us to output C++ member
6075 function initializations in a form that the optimizers
6076 can understand. */
6077 if (REG_P (target)
6078 && bitsize < BITS_PER_WORD
6079 && bitpos % BITS_PER_WORD == 0
6080 && GET_MODE_CLASS (mode) == MODE_INT
6081 && TREE_CODE (value) == INTEGER_CST
6082 && exp_size >= 0
6083 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6084 {
6085 tree type = TREE_TYPE (value);
6086
6087 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6088 {
6089 type = lang_hooks.types.type_for_mode
6090 (word_mode, TYPE_UNSIGNED (type));
6091 value = fold_convert (type, value);
6092 }
6093
6094 if (BYTES_BIG_ENDIAN)
6095 value
6096 = fold_build2 (LSHIFT_EXPR, type, value,
6097 build_int_cst (type,
6098 BITS_PER_WORD - bitsize));
6099 bitsize = BITS_PER_WORD;
6100 mode = word_mode;
6101 }
6102 #endif
6103
6104 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6105 && DECL_NONADDRESSABLE_P (field))
6106 {
6107 to_rtx = copy_rtx (to_rtx);
6108 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6109 }
6110
6111 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6112 value, cleared,
6113 get_alias_set (TREE_TYPE (field)));
6114 }
6115 break;
6116 }
6117 case ARRAY_TYPE:
6118 {
6119 tree value, index;
6120 unsigned HOST_WIDE_INT i;
6121 int need_to_clear;
6122 tree domain;
6123 tree elttype = TREE_TYPE (type);
6124 int const_bounds_p;
6125 HOST_WIDE_INT minelt = 0;
6126 HOST_WIDE_INT maxelt = 0;
6127
6128 domain = TYPE_DOMAIN (type);
6129 const_bounds_p = (TYPE_MIN_VALUE (domain)
6130 && TYPE_MAX_VALUE (domain)
6131 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6132 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6133
6134 /* If we have constant bounds for the range of the type, get them. */
6135 if (const_bounds_p)
6136 {
6137 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6138 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6139 }
6140
6141 /* If the constructor has fewer elements than the array, clear
6142 the whole array first. Similarly if this is static
6143 constructor of a non-BLKmode object. */
6144 if (cleared)
6145 need_to_clear = 0;
6146 else if (REG_P (target) && TREE_STATIC (exp))
6147 need_to_clear = 1;
6148 else
6149 {
6150 unsigned HOST_WIDE_INT idx;
6151 tree index, value;
6152 HOST_WIDE_INT count = 0, zero_count = 0;
6153 need_to_clear = ! const_bounds_p;
6154
6155 /* This loop is a more accurate version of the loop in
6156 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6157 is also needed to check for missing elements. */
6158 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6159 {
6160 HOST_WIDE_INT this_node_count;
6161
6162 if (need_to_clear)
6163 break;
6164
6165 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6166 {
6167 tree lo_index = TREE_OPERAND (index, 0);
6168 tree hi_index = TREE_OPERAND (index, 1);
6169
6170 if (! tree_fits_uhwi_p (lo_index)
6171 || ! tree_fits_uhwi_p (hi_index))
6172 {
6173 need_to_clear = 1;
6174 break;
6175 }
6176
6177 this_node_count = (tree_to_uhwi (hi_index)
6178 - tree_to_uhwi (lo_index) + 1);
6179 }
6180 else
6181 this_node_count = 1;
6182
6183 count += this_node_count;
6184 if (mostly_zeros_p (value))
6185 zero_count += this_node_count;
6186 }
6187
6188 /* Clear the entire array first if there are any missing
6189 elements, or if the incidence of zero elements is >=
6190 75%. */
6191 if (! need_to_clear
6192 && (count < maxelt - minelt + 1
6193 || 4 * zero_count >= 3 * count))
6194 need_to_clear = 1;
6195 }
6196
6197 if (need_to_clear && size > 0)
6198 {
6199 if (REG_P (target))
6200 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6201 else
6202 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6203 cleared = 1;
6204 }
6205
6206 if (!cleared && REG_P (target))
6207 /* Inform later passes that the old value is dead. */
6208 emit_clobber (target);
6209
6210 /* Store each element of the constructor into the
6211 corresponding element of TARGET, determined by counting the
6212 elements. */
6213 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6214 {
6215 machine_mode mode;
6216 HOST_WIDE_INT bitsize;
6217 HOST_WIDE_INT bitpos;
6218 rtx xtarget = target;
6219
6220 if (cleared && initializer_zerop (value))
6221 continue;
6222
6223 mode = TYPE_MODE (elttype);
6224 if (mode == BLKmode)
6225 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6226 ? tree_to_uhwi (TYPE_SIZE (elttype))
6227 : -1);
6228 else
6229 bitsize = GET_MODE_BITSIZE (mode);
6230
6231 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6232 {
6233 tree lo_index = TREE_OPERAND (index, 0);
6234 tree hi_index = TREE_OPERAND (index, 1);
6235 rtx index_r, pos_rtx;
6236 HOST_WIDE_INT lo, hi, count;
6237 tree position;
6238
6239 /* If the range is constant and "small", unroll the loop. */
6240 if (const_bounds_p
6241 && tree_fits_shwi_p (lo_index)
6242 && tree_fits_shwi_p (hi_index)
6243 && (lo = tree_to_shwi (lo_index),
6244 hi = tree_to_shwi (hi_index),
6245 count = hi - lo + 1,
6246 (!MEM_P (target)
6247 || count <= 2
6248 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6249 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6250 <= 40 * 8)))))
6251 {
6252 lo -= minelt; hi -= minelt;
6253 for (; lo <= hi; lo++)
6254 {
6255 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6256
6257 if (MEM_P (target)
6258 && !MEM_KEEP_ALIAS_SET_P (target)
6259 && TREE_CODE (type) == ARRAY_TYPE
6260 && TYPE_NONALIASED_COMPONENT (type))
6261 {
6262 target = copy_rtx (target);
6263 MEM_KEEP_ALIAS_SET_P (target) = 1;
6264 }
6265
6266 store_constructor_field
6267 (target, bitsize, bitpos, mode, value, cleared,
6268 get_alias_set (elttype));
6269 }
6270 }
6271 else
6272 {
6273 rtx_code_label *loop_start = gen_label_rtx ();
6274 rtx_code_label *loop_end = gen_label_rtx ();
6275 tree exit_cond;
6276
6277 expand_normal (hi_index);
6278
6279 index = build_decl (EXPR_LOCATION (exp),
6280 VAR_DECL, NULL_TREE, domain);
6281 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6282 SET_DECL_RTL (index, index_r);
6283 store_expr (lo_index, index_r, 0, false);
6284
6285 /* Build the head of the loop. */
6286 do_pending_stack_adjust ();
6287 emit_label (loop_start);
6288
6289 /* Assign value to element index. */
6290 position =
6291 fold_convert (ssizetype,
6292 fold_build2 (MINUS_EXPR,
6293 TREE_TYPE (index),
6294 index,
6295 TYPE_MIN_VALUE (domain)));
6296
6297 position =
6298 size_binop (MULT_EXPR, position,
6299 fold_convert (ssizetype,
6300 TYPE_SIZE_UNIT (elttype)));
6301
6302 pos_rtx = expand_normal (position);
6303 xtarget = offset_address (target, pos_rtx,
6304 highest_pow2_factor (position));
6305 xtarget = adjust_address (xtarget, mode, 0);
6306 if (TREE_CODE (value) == CONSTRUCTOR)
6307 store_constructor (value, xtarget, cleared,
6308 bitsize / BITS_PER_UNIT);
6309 else
6310 store_expr (value, xtarget, 0, false);
6311
6312 /* Generate a conditional jump to exit the loop. */
6313 exit_cond = build2 (LT_EXPR, integer_type_node,
6314 index, hi_index);
6315 jumpif (exit_cond, loop_end, -1);
6316
6317 /* Update the loop counter, and jump to the head of
6318 the loop. */
6319 expand_assignment (index,
6320 build2 (PLUS_EXPR, TREE_TYPE (index),
6321 index, integer_one_node),
6322 false);
6323
6324 emit_jump (loop_start);
6325
6326 /* Build the end of the loop. */
6327 emit_label (loop_end);
6328 }
6329 }
6330 else if ((index != 0 && ! tree_fits_shwi_p (index))
6331 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6332 {
6333 tree position;
6334
6335 if (index == 0)
6336 index = ssize_int (1);
6337
6338 if (minelt)
6339 index = fold_convert (ssizetype,
6340 fold_build2 (MINUS_EXPR,
6341 TREE_TYPE (index),
6342 index,
6343 TYPE_MIN_VALUE (domain)));
6344
6345 position =
6346 size_binop (MULT_EXPR, index,
6347 fold_convert (ssizetype,
6348 TYPE_SIZE_UNIT (elttype)));
6349 xtarget = offset_address (target,
6350 expand_normal (position),
6351 highest_pow2_factor (position));
6352 xtarget = adjust_address (xtarget, mode, 0);
6353 store_expr (value, xtarget, 0, false);
6354 }
6355 else
6356 {
6357 if (index != 0)
6358 bitpos = ((tree_to_shwi (index) - minelt)
6359 * tree_to_uhwi (TYPE_SIZE (elttype)));
6360 else
6361 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6362
6363 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6364 && TREE_CODE (type) == ARRAY_TYPE
6365 && TYPE_NONALIASED_COMPONENT (type))
6366 {
6367 target = copy_rtx (target);
6368 MEM_KEEP_ALIAS_SET_P (target) = 1;
6369 }
6370 store_constructor_field (target, bitsize, bitpos, mode, value,
6371 cleared, get_alias_set (elttype));
6372 }
6373 }
6374 break;
6375 }
6376
6377 case VECTOR_TYPE:
6378 {
6379 unsigned HOST_WIDE_INT idx;
6380 constructor_elt *ce;
6381 int i;
6382 int need_to_clear;
6383 int icode = CODE_FOR_nothing;
6384 tree elttype = TREE_TYPE (type);
6385 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6386 machine_mode eltmode = TYPE_MODE (elttype);
6387 HOST_WIDE_INT bitsize;
6388 HOST_WIDE_INT bitpos;
6389 rtvec vector = NULL;
6390 unsigned n_elts;
6391 alias_set_type alias;
6392
6393 gcc_assert (eltmode != BLKmode);
6394
6395 n_elts = TYPE_VECTOR_SUBPARTS (type);
6396 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6397 {
6398 machine_mode mode = GET_MODE (target);
6399
6400 icode = (int) optab_handler (vec_init_optab, mode);
6401 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6402 if (icode != CODE_FOR_nothing)
6403 {
6404 tree value;
6405
6406 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6407 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6408 {
6409 icode = CODE_FOR_nothing;
6410 break;
6411 }
6412 }
6413 if (icode != CODE_FOR_nothing)
6414 {
6415 unsigned int i;
6416
6417 vector = rtvec_alloc (n_elts);
6418 for (i = 0; i < n_elts; i++)
6419 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6420 }
6421 }
6422
6423 /* If the constructor has fewer elements than the vector,
6424 clear the whole array first. Similarly if this is static
6425 constructor of a non-BLKmode object. */
6426 if (cleared)
6427 need_to_clear = 0;
6428 else if (REG_P (target) && TREE_STATIC (exp))
6429 need_to_clear = 1;
6430 else
6431 {
6432 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6433 tree value;
6434
6435 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6436 {
6437 int n_elts_here = tree_to_uhwi
6438 (int_const_binop (TRUNC_DIV_EXPR,
6439 TYPE_SIZE (TREE_TYPE (value)),
6440 TYPE_SIZE (elttype)));
6441
6442 count += n_elts_here;
6443 if (mostly_zeros_p (value))
6444 zero_count += n_elts_here;
6445 }
6446
6447 /* Clear the entire vector first if there are any missing elements,
6448 or if the incidence of zero elements is >= 75%. */
6449 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6450 }
6451
6452 if (need_to_clear && size > 0 && !vector)
6453 {
6454 if (REG_P (target))
6455 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6456 else
6457 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6458 cleared = 1;
6459 }
6460
6461 /* Inform later passes that the old value is dead. */
6462 if (!cleared && !vector && REG_P (target))
6463 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6464
6465 if (MEM_P (target))
6466 alias = MEM_ALIAS_SET (target);
6467 else
6468 alias = get_alias_set (elttype);
6469
6470 /* Store each element of the constructor into the corresponding
6471 element of TARGET, determined by counting the elements. */
6472 for (idx = 0, i = 0;
6473 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6474 idx++, i += bitsize / elt_size)
6475 {
6476 HOST_WIDE_INT eltpos;
6477 tree value = ce->value;
6478
6479 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6480 if (cleared && initializer_zerop (value))
6481 continue;
6482
6483 if (ce->index)
6484 eltpos = tree_to_uhwi (ce->index);
6485 else
6486 eltpos = i;
6487
6488 if (vector)
6489 {
6490 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6491 elements. */
6492 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6493 RTVEC_ELT (vector, eltpos)
6494 = expand_normal (value);
6495 }
6496 else
6497 {
6498 machine_mode value_mode =
6499 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6500 ? TYPE_MODE (TREE_TYPE (value))
6501 : eltmode;
6502 bitpos = eltpos * elt_size;
6503 store_constructor_field (target, bitsize, bitpos, value_mode,
6504 value, cleared, alias);
6505 }
6506 }
6507
6508 if (vector)
6509 emit_insn (GEN_FCN (icode)
6510 (target,
6511 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6512 break;
6513 }
6514
6515 default:
6516 gcc_unreachable ();
6517 }
6518 }
6519
6520 /* Store the value of EXP (an expression tree)
6521 into a subfield of TARGET which has mode MODE and occupies
6522 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6523 If MODE is VOIDmode, it means that we are storing into a bit-field.
6524
6525 BITREGION_START is bitpos of the first bitfield in this region.
6526 BITREGION_END is the bitpos of the ending bitfield in this region.
6527 These two fields are 0, if the C++ memory model does not apply,
6528 or we are not interested in keeping track of bitfield regions.
6529
6530 Always return const0_rtx unless we have something particular to
6531 return.
6532
6533 ALIAS_SET is the alias set for the destination. This value will
6534 (in general) be different from that for TARGET, since TARGET is a
6535 reference to the containing structure.
6536
6537 If NONTEMPORAL is true, try generating a nontemporal store. */
6538
6539 static rtx
6540 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6541 unsigned HOST_WIDE_INT bitregion_start,
6542 unsigned HOST_WIDE_INT bitregion_end,
6543 machine_mode mode, tree exp,
6544 alias_set_type alias_set, bool nontemporal)
6545 {
6546 if (TREE_CODE (exp) == ERROR_MARK)
6547 return const0_rtx;
6548
6549 /* If we have nothing to store, do nothing unless the expression has
6550 side-effects. */
6551 if (bitsize == 0)
6552 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6553
6554 if (GET_CODE (target) == CONCAT)
6555 {
6556 /* We're storing into a struct containing a single __complex. */
6557
6558 gcc_assert (!bitpos);
6559 return store_expr (exp, target, 0, nontemporal);
6560 }
6561
6562 /* If the structure is in a register or if the component
6563 is a bit field, we cannot use addressing to access it.
6564 Use bit-field techniques or SUBREG to store in it. */
6565
6566 if (mode == VOIDmode
6567 || (mode != BLKmode && ! direct_store[(int) mode]
6568 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6569 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6570 || REG_P (target)
6571 || GET_CODE (target) == SUBREG
6572 /* If the field isn't aligned enough to store as an ordinary memref,
6573 store it as a bit field. */
6574 || (mode != BLKmode
6575 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6576 || bitpos % GET_MODE_ALIGNMENT (mode))
6577 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6578 || (bitpos % BITS_PER_UNIT != 0)))
6579 || (bitsize >= 0 && mode != BLKmode
6580 && GET_MODE_BITSIZE (mode) > bitsize)
6581 /* If the RHS and field are a constant size and the size of the
6582 RHS isn't the same size as the bitfield, we must use bitfield
6583 operations. */
6584 || (bitsize >= 0
6585 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6586 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6587 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6588 decl we must use bitfield operations. */
6589 || (bitsize >= 0
6590 && TREE_CODE (exp) == MEM_REF
6591 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6592 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6593 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6594 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6595 {
6596 rtx temp;
6597 gimple nop_def;
6598
6599 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6600 implies a mask operation. If the precision is the same size as
6601 the field we're storing into, that mask is redundant. This is
6602 particularly common with bit field assignments generated by the
6603 C front end. */
6604 nop_def = get_def_for_expr (exp, NOP_EXPR);
6605 if (nop_def)
6606 {
6607 tree type = TREE_TYPE (exp);
6608 if (INTEGRAL_TYPE_P (type)
6609 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6610 && bitsize == TYPE_PRECISION (type))
6611 {
6612 tree op = gimple_assign_rhs1 (nop_def);
6613 type = TREE_TYPE (op);
6614 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6615 exp = op;
6616 }
6617 }
6618
6619 temp = expand_normal (exp);
6620
6621 /* If BITSIZE is narrower than the size of the type of EXP
6622 we will be narrowing TEMP. Normally, what's wanted are the
6623 low-order bits. However, if EXP's type is a record and this is
6624 big-endian machine, we want the upper BITSIZE bits. */
6625 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6626 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6627 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6628 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6629 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6630 NULL_RTX, 1);
6631
6632 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6633 if (mode != VOIDmode && mode != BLKmode
6634 && mode != TYPE_MODE (TREE_TYPE (exp)))
6635 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6636
6637 /* If the modes of TEMP and TARGET are both BLKmode, both
6638 must be in memory and BITPOS must be aligned on a byte
6639 boundary. If so, we simply do a block copy. Likewise
6640 for a BLKmode-like TARGET. */
6641 if (GET_MODE (temp) == BLKmode
6642 && (GET_MODE (target) == BLKmode
6643 || (MEM_P (target)
6644 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6645 && (bitpos % BITS_PER_UNIT) == 0
6646 && (bitsize % BITS_PER_UNIT) == 0)))
6647 {
6648 gcc_assert (MEM_P (target) && MEM_P (temp)
6649 && (bitpos % BITS_PER_UNIT) == 0);
6650
6651 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6652 emit_block_move (target, temp,
6653 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6654 / BITS_PER_UNIT),
6655 BLOCK_OP_NORMAL);
6656
6657 return const0_rtx;
6658 }
6659
6660 /* Handle calls that return values in multiple non-contiguous locations.
6661 The Irix 6 ABI has examples of this. */
6662 if (GET_CODE (temp) == PARALLEL)
6663 {
6664 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6665 rtx temp_target;
6666 if (mode == BLKmode || mode == VOIDmode)
6667 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6668 temp_target = gen_reg_rtx (mode);
6669 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6670 temp = temp_target;
6671 }
6672 else if (mode == BLKmode)
6673 {
6674 /* Handle calls that return BLKmode values in registers. */
6675 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6676 {
6677 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6678 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6679 temp = temp_target;
6680 }
6681 else
6682 {
6683 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6684 rtx temp_target;
6685 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6686 temp_target = gen_reg_rtx (mode);
6687 temp_target
6688 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6689 temp_target, mode, mode);
6690 temp = temp_target;
6691 }
6692 }
6693
6694 /* Store the value in the bitfield. */
6695 store_bit_field (target, bitsize, bitpos,
6696 bitregion_start, bitregion_end,
6697 mode, temp);
6698
6699 return const0_rtx;
6700 }
6701 else
6702 {
6703 /* Now build a reference to just the desired component. */
6704 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6705
6706 if (to_rtx == target)
6707 to_rtx = copy_rtx (to_rtx);
6708
6709 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6710 set_mem_alias_set (to_rtx, alias_set);
6711
6712 return store_expr (exp, to_rtx, 0, nontemporal);
6713 }
6714 }
6715 \f
6716 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6717 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6718 codes and find the ultimate containing object, which we return.
6719
6720 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6721 bit position, and *PUNSIGNEDP to the signedness of the field.
6722 If the position of the field is variable, we store a tree
6723 giving the variable offset (in units) in *POFFSET.
6724 This offset is in addition to the bit position.
6725 If the position is not variable, we store 0 in *POFFSET.
6726
6727 If any of the extraction expressions is volatile,
6728 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6729
6730 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6731 Otherwise, it is a mode that can be used to access the field.
6732
6733 If the field describes a variable-sized object, *PMODE is set to
6734 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6735 this case, but the address of the object can be found.
6736
6737 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6738 look through nodes that serve as markers of a greater alignment than
6739 the one that can be deduced from the expression. These nodes make it
6740 possible for front-ends to prevent temporaries from being created by
6741 the middle-end on alignment considerations. For that purpose, the
6742 normal operating mode at high-level is to always pass FALSE so that
6743 the ultimate containing object is really returned; moreover, the
6744 associated predicate handled_component_p will always return TRUE
6745 on these nodes, thus indicating that they are essentially handled
6746 by get_inner_reference. TRUE should only be passed when the caller
6747 is scanning the expression in order to build another representation
6748 and specifically knows how to handle these nodes; as such, this is
6749 the normal operating mode in the RTL expanders. */
6750
6751 tree
6752 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6753 HOST_WIDE_INT *pbitpos, tree *poffset,
6754 machine_mode *pmode, int *punsignedp,
6755 int *pvolatilep, bool keep_aligning)
6756 {
6757 tree size_tree = 0;
6758 machine_mode mode = VOIDmode;
6759 bool blkmode_bitfield = false;
6760 tree offset = size_zero_node;
6761 offset_int bit_offset = 0;
6762
6763 /* First get the mode, signedness, and size. We do this from just the
6764 outermost expression. */
6765 *pbitsize = -1;
6766 if (TREE_CODE (exp) == COMPONENT_REF)
6767 {
6768 tree field = TREE_OPERAND (exp, 1);
6769 size_tree = DECL_SIZE (field);
6770 if (flag_strict_volatile_bitfields > 0
6771 && TREE_THIS_VOLATILE (exp)
6772 && DECL_BIT_FIELD_TYPE (field)
6773 && DECL_MODE (field) != BLKmode)
6774 /* Volatile bitfields should be accessed in the mode of the
6775 field's type, not the mode computed based on the bit
6776 size. */
6777 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6778 else if (!DECL_BIT_FIELD (field))
6779 mode = DECL_MODE (field);
6780 else if (DECL_MODE (field) == BLKmode)
6781 blkmode_bitfield = true;
6782
6783 *punsignedp = DECL_UNSIGNED (field);
6784 }
6785 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6786 {
6787 size_tree = TREE_OPERAND (exp, 1);
6788 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6789 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6790
6791 /* For vector types, with the correct size of access, use the mode of
6792 inner type. */
6793 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6794 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6795 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6796 mode = TYPE_MODE (TREE_TYPE (exp));
6797 }
6798 else
6799 {
6800 mode = TYPE_MODE (TREE_TYPE (exp));
6801 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6802
6803 if (mode == BLKmode)
6804 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6805 else
6806 *pbitsize = GET_MODE_BITSIZE (mode);
6807 }
6808
6809 if (size_tree != 0)
6810 {
6811 if (! tree_fits_uhwi_p (size_tree))
6812 mode = BLKmode, *pbitsize = -1;
6813 else
6814 *pbitsize = tree_to_uhwi (size_tree);
6815 }
6816
6817 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6818 and find the ultimate containing object. */
6819 while (1)
6820 {
6821 switch (TREE_CODE (exp))
6822 {
6823 case BIT_FIELD_REF:
6824 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6825 break;
6826
6827 case COMPONENT_REF:
6828 {
6829 tree field = TREE_OPERAND (exp, 1);
6830 tree this_offset = component_ref_field_offset (exp);
6831
6832 /* If this field hasn't been filled in yet, don't go past it.
6833 This should only happen when folding expressions made during
6834 type construction. */
6835 if (this_offset == 0)
6836 break;
6837
6838 offset = size_binop (PLUS_EXPR, offset, this_offset);
6839 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6840
6841 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6842 }
6843 break;
6844
6845 case ARRAY_REF:
6846 case ARRAY_RANGE_REF:
6847 {
6848 tree index = TREE_OPERAND (exp, 1);
6849 tree low_bound = array_ref_low_bound (exp);
6850 tree unit_size = array_ref_element_size (exp);
6851
6852 /* We assume all arrays have sizes that are a multiple of a byte.
6853 First subtract the lower bound, if any, in the type of the
6854 index, then convert to sizetype and multiply by the size of
6855 the array element. */
6856 if (! integer_zerop (low_bound))
6857 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6858 index, low_bound);
6859
6860 offset = size_binop (PLUS_EXPR, offset,
6861 size_binop (MULT_EXPR,
6862 fold_convert (sizetype, index),
6863 unit_size));
6864 }
6865 break;
6866
6867 case REALPART_EXPR:
6868 break;
6869
6870 case IMAGPART_EXPR:
6871 bit_offset += *pbitsize;
6872 break;
6873
6874 case VIEW_CONVERT_EXPR:
6875 if (keep_aligning && STRICT_ALIGNMENT
6876 && (TYPE_ALIGN (TREE_TYPE (exp))
6877 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6878 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6879 < BIGGEST_ALIGNMENT)
6880 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6881 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6882 goto done;
6883 break;
6884
6885 case MEM_REF:
6886 /* Hand back the decl for MEM[&decl, off]. */
6887 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6888 {
6889 tree off = TREE_OPERAND (exp, 1);
6890 if (!integer_zerop (off))
6891 {
6892 offset_int boff, coff = mem_ref_offset (exp);
6893 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6894 bit_offset += boff;
6895 }
6896 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6897 }
6898 goto done;
6899
6900 default:
6901 goto done;
6902 }
6903
6904 /* If any reference in the chain is volatile, the effect is volatile. */
6905 if (TREE_THIS_VOLATILE (exp))
6906 *pvolatilep = 1;
6907
6908 exp = TREE_OPERAND (exp, 0);
6909 }
6910 done:
6911
6912 /* If OFFSET is constant, see if we can return the whole thing as a
6913 constant bit position. Make sure to handle overflow during
6914 this conversion. */
6915 if (TREE_CODE (offset) == INTEGER_CST)
6916 {
6917 offset_int tem = wi::sext (wi::to_offset (offset),
6918 TYPE_PRECISION (sizetype));
6919 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6920 tem += bit_offset;
6921 if (wi::fits_shwi_p (tem))
6922 {
6923 *pbitpos = tem.to_shwi ();
6924 *poffset = offset = NULL_TREE;
6925 }
6926 }
6927
6928 /* Otherwise, split it up. */
6929 if (offset)
6930 {
6931 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6932 if (wi::neg_p (bit_offset))
6933 {
6934 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6935 offset_int tem = bit_offset.and_not (mask);
6936 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6937 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6938 bit_offset -= tem;
6939 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6940 offset = size_binop (PLUS_EXPR, offset,
6941 wide_int_to_tree (sizetype, tem));
6942 }
6943
6944 *pbitpos = bit_offset.to_shwi ();
6945 *poffset = offset;
6946 }
6947
6948 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6949 if (mode == VOIDmode
6950 && blkmode_bitfield
6951 && (*pbitpos % BITS_PER_UNIT) == 0
6952 && (*pbitsize % BITS_PER_UNIT) == 0)
6953 *pmode = BLKmode;
6954 else
6955 *pmode = mode;
6956
6957 return exp;
6958 }
6959
6960 /* Return a tree of sizetype representing the size, in bytes, of the element
6961 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6962
6963 tree
6964 array_ref_element_size (tree exp)
6965 {
6966 tree aligned_size = TREE_OPERAND (exp, 3);
6967 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6968 location_t loc = EXPR_LOCATION (exp);
6969
6970 /* If a size was specified in the ARRAY_REF, it's the size measured
6971 in alignment units of the element type. So multiply by that value. */
6972 if (aligned_size)
6973 {
6974 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6975 sizetype from another type of the same width and signedness. */
6976 if (TREE_TYPE (aligned_size) != sizetype)
6977 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6978 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6979 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6980 }
6981
6982 /* Otherwise, take the size from that of the element type. Substitute
6983 any PLACEHOLDER_EXPR that we have. */
6984 else
6985 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6986 }
6987
6988 /* Return a tree representing the lower bound of the array mentioned in
6989 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6990
6991 tree
6992 array_ref_low_bound (tree exp)
6993 {
6994 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6995
6996 /* If a lower bound is specified in EXP, use it. */
6997 if (TREE_OPERAND (exp, 2))
6998 return TREE_OPERAND (exp, 2);
6999
7000 /* Otherwise, if there is a domain type and it has a lower bound, use it,
7001 substituting for a PLACEHOLDER_EXPR as needed. */
7002 if (domain_type && TYPE_MIN_VALUE (domain_type))
7003 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
7004
7005 /* Otherwise, return a zero of the appropriate type. */
7006 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
7007 }
7008
7009 /* Returns true if REF is an array reference to an array at the end of
7010 a structure. If this is the case, the array may be allocated larger
7011 than its upper bound implies. */
7012
7013 bool
7014 array_at_struct_end_p (tree ref)
7015 {
7016 if (TREE_CODE (ref) != ARRAY_REF
7017 && TREE_CODE (ref) != ARRAY_RANGE_REF)
7018 return false;
7019
7020 while (handled_component_p (ref))
7021 {
7022 /* If the reference chain contains a component reference to a
7023 non-union type and there follows another field the reference
7024 is not at the end of a structure. */
7025 if (TREE_CODE (ref) == COMPONENT_REF
7026 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
7027 {
7028 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
7029 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
7030 nextf = DECL_CHAIN (nextf);
7031 if (nextf)
7032 return false;
7033 }
7034
7035 ref = TREE_OPERAND (ref, 0);
7036 }
7037
7038 /* If the reference is based on a declared entity, the size of the array
7039 is constrained by its given domain. */
7040 if (DECL_P (ref))
7041 return false;
7042
7043 return true;
7044 }
7045
7046 /* Return a tree representing the upper bound of the array mentioned in
7047 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
7048
7049 tree
7050 array_ref_up_bound (tree exp)
7051 {
7052 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7053
7054 /* If there is a domain type and it has an upper bound, use it, substituting
7055 for a PLACEHOLDER_EXPR as needed. */
7056 if (domain_type && TYPE_MAX_VALUE (domain_type))
7057 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
7058
7059 /* Otherwise fail. */
7060 return NULL_TREE;
7061 }
7062
7063 /* Return a tree representing the offset, in bytes, of the field referenced
7064 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
7065
7066 tree
7067 component_ref_field_offset (tree exp)
7068 {
7069 tree aligned_offset = TREE_OPERAND (exp, 2);
7070 tree field = TREE_OPERAND (exp, 1);
7071 location_t loc = EXPR_LOCATION (exp);
7072
7073 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
7074 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
7075 value. */
7076 if (aligned_offset)
7077 {
7078 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7079 sizetype from another type of the same width and signedness. */
7080 if (TREE_TYPE (aligned_offset) != sizetype)
7081 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7082 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7083 size_int (DECL_OFFSET_ALIGN (field)
7084 / BITS_PER_UNIT));
7085 }
7086
7087 /* Otherwise, take the offset from that of the field. Substitute
7088 any PLACEHOLDER_EXPR that we have. */
7089 else
7090 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7091 }
7092
7093 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7094
7095 static unsigned HOST_WIDE_INT
7096 target_align (const_tree target)
7097 {
7098 /* We might have a chain of nested references with intermediate misaligning
7099 bitfields components, so need to recurse to find out. */
7100
7101 unsigned HOST_WIDE_INT this_align, outer_align;
7102
7103 switch (TREE_CODE (target))
7104 {
7105 case BIT_FIELD_REF:
7106 return 1;
7107
7108 case COMPONENT_REF:
7109 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7110 outer_align = target_align (TREE_OPERAND (target, 0));
7111 return MIN (this_align, outer_align);
7112
7113 case ARRAY_REF:
7114 case ARRAY_RANGE_REF:
7115 this_align = TYPE_ALIGN (TREE_TYPE (target));
7116 outer_align = target_align (TREE_OPERAND (target, 0));
7117 return MIN (this_align, outer_align);
7118
7119 CASE_CONVERT:
7120 case NON_LVALUE_EXPR:
7121 case VIEW_CONVERT_EXPR:
7122 this_align = TYPE_ALIGN (TREE_TYPE (target));
7123 outer_align = target_align (TREE_OPERAND (target, 0));
7124 return MAX (this_align, outer_align);
7125
7126 default:
7127 return TYPE_ALIGN (TREE_TYPE (target));
7128 }
7129 }
7130
7131 \f
7132 /* Given an rtx VALUE that may contain additions and multiplications, return
7133 an equivalent value that just refers to a register, memory, or constant.
7134 This is done by generating instructions to perform the arithmetic and
7135 returning a pseudo-register containing the value.
7136
7137 The returned value may be a REG, SUBREG, MEM or constant. */
7138
7139 rtx
7140 force_operand (rtx value, rtx target)
7141 {
7142 rtx op1, op2;
7143 /* Use subtarget as the target for operand 0 of a binary operation. */
7144 rtx subtarget = get_subtarget (target);
7145 enum rtx_code code = GET_CODE (value);
7146
7147 /* Check for subreg applied to an expression produced by loop optimizer. */
7148 if (code == SUBREG
7149 && !REG_P (SUBREG_REG (value))
7150 && !MEM_P (SUBREG_REG (value)))
7151 {
7152 value
7153 = simplify_gen_subreg (GET_MODE (value),
7154 force_reg (GET_MODE (SUBREG_REG (value)),
7155 force_operand (SUBREG_REG (value),
7156 NULL_RTX)),
7157 GET_MODE (SUBREG_REG (value)),
7158 SUBREG_BYTE (value));
7159 code = GET_CODE (value);
7160 }
7161
7162 /* Check for a PIC address load. */
7163 if ((code == PLUS || code == MINUS)
7164 && XEXP (value, 0) == pic_offset_table_rtx
7165 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7166 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7167 || GET_CODE (XEXP (value, 1)) == CONST))
7168 {
7169 if (!subtarget)
7170 subtarget = gen_reg_rtx (GET_MODE (value));
7171 emit_move_insn (subtarget, value);
7172 return subtarget;
7173 }
7174
7175 if (ARITHMETIC_P (value))
7176 {
7177 op2 = XEXP (value, 1);
7178 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7179 subtarget = 0;
7180 if (code == MINUS && CONST_INT_P (op2))
7181 {
7182 code = PLUS;
7183 op2 = negate_rtx (GET_MODE (value), op2);
7184 }
7185
7186 /* Check for an addition with OP2 a constant integer and our first
7187 operand a PLUS of a virtual register and something else. In that
7188 case, we want to emit the sum of the virtual register and the
7189 constant first and then add the other value. This allows virtual
7190 register instantiation to simply modify the constant rather than
7191 creating another one around this addition. */
7192 if (code == PLUS && CONST_INT_P (op2)
7193 && GET_CODE (XEXP (value, 0)) == PLUS
7194 && REG_P (XEXP (XEXP (value, 0), 0))
7195 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7196 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7197 {
7198 rtx temp = expand_simple_binop (GET_MODE (value), code,
7199 XEXP (XEXP (value, 0), 0), op2,
7200 subtarget, 0, OPTAB_LIB_WIDEN);
7201 return expand_simple_binop (GET_MODE (value), code, temp,
7202 force_operand (XEXP (XEXP (value,
7203 0), 1), 0),
7204 target, 0, OPTAB_LIB_WIDEN);
7205 }
7206
7207 op1 = force_operand (XEXP (value, 0), subtarget);
7208 op2 = force_operand (op2, NULL_RTX);
7209 switch (code)
7210 {
7211 case MULT:
7212 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7213 case DIV:
7214 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7215 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7216 target, 1, OPTAB_LIB_WIDEN);
7217 else
7218 return expand_divmod (0,
7219 FLOAT_MODE_P (GET_MODE (value))
7220 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7221 GET_MODE (value), op1, op2, target, 0);
7222 case MOD:
7223 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7224 target, 0);
7225 case UDIV:
7226 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7227 target, 1);
7228 case UMOD:
7229 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7230 target, 1);
7231 case ASHIFTRT:
7232 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7233 target, 0, OPTAB_LIB_WIDEN);
7234 default:
7235 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7236 target, 1, OPTAB_LIB_WIDEN);
7237 }
7238 }
7239 if (UNARY_P (value))
7240 {
7241 if (!target)
7242 target = gen_reg_rtx (GET_MODE (value));
7243 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7244 switch (code)
7245 {
7246 case ZERO_EXTEND:
7247 case SIGN_EXTEND:
7248 case TRUNCATE:
7249 case FLOAT_EXTEND:
7250 case FLOAT_TRUNCATE:
7251 convert_move (target, op1, code == ZERO_EXTEND);
7252 return target;
7253
7254 case FIX:
7255 case UNSIGNED_FIX:
7256 expand_fix (target, op1, code == UNSIGNED_FIX);
7257 return target;
7258
7259 case FLOAT:
7260 case UNSIGNED_FLOAT:
7261 expand_float (target, op1, code == UNSIGNED_FLOAT);
7262 return target;
7263
7264 default:
7265 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7266 }
7267 }
7268
7269 #ifdef INSN_SCHEDULING
7270 /* On machines that have insn scheduling, we want all memory reference to be
7271 explicit, so we need to deal with such paradoxical SUBREGs. */
7272 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7273 value
7274 = simplify_gen_subreg (GET_MODE (value),
7275 force_reg (GET_MODE (SUBREG_REG (value)),
7276 force_operand (SUBREG_REG (value),
7277 NULL_RTX)),
7278 GET_MODE (SUBREG_REG (value)),
7279 SUBREG_BYTE (value));
7280 #endif
7281
7282 return value;
7283 }
7284 \f
7285 /* Subroutine of expand_expr: return nonzero iff there is no way that
7286 EXP can reference X, which is being modified. TOP_P is nonzero if this
7287 call is going to be used to determine whether we need a temporary
7288 for EXP, as opposed to a recursive call to this function.
7289
7290 It is always safe for this routine to return zero since it merely
7291 searches for optimization opportunities. */
7292
7293 int
7294 safe_from_p (const_rtx x, tree exp, int top_p)
7295 {
7296 rtx exp_rtl = 0;
7297 int i, nops;
7298
7299 if (x == 0
7300 /* If EXP has varying size, we MUST use a target since we currently
7301 have no way of allocating temporaries of variable size
7302 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7303 So we assume here that something at a higher level has prevented a
7304 clash. This is somewhat bogus, but the best we can do. Only
7305 do this when X is BLKmode and when we are at the top level. */
7306 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7307 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7308 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7309 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7310 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7311 != INTEGER_CST)
7312 && GET_MODE (x) == BLKmode)
7313 /* If X is in the outgoing argument area, it is always safe. */
7314 || (MEM_P (x)
7315 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7316 || (GET_CODE (XEXP (x, 0)) == PLUS
7317 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7318 return 1;
7319
7320 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7321 find the underlying pseudo. */
7322 if (GET_CODE (x) == SUBREG)
7323 {
7324 x = SUBREG_REG (x);
7325 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7326 return 0;
7327 }
7328
7329 /* Now look at our tree code and possibly recurse. */
7330 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7331 {
7332 case tcc_declaration:
7333 exp_rtl = DECL_RTL_IF_SET (exp);
7334 break;
7335
7336 case tcc_constant:
7337 return 1;
7338
7339 case tcc_exceptional:
7340 if (TREE_CODE (exp) == TREE_LIST)
7341 {
7342 while (1)
7343 {
7344 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7345 return 0;
7346 exp = TREE_CHAIN (exp);
7347 if (!exp)
7348 return 1;
7349 if (TREE_CODE (exp) != TREE_LIST)
7350 return safe_from_p (x, exp, 0);
7351 }
7352 }
7353 else if (TREE_CODE (exp) == CONSTRUCTOR)
7354 {
7355 constructor_elt *ce;
7356 unsigned HOST_WIDE_INT idx;
7357
7358 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7359 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7360 || !safe_from_p (x, ce->value, 0))
7361 return 0;
7362 return 1;
7363 }
7364 else if (TREE_CODE (exp) == ERROR_MARK)
7365 return 1; /* An already-visited SAVE_EXPR? */
7366 else
7367 return 0;
7368
7369 case tcc_statement:
7370 /* The only case we look at here is the DECL_INITIAL inside a
7371 DECL_EXPR. */
7372 return (TREE_CODE (exp) != DECL_EXPR
7373 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7374 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7375 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7376
7377 case tcc_binary:
7378 case tcc_comparison:
7379 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7380 return 0;
7381 /* Fall through. */
7382
7383 case tcc_unary:
7384 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7385
7386 case tcc_expression:
7387 case tcc_reference:
7388 case tcc_vl_exp:
7389 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7390 the expression. If it is set, we conflict iff we are that rtx or
7391 both are in memory. Otherwise, we check all operands of the
7392 expression recursively. */
7393
7394 switch (TREE_CODE (exp))
7395 {
7396 case ADDR_EXPR:
7397 /* If the operand is static or we are static, we can't conflict.
7398 Likewise if we don't conflict with the operand at all. */
7399 if (staticp (TREE_OPERAND (exp, 0))
7400 || TREE_STATIC (exp)
7401 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7402 return 1;
7403
7404 /* Otherwise, the only way this can conflict is if we are taking
7405 the address of a DECL a that address if part of X, which is
7406 very rare. */
7407 exp = TREE_OPERAND (exp, 0);
7408 if (DECL_P (exp))
7409 {
7410 if (!DECL_RTL_SET_P (exp)
7411 || !MEM_P (DECL_RTL (exp)))
7412 return 0;
7413 else
7414 exp_rtl = XEXP (DECL_RTL (exp), 0);
7415 }
7416 break;
7417
7418 case MEM_REF:
7419 if (MEM_P (x)
7420 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7421 get_alias_set (exp)))
7422 return 0;
7423 break;
7424
7425 case CALL_EXPR:
7426 /* Assume that the call will clobber all hard registers and
7427 all of memory. */
7428 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7429 || MEM_P (x))
7430 return 0;
7431 break;
7432
7433 case WITH_CLEANUP_EXPR:
7434 case CLEANUP_POINT_EXPR:
7435 /* Lowered by gimplify.c. */
7436 gcc_unreachable ();
7437
7438 case SAVE_EXPR:
7439 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7440
7441 default:
7442 break;
7443 }
7444
7445 /* If we have an rtx, we do not need to scan our operands. */
7446 if (exp_rtl)
7447 break;
7448
7449 nops = TREE_OPERAND_LENGTH (exp);
7450 for (i = 0; i < nops; i++)
7451 if (TREE_OPERAND (exp, i) != 0
7452 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7453 return 0;
7454
7455 break;
7456
7457 case tcc_type:
7458 /* Should never get a type here. */
7459 gcc_unreachable ();
7460 }
7461
7462 /* If we have an rtl, find any enclosed object. Then see if we conflict
7463 with it. */
7464 if (exp_rtl)
7465 {
7466 if (GET_CODE (exp_rtl) == SUBREG)
7467 {
7468 exp_rtl = SUBREG_REG (exp_rtl);
7469 if (REG_P (exp_rtl)
7470 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7471 return 0;
7472 }
7473
7474 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7475 are memory and they conflict. */
7476 return ! (rtx_equal_p (x, exp_rtl)
7477 || (MEM_P (x) && MEM_P (exp_rtl)
7478 && true_dependence (exp_rtl, VOIDmode, x)));
7479 }
7480
7481 /* If we reach here, it is safe. */
7482 return 1;
7483 }
7484
7485 \f
7486 /* Return the highest power of two that EXP is known to be a multiple of.
7487 This is used in updating alignment of MEMs in array references. */
7488
7489 unsigned HOST_WIDE_INT
7490 highest_pow2_factor (const_tree exp)
7491 {
7492 unsigned HOST_WIDE_INT ret;
7493 int trailing_zeros = tree_ctz (exp);
7494 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7495 return BIGGEST_ALIGNMENT;
7496 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7497 if (ret > BIGGEST_ALIGNMENT)
7498 return BIGGEST_ALIGNMENT;
7499 return ret;
7500 }
7501
7502 /* Similar, except that the alignment requirements of TARGET are
7503 taken into account. Assume it is at least as aligned as its
7504 type, unless it is a COMPONENT_REF in which case the layout of
7505 the structure gives the alignment. */
7506
7507 static unsigned HOST_WIDE_INT
7508 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7509 {
7510 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7511 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7512
7513 return MAX (factor, talign);
7514 }
7515 \f
7516 #ifdef HAVE_conditional_move
7517 /* Convert the tree comparison code TCODE to the rtl one where the
7518 signedness is UNSIGNEDP. */
7519
7520 static enum rtx_code
7521 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7522 {
7523 enum rtx_code code;
7524 switch (tcode)
7525 {
7526 case EQ_EXPR:
7527 code = EQ;
7528 break;
7529 case NE_EXPR:
7530 code = NE;
7531 break;
7532 case LT_EXPR:
7533 code = unsignedp ? LTU : LT;
7534 break;
7535 case LE_EXPR:
7536 code = unsignedp ? LEU : LE;
7537 break;
7538 case GT_EXPR:
7539 code = unsignedp ? GTU : GT;
7540 break;
7541 case GE_EXPR:
7542 code = unsignedp ? GEU : GE;
7543 break;
7544 case UNORDERED_EXPR:
7545 code = UNORDERED;
7546 break;
7547 case ORDERED_EXPR:
7548 code = ORDERED;
7549 break;
7550 case UNLT_EXPR:
7551 code = UNLT;
7552 break;
7553 case UNLE_EXPR:
7554 code = UNLE;
7555 break;
7556 case UNGT_EXPR:
7557 code = UNGT;
7558 break;
7559 case UNGE_EXPR:
7560 code = UNGE;
7561 break;
7562 case UNEQ_EXPR:
7563 code = UNEQ;
7564 break;
7565 case LTGT_EXPR:
7566 code = LTGT;
7567 break;
7568
7569 default:
7570 gcc_unreachable ();
7571 }
7572 return code;
7573 }
7574 #endif
7575
7576 /* Subroutine of expand_expr. Expand the two operands of a binary
7577 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7578 The value may be stored in TARGET if TARGET is nonzero. The
7579 MODIFIER argument is as documented by expand_expr. */
7580
7581 void
7582 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7583 enum expand_modifier modifier)
7584 {
7585 if (! safe_from_p (target, exp1, 1))
7586 target = 0;
7587 if (operand_equal_p (exp0, exp1, 0))
7588 {
7589 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7590 *op1 = copy_rtx (*op0);
7591 }
7592 else
7593 {
7594 /* If we need to preserve evaluation order, copy exp0 into its own
7595 temporary variable so that it can't be clobbered by exp1. */
7596 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7597 exp0 = save_expr (exp0);
7598 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7599 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7600 }
7601 }
7602
7603 \f
7604 /* Return a MEM that contains constant EXP. DEFER is as for
7605 output_constant_def and MODIFIER is as for expand_expr. */
7606
7607 static rtx
7608 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7609 {
7610 rtx mem;
7611
7612 mem = output_constant_def (exp, defer);
7613 if (modifier != EXPAND_INITIALIZER)
7614 mem = use_anchored_address (mem);
7615 return mem;
7616 }
7617
7618 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7619 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7620
7621 static rtx
7622 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7623 enum expand_modifier modifier, addr_space_t as)
7624 {
7625 rtx result, subtarget;
7626 tree inner, offset;
7627 HOST_WIDE_INT bitsize, bitpos;
7628 int volatilep, unsignedp;
7629 machine_mode mode1;
7630
7631 /* If we are taking the address of a constant and are at the top level,
7632 we have to use output_constant_def since we can't call force_const_mem
7633 at top level. */
7634 /* ??? This should be considered a front-end bug. We should not be
7635 generating ADDR_EXPR of something that isn't an LVALUE. The only
7636 exception here is STRING_CST. */
7637 if (CONSTANT_CLASS_P (exp))
7638 {
7639 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7640 if (modifier < EXPAND_SUM)
7641 result = force_operand (result, target);
7642 return result;
7643 }
7644
7645 /* Everything must be something allowed by is_gimple_addressable. */
7646 switch (TREE_CODE (exp))
7647 {
7648 case INDIRECT_REF:
7649 /* This case will happen via recursion for &a->b. */
7650 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7651
7652 case MEM_REF:
7653 {
7654 tree tem = TREE_OPERAND (exp, 0);
7655 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7656 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7657 return expand_expr (tem, target, tmode, modifier);
7658 }
7659
7660 case CONST_DECL:
7661 /* Expand the initializer like constants above. */
7662 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7663 0, modifier), 0);
7664 if (modifier < EXPAND_SUM)
7665 result = force_operand (result, target);
7666 return result;
7667
7668 case REALPART_EXPR:
7669 /* The real part of the complex number is always first, therefore
7670 the address is the same as the address of the parent object. */
7671 offset = 0;
7672 bitpos = 0;
7673 inner = TREE_OPERAND (exp, 0);
7674 break;
7675
7676 case IMAGPART_EXPR:
7677 /* The imaginary part of the complex number is always second.
7678 The expression is therefore always offset by the size of the
7679 scalar type. */
7680 offset = 0;
7681 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7682 inner = TREE_OPERAND (exp, 0);
7683 break;
7684
7685 case COMPOUND_LITERAL_EXPR:
7686 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7687 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7688 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7689 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7690 the initializers aren't gimplified. */
7691 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7692 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7693 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7694 target, tmode, modifier, as);
7695 /* FALLTHRU */
7696 default:
7697 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7698 expand_expr, as that can have various side effects; LABEL_DECLs for
7699 example, may not have their DECL_RTL set yet. Expand the rtl of
7700 CONSTRUCTORs too, which should yield a memory reference for the
7701 constructor's contents. Assume language specific tree nodes can
7702 be expanded in some interesting way. */
7703 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7704 if (DECL_P (exp)
7705 || TREE_CODE (exp) == CONSTRUCTOR
7706 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7707 {
7708 result = expand_expr (exp, target, tmode,
7709 modifier == EXPAND_INITIALIZER
7710 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7711
7712 /* If the DECL isn't in memory, then the DECL wasn't properly
7713 marked TREE_ADDRESSABLE, which will be either a front-end
7714 or a tree optimizer bug. */
7715
7716 if (TREE_ADDRESSABLE (exp)
7717 && ! MEM_P (result)
7718 && ! targetm.calls.allocate_stack_slots_for_args ())
7719 {
7720 error ("local frame unavailable (naked function?)");
7721 return result;
7722 }
7723 else
7724 gcc_assert (MEM_P (result));
7725 result = XEXP (result, 0);
7726
7727 /* ??? Is this needed anymore? */
7728 if (DECL_P (exp))
7729 TREE_USED (exp) = 1;
7730
7731 if (modifier != EXPAND_INITIALIZER
7732 && modifier != EXPAND_CONST_ADDRESS
7733 && modifier != EXPAND_SUM)
7734 result = force_operand (result, target);
7735 return result;
7736 }
7737
7738 /* Pass FALSE as the last argument to get_inner_reference although
7739 we are expanding to RTL. The rationale is that we know how to
7740 handle "aligning nodes" here: we can just bypass them because
7741 they won't change the final object whose address will be returned
7742 (they actually exist only for that purpose). */
7743 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7744 &mode1, &unsignedp, &volatilep, false);
7745 break;
7746 }
7747
7748 /* We must have made progress. */
7749 gcc_assert (inner != exp);
7750
7751 subtarget = offset || bitpos ? NULL_RTX : target;
7752 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7753 inner alignment, force the inner to be sufficiently aligned. */
7754 if (CONSTANT_CLASS_P (inner)
7755 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7756 {
7757 inner = copy_node (inner);
7758 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7759 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7760 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7761 }
7762 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7763
7764 if (offset)
7765 {
7766 rtx tmp;
7767
7768 if (modifier != EXPAND_NORMAL)
7769 result = force_operand (result, NULL);
7770 tmp = expand_expr (offset, NULL_RTX, tmode,
7771 modifier == EXPAND_INITIALIZER
7772 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7773
7774 /* expand_expr is allowed to return an object in a mode other
7775 than TMODE. If it did, we need to convert. */
7776 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7777 tmp = convert_modes (tmode, GET_MODE (tmp),
7778 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7779 result = convert_memory_address_addr_space (tmode, result, as);
7780 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7781
7782 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7783 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7784 else
7785 {
7786 subtarget = bitpos ? NULL_RTX : target;
7787 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7788 1, OPTAB_LIB_WIDEN);
7789 }
7790 }
7791
7792 if (bitpos)
7793 {
7794 /* Someone beforehand should have rejected taking the address
7795 of such an object. */
7796 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7797
7798 result = convert_memory_address_addr_space (tmode, result, as);
7799 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7800 if (modifier < EXPAND_SUM)
7801 result = force_operand (result, target);
7802 }
7803
7804 return result;
7805 }
7806
7807 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7808 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7809
7810 static rtx
7811 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7812 enum expand_modifier modifier)
7813 {
7814 addr_space_t as = ADDR_SPACE_GENERIC;
7815 machine_mode address_mode = Pmode;
7816 machine_mode pointer_mode = ptr_mode;
7817 machine_mode rmode;
7818 rtx result;
7819
7820 /* Target mode of VOIDmode says "whatever's natural". */
7821 if (tmode == VOIDmode)
7822 tmode = TYPE_MODE (TREE_TYPE (exp));
7823
7824 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7825 {
7826 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7827 address_mode = targetm.addr_space.address_mode (as);
7828 pointer_mode = targetm.addr_space.pointer_mode (as);
7829 }
7830
7831 /* We can get called with some Weird Things if the user does silliness
7832 like "(short) &a". In that case, convert_memory_address won't do
7833 the right thing, so ignore the given target mode. */
7834 if (tmode != address_mode && tmode != pointer_mode)
7835 tmode = address_mode;
7836
7837 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7838 tmode, modifier, as);
7839
7840 /* Despite expand_expr claims concerning ignoring TMODE when not
7841 strictly convenient, stuff breaks if we don't honor it. Note
7842 that combined with the above, we only do this for pointer modes. */
7843 rmode = GET_MODE (result);
7844 if (rmode == VOIDmode)
7845 rmode = tmode;
7846 if (rmode != tmode)
7847 result = convert_memory_address_addr_space (tmode, result, as);
7848
7849 return result;
7850 }
7851
7852 /* Generate code for computing CONSTRUCTOR EXP.
7853 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7854 is TRUE, instead of creating a temporary variable in memory
7855 NULL is returned and the caller needs to handle it differently. */
7856
7857 static rtx
7858 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7859 bool avoid_temp_mem)
7860 {
7861 tree type = TREE_TYPE (exp);
7862 machine_mode mode = TYPE_MODE (type);
7863
7864 /* Try to avoid creating a temporary at all. This is possible
7865 if all of the initializer is zero.
7866 FIXME: try to handle all [0..255] initializers we can handle
7867 with memset. */
7868 if (TREE_STATIC (exp)
7869 && !TREE_ADDRESSABLE (exp)
7870 && target != 0 && mode == BLKmode
7871 && all_zeros_p (exp))
7872 {
7873 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7874 return target;
7875 }
7876
7877 /* All elts simple constants => refer to a constant in memory. But
7878 if this is a non-BLKmode mode, let it store a field at a time
7879 since that should make a CONST_INT, CONST_WIDE_INT or
7880 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7881 use, it is best to store directly into the target unless the type
7882 is large enough that memcpy will be used. If we are making an
7883 initializer and all operands are constant, put it in memory as
7884 well.
7885
7886 FIXME: Avoid trying to fill vector constructors piece-meal.
7887 Output them with output_constant_def below unless we're sure
7888 they're zeros. This should go away when vector initializers
7889 are treated like VECTOR_CST instead of arrays. */
7890 if ((TREE_STATIC (exp)
7891 && ((mode == BLKmode
7892 && ! (target != 0 && safe_from_p (target, exp, 1)))
7893 || TREE_ADDRESSABLE (exp)
7894 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7895 && (! can_move_by_pieces
7896 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7897 TYPE_ALIGN (type)))
7898 && ! mostly_zeros_p (exp))))
7899 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7900 && TREE_CONSTANT (exp)))
7901 {
7902 rtx constructor;
7903
7904 if (avoid_temp_mem)
7905 return NULL_RTX;
7906
7907 constructor = expand_expr_constant (exp, 1, modifier);
7908
7909 if (modifier != EXPAND_CONST_ADDRESS
7910 && modifier != EXPAND_INITIALIZER
7911 && modifier != EXPAND_SUM)
7912 constructor = validize_mem (constructor);
7913
7914 return constructor;
7915 }
7916
7917 /* Handle calls that pass values in multiple non-contiguous
7918 locations. The Irix 6 ABI has examples of this. */
7919 if (target == 0 || ! safe_from_p (target, exp, 1)
7920 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7921 {
7922 if (avoid_temp_mem)
7923 return NULL_RTX;
7924
7925 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7926 }
7927
7928 store_constructor (exp, target, 0, int_expr_size (exp));
7929 return target;
7930 }
7931
7932
7933 /* expand_expr: generate code for computing expression EXP.
7934 An rtx for the computed value is returned. The value is never null.
7935 In the case of a void EXP, const0_rtx is returned.
7936
7937 The value may be stored in TARGET if TARGET is nonzero.
7938 TARGET is just a suggestion; callers must assume that
7939 the rtx returned may not be the same as TARGET.
7940
7941 If TARGET is CONST0_RTX, it means that the value will be ignored.
7942
7943 If TMODE is not VOIDmode, it suggests generating the
7944 result in mode TMODE. But this is done only when convenient.
7945 Otherwise, TMODE is ignored and the value generated in its natural mode.
7946 TMODE is just a suggestion; callers must assume that
7947 the rtx returned may not have mode TMODE.
7948
7949 Note that TARGET may have neither TMODE nor MODE. In that case, it
7950 probably will not be used.
7951
7952 If MODIFIER is EXPAND_SUM then when EXP is an addition
7953 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7954 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7955 products as above, or REG or MEM, or constant.
7956 Ordinarily in such cases we would output mul or add instructions
7957 and then return a pseudo reg containing the sum.
7958
7959 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7960 it also marks a label as absolutely required (it can't be dead).
7961 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7962 This is used for outputting expressions used in initializers.
7963
7964 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7965 with a constant address even if that address is not normally legitimate.
7966 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7967
7968 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7969 a call parameter. Such targets require special care as we haven't yet
7970 marked TARGET so that it's safe from being trashed by libcalls. We
7971 don't want to use TARGET for anything but the final result;
7972 Intermediate values must go elsewhere. Additionally, calls to
7973 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7974
7975 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7976 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7977 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7978 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7979 recursively.
7980
7981 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7982 In this case, we don't adjust a returned MEM rtx that wouldn't be
7983 sufficiently aligned for its mode; instead, it's up to the caller
7984 to deal with it afterwards. This is used to make sure that unaligned
7985 base objects for which out-of-bounds accesses are supported, for
7986 example record types with trailing arrays, aren't realigned behind
7987 the back of the caller.
7988 The normal operating mode is to pass FALSE for this parameter. */
7989
7990 rtx
7991 expand_expr_real (tree exp, rtx target, machine_mode tmode,
7992 enum expand_modifier modifier, rtx *alt_rtl,
7993 bool inner_reference_p)
7994 {
7995 rtx ret;
7996
7997 /* Handle ERROR_MARK before anybody tries to access its type. */
7998 if (TREE_CODE (exp) == ERROR_MARK
7999 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
8000 {
8001 ret = CONST0_RTX (tmode);
8002 return ret ? ret : const0_rtx;
8003 }
8004
8005 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8006 inner_reference_p);
8007 return ret;
8008 }
8009
8010 /* Try to expand the conditional expression which is represented by
8011 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
8012 return the rtl reg which repsents the result. Otherwise return
8013 NULL_RTL. */
8014
8015 static rtx
8016 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8017 tree treeop1 ATTRIBUTE_UNUSED,
8018 tree treeop2 ATTRIBUTE_UNUSED)
8019 {
8020 #ifdef HAVE_conditional_move
8021 rtx insn;
8022 rtx op00, op01, op1, op2;
8023 enum rtx_code comparison_code;
8024 machine_mode comparison_mode;
8025 gimple srcstmt;
8026 rtx temp;
8027 tree type = TREE_TYPE (treeop1);
8028 int unsignedp = TYPE_UNSIGNED (type);
8029 machine_mode mode = TYPE_MODE (type);
8030 machine_mode orig_mode = mode;
8031
8032 /* If we cannot do a conditional move on the mode, try doing it
8033 with the promoted mode. */
8034 if (!can_conditionally_move_p (mode))
8035 {
8036 mode = promote_mode (type, mode, &unsignedp);
8037 if (!can_conditionally_move_p (mode))
8038 return NULL_RTX;
8039 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
8040 }
8041 else
8042 temp = assign_temp (type, 0, 1);
8043
8044 start_sequence ();
8045 expand_operands (treeop1, treeop2,
8046 temp, &op1, &op2, EXPAND_NORMAL);
8047
8048 if (TREE_CODE (treeop0) == SSA_NAME
8049 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8050 {
8051 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8052 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8053 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8054 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8055 comparison_mode = TYPE_MODE (type);
8056 unsignedp = TYPE_UNSIGNED (type);
8057 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8058 }
8059 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
8060 {
8061 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8062 enum tree_code cmpcode = TREE_CODE (treeop0);
8063 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8064 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8065 unsignedp = TYPE_UNSIGNED (type);
8066 comparison_mode = TYPE_MODE (type);
8067 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8068 }
8069 else
8070 {
8071 op00 = expand_normal (treeop0);
8072 op01 = const0_rtx;
8073 comparison_code = NE;
8074 comparison_mode = GET_MODE (op00);
8075 if (comparison_mode == VOIDmode)
8076 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8077 }
8078
8079 if (GET_MODE (op1) != mode)
8080 op1 = gen_lowpart (mode, op1);
8081
8082 if (GET_MODE (op2) != mode)
8083 op2 = gen_lowpart (mode, op2);
8084
8085 /* Try to emit the conditional move. */
8086 insn = emit_conditional_move (temp, comparison_code,
8087 op00, op01, comparison_mode,
8088 op1, op2, mode,
8089 unsignedp);
8090
8091 /* If we could do the conditional move, emit the sequence,
8092 and return. */
8093 if (insn)
8094 {
8095 rtx_insn *seq = get_insns ();
8096 end_sequence ();
8097 emit_insn (seq);
8098 return convert_modes (orig_mode, mode, temp, 0);
8099 }
8100
8101 /* Otherwise discard the sequence and fall back to code with
8102 branches. */
8103 end_sequence ();
8104 #endif
8105 return NULL_RTX;
8106 }
8107
8108 rtx
8109 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8110 enum expand_modifier modifier)
8111 {
8112 rtx op0, op1, op2, temp;
8113 tree type;
8114 int unsignedp;
8115 machine_mode mode;
8116 enum tree_code code = ops->code;
8117 optab this_optab;
8118 rtx subtarget, original_target;
8119 int ignore;
8120 bool reduce_bit_field;
8121 location_t loc = ops->location;
8122 tree treeop0, treeop1, treeop2;
8123 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8124 ? reduce_to_bit_field_precision ((expr), \
8125 target, \
8126 type) \
8127 : (expr))
8128
8129 type = ops->type;
8130 mode = TYPE_MODE (type);
8131 unsignedp = TYPE_UNSIGNED (type);
8132
8133 treeop0 = ops->op0;
8134 treeop1 = ops->op1;
8135 treeop2 = ops->op2;
8136
8137 /* We should be called only on simple (binary or unary) expressions,
8138 exactly those that are valid in gimple expressions that aren't
8139 GIMPLE_SINGLE_RHS (or invalid). */
8140 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8141 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8142 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8143
8144 ignore = (target == const0_rtx
8145 || ((CONVERT_EXPR_CODE_P (code)
8146 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8147 && TREE_CODE (type) == VOID_TYPE));
8148
8149 /* We should be called only if we need the result. */
8150 gcc_assert (!ignore);
8151
8152 /* An operation in what may be a bit-field type needs the
8153 result to be reduced to the precision of the bit-field type,
8154 which is narrower than that of the type's mode. */
8155 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8156 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8157
8158 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8159 target = 0;
8160
8161 /* Use subtarget as the target for operand 0 of a binary operation. */
8162 subtarget = get_subtarget (target);
8163 original_target = target;
8164
8165 switch (code)
8166 {
8167 case NON_LVALUE_EXPR:
8168 case PAREN_EXPR:
8169 CASE_CONVERT:
8170 if (treeop0 == error_mark_node)
8171 return const0_rtx;
8172
8173 if (TREE_CODE (type) == UNION_TYPE)
8174 {
8175 tree valtype = TREE_TYPE (treeop0);
8176
8177 /* If both input and output are BLKmode, this conversion isn't doing
8178 anything except possibly changing memory attribute. */
8179 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8180 {
8181 rtx result = expand_expr (treeop0, target, tmode,
8182 modifier);
8183
8184 result = copy_rtx (result);
8185 set_mem_attributes (result, type, 0);
8186 return result;
8187 }
8188
8189 if (target == 0)
8190 {
8191 if (TYPE_MODE (type) != BLKmode)
8192 target = gen_reg_rtx (TYPE_MODE (type));
8193 else
8194 target = assign_temp (type, 1, 1);
8195 }
8196
8197 if (MEM_P (target))
8198 /* Store data into beginning of memory target. */
8199 store_expr (treeop0,
8200 adjust_address (target, TYPE_MODE (valtype), 0),
8201 modifier == EXPAND_STACK_PARM,
8202 false);
8203
8204 else
8205 {
8206 gcc_assert (REG_P (target));
8207
8208 /* Store this field into a union of the proper type. */
8209 store_field (target,
8210 MIN ((int_size_in_bytes (TREE_TYPE
8211 (treeop0))
8212 * BITS_PER_UNIT),
8213 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8214 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8215 }
8216
8217 /* Return the entire union. */
8218 return target;
8219 }
8220
8221 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8222 {
8223 op0 = expand_expr (treeop0, target, VOIDmode,
8224 modifier);
8225
8226 /* If the signedness of the conversion differs and OP0 is
8227 a promoted SUBREG, clear that indication since we now
8228 have to do the proper extension. */
8229 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8230 && GET_CODE (op0) == SUBREG)
8231 SUBREG_PROMOTED_VAR_P (op0) = 0;
8232
8233 return REDUCE_BIT_FIELD (op0);
8234 }
8235
8236 op0 = expand_expr (treeop0, NULL_RTX, mode,
8237 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8238 if (GET_MODE (op0) == mode)
8239 ;
8240
8241 /* If OP0 is a constant, just convert it into the proper mode. */
8242 else if (CONSTANT_P (op0))
8243 {
8244 tree inner_type = TREE_TYPE (treeop0);
8245 machine_mode inner_mode = GET_MODE (op0);
8246
8247 if (inner_mode == VOIDmode)
8248 inner_mode = TYPE_MODE (inner_type);
8249
8250 if (modifier == EXPAND_INITIALIZER)
8251 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8252 subreg_lowpart_offset (mode,
8253 inner_mode));
8254 else
8255 op0= convert_modes (mode, inner_mode, op0,
8256 TYPE_UNSIGNED (inner_type));
8257 }
8258
8259 else if (modifier == EXPAND_INITIALIZER)
8260 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8261
8262 else if (target == 0)
8263 op0 = convert_to_mode (mode, op0,
8264 TYPE_UNSIGNED (TREE_TYPE
8265 (treeop0)));
8266 else
8267 {
8268 convert_move (target, op0,
8269 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8270 op0 = target;
8271 }
8272
8273 return REDUCE_BIT_FIELD (op0);
8274
8275 case ADDR_SPACE_CONVERT_EXPR:
8276 {
8277 tree treeop0_type = TREE_TYPE (treeop0);
8278 addr_space_t as_to;
8279 addr_space_t as_from;
8280
8281 gcc_assert (POINTER_TYPE_P (type));
8282 gcc_assert (POINTER_TYPE_P (treeop0_type));
8283
8284 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8285 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8286
8287 /* Conversions between pointers to the same address space should
8288 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8289 gcc_assert (as_to != as_from);
8290
8291 /* Ask target code to handle conversion between pointers
8292 to overlapping address spaces. */
8293 if (targetm.addr_space.subset_p (as_to, as_from)
8294 || targetm.addr_space.subset_p (as_from, as_to))
8295 {
8296 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8297 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8298 gcc_assert (op0);
8299 return op0;
8300 }
8301
8302 /* For disjoint address spaces, converting anything but
8303 a null pointer invokes undefined behaviour. We simply
8304 always return a null pointer here. */
8305 return CONST0_RTX (mode);
8306 }
8307
8308 case POINTER_PLUS_EXPR:
8309 /* Even though the sizetype mode and the pointer's mode can be different
8310 expand is able to handle this correctly and get the correct result out
8311 of the PLUS_EXPR code. */
8312 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8313 if sizetype precision is smaller than pointer precision. */
8314 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8315 treeop1 = fold_convert_loc (loc, type,
8316 fold_convert_loc (loc, ssizetype,
8317 treeop1));
8318 /* If sizetype precision is larger than pointer precision, truncate the
8319 offset to have matching modes. */
8320 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8321 treeop1 = fold_convert_loc (loc, type, treeop1);
8322
8323 case PLUS_EXPR:
8324 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8325 something else, make sure we add the register to the constant and
8326 then to the other thing. This case can occur during strength
8327 reduction and doing it this way will produce better code if the
8328 frame pointer or argument pointer is eliminated.
8329
8330 fold-const.c will ensure that the constant is always in the inner
8331 PLUS_EXPR, so the only case we need to do anything about is if
8332 sp, ap, or fp is our second argument, in which case we must swap
8333 the innermost first argument and our second argument. */
8334
8335 if (TREE_CODE (treeop0) == PLUS_EXPR
8336 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8337 && TREE_CODE (treeop1) == VAR_DECL
8338 && (DECL_RTL (treeop1) == frame_pointer_rtx
8339 || DECL_RTL (treeop1) == stack_pointer_rtx
8340 || DECL_RTL (treeop1) == arg_pointer_rtx))
8341 {
8342 gcc_unreachable ();
8343 }
8344
8345 /* If the result is to be ptr_mode and we are adding an integer to
8346 something, we might be forming a constant. So try to use
8347 plus_constant. If it produces a sum and we can't accept it,
8348 use force_operand. This allows P = &ARR[const] to generate
8349 efficient code on machines where a SYMBOL_REF is not a valid
8350 address.
8351
8352 If this is an EXPAND_SUM call, always return the sum. */
8353 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8354 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8355 {
8356 if (modifier == EXPAND_STACK_PARM)
8357 target = 0;
8358 if (TREE_CODE (treeop0) == INTEGER_CST
8359 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8360 && TREE_CONSTANT (treeop1))
8361 {
8362 rtx constant_part;
8363 HOST_WIDE_INT wc;
8364 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8365
8366 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8367 EXPAND_SUM);
8368 /* Use wi::shwi to ensure that the constant is
8369 truncated according to the mode of OP1, then sign extended
8370 to a HOST_WIDE_INT. Using the constant directly can result
8371 in non-canonical RTL in a 64x32 cross compile. */
8372 wc = TREE_INT_CST_LOW (treeop0);
8373 constant_part =
8374 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8375 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8376 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8377 op1 = force_operand (op1, target);
8378 return REDUCE_BIT_FIELD (op1);
8379 }
8380
8381 else if (TREE_CODE (treeop1) == INTEGER_CST
8382 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8383 && TREE_CONSTANT (treeop0))
8384 {
8385 rtx constant_part;
8386 HOST_WIDE_INT wc;
8387 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8388
8389 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8390 (modifier == EXPAND_INITIALIZER
8391 ? EXPAND_INITIALIZER : EXPAND_SUM));
8392 if (! CONSTANT_P (op0))
8393 {
8394 op1 = expand_expr (treeop1, NULL_RTX,
8395 VOIDmode, modifier);
8396 /* Return a PLUS if modifier says it's OK. */
8397 if (modifier == EXPAND_SUM
8398 || modifier == EXPAND_INITIALIZER)
8399 return simplify_gen_binary (PLUS, mode, op0, op1);
8400 goto binop2;
8401 }
8402 /* Use wi::shwi to ensure that the constant is
8403 truncated according to the mode of OP1, then sign extended
8404 to a HOST_WIDE_INT. Using the constant directly can result
8405 in non-canonical RTL in a 64x32 cross compile. */
8406 wc = TREE_INT_CST_LOW (treeop1);
8407 constant_part
8408 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8409 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8410 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8411 op0 = force_operand (op0, target);
8412 return REDUCE_BIT_FIELD (op0);
8413 }
8414 }
8415
8416 /* Use TER to expand pointer addition of a negated value
8417 as pointer subtraction. */
8418 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8419 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8420 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8421 && TREE_CODE (treeop1) == SSA_NAME
8422 && TYPE_MODE (TREE_TYPE (treeop0))
8423 == TYPE_MODE (TREE_TYPE (treeop1)))
8424 {
8425 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8426 if (def)
8427 {
8428 treeop1 = gimple_assign_rhs1 (def);
8429 code = MINUS_EXPR;
8430 goto do_minus;
8431 }
8432 }
8433
8434 /* No sense saving up arithmetic to be done
8435 if it's all in the wrong mode to form part of an address.
8436 And force_operand won't know whether to sign-extend or
8437 zero-extend. */
8438 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8439 || mode != ptr_mode)
8440 {
8441 expand_operands (treeop0, treeop1,
8442 subtarget, &op0, &op1, EXPAND_NORMAL);
8443 if (op0 == const0_rtx)
8444 return op1;
8445 if (op1 == const0_rtx)
8446 return op0;
8447 goto binop2;
8448 }
8449
8450 expand_operands (treeop0, treeop1,
8451 subtarget, &op0, &op1, modifier);
8452 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8453
8454 case MINUS_EXPR:
8455 do_minus:
8456 /* For initializers, we are allowed to return a MINUS of two
8457 symbolic constants. Here we handle all cases when both operands
8458 are constant. */
8459 /* Handle difference of two symbolic constants,
8460 for the sake of an initializer. */
8461 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8462 && really_constant_p (treeop0)
8463 && really_constant_p (treeop1))
8464 {
8465 expand_operands (treeop0, treeop1,
8466 NULL_RTX, &op0, &op1, modifier);
8467
8468 /* If the last operand is a CONST_INT, use plus_constant of
8469 the negated constant. Else make the MINUS. */
8470 if (CONST_INT_P (op1))
8471 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8472 -INTVAL (op1)));
8473 else
8474 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8475 }
8476
8477 /* No sense saving up arithmetic to be done
8478 if it's all in the wrong mode to form part of an address.
8479 And force_operand won't know whether to sign-extend or
8480 zero-extend. */
8481 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8482 || mode != ptr_mode)
8483 goto binop;
8484
8485 expand_operands (treeop0, treeop1,
8486 subtarget, &op0, &op1, modifier);
8487
8488 /* Convert A - const to A + (-const). */
8489 if (CONST_INT_P (op1))
8490 {
8491 op1 = negate_rtx (mode, op1);
8492 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8493 }
8494
8495 goto binop2;
8496
8497 case WIDEN_MULT_PLUS_EXPR:
8498 case WIDEN_MULT_MINUS_EXPR:
8499 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8500 op2 = expand_normal (treeop2);
8501 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8502 target, unsignedp);
8503 return target;
8504
8505 case WIDEN_MULT_EXPR:
8506 /* If first operand is constant, swap them.
8507 Thus the following special case checks need only
8508 check the second operand. */
8509 if (TREE_CODE (treeop0) == INTEGER_CST)
8510 {
8511 tree t1 = treeop0;
8512 treeop0 = treeop1;
8513 treeop1 = t1;
8514 }
8515
8516 /* First, check if we have a multiplication of one signed and one
8517 unsigned operand. */
8518 if (TREE_CODE (treeop1) != INTEGER_CST
8519 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8520 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8521 {
8522 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8523 this_optab = usmul_widen_optab;
8524 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8525 != CODE_FOR_nothing)
8526 {
8527 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8528 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8529 EXPAND_NORMAL);
8530 else
8531 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8532 EXPAND_NORMAL);
8533 /* op0 and op1 might still be constant, despite the above
8534 != INTEGER_CST check. Handle it. */
8535 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8536 {
8537 op0 = convert_modes (innermode, mode, op0, true);
8538 op1 = convert_modes (innermode, mode, op1, false);
8539 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8540 target, unsignedp));
8541 }
8542 goto binop3;
8543 }
8544 }
8545 /* Check for a multiplication with matching signedness. */
8546 else if ((TREE_CODE (treeop1) == INTEGER_CST
8547 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8548 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8549 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8550 {
8551 tree op0type = TREE_TYPE (treeop0);
8552 machine_mode innermode = TYPE_MODE (op0type);
8553 bool zextend_p = TYPE_UNSIGNED (op0type);
8554 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8555 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8556
8557 if (TREE_CODE (treeop0) != INTEGER_CST)
8558 {
8559 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8560 != CODE_FOR_nothing)
8561 {
8562 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8563 EXPAND_NORMAL);
8564 /* op0 and op1 might still be constant, despite the above
8565 != INTEGER_CST check. Handle it. */
8566 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8567 {
8568 widen_mult_const:
8569 op0 = convert_modes (innermode, mode, op0, zextend_p);
8570 op1
8571 = convert_modes (innermode, mode, op1,
8572 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8573 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8574 target,
8575 unsignedp));
8576 }
8577 temp = expand_widening_mult (mode, op0, op1, target,
8578 unsignedp, this_optab);
8579 return REDUCE_BIT_FIELD (temp);
8580 }
8581 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8582 != CODE_FOR_nothing
8583 && innermode == word_mode)
8584 {
8585 rtx htem, hipart;
8586 op0 = expand_normal (treeop0);
8587 if (TREE_CODE (treeop1) == INTEGER_CST)
8588 op1 = convert_modes (innermode, mode,
8589 expand_normal (treeop1),
8590 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8591 else
8592 op1 = expand_normal (treeop1);
8593 /* op0 and op1 might still be constant, despite the above
8594 != INTEGER_CST check. Handle it. */
8595 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8596 goto widen_mult_const;
8597 temp = expand_binop (mode, other_optab, op0, op1, target,
8598 unsignedp, OPTAB_LIB_WIDEN);
8599 hipart = gen_highpart (innermode, temp);
8600 htem = expand_mult_highpart_adjust (innermode, hipart,
8601 op0, op1, hipart,
8602 zextend_p);
8603 if (htem != hipart)
8604 emit_move_insn (hipart, htem);
8605 return REDUCE_BIT_FIELD (temp);
8606 }
8607 }
8608 }
8609 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8610 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8611 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8612 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8613
8614 case FMA_EXPR:
8615 {
8616 optab opt = fma_optab;
8617 gimple def0, def2;
8618
8619 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8620 call. */
8621 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8622 {
8623 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8624 tree call_expr;
8625
8626 gcc_assert (fn != NULL_TREE);
8627 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8628 return expand_builtin (call_expr, target, subtarget, mode, false);
8629 }
8630
8631 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8632 /* The multiplication is commutative - look at its 2nd operand
8633 if the first isn't fed by a negate. */
8634 if (!def0)
8635 {
8636 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8637 /* Swap operands if the 2nd operand is fed by a negate. */
8638 if (def0)
8639 {
8640 tree tem = treeop0;
8641 treeop0 = treeop1;
8642 treeop1 = tem;
8643 }
8644 }
8645 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8646
8647 op0 = op2 = NULL;
8648
8649 if (def0 && def2
8650 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8651 {
8652 opt = fnms_optab;
8653 op0 = expand_normal (gimple_assign_rhs1 (def0));
8654 op2 = expand_normal (gimple_assign_rhs1 (def2));
8655 }
8656 else if (def0
8657 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8658 {
8659 opt = fnma_optab;
8660 op0 = expand_normal (gimple_assign_rhs1 (def0));
8661 }
8662 else if (def2
8663 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8664 {
8665 opt = fms_optab;
8666 op2 = expand_normal (gimple_assign_rhs1 (def2));
8667 }
8668
8669 if (op0 == NULL)
8670 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8671 if (op2 == NULL)
8672 op2 = expand_normal (treeop2);
8673 op1 = expand_normal (treeop1);
8674
8675 return expand_ternary_op (TYPE_MODE (type), opt,
8676 op0, op1, op2, target, 0);
8677 }
8678
8679 case MULT_EXPR:
8680 /* If this is a fixed-point operation, then we cannot use the code
8681 below because "expand_mult" doesn't support sat/no-sat fixed-point
8682 multiplications. */
8683 if (ALL_FIXED_POINT_MODE_P (mode))
8684 goto binop;
8685
8686 /* If first operand is constant, swap them.
8687 Thus the following special case checks need only
8688 check the second operand. */
8689 if (TREE_CODE (treeop0) == INTEGER_CST)
8690 {
8691 tree t1 = treeop0;
8692 treeop0 = treeop1;
8693 treeop1 = t1;
8694 }
8695
8696 /* Attempt to return something suitable for generating an
8697 indexed address, for machines that support that. */
8698
8699 if (modifier == EXPAND_SUM && mode == ptr_mode
8700 && tree_fits_shwi_p (treeop1))
8701 {
8702 tree exp1 = treeop1;
8703
8704 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8705 EXPAND_SUM);
8706
8707 if (!REG_P (op0))
8708 op0 = force_operand (op0, NULL_RTX);
8709 if (!REG_P (op0))
8710 op0 = copy_to_mode_reg (mode, op0);
8711
8712 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8713 gen_int_mode (tree_to_shwi (exp1),
8714 TYPE_MODE (TREE_TYPE (exp1)))));
8715 }
8716
8717 if (modifier == EXPAND_STACK_PARM)
8718 target = 0;
8719
8720 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8721 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8722
8723 case TRUNC_DIV_EXPR:
8724 case FLOOR_DIV_EXPR:
8725 case CEIL_DIV_EXPR:
8726 case ROUND_DIV_EXPR:
8727 case EXACT_DIV_EXPR:
8728 /* If this is a fixed-point operation, then we cannot use the code
8729 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8730 divisions. */
8731 if (ALL_FIXED_POINT_MODE_P (mode))
8732 goto binop;
8733
8734 if (modifier == EXPAND_STACK_PARM)
8735 target = 0;
8736 /* Possible optimization: compute the dividend with EXPAND_SUM
8737 then if the divisor is constant can optimize the case
8738 where some terms of the dividend have coeffs divisible by it. */
8739 expand_operands (treeop0, treeop1,
8740 subtarget, &op0, &op1, EXPAND_NORMAL);
8741 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8742
8743 case RDIV_EXPR:
8744 goto binop;
8745
8746 case MULT_HIGHPART_EXPR:
8747 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8748 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8749 gcc_assert (temp);
8750 return temp;
8751
8752 case TRUNC_MOD_EXPR:
8753 case FLOOR_MOD_EXPR:
8754 case CEIL_MOD_EXPR:
8755 case ROUND_MOD_EXPR:
8756 if (modifier == EXPAND_STACK_PARM)
8757 target = 0;
8758 expand_operands (treeop0, treeop1,
8759 subtarget, &op0, &op1, EXPAND_NORMAL);
8760 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8761
8762 case FIXED_CONVERT_EXPR:
8763 op0 = expand_normal (treeop0);
8764 if (target == 0 || modifier == EXPAND_STACK_PARM)
8765 target = gen_reg_rtx (mode);
8766
8767 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8768 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8769 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8770 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8771 else
8772 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8773 return target;
8774
8775 case FIX_TRUNC_EXPR:
8776 op0 = expand_normal (treeop0);
8777 if (target == 0 || modifier == EXPAND_STACK_PARM)
8778 target = gen_reg_rtx (mode);
8779 expand_fix (target, op0, unsignedp);
8780 return target;
8781
8782 case FLOAT_EXPR:
8783 op0 = expand_normal (treeop0);
8784 if (target == 0 || modifier == EXPAND_STACK_PARM)
8785 target = gen_reg_rtx (mode);
8786 /* expand_float can't figure out what to do if FROM has VOIDmode.
8787 So give it the correct mode. With -O, cse will optimize this. */
8788 if (GET_MODE (op0) == VOIDmode)
8789 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8790 op0);
8791 expand_float (target, op0,
8792 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8793 return target;
8794
8795 case NEGATE_EXPR:
8796 op0 = expand_expr (treeop0, subtarget,
8797 VOIDmode, EXPAND_NORMAL);
8798 if (modifier == EXPAND_STACK_PARM)
8799 target = 0;
8800 temp = expand_unop (mode,
8801 optab_for_tree_code (NEGATE_EXPR, type,
8802 optab_default),
8803 op0, target, 0);
8804 gcc_assert (temp);
8805 return REDUCE_BIT_FIELD (temp);
8806
8807 case ABS_EXPR:
8808 op0 = expand_expr (treeop0, subtarget,
8809 VOIDmode, EXPAND_NORMAL);
8810 if (modifier == EXPAND_STACK_PARM)
8811 target = 0;
8812
8813 /* ABS_EXPR is not valid for complex arguments. */
8814 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8815 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8816
8817 /* Unsigned abs is simply the operand. Testing here means we don't
8818 risk generating incorrect code below. */
8819 if (TYPE_UNSIGNED (type))
8820 return op0;
8821
8822 return expand_abs (mode, op0, target, unsignedp,
8823 safe_from_p (target, treeop0, 1));
8824
8825 case MAX_EXPR:
8826 case MIN_EXPR:
8827 target = original_target;
8828 if (target == 0
8829 || modifier == EXPAND_STACK_PARM
8830 || (MEM_P (target) && MEM_VOLATILE_P (target))
8831 || GET_MODE (target) != mode
8832 || (REG_P (target)
8833 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8834 target = gen_reg_rtx (mode);
8835 expand_operands (treeop0, treeop1,
8836 target, &op0, &op1, EXPAND_NORMAL);
8837
8838 /* First try to do it with a special MIN or MAX instruction.
8839 If that does not win, use a conditional jump to select the proper
8840 value. */
8841 this_optab = optab_for_tree_code (code, type, optab_default);
8842 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8843 OPTAB_WIDEN);
8844 if (temp != 0)
8845 return temp;
8846
8847 /* At this point, a MEM target is no longer useful; we will get better
8848 code without it. */
8849
8850 if (! REG_P (target))
8851 target = gen_reg_rtx (mode);
8852
8853 /* If op1 was placed in target, swap op0 and op1. */
8854 if (target != op0 && target == op1)
8855 {
8856 temp = op0;
8857 op0 = op1;
8858 op1 = temp;
8859 }
8860
8861 /* We generate better code and avoid problems with op1 mentioning
8862 target by forcing op1 into a pseudo if it isn't a constant. */
8863 if (! CONSTANT_P (op1))
8864 op1 = force_reg (mode, op1);
8865
8866 {
8867 enum rtx_code comparison_code;
8868 rtx cmpop1 = op1;
8869
8870 if (code == MAX_EXPR)
8871 comparison_code = unsignedp ? GEU : GE;
8872 else
8873 comparison_code = unsignedp ? LEU : LE;
8874
8875 /* Canonicalize to comparisons against 0. */
8876 if (op1 == const1_rtx)
8877 {
8878 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8879 or (a != 0 ? a : 1) for unsigned.
8880 For MIN we are safe converting (a <= 1 ? a : 1)
8881 into (a <= 0 ? a : 1) */
8882 cmpop1 = const0_rtx;
8883 if (code == MAX_EXPR)
8884 comparison_code = unsignedp ? NE : GT;
8885 }
8886 if (op1 == constm1_rtx && !unsignedp)
8887 {
8888 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8889 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8890 cmpop1 = const0_rtx;
8891 if (code == MIN_EXPR)
8892 comparison_code = LT;
8893 }
8894 #ifdef HAVE_conditional_move
8895 /* Use a conditional move if possible. */
8896 if (can_conditionally_move_p (mode))
8897 {
8898 rtx insn;
8899
8900 start_sequence ();
8901
8902 /* Try to emit the conditional move. */
8903 insn = emit_conditional_move (target, comparison_code,
8904 op0, cmpop1, mode,
8905 op0, op1, mode,
8906 unsignedp);
8907
8908 /* If we could do the conditional move, emit the sequence,
8909 and return. */
8910 if (insn)
8911 {
8912 rtx_insn *seq = get_insns ();
8913 end_sequence ();
8914 emit_insn (seq);
8915 return target;
8916 }
8917
8918 /* Otherwise discard the sequence and fall back to code with
8919 branches. */
8920 end_sequence ();
8921 }
8922 #endif
8923 if (target != op0)
8924 emit_move_insn (target, op0);
8925
8926 temp = gen_label_rtx ();
8927 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8928 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8929 -1);
8930 }
8931 emit_move_insn (target, op1);
8932 emit_label (temp);
8933 return target;
8934
8935 case BIT_NOT_EXPR:
8936 op0 = expand_expr (treeop0, subtarget,
8937 VOIDmode, EXPAND_NORMAL);
8938 if (modifier == EXPAND_STACK_PARM)
8939 target = 0;
8940 /* In case we have to reduce the result to bitfield precision
8941 for unsigned bitfield expand this as XOR with a proper constant
8942 instead. */
8943 if (reduce_bit_field && TYPE_UNSIGNED (type))
8944 {
8945 wide_int mask = wi::mask (TYPE_PRECISION (type),
8946 false, GET_MODE_PRECISION (mode));
8947
8948 temp = expand_binop (mode, xor_optab, op0,
8949 immed_wide_int_const (mask, mode),
8950 target, 1, OPTAB_LIB_WIDEN);
8951 }
8952 else
8953 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8954 gcc_assert (temp);
8955 return temp;
8956
8957 /* ??? Can optimize bitwise operations with one arg constant.
8958 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8959 and (a bitwise1 b) bitwise2 b (etc)
8960 but that is probably not worth while. */
8961
8962 case BIT_AND_EXPR:
8963 case BIT_IOR_EXPR:
8964 case BIT_XOR_EXPR:
8965 goto binop;
8966
8967 case LROTATE_EXPR:
8968 case RROTATE_EXPR:
8969 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8970 || (GET_MODE_PRECISION (TYPE_MODE (type))
8971 == TYPE_PRECISION (type)));
8972 /* fall through */
8973
8974 case LSHIFT_EXPR:
8975 case RSHIFT_EXPR:
8976 /* If this is a fixed-point operation, then we cannot use the code
8977 below because "expand_shift" doesn't support sat/no-sat fixed-point
8978 shifts. */
8979 if (ALL_FIXED_POINT_MODE_P (mode))
8980 goto binop;
8981
8982 if (! safe_from_p (subtarget, treeop1, 1))
8983 subtarget = 0;
8984 if (modifier == EXPAND_STACK_PARM)
8985 target = 0;
8986 op0 = expand_expr (treeop0, subtarget,
8987 VOIDmode, EXPAND_NORMAL);
8988 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8989 unsignedp);
8990 if (code == LSHIFT_EXPR)
8991 temp = REDUCE_BIT_FIELD (temp);
8992 return temp;
8993
8994 /* Could determine the answer when only additive constants differ. Also,
8995 the addition of one can be handled by changing the condition. */
8996 case LT_EXPR:
8997 case LE_EXPR:
8998 case GT_EXPR:
8999 case GE_EXPR:
9000 case EQ_EXPR:
9001 case NE_EXPR:
9002 case UNORDERED_EXPR:
9003 case ORDERED_EXPR:
9004 case UNLT_EXPR:
9005 case UNLE_EXPR:
9006 case UNGT_EXPR:
9007 case UNGE_EXPR:
9008 case UNEQ_EXPR:
9009 case LTGT_EXPR:
9010 temp = do_store_flag (ops,
9011 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9012 tmode != VOIDmode ? tmode : mode);
9013 if (temp)
9014 return temp;
9015
9016 /* Use a compare and a jump for BLKmode comparisons, or for function
9017 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9018
9019 if ((target == 0
9020 || modifier == EXPAND_STACK_PARM
9021 || ! safe_from_p (target, treeop0, 1)
9022 || ! safe_from_p (target, treeop1, 1)
9023 /* Make sure we don't have a hard reg (such as function's return
9024 value) live across basic blocks, if not optimizing. */
9025 || (!optimize && REG_P (target)
9026 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9027 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9028
9029 emit_move_insn (target, const0_rtx);
9030
9031 op1 = gen_label_rtx ();
9032 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9033
9034 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9035 emit_move_insn (target, constm1_rtx);
9036 else
9037 emit_move_insn (target, const1_rtx);
9038
9039 emit_label (op1);
9040 return target;
9041
9042 case COMPLEX_EXPR:
9043 /* Get the rtx code of the operands. */
9044 op0 = expand_normal (treeop0);
9045 op1 = expand_normal (treeop1);
9046
9047 if (!target)
9048 target = gen_reg_rtx (TYPE_MODE (type));
9049 else
9050 /* If target overlaps with op1, then either we need to force
9051 op1 into a pseudo (if target also overlaps with op0),
9052 or write the complex parts in reverse order. */
9053 switch (GET_CODE (target))
9054 {
9055 case CONCAT:
9056 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9057 {
9058 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9059 {
9060 complex_expr_force_op1:
9061 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9062 emit_move_insn (temp, op1);
9063 op1 = temp;
9064 break;
9065 }
9066 complex_expr_swap_order:
9067 /* Move the imaginary (op1) and real (op0) parts to their
9068 location. */
9069 write_complex_part (target, op1, true);
9070 write_complex_part (target, op0, false);
9071
9072 return target;
9073 }
9074 break;
9075 case MEM:
9076 temp = adjust_address_nv (target,
9077 GET_MODE_INNER (GET_MODE (target)), 0);
9078 if (reg_overlap_mentioned_p (temp, op1))
9079 {
9080 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9081 temp = adjust_address_nv (target, imode,
9082 GET_MODE_SIZE (imode));
9083 if (reg_overlap_mentioned_p (temp, op0))
9084 goto complex_expr_force_op1;
9085 goto complex_expr_swap_order;
9086 }
9087 break;
9088 default:
9089 if (reg_overlap_mentioned_p (target, op1))
9090 {
9091 if (reg_overlap_mentioned_p (target, op0))
9092 goto complex_expr_force_op1;
9093 goto complex_expr_swap_order;
9094 }
9095 break;
9096 }
9097
9098 /* Move the real (op0) and imaginary (op1) parts to their location. */
9099 write_complex_part (target, op0, false);
9100 write_complex_part (target, op1, true);
9101
9102 return target;
9103
9104 case WIDEN_SUM_EXPR:
9105 {
9106 tree oprnd0 = treeop0;
9107 tree oprnd1 = treeop1;
9108
9109 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9110 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9111 target, unsignedp);
9112 return target;
9113 }
9114
9115 case REDUC_MAX_EXPR:
9116 case REDUC_MIN_EXPR:
9117 case REDUC_PLUS_EXPR:
9118 {
9119 op0 = expand_normal (treeop0);
9120 this_optab = optab_for_tree_code (code, type, optab_default);
9121 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9122
9123 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9124 {
9125 struct expand_operand ops[2];
9126 enum insn_code icode = optab_handler (this_optab, vec_mode);
9127
9128 create_output_operand (&ops[0], target, mode);
9129 create_input_operand (&ops[1], op0, vec_mode);
9130 if (maybe_expand_insn (icode, 2, ops))
9131 {
9132 target = ops[0].value;
9133 if (GET_MODE (target) != mode)
9134 return gen_lowpart (tmode, target);
9135 return target;
9136 }
9137 }
9138 /* Fall back to optab with vector result, and then extract scalar. */
9139 this_optab = scalar_reduc_to_vector (this_optab, type);
9140 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9141 gcc_assert (temp);
9142 /* The tree code produces a scalar result, but (somewhat by convention)
9143 the optab produces a vector with the result in element 0 if
9144 little-endian, or element N-1 if big-endian. So pull the scalar
9145 result out of that element. */
9146 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9147 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9148 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9149 target, mode, mode);
9150 gcc_assert (temp);
9151 return temp;
9152 }
9153
9154 case VEC_UNPACK_HI_EXPR:
9155 case VEC_UNPACK_LO_EXPR:
9156 {
9157 op0 = expand_normal (treeop0);
9158 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9159 target, unsignedp);
9160 gcc_assert (temp);
9161 return temp;
9162 }
9163
9164 case VEC_UNPACK_FLOAT_HI_EXPR:
9165 case VEC_UNPACK_FLOAT_LO_EXPR:
9166 {
9167 op0 = expand_normal (treeop0);
9168 /* The signedness is determined from input operand. */
9169 temp = expand_widen_pattern_expr
9170 (ops, op0, NULL_RTX, NULL_RTX,
9171 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9172
9173 gcc_assert (temp);
9174 return temp;
9175 }
9176
9177 case VEC_WIDEN_MULT_HI_EXPR:
9178 case VEC_WIDEN_MULT_LO_EXPR:
9179 case VEC_WIDEN_MULT_EVEN_EXPR:
9180 case VEC_WIDEN_MULT_ODD_EXPR:
9181 case VEC_WIDEN_LSHIFT_HI_EXPR:
9182 case VEC_WIDEN_LSHIFT_LO_EXPR:
9183 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9184 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9185 target, unsignedp);
9186 gcc_assert (target);
9187 return target;
9188
9189 case VEC_PACK_TRUNC_EXPR:
9190 case VEC_PACK_SAT_EXPR:
9191 case VEC_PACK_FIX_TRUNC_EXPR:
9192 mode = TYPE_MODE (TREE_TYPE (treeop0));
9193 goto binop;
9194
9195 case VEC_PERM_EXPR:
9196 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9197 op2 = expand_normal (treeop2);
9198
9199 /* Careful here: if the target doesn't support integral vector modes,
9200 a constant selection vector could wind up smooshed into a normal
9201 integral constant. */
9202 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9203 {
9204 tree sel_type = TREE_TYPE (treeop2);
9205 machine_mode vmode
9206 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9207 TYPE_VECTOR_SUBPARTS (sel_type));
9208 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9209 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9210 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9211 }
9212 else
9213 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9214
9215 temp = expand_vec_perm (mode, op0, op1, op2, target);
9216 gcc_assert (temp);
9217 return temp;
9218
9219 case DOT_PROD_EXPR:
9220 {
9221 tree oprnd0 = treeop0;
9222 tree oprnd1 = treeop1;
9223 tree oprnd2 = treeop2;
9224 rtx op2;
9225
9226 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9227 op2 = expand_normal (oprnd2);
9228 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9229 target, unsignedp);
9230 return target;
9231 }
9232
9233 case SAD_EXPR:
9234 {
9235 tree oprnd0 = treeop0;
9236 tree oprnd1 = treeop1;
9237 tree oprnd2 = treeop2;
9238 rtx op2;
9239
9240 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9241 op2 = expand_normal (oprnd2);
9242 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9243 target, unsignedp);
9244 return target;
9245 }
9246
9247 case REALIGN_LOAD_EXPR:
9248 {
9249 tree oprnd0 = treeop0;
9250 tree oprnd1 = treeop1;
9251 tree oprnd2 = treeop2;
9252 rtx op2;
9253
9254 this_optab = optab_for_tree_code (code, type, optab_default);
9255 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9256 op2 = expand_normal (oprnd2);
9257 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9258 target, unsignedp);
9259 gcc_assert (temp);
9260 return temp;
9261 }
9262
9263 case COND_EXPR:
9264 /* A COND_EXPR with its type being VOID_TYPE represents a
9265 conditional jump and is handled in
9266 expand_gimple_cond_expr. */
9267 gcc_assert (!VOID_TYPE_P (type));
9268
9269 /* Note that COND_EXPRs whose type is a structure or union
9270 are required to be constructed to contain assignments of
9271 a temporary variable, so that we can evaluate them here
9272 for side effect only. If type is void, we must do likewise. */
9273
9274 gcc_assert (!TREE_ADDRESSABLE (type)
9275 && !ignore
9276 && TREE_TYPE (treeop1) != void_type_node
9277 && TREE_TYPE (treeop2) != void_type_node);
9278
9279 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9280 if (temp)
9281 return temp;
9282
9283 /* If we are not to produce a result, we have no target. Otherwise,
9284 if a target was specified use it; it will not be used as an
9285 intermediate target unless it is safe. If no target, use a
9286 temporary. */
9287
9288 if (modifier != EXPAND_STACK_PARM
9289 && original_target
9290 && safe_from_p (original_target, treeop0, 1)
9291 && GET_MODE (original_target) == mode
9292 && !MEM_P (original_target))
9293 temp = original_target;
9294 else
9295 temp = assign_temp (type, 0, 1);
9296
9297 do_pending_stack_adjust ();
9298 NO_DEFER_POP;
9299 op0 = gen_label_rtx ();
9300 op1 = gen_label_rtx ();
9301 jumpifnot (treeop0, op0, -1);
9302 store_expr (treeop1, temp,
9303 modifier == EXPAND_STACK_PARM,
9304 false);
9305
9306 emit_jump_insn (gen_jump (op1));
9307 emit_barrier ();
9308 emit_label (op0);
9309 store_expr (treeop2, temp,
9310 modifier == EXPAND_STACK_PARM,
9311 false);
9312
9313 emit_label (op1);
9314 OK_DEFER_POP;
9315 return temp;
9316
9317 case VEC_COND_EXPR:
9318 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9319 return target;
9320
9321 default:
9322 gcc_unreachable ();
9323 }
9324
9325 /* Here to do an ordinary binary operator. */
9326 binop:
9327 expand_operands (treeop0, treeop1,
9328 subtarget, &op0, &op1, EXPAND_NORMAL);
9329 binop2:
9330 this_optab = optab_for_tree_code (code, type, optab_default);
9331 binop3:
9332 if (modifier == EXPAND_STACK_PARM)
9333 target = 0;
9334 temp = expand_binop (mode, this_optab, op0, op1, target,
9335 unsignedp, OPTAB_LIB_WIDEN);
9336 gcc_assert (temp);
9337 /* Bitwise operations do not need bitfield reduction as we expect their
9338 operands being properly truncated. */
9339 if (code == BIT_XOR_EXPR
9340 || code == BIT_AND_EXPR
9341 || code == BIT_IOR_EXPR)
9342 return temp;
9343 return REDUCE_BIT_FIELD (temp);
9344 }
9345 #undef REDUCE_BIT_FIELD
9346
9347
9348 /* Return TRUE if expression STMT is suitable for replacement.
9349 Never consider memory loads as replaceable, because those don't ever lead
9350 into constant expressions. */
9351
9352 static bool
9353 stmt_is_replaceable_p (gimple stmt)
9354 {
9355 if (ssa_is_replaceable_p (stmt))
9356 {
9357 /* Don't move around loads. */
9358 if (!gimple_assign_single_p (stmt)
9359 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9360 return true;
9361 }
9362 return false;
9363 }
9364
9365 rtx
9366 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9367 enum expand_modifier modifier, rtx *alt_rtl,
9368 bool inner_reference_p)
9369 {
9370 rtx op0, op1, temp, decl_rtl;
9371 tree type;
9372 int unsignedp;
9373 machine_mode mode;
9374 enum tree_code code = TREE_CODE (exp);
9375 rtx subtarget, original_target;
9376 int ignore;
9377 tree context;
9378 bool reduce_bit_field;
9379 location_t loc = EXPR_LOCATION (exp);
9380 struct separate_ops ops;
9381 tree treeop0, treeop1, treeop2;
9382 tree ssa_name = NULL_TREE;
9383 gimple g;
9384
9385 type = TREE_TYPE (exp);
9386 mode = TYPE_MODE (type);
9387 unsignedp = TYPE_UNSIGNED (type);
9388
9389 treeop0 = treeop1 = treeop2 = NULL_TREE;
9390 if (!VL_EXP_CLASS_P (exp))
9391 switch (TREE_CODE_LENGTH (code))
9392 {
9393 default:
9394 case 3: treeop2 = TREE_OPERAND (exp, 2);
9395 case 2: treeop1 = TREE_OPERAND (exp, 1);
9396 case 1: treeop0 = TREE_OPERAND (exp, 0);
9397 case 0: break;
9398 }
9399 ops.code = code;
9400 ops.type = type;
9401 ops.op0 = treeop0;
9402 ops.op1 = treeop1;
9403 ops.op2 = treeop2;
9404 ops.location = loc;
9405
9406 ignore = (target == const0_rtx
9407 || ((CONVERT_EXPR_CODE_P (code)
9408 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9409 && TREE_CODE (type) == VOID_TYPE));
9410
9411 /* An operation in what may be a bit-field type needs the
9412 result to be reduced to the precision of the bit-field type,
9413 which is narrower than that of the type's mode. */
9414 reduce_bit_field = (!ignore
9415 && INTEGRAL_TYPE_P (type)
9416 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9417
9418 /* If we are going to ignore this result, we need only do something
9419 if there is a side-effect somewhere in the expression. If there
9420 is, short-circuit the most common cases here. Note that we must
9421 not call expand_expr with anything but const0_rtx in case this
9422 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9423
9424 if (ignore)
9425 {
9426 if (! TREE_SIDE_EFFECTS (exp))
9427 return const0_rtx;
9428
9429 /* Ensure we reference a volatile object even if value is ignored, but
9430 don't do this if all we are doing is taking its address. */
9431 if (TREE_THIS_VOLATILE (exp)
9432 && TREE_CODE (exp) != FUNCTION_DECL
9433 && mode != VOIDmode && mode != BLKmode
9434 && modifier != EXPAND_CONST_ADDRESS)
9435 {
9436 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9437 if (MEM_P (temp))
9438 copy_to_reg (temp);
9439 return const0_rtx;
9440 }
9441
9442 if (TREE_CODE_CLASS (code) == tcc_unary
9443 || code == BIT_FIELD_REF
9444 || code == COMPONENT_REF
9445 || code == INDIRECT_REF)
9446 return expand_expr (treeop0, const0_rtx, VOIDmode,
9447 modifier);
9448
9449 else if (TREE_CODE_CLASS (code) == tcc_binary
9450 || TREE_CODE_CLASS (code) == tcc_comparison
9451 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9452 {
9453 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9454 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9455 return const0_rtx;
9456 }
9457
9458 target = 0;
9459 }
9460
9461 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9462 target = 0;
9463
9464 /* Use subtarget as the target for operand 0 of a binary operation. */
9465 subtarget = get_subtarget (target);
9466 original_target = target;
9467
9468 switch (code)
9469 {
9470 case LABEL_DECL:
9471 {
9472 tree function = decl_function_context (exp);
9473
9474 temp = label_rtx (exp);
9475 temp = gen_rtx_LABEL_REF (Pmode, temp);
9476
9477 if (function != current_function_decl
9478 && function != 0)
9479 LABEL_REF_NONLOCAL_P (temp) = 1;
9480
9481 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9482 return temp;
9483 }
9484
9485 case SSA_NAME:
9486 /* ??? ivopts calls expander, without any preparation from
9487 out-of-ssa. So fake instructions as if this was an access to the
9488 base variable. This unnecessarily allocates a pseudo, see how we can
9489 reuse it, if partition base vars have it set already. */
9490 if (!currently_expanding_to_rtl)
9491 {
9492 tree var = SSA_NAME_VAR (exp);
9493 if (var && DECL_RTL_SET_P (var))
9494 return DECL_RTL (var);
9495 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9496 LAST_VIRTUAL_REGISTER + 1);
9497 }
9498
9499 g = get_gimple_for_ssa_name (exp);
9500 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9501 if (g == NULL
9502 && modifier == EXPAND_INITIALIZER
9503 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9504 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9505 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9506 g = SSA_NAME_DEF_STMT (exp);
9507 if (g)
9508 {
9509 rtx r;
9510 ops.code = gimple_assign_rhs_code (g);
9511 switch (get_gimple_rhs_class (ops.code))
9512 {
9513 case GIMPLE_TERNARY_RHS:
9514 ops.op2 = gimple_assign_rhs3 (g);
9515 /* Fallthru */
9516 case GIMPLE_BINARY_RHS:
9517 ops.op1 = gimple_assign_rhs2 (g);
9518
9519 /* Try to expand conditonal compare. */
9520 if (targetm.gen_ccmp_first)
9521 {
9522 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9523 r = expand_ccmp_expr (g);
9524 if (r)
9525 break;
9526 }
9527 /* Fallthru */
9528 case GIMPLE_UNARY_RHS:
9529 ops.op0 = gimple_assign_rhs1 (g);
9530 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9531 ops.location = gimple_location (g);
9532 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9533 break;
9534 case GIMPLE_SINGLE_RHS:
9535 {
9536 location_t saved_loc = curr_insn_location ();
9537 set_curr_insn_location (gimple_location (g));
9538 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9539 tmode, modifier, NULL, inner_reference_p);
9540 set_curr_insn_location (saved_loc);
9541 break;
9542 }
9543 default:
9544 gcc_unreachable ();
9545 }
9546 if (REG_P (r) && !REG_EXPR (r))
9547 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9548 return r;
9549 }
9550
9551 ssa_name = exp;
9552 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9553 exp = SSA_NAME_VAR (ssa_name);
9554 goto expand_decl_rtl;
9555
9556 case PARM_DECL:
9557 case VAR_DECL:
9558 /* If a static var's type was incomplete when the decl was written,
9559 but the type is complete now, lay out the decl now. */
9560 if (DECL_SIZE (exp) == 0
9561 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9562 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9563 layout_decl (exp, 0);
9564
9565 /* ... fall through ... */
9566
9567 case FUNCTION_DECL:
9568 case RESULT_DECL:
9569 decl_rtl = DECL_RTL (exp);
9570 expand_decl_rtl:
9571 gcc_assert (decl_rtl);
9572 decl_rtl = copy_rtx (decl_rtl);
9573 /* Record writes to register variables. */
9574 if (modifier == EXPAND_WRITE
9575 && REG_P (decl_rtl)
9576 && HARD_REGISTER_P (decl_rtl))
9577 add_to_hard_reg_set (&crtl->asm_clobbers,
9578 GET_MODE (decl_rtl), REGNO (decl_rtl));
9579
9580 /* Ensure variable marked as used even if it doesn't go through
9581 a parser. If it hasn't be used yet, write out an external
9582 definition. */
9583 TREE_USED (exp) = 1;
9584
9585 /* Show we haven't gotten RTL for this yet. */
9586 temp = 0;
9587
9588 /* Variables inherited from containing functions should have
9589 been lowered by this point. */
9590 context = decl_function_context (exp);
9591 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9592 || context == current_function_decl
9593 || TREE_STATIC (exp)
9594 || DECL_EXTERNAL (exp)
9595 /* ??? C++ creates functions that are not TREE_STATIC. */
9596 || TREE_CODE (exp) == FUNCTION_DECL);
9597
9598 /* This is the case of an array whose size is to be determined
9599 from its initializer, while the initializer is still being parsed.
9600 ??? We aren't parsing while expanding anymore. */
9601
9602 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9603 temp = validize_mem (decl_rtl);
9604
9605 /* If DECL_RTL is memory, we are in the normal case and the
9606 address is not valid, get the address into a register. */
9607
9608 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9609 {
9610 if (alt_rtl)
9611 *alt_rtl = decl_rtl;
9612 decl_rtl = use_anchored_address (decl_rtl);
9613 if (modifier != EXPAND_CONST_ADDRESS
9614 && modifier != EXPAND_SUM
9615 && !memory_address_addr_space_p (DECL_MODE (exp),
9616 XEXP (decl_rtl, 0),
9617 MEM_ADDR_SPACE (decl_rtl)))
9618 temp = replace_equiv_address (decl_rtl,
9619 copy_rtx (XEXP (decl_rtl, 0)));
9620 }
9621
9622 /* If we got something, return it. But first, set the alignment
9623 if the address is a register. */
9624 if (temp != 0)
9625 {
9626 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9627 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9628
9629 return temp;
9630 }
9631
9632 /* If the mode of DECL_RTL does not match that of the decl,
9633 there are two cases: we are dealing with a BLKmode value
9634 that is returned in a register, or we are dealing with
9635 a promoted value. In the latter case, return a SUBREG
9636 of the wanted mode, but mark it so that we know that it
9637 was already extended. */
9638 if (REG_P (decl_rtl)
9639 && DECL_MODE (exp) != BLKmode
9640 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9641 {
9642 machine_mode pmode;
9643
9644 /* Get the signedness to be used for this variable. Ensure we get
9645 the same mode we got when the variable was declared. */
9646 if (code == SSA_NAME
9647 && (g = SSA_NAME_DEF_STMT (ssa_name))
9648 && gimple_code (g) == GIMPLE_CALL
9649 && !gimple_call_internal_p (g))
9650 pmode = promote_function_mode (type, mode, &unsignedp,
9651 gimple_call_fntype (g),
9652 2);
9653 else
9654 pmode = promote_decl_mode (exp, &unsignedp);
9655 gcc_assert (GET_MODE (decl_rtl) == pmode);
9656
9657 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9658 SUBREG_PROMOTED_VAR_P (temp) = 1;
9659 SUBREG_PROMOTED_SET (temp, unsignedp);
9660 return temp;
9661 }
9662
9663 return decl_rtl;
9664
9665 case INTEGER_CST:
9666 /* Given that TYPE_PRECISION (type) is not always equal to
9667 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9668 the former to the latter according to the signedness of the
9669 type. */
9670 temp = immed_wide_int_const (wide_int::from
9671 (exp,
9672 GET_MODE_PRECISION (TYPE_MODE (type)),
9673 TYPE_SIGN (type)),
9674 TYPE_MODE (type));
9675 return temp;
9676
9677 case VECTOR_CST:
9678 {
9679 tree tmp = NULL_TREE;
9680 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9681 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9682 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9683 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9684 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9685 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9686 return const_vector_from_tree (exp);
9687 if (GET_MODE_CLASS (mode) == MODE_INT)
9688 {
9689 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9690 if (type_for_mode)
9691 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9692 }
9693 if (!tmp)
9694 {
9695 vec<constructor_elt, va_gc> *v;
9696 unsigned i;
9697 vec_alloc (v, VECTOR_CST_NELTS (exp));
9698 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9699 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9700 tmp = build_constructor (type, v);
9701 }
9702 return expand_expr (tmp, ignore ? const0_rtx : target,
9703 tmode, modifier);
9704 }
9705
9706 case CONST_DECL:
9707 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9708
9709 case REAL_CST:
9710 /* If optimized, generate immediate CONST_DOUBLE
9711 which will be turned into memory by reload if necessary.
9712
9713 We used to force a register so that loop.c could see it. But
9714 this does not allow gen_* patterns to perform optimizations with
9715 the constants. It also produces two insns in cases like "x = 1.0;".
9716 On most machines, floating-point constants are not permitted in
9717 many insns, so we'd end up copying it to a register in any case.
9718
9719 Now, we do the copying in expand_binop, if appropriate. */
9720 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9721 TYPE_MODE (TREE_TYPE (exp)));
9722
9723 case FIXED_CST:
9724 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9725 TYPE_MODE (TREE_TYPE (exp)));
9726
9727 case COMPLEX_CST:
9728 /* Handle evaluating a complex constant in a CONCAT target. */
9729 if (original_target && GET_CODE (original_target) == CONCAT)
9730 {
9731 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9732 rtx rtarg, itarg;
9733
9734 rtarg = XEXP (original_target, 0);
9735 itarg = XEXP (original_target, 1);
9736
9737 /* Move the real and imaginary parts separately. */
9738 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9739 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9740
9741 if (op0 != rtarg)
9742 emit_move_insn (rtarg, op0);
9743 if (op1 != itarg)
9744 emit_move_insn (itarg, op1);
9745
9746 return original_target;
9747 }
9748
9749 /* ... fall through ... */
9750
9751 case STRING_CST:
9752 temp = expand_expr_constant (exp, 1, modifier);
9753
9754 /* temp contains a constant address.
9755 On RISC machines where a constant address isn't valid,
9756 make some insns to get that address into a register. */
9757 if (modifier != EXPAND_CONST_ADDRESS
9758 && modifier != EXPAND_INITIALIZER
9759 && modifier != EXPAND_SUM
9760 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9761 MEM_ADDR_SPACE (temp)))
9762 return replace_equiv_address (temp,
9763 copy_rtx (XEXP (temp, 0)));
9764 return temp;
9765
9766 case SAVE_EXPR:
9767 {
9768 tree val = treeop0;
9769 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9770 inner_reference_p);
9771
9772 if (!SAVE_EXPR_RESOLVED_P (exp))
9773 {
9774 /* We can indeed still hit this case, typically via builtin
9775 expanders calling save_expr immediately before expanding
9776 something. Assume this means that we only have to deal
9777 with non-BLKmode values. */
9778 gcc_assert (GET_MODE (ret) != BLKmode);
9779
9780 val = build_decl (curr_insn_location (),
9781 VAR_DECL, NULL, TREE_TYPE (exp));
9782 DECL_ARTIFICIAL (val) = 1;
9783 DECL_IGNORED_P (val) = 1;
9784 treeop0 = val;
9785 TREE_OPERAND (exp, 0) = treeop0;
9786 SAVE_EXPR_RESOLVED_P (exp) = 1;
9787
9788 if (!CONSTANT_P (ret))
9789 ret = copy_to_reg (ret);
9790 SET_DECL_RTL (val, ret);
9791 }
9792
9793 return ret;
9794 }
9795
9796
9797 case CONSTRUCTOR:
9798 /* If we don't need the result, just ensure we evaluate any
9799 subexpressions. */
9800 if (ignore)
9801 {
9802 unsigned HOST_WIDE_INT idx;
9803 tree value;
9804
9805 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9806 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9807
9808 return const0_rtx;
9809 }
9810
9811 return expand_constructor (exp, target, modifier, false);
9812
9813 case TARGET_MEM_REF:
9814 {
9815 addr_space_t as
9816 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9817 enum insn_code icode;
9818 unsigned int align;
9819
9820 op0 = addr_for_mem_ref (exp, as, true);
9821 op0 = memory_address_addr_space (mode, op0, as);
9822 temp = gen_rtx_MEM (mode, op0);
9823 set_mem_attributes (temp, exp, 0);
9824 set_mem_addr_space (temp, as);
9825 align = get_object_alignment (exp);
9826 if (modifier != EXPAND_WRITE
9827 && modifier != EXPAND_MEMORY
9828 && mode != BLKmode
9829 && align < GET_MODE_ALIGNMENT (mode)
9830 /* If the target does not have special handling for unaligned
9831 loads of mode then it can use regular moves for them. */
9832 && ((icode = optab_handler (movmisalign_optab, mode))
9833 != CODE_FOR_nothing))
9834 {
9835 struct expand_operand ops[2];
9836
9837 /* We've already validated the memory, and we're creating a
9838 new pseudo destination. The predicates really can't fail,
9839 nor can the generator. */
9840 create_output_operand (&ops[0], NULL_RTX, mode);
9841 create_fixed_operand (&ops[1], temp);
9842 expand_insn (icode, 2, ops);
9843 temp = ops[0].value;
9844 }
9845 return temp;
9846 }
9847
9848 case MEM_REF:
9849 {
9850 addr_space_t as
9851 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9852 machine_mode address_mode;
9853 tree base = TREE_OPERAND (exp, 0);
9854 gimple def_stmt;
9855 enum insn_code icode;
9856 unsigned align;
9857 /* Handle expansion of non-aliased memory with non-BLKmode. That
9858 might end up in a register. */
9859 if (mem_ref_refers_to_non_mem_p (exp))
9860 {
9861 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9862 base = TREE_OPERAND (base, 0);
9863 if (offset == 0
9864 && tree_fits_uhwi_p (TYPE_SIZE (type))
9865 && (GET_MODE_BITSIZE (DECL_MODE (base))
9866 == tree_to_uhwi (TYPE_SIZE (type))))
9867 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9868 target, tmode, modifier);
9869 if (TYPE_MODE (type) == BLKmode)
9870 {
9871 temp = assign_stack_temp (DECL_MODE (base),
9872 GET_MODE_SIZE (DECL_MODE (base)));
9873 store_expr (base, temp, 0, false);
9874 temp = adjust_address (temp, BLKmode, offset);
9875 set_mem_size (temp, int_size_in_bytes (type));
9876 return temp;
9877 }
9878 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9879 bitsize_int (offset * BITS_PER_UNIT));
9880 return expand_expr (exp, target, tmode, modifier);
9881 }
9882 address_mode = targetm.addr_space.address_mode (as);
9883 base = TREE_OPERAND (exp, 0);
9884 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9885 {
9886 tree mask = gimple_assign_rhs2 (def_stmt);
9887 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9888 gimple_assign_rhs1 (def_stmt), mask);
9889 TREE_OPERAND (exp, 0) = base;
9890 }
9891 align = get_object_alignment (exp);
9892 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9893 op0 = memory_address_addr_space (mode, op0, as);
9894 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9895 {
9896 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9897 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9898 op0 = memory_address_addr_space (mode, op0, as);
9899 }
9900 temp = gen_rtx_MEM (mode, op0);
9901 set_mem_attributes (temp, exp, 0);
9902 set_mem_addr_space (temp, as);
9903 if (TREE_THIS_VOLATILE (exp))
9904 MEM_VOLATILE_P (temp) = 1;
9905 if (modifier != EXPAND_WRITE
9906 && modifier != EXPAND_MEMORY
9907 && !inner_reference_p
9908 && mode != BLKmode
9909 && align < GET_MODE_ALIGNMENT (mode))
9910 {
9911 if ((icode = optab_handler (movmisalign_optab, mode))
9912 != CODE_FOR_nothing)
9913 {
9914 struct expand_operand ops[2];
9915
9916 /* We've already validated the memory, and we're creating a
9917 new pseudo destination. The predicates really can't fail,
9918 nor can the generator. */
9919 create_output_operand (&ops[0], NULL_RTX, mode);
9920 create_fixed_operand (&ops[1], temp);
9921 expand_insn (icode, 2, ops);
9922 temp = ops[0].value;
9923 }
9924 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9925 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9926 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9927 (modifier == EXPAND_STACK_PARM
9928 ? NULL_RTX : target),
9929 mode, mode);
9930 }
9931 return temp;
9932 }
9933
9934 case ARRAY_REF:
9935
9936 {
9937 tree array = treeop0;
9938 tree index = treeop1;
9939 tree init;
9940
9941 /* Fold an expression like: "foo"[2].
9942 This is not done in fold so it won't happen inside &.
9943 Don't fold if this is for wide characters since it's too
9944 difficult to do correctly and this is a very rare case. */
9945
9946 if (modifier != EXPAND_CONST_ADDRESS
9947 && modifier != EXPAND_INITIALIZER
9948 && modifier != EXPAND_MEMORY)
9949 {
9950 tree t = fold_read_from_constant_string (exp);
9951
9952 if (t)
9953 return expand_expr (t, target, tmode, modifier);
9954 }
9955
9956 /* If this is a constant index into a constant array,
9957 just get the value from the array. Handle both the cases when
9958 we have an explicit constructor and when our operand is a variable
9959 that was declared const. */
9960
9961 if (modifier != EXPAND_CONST_ADDRESS
9962 && modifier != EXPAND_INITIALIZER
9963 && modifier != EXPAND_MEMORY
9964 && TREE_CODE (array) == CONSTRUCTOR
9965 && ! TREE_SIDE_EFFECTS (array)
9966 && TREE_CODE (index) == INTEGER_CST)
9967 {
9968 unsigned HOST_WIDE_INT ix;
9969 tree field, value;
9970
9971 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9972 field, value)
9973 if (tree_int_cst_equal (field, index))
9974 {
9975 if (!TREE_SIDE_EFFECTS (value))
9976 return expand_expr (fold (value), target, tmode, modifier);
9977 break;
9978 }
9979 }
9980
9981 else if (optimize >= 1
9982 && modifier != EXPAND_CONST_ADDRESS
9983 && modifier != EXPAND_INITIALIZER
9984 && modifier != EXPAND_MEMORY
9985 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9986 && TREE_CODE (index) == INTEGER_CST
9987 && (TREE_CODE (array) == VAR_DECL
9988 || TREE_CODE (array) == CONST_DECL)
9989 && (init = ctor_for_folding (array)) != error_mark_node)
9990 {
9991 if (init == NULL_TREE)
9992 {
9993 tree value = build_zero_cst (type);
9994 if (TREE_CODE (value) == CONSTRUCTOR)
9995 {
9996 /* If VALUE is a CONSTRUCTOR, this optimization is only
9997 useful if this doesn't store the CONSTRUCTOR into
9998 memory. If it does, it is more efficient to just
9999 load the data from the array directly. */
10000 rtx ret = expand_constructor (value, target,
10001 modifier, true);
10002 if (ret == NULL_RTX)
10003 value = NULL_TREE;
10004 }
10005
10006 if (value)
10007 return expand_expr (value, target, tmode, modifier);
10008 }
10009 else if (TREE_CODE (init) == CONSTRUCTOR)
10010 {
10011 unsigned HOST_WIDE_INT ix;
10012 tree field, value;
10013
10014 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10015 field, value)
10016 if (tree_int_cst_equal (field, index))
10017 {
10018 if (TREE_SIDE_EFFECTS (value))
10019 break;
10020
10021 if (TREE_CODE (value) == CONSTRUCTOR)
10022 {
10023 /* If VALUE is a CONSTRUCTOR, this
10024 optimization is only useful if
10025 this doesn't store the CONSTRUCTOR
10026 into memory. If it does, it is more
10027 efficient to just load the data from
10028 the array directly. */
10029 rtx ret = expand_constructor (value, target,
10030 modifier, true);
10031 if (ret == NULL_RTX)
10032 break;
10033 }
10034
10035 return
10036 expand_expr (fold (value), target, tmode, modifier);
10037 }
10038 }
10039 else if (TREE_CODE (init) == STRING_CST)
10040 {
10041 tree low_bound = array_ref_low_bound (exp);
10042 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10043
10044 /* Optimize the special case of a zero lower bound.
10045
10046 We convert the lower bound to sizetype to avoid problems
10047 with constant folding. E.g. suppose the lower bound is
10048 1 and its mode is QI. Without the conversion
10049 (ARRAY + (INDEX - (unsigned char)1))
10050 becomes
10051 (ARRAY + (-(unsigned char)1) + INDEX)
10052 which becomes
10053 (ARRAY + 255 + INDEX). Oops! */
10054 if (!integer_zerop (low_bound))
10055 index1 = size_diffop_loc (loc, index1,
10056 fold_convert_loc (loc, sizetype,
10057 low_bound));
10058
10059 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10060 {
10061 tree type = TREE_TYPE (TREE_TYPE (init));
10062 machine_mode mode = TYPE_MODE (type);
10063
10064 if (GET_MODE_CLASS (mode) == MODE_INT
10065 && GET_MODE_SIZE (mode) == 1)
10066 return gen_int_mode (TREE_STRING_POINTER (init)
10067 [TREE_INT_CST_LOW (index1)],
10068 mode);
10069 }
10070 }
10071 }
10072 }
10073 goto normal_inner_ref;
10074
10075 case COMPONENT_REF:
10076 /* If the operand is a CONSTRUCTOR, we can just extract the
10077 appropriate field if it is present. */
10078 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10079 {
10080 unsigned HOST_WIDE_INT idx;
10081 tree field, value;
10082
10083 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10084 idx, field, value)
10085 if (field == treeop1
10086 /* We can normally use the value of the field in the
10087 CONSTRUCTOR. However, if this is a bitfield in
10088 an integral mode that we can fit in a HOST_WIDE_INT,
10089 we must mask only the number of bits in the bitfield,
10090 since this is done implicitly by the constructor. If
10091 the bitfield does not meet either of those conditions,
10092 we can't do this optimization. */
10093 && (! DECL_BIT_FIELD (field)
10094 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10095 && (GET_MODE_PRECISION (DECL_MODE (field))
10096 <= HOST_BITS_PER_WIDE_INT))))
10097 {
10098 if (DECL_BIT_FIELD (field)
10099 && modifier == EXPAND_STACK_PARM)
10100 target = 0;
10101 op0 = expand_expr (value, target, tmode, modifier);
10102 if (DECL_BIT_FIELD (field))
10103 {
10104 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10105 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10106
10107 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10108 {
10109 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10110 imode);
10111 op0 = expand_and (imode, op0, op1, target);
10112 }
10113 else
10114 {
10115 int count = GET_MODE_PRECISION (imode) - bitsize;
10116
10117 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10118 target, 0);
10119 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10120 target, 0);
10121 }
10122 }
10123
10124 return op0;
10125 }
10126 }
10127 goto normal_inner_ref;
10128
10129 case BIT_FIELD_REF:
10130 case ARRAY_RANGE_REF:
10131 normal_inner_ref:
10132 {
10133 machine_mode mode1, mode2;
10134 HOST_WIDE_INT bitsize, bitpos;
10135 tree offset;
10136 int volatilep = 0, must_force_mem;
10137 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10138 &mode1, &unsignedp, &volatilep, true);
10139 rtx orig_op0, memloc;
10140 bool mem_attrs_from_type = false;
10141
10142 /* If we got back the original object, something is wrong. Perhaps
10143 we are evaluating an expression too early. In any event, don't
10144 infinitely recurse. */
10145 gcc_assert (tem != exp);
10146
10147 /* If TEM's type is a union of variable size, pass TARGET to the inner
10148 computation, since it will need a temporary and TARGET is known
10149 to have to do. This occurs in unchecked conversion in Ada. */
10150 orig_op0 = op0
10151 = expand_expr_real (tem,
10152 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10153 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10154 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10155 != INTEGER_CST)
10156 && modifier != EXPAND_STACK_PARM
10157 ? target : NULL_RTX),
10158 VOIDmode,
10159 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10160 NULL, true);
10161
10162 /* If the field has a mode, we want to access it in the
10163 field's mode, not the computed mode.
10164 If a MEM has VOIDmode (external with incomplete type),
10165 use BLKmode for it instead. */
10166 if (MEM_P (op0))
10167 {
10168 if (mode1 != VOIDmode)
10169 op0 = adjust_address (op0, mode1, 0);
10170 else if (GET_MODE (op0) == VOIDmode)
10171 op0 = adjust_address (op0, BLKmode, 0);
10172 }
10173
10174 mode2
10175 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10176
10177 /* If we have either an offset, a BLKmode result, or a reference
10178 outside the underlying object, we must force it to memory.
10179 Such a case can occur in Ada if we have unchecked conversion
10180 of an expression from a scalar type to an aggregate type or
10181 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10182 passed a partially uninitialized object or a view-conversion
10183 to a larger size. */
10184 must_force_mem = (offset
10185 || mode1 == BLKmode
10186 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10187
10188 /* Handle CONCAT first. */
10189 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10190 {
10191 if (bitpos == 0
10192 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10193 return op0;
10194 if (bitpos == 0
10195 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10196 && bitsize)
10197 {
10198 op0 = XEXP (op0, 0);
10199 mode2 = GET_MODE (op0);
10200 }
10201 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10202 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10203 && bitpos
10204 && bitsize)
10205 {
10206 op0 = XEXP (op0, 1);
10207 bitpos = 0;
10208 mode2 = GET_MODE (op0);
10209 }
10210 else
10211 /* Otherwise force into memory. */
10212 must_force_mem = 1;
10213 }
10214
10215 /* If this is a constant, put it in a register if it is a legitimate
10216 constant and we don't need a memory reference. */
10217 if (CONSTANT_P (op0)
10218 && mode2 != BLKmode
10219 && targetm.legitimate_constant_p (mode2, op0)
10220 && !must_force_mem)
10221 op0 = force_reg (mode2, op0);
10222
10223 /* Otherwise, if this is a constant, try to force it to the constant
10224 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10225 is a legitimate constant. */
10226 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10227 op0 = validize_mem (memloc);
10228
10229 /* Otherwise, if this is a constant or the object is not in memory
10230 and need be, put it there. */
10231 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10232 {
10233 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10234 emit_move_insn (memloc, op0);
10235 op0 = memloc;
10236 mem_attrs_from_type = true;
10237 }
10238
10239 if (offset)
10240 {
10241 machine_mode address_mode;
10242 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10243 EXPAND_SUM);
10244
10245 gcc_assert (MEM_P (op0));
10246
10247 address_mode = get_address_mode (op0);
10248 if (GET_MODE (offset_rtx) != address_mode)
10249 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10250
10251 /* See the comment in expand_assignment for the rationale. */
10252 if (mode1 != VOIDmode
10253 && bitpos != 0
10254 && bitsize > 0
10255 && (bitpos % bitsize) == 0
10256 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10257 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10258 {
10259 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10260 bitpos = 0;
10261 }
10262
10263 op0 = offset_address (op0, offset_rtx,
10264 highest_pow2_factor (offset));
10265 }
10266
10267 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10268 record its alignment as BIGGEST_ALIGNMENT. */
10269 if (MEM_P (op0) && bitpos == 0 && offset != 0
10270 && is_aligning_offset (offset, tem))
10271 set_mem_align (op0, BIGGEST_ALIGNMENT);
10272
10273 /* Don't forget about volatility even if this is a bitfield. */
10274 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10275 {
10276 if (op0 == orig_op0)
10277 op0 = copy_rtx (op0);
10278
10279 MEM_VOLATILE_P (op0) = 1;
10280 }
10281
10282 /* In cases where an aligned union has an unaligned object
10283 as a field, we might be extracting a BLKmode value from
10284 an integer-mode (e.g., SImode) object. Handle this case
10285 by doing the extract into an object as wide as the field
10286 (which we know to be the width of a basic mode), then
10287 storing into memory, and changing the mode to BLKmode. */
10288 if (mode1 == VOIDmode
10289 || REG_P (op0) || GET_CODE (op0) == SUBREG
10290 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10291 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10292 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10293 && modifier != EXPAND_CONST_ADDRESS
10294 && modifier != EXPAND_INITIALIZER
10295 && modifier != EXPAND_MEMORY)
10296 /* If the bitfield is volatile and the bitsize
10297 is narrower than the access size of the bitfield,
10298 we need to extract bitfields from the access. */
10299 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10300 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10301 && mode1 != BLKmode
10302 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10303 /* If the field isn't aligned enough to fetch as a memref,
10304 fetch it as a bit field. */
10305 || (mode1 != BLKmode
10306 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10307 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10308 || (MEM_P (op0)
10309 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10310 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10311 && modifier != EXPAND_MEMORY
10312 && ((modifier == EXPAND_CONST_ADDRESS
10313 || modifier == EXPAND_INITIALIZER)
10314 ? STRICT_ALIGNMENT
10315 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10316 || (bitpos % BITS_PER_UNIT != 0)))
10317 /* If the type and the field are a constant size and the
10318 size of the type isn't the same size as the bitfield,
10319 we must use bitfield operations. */
10320 || (bitsize >= 0
10321 && TYPE_SIZE (TREE_TYPE (exp))
10322 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10323 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10324 bitsize)))
10325 {
10326 machine_mode ext_mode = mode;
10327
10328 if (ext_mode == BLKmode
10329 && ! (target != 0 && MEM_P (op0)
10330 && MEM_P (target)
10331 && bitpos % BITS_PER_UNIT == 0))
10332 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10333
10334 if (ext_mode == BLKmode)
10335 {
10336 if (target == 0)
10337 target = assign_temp (type, 1, 1);
10338
10339 /* ??? Unlike the similar test a few lines below, this one is
10340 very likely obsolete. */
10341 if (bitsize == 0)
10342 return target;
10343
10344 /* In this case, BITPOS must start at a byte boundary and
10345 TARGET, if specified, must be a MEM. */
10346 gcc_assert (MEM_P (op0)
10347 && (!target || MEM_P (target))
10348 && !(bitpos % BITS_PER_UNIT));
10349
10350 emit_block_move (target,
10351 adjust_address (op0, VOIDmode,
10352 bitpos / BITS_PER_UNIT),
10353 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10354 / BITS_PER_UNIT),
10355 (modifier == EXPAND_STACK_PARM
10356 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10357
10358 return target;
10359 }
10360
10361 /* If we have nothing to extract, the result will be 0 for targets
10362 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10363 return 0 for the sake of consistency, as reading a zero-sized
10364 bitfield is valid in Ada and the value is fully specified. */
10365 if (bitsize == 0)
10366 return const0_rtx;
10367
10368 op0 = validize_mem (op0);
10369
10370 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10371 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10372
10373 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10374 (modifier == EXPAND_STACK_PARM
10375 ? NULL_RTX : target),
10376 ext_mode, ext_mode);
10377
10378 /* If the result is a record type and BITSIZE is narrower than
10379 the mode of OP0, an integral mode, and this is a big endian
10380 machine, we must put the field into the high-order bits. */
10381 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10382 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10383 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10384 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10385 GET_MODE_BITSIZE (GET_MODE (op0))
10386 - bitsize, op0, 1);
10387
10388 /* If the result type is BLKmode, store the data into a temporary
10389 of the appropriate type, but with the mode corresponding to the
10390 mode for the data we have (op0's mode). */
10391 if (mode == BLKmode)
10392 {
10393 rtx new_rtx
10394 = assign_stack_temp_for_type (ext_mode,
10395 GET_MODE_BITSIZE (ext_mode),
10396 type);
10397 emit_move_insn (new_rtx, op0);
10398 op0 = copy_rtx (new_rtx);
10399 PUT_MODE (op0, BLKmode);
10400 }
10401
10402 return op0;
10403 }
10404
10405 /* If the result is BLKmode, use that to access the object
10406 now as well. */
10407 if (mode == BLKmode)
10408 mode1 = BLKmode;
10409
10410 /* Get a reference to just this component. */
10411 if (modifier == EXPAND_CONST_ADDRESS
10412 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10413 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10414 else
10415 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10416
10417 if (op0 == orig_op0)
10418 op0 = copy_rtx (op0);
10419
10420 /* If op0 is a temporary because of forcing to memory, pass only the
10421 type to set_mem_attributes so that the original expression is never
10422 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10423 if (mem_attrs_from_type)
10424 set_mem_attributes (op0, type, 0);
10425 else
10426 set_mem_attributes (op0, exp, 0);
10427
10428 if (REG_P (XEXP (op0, 0)))
10429 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10430
10431 MEM_VOLATILE_P (op0) |= volatilep;
10432 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10433 || modifier == EXPAND_CONST_ADDRESS
10434 || modifier == EXPAND_INITIALIZER)
10435 return op0;
10436
10437 if (target == 0)
10438 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10439
10440 convert_move (target, op0, unsignedp);
10441 return target;
10442 }
10443
10444 case OBJ_TYPE_REF:
10445 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10446
10447 case CALL_EXPR:
10448 /* All valid uses of __builtin_va_arg_pack () are removed during
10449 inlining. */
10450 if (CALL_EXPR_VA_ARG_PACK (exp))
10451 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10452 {
10453 tree fndecl = get_callee_fndecl (exp), attr;
10454
10455 if (fndecl
10456 && (attr = lookup_attribute ("error",
10457 DECL_ATTRIBUTES (fndecl))) != NULL)
10458 error ("%Kcall to %qs declared with attribute error: %s",
10459 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10460 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10461 if (fndecl
10462 && (attr = lookup_attribute ("warning",
10463 DECL_ATTRIBUTES (fndecl))) != NULL)
10464 warning_at (tree_nonartificial_location (exp),
10465 0, "%Kcall to %qs declared with attribute warning: %s",
10466 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10467 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10468
10469 /* Check for a built-in function. */
10470 if (fndecl && DECL_BUILT_IN (fndecl))
10471 {
10472 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10473 if (CALL_WITH_BOUNDS_P (exp))
10474 return expand_builtin_with_bounds (exp, target, subtarget,
10475 tmode, ignore);
10476 else
10477 return expand_builtin (exp, target, subtarget, tmode, ignore);
10478 }
10479 }
10480 return expand_call (exp, target, ignore);
10481
10482 case VIEW_CONVERT_EXPR:
10483 op0 = NULL_RTX;
10484
10485 /* If we are converting to BLKmode, try to avoid an intermediate
10486 temporary by fetching an inner memory reference. */
10487 if (mode == BLKmode
10488 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10489 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10490 && handled_component_p (treeop0))
10491 {
10492 machine_mode mode1;
10493 HOST_WIDE_INT bitsize, bitpos;
10494 tree offset;
10495 int unsignedp;
10496 int volatilep = 0;
10497 tree tem
10498 = get_inner_reference (treeop0, &bitsize, &bitpos,
10499 &offset, &mode1, &unsignedp, &volatilep,
10500 true);
10501 rtx orig_op0;
10502
10503 /* ??? We should work harder and deal with non-zero offsets. */
10504 if (!offset
10505 && (bitpos % BITS_PER_UNIT) == 0
10506 && bitsize >= 0
10507 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10508 {
10509 /* See the normal_inner_ref case for the rationale. */
10510 orig_op0
10511 = expand_expr_real (tem,
10512 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10513 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10514 != INTEGER_CST)
10515 && modifier != EXPAND_STACK_PARM
10516 ? target : NULL_RTX),
10517 VOIDmode,
10518 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10519 NULL, true);
10520
10521 if (MEM_P (orig_op0))
10522 {
10523 op0 = orig_op0;
10524
10525 /* Get a reference to just this component. */
10526 if (modifier == EXPAND_CONST_ADDRESS
10527 || modifier == EXPAND_SUM
10528 || modifier == EXPAND_INITIALIZER)
10529 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10530 else
10531 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10532
10533 if (op0 == orig_op0)
10534 op0 = copy_rtx (op0);
10535
10536 set_mem_attributes (op0, treeop0, 0);
10537 if (REG_P (XEXP (op0, 0)))
10538 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10539
10540 MEM_VOLATILE_P (op0) |= volatilep;
10541 }
10542 }
10543 }
10544
10545 if (!op0)
10546 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10547 NULL, inner_reference_p);
10548
10549 /* If the input and output modes are both the same, we are done. */
10550 if (mode == GET_MODE (op0))
10551 ;
10552 /* If neither mode is BLKmode, and both modes are the same size
10553 then we can use gen_lowpart. */
10554 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10555 && (GET_MODE_PRECISION (mode)
10556 == GET_MODE_PRECISION (GET_MODE (op0)))
10557 && !COMPLEX_MODE_P (GET_MODE (op0)))
10558 {
10559 if (GET_CODE (op0) == SUBREG)
10560 op0 = force_reg (GET_MODE (op0), op0);
10561 temp = gen_lowpart_common (mode, op0);
10562 if (temp)
10563 op0 = temp;
10564 else
10565 {
10566 if (!REG_P (op0) && !MEM_P (op0))
10567 op0 = force_reg (GET_MODE (op0), op0);
10568 op0 = gen_lowpart (mode, op0);
10569 }
10570 }
10571 /* If both types are integral, convert from one mode to the other. */
10572 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10573 op0 = convert_modes (mode, GET_MODE (op0), op0,
10574 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10575 /* If the output type is a bit-field type, do an extraction. */
10576 else if (reduce_bit_field)
10577 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10578 TYPE_UNSIGNED (type), NULL_RTX,
10579 mode, mode);
10580 /* As a last resort, spill op0 to memory, and reload it in a
10581 different mode. */
10582 else if (!MEM_P (op0))
10583 {
10584 /* If the operand is not a MEM, force it into memory. Since we
10585 are going to be changing the mode of the MEM, don't call
10586 force_const_mem for constants because we don't allow pool
10587 constants to change mode. */
10588 tree inner_type = TREE_TYPE (treeop0);
10589
10590 gcc_assert (!TREE_ADDRESSABLE (exp));
10591
10592 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10593 target
10594 = assign_stack_temp_for_type
10595 (TYPE_MODE (inner_type),
10596 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10597
10598 emit_move_insn (target, op0);
10599 op0 = target;
10600 }
10601
10602 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10603 output type is such that the operand is known to be aligned, indicate
10604 that it is. Otherwise, we need only be concerned about alignment for
10605 non-BLKmode results. */
10606 if (MEM_P (op0))
10607 {
10608 enum insn_code icode;
10609
10610 if (TYPE_ALIGN_OK (type))
10611 {
10612 /* ??? Copying the MEM without substantially changing it might
10613 run afoul of the code handling volatile memory references in
10614 store_expr, which assumes that TARGET is returned unmodified
10615 if it has been used. */
10616 op0 = copy_rtx (op0);
10617 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10618 }
10619 else if (modifier != EXPAND_WRITE
10620 && modifier != EXPAND_MEMORY
10621 && !inner_reference_p
10622 && mode != BLKmode
10623 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10624 {
10625 /* If the target does have special handling for unaligned
10626 loads of mode then use them. */
10627 if ((icode = optab_handler (movmisalign_optab, mode))
10628 != CODE_FOR_nothing)
10629 {
10630 rtx reg, insn;
10631
10632 op0 = adjust_address (op0, mode, 0);
10633 /* We've already validated the memory, and we're creating a
10634 new pseudo destination. The predicates really can't
10635 fail. */
10636 reg = gen_reg_rtx (mode);
10637
10638 /* Nor can the insn generator. */
10639 insn = GEN_FCN (icode) (reg, op0);
10640 emit_insn (insn);
10641 return reg;
10642 }
10643 else if (STRICT_ALIGNMENT)
10644 {
10645 tree inner_type = TREE_TYPE (treeop0);
10646 HOST_WIDE_INT temp_size
10647 = MAX (int_size_in_bytes (inner_type),
10648 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10649 rtx new_rtx
10650 = assign_stack_temp_for_type (mode, temp_size, type);
10651 rtx new_with_op0_mode
10652 = adjust_address (new_rtx, GET_MODE (op0), 0);
10653
10654 gcc_assert (!TREE_ADDRESSABLE (exp));
10655
10656 if (GET_MODE (op0) == BLKmode)
10657 emit_block_move (new_with_op0_mode, op0,
10658 GEN_INT (GET_MODE_SIZE (mode)),
10659 (modifier == EXPAND_STACK_PARM
10660 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10661 else
10662 emit_move_insn (new_with_op0_mode, op0);
10663
10664 op0 = new_rtx;
10665 }
10666 }
10667
10668 op0 = adjust_address (op0, mode, 0);
10669 }
10670
10671 return op0;
10672
10673 case MODIFY_EXPR:
10674 {
10675 tree lhs = treeop0;
10676 tree rhs = treeop1;
10677 gcc_assert (ignore);
10678
10679 /* Check for |= or &= of a bitfield of size one into another bitfield
10680 of size 1. In this case, (unless we need the result of the
10681 assignment) we can do this more efficiently with a
10682 test followed by an assignment, if necessary.
10683
10684 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10685 things change so we do, this code should be enhanced to
10686 support it. */
10687 if (TREE_CODE (lhs) == COMPONENT_REF
10688 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10689 || TREE_CODE (rhs) == BIT_AND_EXPR)
10690 && TREE_OPERAND (rhs, 0) == lhs
10691 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10692 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10693 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10694 {
10695 rtx_code_label *label = gen_label_rtx ();
10696 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10697 do_jump (TREE_OPERAND (rhs, 1),
10698 value ? label : 0,
10699 value ? 0 : label, -1);
10700 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10701 false);
10702 do_pending_stack_adjust ();
10703 emit_label (label);
10704 return const0_rtx;
10705 }
10706
10707 expand_assignment (lhs, rhs, false);
10708 return const0_rtx;
10709 }
10710
10711 case ADDR_EXPR:
10712 return expand_expr_addr_expr (exp, target, tmode, modifier);
10713
10714 case REALPART_EXPR:
10715 op0 = expand_normal (treeop0);
10716 return read_complex_part (op0, false);
10717
10718 case IMAGPART_EXPR:
10719 op0 = expand_normal (treeop0);
10720 return read_complex_part (op0, true);
10721
10722 case RETURN_EXPR:
10723 case LABEL_EXPR:
10724 case GOTO_EXPR:
10725 case SWITCH_EXPR:
10726 case ASM_EXPR:
10727 /* Expanded in cfgexpand.c. */
10728 gcc_unreachable ();
10729
10730 case TRY_CATCH_EXPR:
10731 case CATCH_EXPR:
10732 case EH_FILTER_EXPR:
10733 case TRY_FINALLY_EXPR:
10734 /* Lowered by tree-eh.c. */
10735 gcc_unreachable ();
10736
10737 case WITH_CLEANUP_EXPR:
10738 case CLEANUP_POINT_EXPR:
10739 case TARGET_EXPR:
10740 case CASE_LABEL_EXPR:
10741 case VA_ARG_EXPR:
10742 case BIND_EXPR:
10743 case INIT_EXPR:
10744 case CONJ_EXPR:
10745 case COMPOUND_EXPR:
10746 case PREINCREMENT_EXPR:
10747 case PREDECREMENT_EXPR:
10748 case POSTINCREMENT_EXPR:
10749 case POSTDECREMENT_EXPR:
10750 case LOOP_EXPR:
10751 case EXIT_EXPR:
10752 case COMPOUND_LITERAL_EXPR:
10753 /* Lowered by gimplify.c. */
10754 gcc_unreachable ();
10755
10756 case FDESC_EXPR:
10757 /* Function descriptors are not valid except for as
10758 initialization constants, and should not be expanded. */
10759 gcc_unreachable ();
10760
10761 case WITH_SIZE_EXPR:
10762 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10763 have pulled out the size to use in whatever context it needed. */
10764 return expand_expr_real (treeop0, original_target, tmode,
10765 modifier, alt_rtl, inner_reference_p);
10766
10767 default:
10768 return expand_expr_real_2 (&ops, target, tmode, modifier);
10769 }
10770 }
10771 \f
10772 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10773 signedness of TYPE), possibly returning the result in TARGET. */
10774 static rtx
10775 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10776 {
10777 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10778 if (target && GET_MODE (target) != GET_MODE (exp))
10779 target = 0;
10780 /* For constant values, reduce using build_int_cst_type. */
10781 if (CONST_INT_P (exp))
10782 {
10783 HOST_WIDE_INT value = INTVAL (exp);
10784 tree t = build_int_cst_type (type, value);
10785 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10786 }
10787 else if (TYPE_UNSIGNED (type))
10788 {
10789 machine_mode mode = GET_MODE (exp);
10790 rtx mask = immed_wide_int_const
10791 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10792 return expand_and (mode, exp, mask, target);
10793 }
10794 else
10795 {
10796 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10797 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10798 exp, count, target, 0);
10799 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10800 exp, count, target, 0);
10801 }
10802 }
10803 \f
10804 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10805 when applied to the address of EXP produces an address known to be
10806 aligned more than BIGGEST_ALIGNMENT. */
10807
10808 static int
10809 is_aligning_offset (const_tree offset, const_tree exp)
10810 {
10811 /* Strip off any conversions. */
10812 while (CONVERT_EXPR_P (offset))
10813 offset = TREE_OPERAND (offset, 0);
10814
10815 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10816 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10817 if (TREE_CODE (offset) != BIT_AND_EXPR
10818 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10819 || compare_tree_int (TREE_OPERAND (offset, 1),
10820 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10821 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10822 return 0;
10823
10824 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10825 It must be NEGATE_EXPR. Then strip any more conversions. */
10826 offset = TREE_OPERAND (offset, 0);
10827 while (CONVERT_EXPR_P (offset))
10828 offset = TREE_OPERAND (offset, 0);
10829
10830 if (TREE_CODE (offset) != NEGATE_EXPR)
10831 return 0;
10832
10833 offset = TREE_OPERAND (offset, 0);
10834 while (CONVERT_EXPR_P (offset))
10835 offset = TREE_OPERAND (offset, 0);
10836
10837 /* This must now be the address of EXP. */
10838 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10839 }
10840 \f
10841 /* Return the tree node if an ARG corresponds to a string constant or zero
10842 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10843 in bytes within the string that ARG is accessing. The type of the
10844 offset will be `sizetype'. */
10845
10846 tree
10847 string_constant (tree arg, tree *ptr_offset)
10848 {
10849 tree array, offset, lower_bound;
10850 STRIP_NOPS (arg);
10851
10852 if (TREE_CODE (arg) == ADDR_EXPR)
10853 {
10854 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10855 {
10856 *ptr_offset = size_zero_node;
10857 return TREE_OPERAND (arg, 0);
10858 }
10859 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10860 {
10861 array = TREE_OPERAND (arg, 0);
10862 offset = size_zero_node;
10863 }
10864 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10865 {
10866 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10867 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10868 if (TREE_CODE (array) != STRING_CST
10869 && TREE_CODE (array) != VAR_DECL)
10870 return 0;
10871
10872 /* Check if the array has a nonzero lower bound. */
10873 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10874 if (!integer_zerop (lower_bound))
10875 {
10876 /* If the offset and base aren't both constants, return 0. */
10877 if (TREE_CODE (lower_bound) != INTEGER_CST)
10878 return 0;
10879 if (TREE_CODE (offset) != INTEGER_CST)
10880 return 0;
10881 /* Adjust offset by the lower bound. */
10882 offset = size_diffop (fold_convert (sizetype, offset),
10883 fold_convert (sizetype, lower_bound));
10884 }
10885 }
10886 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10887 {
10888 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10889 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10890 if (TREE_CODE (array) != ADDR_EXPR)
10891 return 0;
10892 array = TREE_OPERAND (array, 0);
10893 if (TREE_CODE (array) != STRING_CST
10894 && TREE_CODE (array) != VAR_DECL)
10895 return 0;
10896 }
10897 else
10898 return 0;
10899 }
10900 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10901 {
10902 tree arg0 = TREE_OPERAND (arg, 0);
10903 tree arg1 = TREE_OPERAND (arg, 1);
10904
10905 STRIP_NOPS (arg0);
10906 STRIP_NOPS (arg1);
10907
10908 if (TREE_CODE (arg0) == ADDR_EXPR
10909 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10910 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10911 {
10912 array = TREE_OPERAND (arg0, 0);
10913 offset = arg1;
10914 }
10915 else if (TREE_CODE (arg1) == ADDR_EXPR
10916 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10917 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10918 {
10919 array = TREE_OPERAND (arg1, 0);
10920 offset = arg0;
10921 }
10922 else
10923 return 0;
10924 }
10925 else
10926 return 0;
10927
10928 if (TREE_CODE (array) == STRING_CST)
10929 {
10930 *ptr_offset = fold_convert (sizetype, offset);
10931 return array;
10932 }
10933 else if (TREE_CODE (array) == VAR_DECL
10934 || TREE_CODE (array) == CONST_DECL)
10935 {
10936 int length;
10937 tree init = ctor_for_folding (array);
10938
10939 /* Variables initialized to string literals can be handled too. */
10940 if (init == error_mark_node
10941 || !init
10942 || TREE_CODE (init) != STRING_CST)
10943 return 0;
10944
10945 /* Avoid const char foo[4] = "abcde"; */
10946 if (DECL_SIZE_UNIT (array) == NULL_TREE
10947 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10948 || (length = TREE_STRING_LENGTH (init)) <= 0
10949 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10950 return 0;
10951
10952 /* If variable is bigger than the string literal, OFFSET must be constant
10953 and inside of the bounds of the string literal. */
10954 offset = fold_convert (sizetype, offset);
10955 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10956 && (! tree_fits_uhwi_p (offset)
10957 || compare_tree_int (offset, length) >= 0))
10958 return 0;
10959
10960 *ptr_offset = offset;
10961 return init;
10962 }
10963
10964 return 0;
10965 }
10966 \f
10967 /* Generate code to calculate OPS, and exploded expression
10968 using a store-flag instruction and return an rtx for the result.
10969 OPS reflects a comparison.
10970
10971 If TARGET is nonzero, store the result there if convenient.
10972
10973 Return zero if there is no suitable set-flag instruction
10974 available on this machine.
10975
10976 Once expand_expr has been called on the arguments of the comparison,
10977 we are committed to doing the store flag, since it is not safe to
10978 re-evaluate the expression. We emit the store-flag insn by calling
10979 emit_store_flag, but only expand the arguments if we have a reason
10980 to believe that emit_store_flag will be successful. If we think that
10981 it will, but it isn't, we have to simulate the store-flag with a
10982 set/jump/set sequence. */
10983
10984 static rtx
10985 do_store_flag (sepops ops, rtx target, machine_mode mode)
10986 {
10987 enum rtx_code code;
10988 tree arg0, arg1, type;
10989 tree tem;
10990 machine_mode operand_mode;
10991 int unsignedp;
10992 rtx op0, op1;
10993 rtx subtarget = target;
10994 location_t loc = ops->location;
10995
10996 arg0 = ops->op0;
10997 arg1 = ops->op1;
10998
10999 /* Don't crash if the comparison was erroneous. */
11000 if (arg0 == error_mark_node || arg1 == error_mark_node)
11001 return const0_rtx;
11002
11003 type = TREE_TYPE (arg0);
11004 operand_mode = TYPE_MODE (type);
11005 unsignedp = TYPE_UNSIGNED (type);
11006
11007 /* We won't bother with BLKmode store-flag operations because it would mean
11008 passing a lot of information to emit_store_flag. */
11009 if (operand_mode == BLKmode)
11010 return 0;
11011
11012 /* We won't bother with store-flag operations involving function pointers
11013 when function pointers must be canonicalized before comparisons. */
11014 #ifdef HAVE_canonicalize_funcptr_for_compare
11015 if (HAVE_canonicalize_funcptr_for_compare
11016 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11017 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11018 == FUNCTION_TYPE))
11019 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11020 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11021 == FUNCTION_TYPE))))
11022 return 0;
11023 #endif
11024
11025 STRIP_NOPS (arg0);
11026 STRIP_NOPS (arg1);
11027
11028 /* For vector typed comparisons emit code to generate the desired
11029 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11030 expander for this. */
11031 if (TREE_CODE (ops->type) == VECTOR_TYPE)
11032 {
11033 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11034 tree if_true = constant_boolean_node (true, ops->type);
11035 tree if_false = constant_boolean_node (false, ops->type);
11036 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
11037 }
11038
11039 /* Get the rtx comparison code to use. We know that EXP is a comparison
11040 operation of some type. Some comparisons against 1 and -1 can be
11041 converted to comparisons with zero. Do so here so that the tests
11042 below will be aware that we have a comparison with zero. These
11043 tests will not catch constants in the first operand, but constants
11044 are rarely passed as the first operand. */
11045
11046 switch (ops->code)
11047 {
11048 case EQ_EXPR:
11049 code = EQ;
11050 break;
11051 case NE_EXPR:
11052 code = NE;
11053 break;
11054 case LT_EXPR:
11055 if (integer_onep (arg1))
11056 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11057 else
11058 code = unsignedp ? LTU : LT;
11059 break;
11060 case LE_EXPR:
11061 if (! unsignedp && integer_all_onesp (arg1))
11062 arg1 = integer_zero_node, code = LT;
11063 else
11064 code = unsignedp ? LEU : LE;
11065 break;
11066 case GT_EXPR:
11067 if (! unsignedp && integer_all_onesp (arg1))
11068 arg1 = integer_zero_node, code = GE;
11069 else
11070 code = unsignedp ? GTU : GT;
11071 break;
11072 case GE_EXPR:
11073 if (integer_onep (arg1))
11074 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11075 else
11076 code = unsignedp ? GEU : GE;
11077 break;
11078
11079 case UNORDERED_EXPR:
11080 code = UNORDERED;
11081 break;
11082 case ORDERED_EXPR:
11083 code = ORDERED;
11084 break;
11085 case UNLT_EXPR:
11086 code = UNLT;
11087 break;
11088 case UNLE_EXPR:
11089 code = UNLE;
11090 break;
11091 case UNGT_EXPR:
11092 code = UNGT;
11093 break;
11094 case UNGE_EXPR:
11095 code = UNGE;
11096 break;
11097 case UNEQ_EXPR:
11098 code = UNEQ;
11099 break;
11100 case LTGT_EXPR:
11101 code = LTGT;
11102 break;
11103
11104 default:
11105 gcc_unreachable ();
11106 }
11107
11108 /* Put a constant second. */
11109 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11110 || TREE_CODE (arg0) == FIXED_CST)
11111 {
11112 tem = arg0; arg0 = arg1; arg1 = tem;
11113 code = swap_condition (code);
11114 }
11115
11116 /* If this is an equality or inequality test of a single bit, we can
11117 do this by shifting the bit being tested to the low-order bit and
11118 masking the result with the constant 1. If the condition was EQ,
11119 we xor it with 1. This does not require an scc insn and is faster
11120 than an scc insn even if we have it.
11121
11122 The code to make this transformation was moved into fold_single_bit_test,
11123 so we just call into the folder and expand its result. */
11124
11125 if ((code == NE || code == EQ)
11126 && integer_zerop (arg1)
11127 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11128 {
11129 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11130 if (srcstmt
11131 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11132 {
11133 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11134 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11135 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11136 gimple_assign_rhs1 (srcstmt),
11137 gimple_assign_rhs2 (srcstmt));
11138 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11139 if (temp)
11140 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11141 }
11142 }
11143
11144 if (! get_subtarget (target)
11145 || GET_MODE (subtarget) != operand_mode)
11146 subtarget = 0;
11147
11148 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11149
11150 if (target == 0)
11151 target = gen_reg_rtx (mode);
11152
11153 /* Try a cstore if possible. */
11154 return emit_store_flag_force (target, code, op0, op1,
11155 operand_mode, unsignedp,
11156 (TYPE_PRECISION (ops->type) == 1
11157 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11158 }
11159 \f
11160
11161 /* Stubs in case we haven't got a casesi insn. */
11162 #ifndef HAVE_casesi
11163 # define HAVE_casesi 0
11164 # define gen_casesi(a, b, c, d, e) (0)
11165 # define CODE_FOR_casesi CODE_FOR_nothing
11166 #endif
11167
11168 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11169 0 otherwise (i.e. if there is no casesi instruction).
11170
11171 DEFAULT_PROBABILITY is the probability of jumping to the default
11172 label. */
11173 int
11174 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11175 rtx table_label, rtx default_label, rtx fallback_label,
11176 int default_probability)
11177 {
11178 struct expand_operand ops[5];
11179 machine_mode index_mode = SImode;
11180 rtx op1, op2, index;
11181
11182 if (! HAVE_casesi)
11183 return 0;
11184
11185 /* Convert the index to SImode. */
11186 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11187 {
11188 machine_mode omode = TYPE_MODE (index_type);
11189 rtx rangertx = expand_normal (range);
11190
11191 /* We must handle the endpoints in the original mode. */
11192 index_expr = build2 (MINUS_EXPR, index_type,
11193 index_expr, minval);
11194 minval = integer_zero_node;
11195 index = expand_normal (index_expr);
11196 if (default_label)
11197 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11198 omode, 1, default_label,
11199 default_probability);
11200 /* Now we can safely truncate. */
11201 index = convert_to_mode (index_mode, index, 0);
11202 }
11203 else
11204 {
11205 if (TYPE_MODE (index_type) != index_mode)
11206 {
11207 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11208 index_expr = fold_convert (index_type, index_expr);
11209 }
11210
11211 index = expand_normal (index_expr);
11212 }
11213
11214 do_pending_stack_adjust ();
11215
11216 op1 = expand_normal (minval);
11217 op2 = expand_normal (range);
11218
11219 create_input_operand (&ops[0], index, index_mode);
11220 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11221 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11222 create_fixed_operand (&ops[3], table_label);
11223 create_fixed_operand (&ops[4], (default_label
11224 ? default_label
11225 : fallback_label));
11226 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11227 return 1;
11228 }
11229
11230 /* Attempt to generate a tablejump instruction; same concept. */
11231 #ifndef HAVE_tablejump
11232 #define HAVE_tablejump 0
11233 #define gen_tablejump(x, y) (0)
11234 #endif
11235
11236 /* Subroutine of the next function.
11237
11238 INDEX is the value being switched on, with the lowest value
11239 in the table already subtracted.
11240 MODE is its expected mode (needed if INDEX is constant).
11241 RANGE is the length of the jump table.
11242 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11243
11244 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11245 index value is out of range.
11246 DEFAULT_PROBABILITY is the probability of jumping to
11247 the default label. */
11248
11249 static void
11250 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11251 rtx default_label, int default_probability)
11252 {
11253 rtx temp, vector;
11254
11255 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11256 cfun->cfg->max_jumptable_ents = INTVAL (range);
11257
11258 /* Do an unsigned comparison (in the proper mode) between the index
11259 expression and the value which represents the length of the range.
11260 Since we just finished subtracting the lower bound of the range
11261 from the index expression, this comparison allows us to simultaneously
11262 check that the original index expression value is both greater than
11263 or equal to the minimum value of the range and less than or equal to
11264 the maximum value of the range. */
11265
11266 if (default_label)
11267 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11268 default_label, default_probability);
11269
11270
11271 /* If index is in range, it must fit in Pmode.
11272 Convert to Pmode so we can index with it. */
11273 if (mode != Pmode)
11274 index = convert_to_mode (Pmode, index, 1);
11275
11276 /* Don't let a MEM slip through, because then INDEX that comes
11277 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11278 and break_out_memory_refs will go to work on it and mess it up. */
11279 #ifdef PIC_CASE_VECTOR_ADDRESS
11280 if (flag_pic && !REG_P (index))
11281 index = copy_to_mode_reg (Pmode, index);
11282 #endif
11283
11284 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11285 GET_MODE_SIZE, because this indicates how large insns are. The other
11286 uses should all be Pmode, because they are addresses. This code
11287 could fail if addresses and insns are not the same size. */
11288 index = simplify_gen_binary (MULT, Pmode, index,
11289 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11290 Pmode));
11291 index = simplify_gen_binary (PLUS, Pmode, index,
11292 gen_rtx_LABEL_REF (Pmode, table_label));
11293
11294 #ifdef PIC_CASE_VECTOR_ADDRESS
11295 if (flag_pic)
11296 index = PIC_CASE_VECTOR_ADDRESS (index);
11297 else
11298 #endif
11299 index = memory_address (CASE_VECTOR_MODE, index);
11300 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11301 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11302 convert_move (temp, vector, 0);
11303
11304 emit_jump_insn (gen_tablejump (temp, table_label));
11305
11306 /* If we are generating PIC code or if the table is PC-relative, the
11307 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11308 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11309 emit_barrier ();
11310 }
11311
11312 int
11313 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11314 rtx table_label, rtx default_label, int default_probability)
11315 {
11316 rtx index;
11317
11318 if (! HAVE_tablejump)
11319 return 0;
11320
11321 index_expr = fold_build2 (MINUS_EXPR, index_type,
11322 fold_convert (index_type, index_expr),
11323 fold_convert (index_type, minval));
11324 index = expand_normal (index_expr);
11325 do_pending_stack_adjust ();
11326
11327 do_tablejump (index, TYPE_MODE (index_type),
11328 convert_modes (TYPE_MODE (index_type),
11329 TYPE_MODE (TREE_TYPE (range)),
11330 expand_normal (range),
11331 TYPE_UNSIGNED (TREE_TYPE (range))),
11332 table_label, default_label, default_probability);
11333 return 1;
11334 }
11335
11336 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11337 static rtx
11338 const_vector_from_tree (tree exp)
11339 {
11340 rtvec v;
11341 unsigned i;
11342 int units;
11343 tree elt;
11344 machine_mode inner, mode;
11345
11346 mode = TYPE_MODE (TREE_TYPE (exp));
11347
11348 if (initializer_zerop (exp))
11349 return CONST0_RTX (mode);
11350
11351 units = GET_MODE_NUNITS (mode);
11352 inner = GET_MODE_INNER (mode);
11353
11354 v = rtvec_alloc (units);
11355
11356 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11357 {
11358 elt = VECTOR_CST_ELT (exp, i);
11359
11360 if (TREE_CODE (elt) == REAL_CST)
11361 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11362 inner);
11363 else if (TREE_CODE (elt) == FIXED_CST)
11364 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11365 inner);
11366 else
11367 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11368 }
11369
11370 return gen_rtx_CONST_VECTOR (mode, v);
11371 }
11372
11373 /* Build a decl for a personality function given a language prefix. */
11374
11375 tree
11376 build_personality_function (const char *lang)
11377 {
11378 const char *unwind_and_version;
11379 tree decl, type;
11380 char *name;
11381
11382 switch (targetm_common.except_unwind_info (&global_options))
11383 {
11384 case UI_NONE:
11385 return NULL;
11386 case UI_SJLJ:
11387 unwind_and_version = "_sj0";
11388 break;
11389 case UI_DWARF2:
11390 case UI_TARGET:
11391 unwind_and_version = "_v0";
11392 break;
11393 case UI_SEH:
11394 unwind_and_version = "_seh0";
11395 break;
11396 default:
11397 gcc_unreachable ();
11398 }
11399
11400 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11401
11402 type = build_function_type_list (integer_type_node, integer_type_node,
11403 long_long_unsigned_type_node,
11404 ptr_type_node, ptr_type_node, NULL_TREE);
11405 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11406 get_identifier (name), type);
11407 DECL_ARTIFICIAL (decl) = 1;
11408 DECL_EXTERNAL (decl) = 1;
11409 TREE_PUBLIC (decl) = 1;
11410
11411 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11412 are the flags assigned by targetm.encode_section_info. */
11413 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11414
11415 return decl;
11416 }
11417
11418 /* Extracts the personality function of DECL and returns the corresponding
11419 libfunc. */
11420
11421 rtx
11422 get_personality_function (tree decl)
11423 {
11424 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11425 enum eh_personality_kind pk;
11426
11427 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11428 if (pk == eh_personality_none)
11429 return NULL;
11430
11431 if (!personality
11432 && pk == eh_personality_any)
11433 personality = lang_hooks.eh_personality ();
11434
11435 if (pk == eh_personality_lang)
11436 gcc_assert (personality != NULL_TREE);
11437
11438 return XEXP (DECL_RTL (personality), 0);
11439 }
11440
11441 #include "gt-expr.h"